Merge branch 'master' of github.com:MongoEngine/mongoengine into add_validation_to_doc
This commit is contained in:
@@ -1,4 +0,0 @@
|
||||
from .all_warnings import AllWarnings
|
||||
from .document import *
|
||||
from .queryset import *
|
||||
from .fields import *
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
__all__ = ('AllWarnings', )
|
||||
|
||||
|
||||
class AllWarnings(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message,
|
||||
"category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
|
||||
class NonAbstractBase(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {'collection': 'fail'}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
self.assertEqual(SyntaxWarning, warning["category"])
|
||||
self.assertEqual('non_abstract_base',
|
||||
InheritedDocumentFailTest._get_collection_name())
|
||||
|
||||
35
tests/all_warnings/test_warnings.py
Normal file
35
tests/all_warnings/test_warnings.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
This test has been put into a module. This is because it tests warnings that
|
||||
only get triggered on first hit. This way we can ensure its imported into the
|
||||
top level and called first by the test suite.
|
||||
"""
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
class TestAllWarnings(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
self.warning_list = []
|
||||
self.showwarning_default = warnings.showwarning
|
||||
warnings.showwarning = self.append_to_warning_list
|
||||
|
||||
def append_to_warning_list(self, message, category, *args):
|
||||
self.warning_list.append({"message": message, "category": category})
|
||||
|
||||
def tearDown(self):
|
||||
# restore default handling of warnings
|
||||
warnings.showwarning = self.showwarning_default
|
||||
|
||||
def test_document_collection_syntax_warning(self):
|
||||
class NonAbstractBase(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class InheritedDocumentFailTest(NonAbstractBase):
|
||||
meta = {"collection": "fail"}
|
||||
|
||||
warning = self.warning_list[0]
|
||||
assert SyntaxWarning == warning["category"]
|
||||
assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name()
|
||||
@@ -1,13 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from .class_methods import *
|
||||
from .delta import *
|
||||
from .dynamic import *
|
||||
from .indexes import *
|
||||
from .inheritance import *
|
||||
from .instance import *
|
||||
from .json_serialisation import *
|
||||
from .validation import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -1,864 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class DeltaTest(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(DeltaTest, self).setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
def delta(self, DocClass):
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ['embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'id': "010101",
|
||||
'string_field': 'hello',
|
||||
'int_field': 1,
|
||||
'dict_field': {'hello': 'world'},
|
||||
'list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.dict_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1}))
|
||||
self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello',
|
||||
'dict_field': {'hello': 'world'},
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||
embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field.2.string_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({'list_field.2.string_field': 'world'}, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.string_field': 'world'}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field':
|
||||
[2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field':
|
||||
[2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'] = embedded_1
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field['Embedded'].string_field = 'Hello World'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['dict_field.Embedded.string_field'])
|
||||
self.assertEqual(doc._delta(),
|
||||
({'dict_field.Embedded.string_field': 'Hello World'}, {}))
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
self.circular_reference_deltas(Document, DynamicDocument)
|
||||
self.circular_reference_deltas(DynamicDocument, Document)
|
||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization'))
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person')
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField('Organization', dbref=dbref))
|
||||
employer = ReferenceField('Organization', dbref=dbref)
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField('Person', dbref=dbref)
|
||||
employees = ListField(ReferenceField('Person', dbref=dbref))
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
employee = Person(name="employee").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
organization.employees.append(employee)
|
||||
employee.employer = organization
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
employee.save()
|
||||
|
||||
p = Person.objects.get(name="owner")
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
self.assertEqual(p.owns[0], o)
|
||||
self.assertEqual(o.owner, p)
|
||||
self.assertEqual(e.employer, o)
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
def test_delta_db_field(self):
|
||||
self.delta_db_field(Document)
|
||||
self.delta_db_field(DynamicDocument)
|
||||
|
||||
def delta_db_field(self, DocClass):
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_string_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_int_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||
self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'db_list_field': 1}))
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc.string_field = 'hello'
|
||||
doc.int_field = 1
|
||||
doc.dict_field = {'hello': 'world'}
|
||||
doc.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.string_field, 'hello')
|
||||
self.assertEqual(doc.int_field, 1)
|
||||
self.assertEqual(doc.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}])
|
||||
|
||||
def test_delta_recursive_db_field(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
|
||||
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field='db_string_field')
|
||||
int_field = IntField(db_field='db_int_field')
|
||||
dict_field = DictField(db_field='db_dict_field')
|
||||
list_field = ListField(db_field='db_list_field')
|
||||
embedded_field = EmbeddedDocumentField(Embedded,
|
||||
db_field='db_embedded_field')
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), ['db_embedded_field'])
|
||||
|
||||
embedded_delta = {
|
||||
'db_string_field': 'hello',
|
||||
'db_int_field': 1,
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}]
|
||||
}
|
||||
self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field': embedded_delta}, {}))
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_dict_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({}, {'db_dict_field': 1}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_dict_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {})
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({}, {'db_list_field': 1}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field, [])
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
|
||||
doc.embedded_field.list_field = ['1', 2, embedded_2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'db_list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello',
|
||||
'db_dict_field': {'hello': 'world'},
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
}]
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
self.assertEqual(doc.embedded_field.list_field[2][k],
|
||||
embedded_2[k])
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = 'world'
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field.2.db_string_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(),
|
||||
({'db_list_field.2.db_string_field': 'world'}, {}))
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_string_field': 'world'},
|
||||
{}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'world')
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}}, {}))
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
'hello world')
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[2, {'hello': 'world'}]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[2, {'hello': 'world'}, 1]}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[2, {'hello': 'world'}, 1])
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||
[1, 2, {'hello': 'world'}])
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||
self.assertEqual(doc._delta(),
|
||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
self.assertEqual(doc._delta(), ({},
|
||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
self.assertEqual(p._delta(), (
|
||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||
|
||||
p = Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
self.assertEqual(p.age, 24)
|
||||
self.assertEqual(p._get_changed_fields(), ['age'])
|
||||
self.assertEqual(p._delta(), ({'age': 24}, {}))
|
||||
|
||||
p.save()
|
||||
self.assertEqual(1, Person.objects(age=24).count())
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(doc._delta(), ({}, {}))
|
||||
|
||||
doc.string_field = 'hello'
|
||||
self.assertEqual(doc._get_changed_fields(), ['string_field'])
|
||||
self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
self.assertEqual(doc._get_changed_fields(), ['int_field'])
|
||||
self.assertEqual(doc._delta(), ({'int_field': 1}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {'hello': 'world', 'ping': 'pong'}
|
||||
doc.dict_field = dict_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {}))
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ['1', 2, {'hello': 'world'}]
|
||||
doc.list_field = list_value
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({'list_field': list_value}, {}))
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
self.assertEqual(doc._get_changed_fields(), ['dict_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'dict_field': 1}))
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
self.assertEqual(doc._get_changed_fields(), ['list_field'])
|
||||
self.assertEqual(doc._delta(), ({}, {'list_field': 1}))
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, True)
|
||||
employee.name = 'test'
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(Document, Document, False)
|
||||
employee.name = 'test'
|
||||
|
||||
self.assertEqual(organization._get_changed_fields(), [])
|
||||
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertEqual({}, updates)
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
self.assertEqual({}, removals)
|
||||
self.assertIn('employees', updates)
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||
name = StringField()
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']['b']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs['a']
|
||||
subdoc.name = 'bar'
|
||||
|
||||
self.assertEqual(["name"], subdoc._get_changed_fields())
|
||||
self.assertEqual(["subs.a.name"], mydoc._get_changed_fields())
|
||||
|
||||
mydoc.subs['a'] = EmbeddedDoc()
|
||||
self.assertEqual(["subs.a"], mydoc._get_changed_fields())
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField('Organization', required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name='Org 1')
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name='Org 2')
|
||||
org2.save()
|
||||
|
||||
user = User(name='Fred', org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
self.assertEqual(org1.name, 'Org 1')
|
||||
self.assertEqual(org2.name, 'Org 2')
|
||||
self.assertEqual(user.name, 'Fred')
|
||||
|
||||
user.name = 'Harold'
|
||||
user.org = org2
|
||||
|
||||
org2.name = 'New Org 2'
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
self.assertEqual(True, "users.007.roles.666" in delta[0])
|
||||
self.assertEqual(True, "users.007.rolist" in delta[0])
|
||||
self.assertEqual(True, "users.007.info" in delta[0])
|
||||
self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"])
|
||||
self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"])
|
||||
self.assertEqual(uinfo.id, delta[0]["users.007.info"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,564 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import (BooleanField, Document, EmbeddedDocument,
|
||||
EmbeddedDocumentField, GenericReferenceField,
|
||||
IntField, ReferenceField, StringField)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
from tests.fixtures import Base
|
||||
|
||||
__all__ = ('InheritanceTest', )
|
||||
|
||||
|
||||
class InheritanceTest(MongoDBTestCase):
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
test_doc = DataDoc(name='test', embed=EmbedData(data='data'))
|
||||
self.assertEqual(test_doc._cls, 'DataDoc')
|
||||
self.assertEqual(test_doc.embed._cls, 'EmbedData')
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
self.assertEqual(test_doc._cls, saved_doc._cls)
|
||||
self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls)
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Fish._superclasses, ('Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal'))
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ('Base', ))
|
||||
self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Fish'))
|
||||
self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',))
|
||||
self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
self.assertEqual(Human._superclasses, ('Base', 'Base.Animal',
|
||||
'Base.Animal.Mammal'))
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Animal',
|
||||
'Animal.Fish',
|
||||
'Animal.Fish.Guppy',
|
||||
'Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',
|
||||
'Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Animal.Mammal',
|
||||
'Animal.Mammal.Dog',
|
||||
'Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',))
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
class Animal(Base): pass
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
self.assertEqual(Animal._subclasses, ('Base.Animal',
|
||||
'Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',
|
||||
'Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Fish._subclasses, ('Base.Animal.Fish',
|
||||
'Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',))
|
||||
self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal',
|
||||
'Base.Animal.Mammal.Dog',
|
||||
'Base.Animal.Mammal.Human'))
|
||||
self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',))
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal',))
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish',))
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
self.assertEqual(Animal._superclasses, ())
|
||||
self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish',
|
||||
'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Fish._superclasses, ('Animal', ))
|
||||
self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike'))
|
||||
|
||||
self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish'))
|
||||
self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',))
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(),
|
||||
['_cls', 'name', 'age', 'salary'])
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['a']
|
||||
}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'indexes': ['b']
|
||||
}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
C.drop_collection()
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
self.assertEqual(
|
||||
sorted([idx['key'] for idx in C._get_collection().index_information().values()]),
|
||||
sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]])
|
||||
)
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
class Fish(Animal): pass
|
||||
class Mammal(Animal): pass
|
||||
class Dog(Mammal): pass
|
||||
class Human(Mammal): pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
Animal().save()
|
||||
Fish().save()
|
||||
Mammal().save()
|
||||
Dog().save()
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human])
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
self.assertEqual(classes, [Mammal, Dog, Human])
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
self.assertEqual(classes, [Human])
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Dog(Animal):
|
||||
pass
|
||||
self.assertIn("Document Animal may not be subclassed", str(cm.exception))
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name='dog').save()
|
||||
self.assertEqual(dog.to_mongo().keys(), ['_id', 'name'])
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertNotIn('_cls', obj)
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Mammal(Animal):
|
||||
meta = {'allow_inheritance': False}
|
||||
self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False')
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
class FinalDocument(Document):
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name='dog')
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {'abstract': True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name='5').save()
|
||||
Home(dad=dad, address='street').save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = 'garbage'
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {'abstract': True}
|
||||
creator = ReferenceField('self', dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name='John').save()
|
||||
user2 = User(name='Foo', creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = 'Bar'
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'id')
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 3)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'city_id')
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name='Berlin', continent='Europe')
|
||||
self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered))
|
||||
self.assertEqual(len(berlin._fields_ordered), 4)
|
||||
self.assertEqual(berlin._fields_ordered[0], 'auto_id_0')
|
||||
berlin.save()
|
||||
self.assertEqual(berlin.pk, berlin.auto_id_0)
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {'abstract': True,
|
||||
'allow_inheritance': False}
|
||||
|
||||
city = City(continent='asia')
|
||||
self.assertEqual(None, city.pk)
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with self.assertRaises(KeyError):
|
||||
setattr(city, 'pk', 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertNotIn('_cls', doc.to_mongo())
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
doc = Comment(content='test')
|
||||
self.assertIn('_cls', doc.to_mongo())
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {
|
||||
'allow_inheritance': True,
|
||||
'abstract': True,
|
||||
}
|
||||
|
||||
try:
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
except Exception:
|
||||
self.assertTrue(False, "Couldn't create MyDocument class")
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {'index_background': True,
|
||||
'index_drop_dups': True,
|
||||
'index_opts': {'hello': 'world'},
|
||||
'allow_inheritance': True,
|
||||
'queryset_class': 'QuerySet',
|
||||
'db_alias': 'myDB',
|
||||
'shard_key': ('hello', 'world')}
|
||||
|
||||
meta_settings = {'abstract': True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal): pass
|
||||
class Guppy(Fish): pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {'abstract': True}
|
||||
class Human(Mammal): pass
|
||||
|
||||
for k, v in iteritems(defaults):
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
self.assertEqual(cls._meta[k], v)
|
||||
|
||||
self.assertNotIn('collection', Animal._meta)
|
||||
self.assertNotIn('collection', Mammal._meta)
|
||||
|
||||
self.assertEqual(Animal._get_collection_name(), None)
|
||||
self.assertEqual(Mammal._get_collection_name(), None)
|
||||
|
||||
self.assertEqual(Fish._get_collection_name(), 'fish')
|
||||
self.assertEqual(Guppy._get_collection_name(), 'fish')
|
||||
self.assertEqual(Human._get_collection_name(), 'human')
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with self.assertRaises(ValueError):
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {'abstract': True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
self.assertFalse(B._meta["abstract"])
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
"""
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
|
||||
try:
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {'collection': 'booze'}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
|
||||
warnings.resetwarnings()
|
||||
|
||||
Drink.drop_collection()
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name='Red Bull')
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name='Beer')
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
self.assertEqual(Drinker.objects[0].drink.name, red_bull.name)
|
||||
self.assertEqual(Drinker.objects[1].drink.name, beer.name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -2,18 +2,14 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("ClassMethodsTest", )
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import NULLIFY, PULL
|
||||
|
||||
|
||||
class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class TestClassMethods(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
self.db = get_db()
|
||||
|
||||
class Person(Document):
|
||||
@@ -33,54 +29,53 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys())
|
||||
assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted(
|
||||
[x.__class__.__name__ for x in self.Person._fields.values()]
|
||||
)
|
||||
|
||||
def test_get_db(self):
|
||||
"""Ensure that get_db returns the expected db.
|
||||
"""
|
||||
db = self.Person._get_db()
|
||||
self.assertEqual(self.db, db)
|
||||
assert self.db == db
|
||||
|
||||
def test_get_collection_name(self):
|
||||
"""Ensure that get_collection_name returns the expected collection
|
||||
name.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.assertEqual(collection_name, self.Person._get_collection_name())
|
||||
collection_name = "person"
|
||||
assert collection_name == self.Person._get_collection_name()
|
||||
|
||||
def test_get_collection(self):
|
||||
"""Ensure that get_collection returns the expected collection.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
collection_name = "person"
|
||||
collection = self.Person._get_collection()
|
||||
self.assertEqual(self.db[collection_name], collection)
|
||||
assert self.db[collection_name] == collection
|
||||
|
||||
def test_drop_collection(self):
|
||||
"""Ensure that the collection may be dropped from the database.
|
||||
"""
|
||||
collection_name = 'person'
|
||||
self.Person(name='Test').save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
collection_name = "person"
|
||||
self.Person(name="Test").save()
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
self.Person.drop_collection()
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_register_delete_rule(self):
|
||||
"""Ensure that register delete rule adds a delete rule to the document
|
||||
meta.
|
||||
"""
|
||||
|
||||
class Job(Document):
|
||||
employee = ReferenceField(self.Person)
|
||||
|
||||
self.assertEqual(self.Person._meta.get('delete_rules'), None)
|
||||
assert self.Person._meta.get("delete_rules") is None
|
||||
|
||||
self.Person.register_delete_rule(Job, 'employee', NULLIFY)
|
||||
self.assertEqual(self.Person._meta['delete_rules'],
|
||||
{(Job, 'employee'): NULLIFY})
|
||||
self.Person.register_delete_rule(Job, "employee", NULLIFY)
|
||||
assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY}
|
||||
|
||||
def test_compare_indexes(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@@ -93,23 +88,27 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
description = StringField()
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'title')]
|
||||
}
|
||||
meta = {"indexes": [("author", "title")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost.ensure_index(['author', 'description'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]})
|
||||
BlogPost.ensure_index(["author", "description"])
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("author", 1), ("description", 1)]],
|
||||
}
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_description_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
BlogPost._get_collection().drop_index("author_1_description_1")
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPost._get_collection().drop_index('author_1_title_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []})
|
||||
BlogPost._get_collection().drop_index("author_1_title_1")
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("author", 1), ("title", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_inheritance(self):
|
||||
""" Ensure that the indexes are properly created and that
|
||||
@@ -122,32 +121,34 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags.ensure_index(['author', 'tag_list'])
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]})
|
||||
BlogPostWithTags.ensure_index(["author", "tag_list"])
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [],
|
||||
"extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]],
|
||||
}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1")
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1')
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []})
|
||||
BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1")
|
||||
assert BlogPost.compare_indexes() == {
|
||||
"missing": [[("_cls", 1), ("author", 1), ("tags", 1)]],
|
||||
"extra": [],
|
||||
}
|
||||
|
||||
def test_compare_indexes_multiple_subclasses(self):
|
||||
""" Ensure that compare_indexes behaves correctly if called from a
|
||||
@@ -159,32 +160,26 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
tag_list = ListField(StringField())
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithCustomField(BlogPost):
|
||||
custom = DictField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'custom')]
|
||||
}
|
||||
meta = {"indexes": [("author", "custom")]}
|
||||
|
||||
BlogPost.ensure_indexes()
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithCustomField.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []})
|
||||
self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []})
|
||||
self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []})
|
||||
assert BlogPost.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []}
|
||||
assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []}
|
||||
|
||||
def test_compare_indexes_for_text_indexes(self):
|
||||
""" Ensure that compare_indexes behaves correctly for text indexes """
|
||||
@@ -192,18 +187,21 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
class Doc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
meta = {'indexes': [
|
||||
{'fields': ['$a', "$b"],
|
||||
'default_language': 'english',
|
||||
'weights': {'a': 10, 'b': 2}
|
||||
}
|
||||
]}
|
||||
meta = {
|
||||
"indexes": [
|
||||
{
|
||||
"fields": ["$a", "$b"],
|
||||
"default_language": "english",
|
||||
"weights": {"a": 10, "b": 2},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Doc.drop_collection()
|
||||
Doc.ensure_indexes()
|
||||
actual = Doc.compare_indexes()
|
||||
expected = {'missing': [], 'extra': []}
|
||||
self.assertEqual(actual, expected)
|
||||
expected = {"missing": [], "extra": []}
|
||||
assert actual == expected
|
||||
|
||||
def test_list_indexes_inheritance(self):
|
||||
""" ensure that all of the indexes are listed regardless of the super-
|
||||
@@ -215,23 +213,17 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
title = StringField()
|
||||
description = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': True
|
||||
}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class BlogPostWithTags(BlogPost):
|
||||
tags = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags")]}
|
||||
|
||||
class BlogPostWithTagsAndExtraText(BlogPostWithTags):
|
||||
extra_text = StringField()
|
||||
|
||||
meta = {
|
||||
'indexes': [('author', 'tags', 'extra_text')]
|
||||
}
|
||||
meta = {"indexes": [("author", "tags", "extra_text")]}
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
@@ -239,17 +231,16 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
BlogPostWithTags.ensure_indexes()
|
||||
BlogPostWithTagsAndExtraText.ensure_indexes()
|
||||
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTags.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
BlogPostWithTagsAndExtraText.list_indexes())
|
||||
self.assertEqual(BlogPost.list_indexes(),
|
||||
[[('_cls', 1), ('author', 1), ('tags', 1)],
|
||||
[('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)],
|
||||
[(u'_id', 1)], [('_cls', 1)]])
|
||||
assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes()
|
||||
assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes()
|
||||
assert BlogPost.list_indexes() == [
|
||||
[("_cls", 1), ("author", 1), ("tags", 1)],
|
||||
[("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)],
|
||||
[(u"_id", 1)],
|
||||
[("_cls", 1)],
|
||||
]
|
||||
|
||||
def test_register_delete_rule_inherited(self):
|
||||
|
||||
class Vaccine(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
@@ -257,15 +248,17 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class Animal(Document):
|
||||
family = StringField(required=True)
|
||||
vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL))
|
||||
vaccine_made = ListField(
|
||||
ReferenceField("Vaccine", reverse_delete_rule=PULL)
|
||||
)
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["family"]}
|
||||
|
||||
class Cat(Animal):
|
||||
name = StringField(required=True)
|
||||
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL)
|
||||
self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL)
|
||||
assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL
|
||||
assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL
|
||||
|
||||
def test_collection_naming(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
@@ -273,80 +266,76 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class DefaultNamingTest(Document):
|
||||
pass
|
||||
self.assertEqual('default_naming_test',
|
||||
DefaultNamingTest._get_collection_name())
|
||||
|
||||
assert "default_naming_test" == DefaultNamingTest._get_collection_name()
|
||||
|
||||
class CustomNamingTest(Document):
|
||||
meta = {'collection': 'pimp_my_collection'}
|
||||
meta = {"collection": "pimp_my_collection"}
|
||||
|
||||
self.assertEqual('pimp_my_collection',
|
||||
CustomNamingTest._get_collection_name())
|
||||
assert "pimp_my_collection" == CustomNamingTest._get_collection_name()
|
||||
|
||||
class DynamicNamingTest(Document):
|
||||
meta = {'collection': lambda c: "DYNAMO"}
|
||||
self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name())
|
||||
meta = {"collection": lambda c: "DYNAMO"}
|
||||
|
||||
assert "DYNAMO" == DynamicNamingTest._get_collection_name()
|
||||
|
||||
# Use Abstract class to handle backwards compatibility
|
||||
class BaseDocument(Document):
|
||||
meta = {
|
||||
'abstract': True,
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"abstract": True, "collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldNamingConvention(BaseDocument):
|
||||
pass
|
||||
self.assertEqual('oldnamingconvention',
|
||||
OldNamingConvention._get_collection_name())
|
||||
|
||||
assert "oldnamingconvention" == OldNamingConvention._get_collection_name()
|
||||
|
||||
class InheritedAbstractNamingTest(BaseDocument):
|
||||
meta = {'collection': 'wibble'}
|
||||
self.assertEqual('wibble',
|
||||
InheritedAbstractNamingTest._get_collection_name())
|
||||
meta = {"collection": "wibble"}
|
||||
|
||||
assert "wibble" == InheritedAbstractNamingTest._get_collection_name()
|
||||
|
||||
# Mixin tests
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class OldMixinNamingConvention(Document, BaseMixin):
|
||||
pass
|
||||
self.assertEqual('oldmixinnamingconvention',
|
||||
OldMixinNamingConvention._get_collection_name())
|
||||
|
||||
assert (
|
||||
"oldmixinnamingconvention"
|
||||
== OldMixinNamingConvention._get_collection_name()
|
||||
)
|
||||
|
||||
class BaseMixin(object):
|
||||
meta = {
|
||||
'collection': lambda c: c.__name__.lower()
|
||||
}
|
||||
meta = {"collection": lambda c: c.__name__.lower()}
|
||||
|
||||
class BaseDocument(Document, BaseMixin):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class MyDocument(BaseDocument):
|
||||
pass
|
||||
|
||||
self.assertEqual('basedocument', MyDocument._get_collection_name())
|
||||
assert "basedocument" == MyDocument._get_collection_name()
|
||||
|
||||
def test_custom_collection_name_operations(self):
|
||||
"""Ensure that a collection with a specified name is used as expected.
|
||||
"""
|
||||
collection_name = 'personCollTest'
|
||||
collection_name = "personCollTest"
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
meta = {'collection': collection_name}
|
||||
meta = {"collection": collection_name}
|
||||
|
||||
Person(name="Test User").save()
|
||||
self.assertIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name in list_collection_names(self.db)
|
||||
|
||||
user_obj = self.db[collection_name].find_one()
|
||||
self.assertEqual(user_obj['name'], "Test User")
|
||||
assert user_obj["name"] == "Test User"
|
||||
|
||||
user_obj = Person.objects[0]
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
self.assertNotIn(collection_name, list_collection_names(self.db))
|
||||
assert collection_name not in list_collection_names(self.db)
|
||||
|
||||
def test_collection_name_and_primary(self):
|
||||
"""Ensure that a collection with a specified name may be used.
|
||||
@@ -354,15 +343,15 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField(primary_key=True)
|
||||
meta = {'collection': 'app'}
|
||||
meta = {"collection": "app"}
|
||||
|
||||
Person(name="Test User").save()
|
||||
|
||||
user_obj = Person.objects.first()
|
||||
self.assertEqual(user_obj.name, "Test User")
|
||||
assert user_obj.name == "Test User"
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
926
tests/document/test_delta.py
Normal file
926
tests/document/test_delta.py
Normal file
@@ -0,0 +1,926 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
|
||||
from bson import SON
|
||||
from mongoengine import *
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDelta(MongoDBTestCase):
|
||||
def setUp(self):
|
||||
super(TestDelta, self).setUp()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
non_field = True
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
self.Person = Person
|
||||
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_delta(self):
|
||||
self.delta(Document)
|
||||
self.delta(DynamicDocument)
|
||||
|
||||
@staticmethod
|
||||
def delta(DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_recursive(self):
|
||||
self.delta_recursive(Document, EmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, EmbeddedDocument)
|
||||
self.delta_recursive(Document, DynamicEmbeddedDocument)
|
||||
self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
def delta_recursive(self, DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
id = StringField()
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField()
|
||||
int_field = IntField()
|
||||
dict_field = DictField()
|
||||
list_field = ListField()
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.id = "010101"
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
assert doc._get_changed_fields() == ["embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"id": "010101",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["embedded_field.dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"dict_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
doc.embedded_field.list_field = []
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"list_field": 1})
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field"]
|
||||
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"dict_field": {"hello": "world"},
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"dict_field": {"hello": "world"},
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
assert doc._get_changed_fields() == ["embedded_field.list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello world",
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
"dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"embedded_field.list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello world",
|
||||
"int_field": 1,
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
"dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
assert doc._delta() == (
|
||||
{"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"embedded_field.list_field.2.list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"] = embedded_1
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.dict_field["Embedded"].string_field = "Hello World"
|
||||
assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"]
|
||||
assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {})
|
||||
|
||||
def test_circular_reference_deltas(self):
|
||||
self.circular_reference_deltas(Document, Document)
|
||||
self.circular_reference_deltas(Document, DynamicDocument)
|
||||
self.circular_reference_deltas(DynamicDocument, Document)
|
||||
self.circular_reference_deltas(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas(self, DocClass1, DocClass2):
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField("Organization"))
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField("Person")
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
|
||||
p = Person.objects[0].select_related()
|
||||
o = Organization.objects.first()
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
|
||||
def test_circular_reference_deltas_2(self):
|
||||
self.circular_reference_deltas_2(Document, Document)
|
||||
self.circular_reference_deltas_2(Document, DynamicDocument)
|
||||
self.circular_reference_deltas_2(DynamicDocument, Document)
|
||||
self.circular_reference_deltas_2(DynamicDocument, DynamicDocument)
|
||||
|
||||
def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True):
|
||||
class Person(DocClass1):
|
||||
name = StringField()
|
||||
owns = ListField(ReferenceField("Organization", dbref=dbref))
|
||||
employer = ReferenceField("Organization", dbref=dbref)
|
||||
|
||||
class Organization(DocClass2):
|
||||
name = StringField()
|
||||
owner = ReferenceField("Person", dbref=dbref)
|
||||
employees = ListField(ReferenceField("Person", dbref=dbref))
|
||||
|
||||
Person.drop_collection()
|
||||
Organization.drop_collection()
|
||||
|
||||
person = Person(name="owner").save()
|
||||
employee = Person(name="employee").save()
|
||||
organization = Organization(name="company").save()
|
||||
|
||||
person.owns.append(organization)
|
||||
organization.owner = person
|
||||
|
||||
organization.employees.append(employee)
|
||||
employee.employer = organization
|
||||
|
||||
person.save()
|
||||
organization.save()
|
||||
employee.save()
|
||||
|
||||
p = Person.objects.get(name="owner")
|
||||
e = Person.objects.get(name="employee")
|
||||
o = Organization.objects.first()
|
||||
|
||||
assert p.owns[0] == o
|
||||
assert o.owner == p
|
||||
assert e.employer == o
|
||||
|
||||
return person, organization, employee
|
||||
|
||||
def test_delta_db_field(self):
|
||||
self.delta_db_field(Document)
|
||||
self.delta_db_field(DynamicDocument)
|
||||
|
||||
def delta_db_field(self, DocClass):
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["db_string_field"]
|
||||
assert doc._delta() == ({"db_string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["db_int_field"]
|
||||
assert doc._delta() == ({"db_int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({"db_dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({"db_list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["db_dict_field"]
|
||||
assert doc._delta() == ({}, {"db_dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["db_list_field"]
|
||||
assert doc._delta() == ({}, {"db_list_field": 1})
|
||||
|
||||
# Test it saves that data
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc.string_field = "hello"
|
||||
doc.int_field = 1
|
||||
doc.dict_field = {"hello": "world"}
|
||||
doc.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.string_field == "hello"
|
||||
assert doc.int_field == 1
|
||||
assert doc.dict_field == {"hello": "world"}
|
||||
assert doc.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
|
||||
|
||||
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
|
||||
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
|
||||
|
||||
@staticmethod
|
||||
def delta_recursive_db_field(DocClass, EmbeddedClass):
|
||||
class Embedded(EmbeddedClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
|
||||
class Doc(DocClass):
|
||||
string_field = StringField(db_field="db_string_field")
|
||||
int_field = IntField(db_field="db_int_field")
|
||||
dict_field = DictField(db_field="db_dict_field")
|
||||
list_field = ListField(db_field="db_list_field")
|
||||
embedded_field = EmbeddedDocumentField(
|
||||
Embedded, db_field="db_embedded_field"
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
assert doc._get_changed_fields() == ["db_embedded_field"]
|
||||
|
||||
embedded_delta = {
|
||||
"db_string_field": "hello",
|
||||
"db_int_field": 1,
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
assert doc.embedded_field._delta() == (embedded_delta, {})
|
||||
assert doc._delta() == ({"db_embedded_field": embedded_delta}, {})
|
||||
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.dict_field == {}
|
||||
|
||||
assert doc._get_changed_fields() == []
|
||||
doc.embedded_field.list_field = []
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
|
||||
assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1})
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field == []
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
doc.embedded_field.list_field = ["1", 2, embedded_2]
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello",
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello",
|
||||
"db_dict_field": {"hello": "world"},
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
for k in doc.embedded_field.list_field[2]._fields:
|
||||
assert doc.embedded_field.list_field[2][k] == embedded_2[k]
|
||||
|
||||
doc.embedded_field.list_field[2].string_field = "world"
|
||||
assert doc._get_changed_fields() == [
|
||||
"db_embedded_field.db_list_field.2.db_string_field"
|
||||
]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{"db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{"db_embedded_field.db_list_field.2.db_string_field": "world"},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "world"
|
||||
|
||||
# Test multiple assignments
|
||||
doc.embedded_field.list_field[2].string_field = "hello world"
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"]
|
||||
assert doc.embedded_field._delta() == (
|
||||
{
|
||||
"db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello world",
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
"db_dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2": {
|
||||
"_cls": "Embedded",
|
||||
"db_string_field": "hello world",
|
||||
"db_int_field": 1,
|
||||
"db_list_field": ["1", 2, {"hello": "world"}],
|
||||
"db_dict_field": {"hello": "world"},
|
||||
}
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].string_field == "hello world"
|
||||
|
||||
# Test list native methods
|
||||
doc.embedded_field.list_field[2].list_field.pop(0)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
{"hello": "world"},
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.append(1)
|
||||
assert doc._delta() == (
|
||||
{
|
||||
"db_embedded_field.db_list_field.2.db_list_field": [
|
||||
2,
|
||||
{"hello": "world"},
|
||||
1,
|
||||
]
|
||||
},
|
||||
{},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1]
|
||||
|
||||
doc.embedded_field.list_field[2].list_field.sort(key=str)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}]
|
||||
|
||||
del doc.embedded_field.list_field[2].list_field[2]["hello"]
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1},
|
||||
)
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
assert doc._delta() == ({}, {},)
|
||||
del doc.embedded_field.list_field[2].list_field
|
||||
assert doc._delta() == (
|
||||
{},
|
||||
{"db_embedded_field.db_list_field.2.db_list_field": 1},
|
||||
)
|
||||
|
||||
def test_delta_for_dynamic_documents(self):
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(name="James", age=34)
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p.doc = 123
|
||||
del p.doc
|
||||
assert p._delta() == (
|
||||
SON([("_cls", "Person"), ("name", "James"), ("age", 34)]),
|
||||
{},
|
||||
)
|
||||
|
||||
p = Person()
|
||||
p.name = "Dean"
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
p.age = 24
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p = Person.objects(age=22).get()
|
||||
p.age = 24
|
||||
assert p.age == 24
|
||||
assert p._get_changed_fields() == ["age"]
|
||||
assert p._delta() == ({"age": 24}, {})
|
||||
|
||||
p.save()
|
||||
assert 1 == Person.objects(age=24).count()
|
||||
|
||||
def test_dynamic_delta(self):
|
||||
class Doc(DynamicDocument):
|
||||
pass
|
||||
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
assert doc._get_changed_fields() == []
|
||||
assert doc._delta() == ({}, {})
|
||||
|
||||
doc.string_field = "hello"
|
||||
assert doc._get_changed_fields() == ["string_field"]
|
||||
assert doc._delta() == ({"string_field": "hello"}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.int_field = 1
|
||||
assert doc._get_changed_fields() == ["int_field"]
|
||||
assert doc._delta() == ({"int_field": 1}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
dict_value = {"hello": "world", "ping": "pong"}
|
||||
doc.dict_field = dict_value
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({"dict_field": dict_value}, {})
|
||||
|
||||
doc._changed_fields = []
|
||||
list_value = ["1", 2, {"hello": "world"}]
|
||||
doc.list_field = list_value
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({"list_field": list_value}, {})
|
||||
|
||||
# Test unsetting
|
||||
doc._changed_fields = []
|
||||
doc.dict_field = {}
|
||||
assert doc._get_changed_fields() == ["dict_field"]
|
||||
assert doc._delta() == ({}, {"dict_field": 1})
|
||||
|
||||
doc._changed_fields = []
|
||||
doc.list_field = []
|
||||
assert doc._get_changed_fields() == ["list_field"]
|
||||
assert doc._delta() == ({}, {"list_field": 1})
|
||||
|
||||
def test_delta_with_dbref_true(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
Document, Document, True
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_delta_with_dbref_false(self):
|
||||
person, organization, employee = self.circular_reference_deltas_2(
|
||||
Document, Document, False
|
||||
)
|
||||
employee.name = "test"
|
||||
|
||||
assert organization._get_changed_fields() == []
|
||||
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert updates == {}
|
||||
|
||||
organization.employees.append(person)
|
||||
updates, removals = organization._delta()
|
||||
assert removals == {}
|
||||
assert "employees" in updates
|
||||
|
||||
def test_nested_nested_fields_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc)))
|
||||
name = StringField()
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
mydoc = MyDoc(
|
||||
name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}
|
||||
).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]["b"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_lower_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc().save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_upper_level_mark_as_changed(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
class MyDoc(Document):
|
||||
subs = MapField(EmbeddedDocumentField(EmbeddedDoc))
|
||||
|
||||
MyDoc.drop_collection()
|
||||
|
||||
MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save()
|
||||
|
||||
mydoc = MyDoc.objects.first()
|
||||
subdoc = mydoc.subs["a"]
|
||||
subdoc.name = "bar"
|
||||
|
||||
assert subdoc._get_changed_fields() == ["name"]
|
||||
assert mydoc._get_changed_fields() == ["subs.a.name"]
|
||||
|
||||
mydoc.subs["a"] = EmbeddedDoc()
|
||||
assert mydoc._get_changed_fields() == ["subs.a"]
|
||||
mydoc.save()
|
||||
|
||||
mydoc._clear_changed_fields()
|
||||
assert mydoc._get_changed_fields() == []
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField("Organization", required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name="Org 1")
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name="Org 2")
|
||||
org2.save()
|
||||
|
||||
user = User(name="Fred", org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
assert org1.name == "Org 1"
|
||||
assert org2.name == "Org 2"
|
||||
assert user.name == "Fred"
|
||||
|
||||
user.name = "Harold"
|
||||
user.org = org2
|
||||
|
||||
org2.name = "New Org 2"
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
assert org2.name == "New Org 2"
|
||||
org2.reload()
|
||||
assert org2.name == "New Org 2"
|
||||
|
||||
def test_delta_for_nested_map_fields(self):
|
||||
class UInfoDocument(Document):
|
||||
phone = StringField()
|
||||
|
||||
class EmbeddedRole(EmbeddedDocument):
|
||||
type = StringField()
|
||||
|
||||
class EmbeddedUser(EmbeddedDocument):
|
||||
name = StringField()
|
||||
roles = MapField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole))
|
||||
info = ReferenceField(UInfoDocument)
|
||||
|
||||
class Doc(Document):
|
||||
users = MapField(field=EmbeddedDocumentField(EmbeddedUser))
|
||||
num = IntField(default=-1)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(num=1)
|
||||
doc.users["007"] = EmbeddedUser(name="Agent007")
|
||||
doc.save()
|
||||
|
||||
uinfo = UInfoDocument(phone="79089269066")
|
||||
uinfo.save()
|
||||
|
||||
d = Doc.objects(num=1).first()
|
||||
d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin")
|
||||
d.users["007"]["rolist"].append(EmbeddedRole(type="oops"))
|
||||
d.users["007"]["info"] = uinfo
|
||||
delta = d._delta()
|
||||
assert True == ("users.007.roles.666" in delta[0])
|
||||
assert True == ("users.007.rolist" in delta[0])
|
||||
assert True == ("users.007.info" in delta[0])
|
||||
assert "superadmin" == delta[0]["users.007.roles.666"]["type"]
|
||||
assert "oops" == delta[0]["users.007.rolist"][0]["type"]
|
||||
assert uinfo.id == delta[0]["users.007.info"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,19 +1,20 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
__all__ = ("TestDynamicDocument", )
|
||||
__all__ = ("TestDynamicDocument",)
|
||||
|
||||
|
||||
class TestDynamicDocument(MongoDBTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestDynamicDocument, self).setUp()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
@@ -26,16 +27,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.name = "James"
|
||||
p.age = 34
|
||||
|
||||
self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James",
|
||||
"age": 34})
|
||||
self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"])
|
||||
assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34}
|
||||
assert p.to_mongo().keys() == ["_cls", "name", "age"]
|
||||
p.save()
|
||||
self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"])
|
||||
assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"]
|
||||
|
||||
self.assertEqual(self.Person.objects.first().age, 34)
|
||||
assert self.Person.objects.first().age == 34
|
||||
|
||||
# Confirm no changes to self.Person
|
||||
self.assertFalse(hasattr(self.Person, 'age'))
|
||||
assert not hasattr(self.Person, "age")
|
||||
|
||||
def test_change_scope_of_variable(self):
|
||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||
@@ -45,11 +45,11 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
assert p.misc == {"hello": "world"}
|
||||
|
||||
def test_delete_dynamic_field(self):
|
||||
"""Test deleting a dynamic field works"""
|
||||
@@ -60,23 +60,23 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertEqual(p.misc, {'hello': 'world'})
|
||||
assert p.misc == {"hello": "world"}
|
||||
collection = self.db[self.Person._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"]
|
||||
|
||||
del p.misc
|
||||
p.save()
|
||||
|
||||
p = self.Person.objects.get()
|
||||
self.assertFalse(hasattr(p, 'misc'))
|
||||
assert not hasattr(p, "misc")
|
||||
|
||||
obj = collection.find_one()
|
||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name'])
|
||||
assert sorted(obj.keys()) == ["_cls", "_id", "name"]
|
||||
|
||||
def test_reload_after_unsetting(self):
|
||||
p = self.Person()
|
||||
@@ -90,78 +90,55 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p = self.Person.objects.create()
|
||||
p.update(age=1)
|
||||
|
||||
self.assertEqual(len(p._data), 3)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name'])
|
||||
assert len(p._data) == 3
|
||||
assert sorted(p._data.keys()) == ["_cls", "id", "name"]
|
||||
|
||||
p.reload()
|
||||
self.assertEqual(len(p._data), 4)
|
||||
self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name'])
|
||||
assert len(p._data) == 4
|
||||
assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"]
|
||||
|
||||
def test_fields_without_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
Person = self.Person
|
||||
|
||||
p = self.Person(name='Dean')
|
||||
p = self.Person(name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_cls': u'Person',
|
||||
'_id': p.id,
|
||||
'name': u'Dean'
|
||||
}
|
||||
)
|
||||
assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"}
|
||||
|
||||
p.name = 'OldDean'
|
||||
p.newattr = 'garbage'
|
||||
p.name = "OldDean"
|
||||
p.newattr = "garbage"
|
||||
p.save()
|
||||
raw_p = Person.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_cls': u'Person',
|
||||
'_id': p.id,
|
||||
'name': 'OldDean',
|
||||
'newattr': u'garbage'
|
||||
}
|
||||
)
|
||||
assert raw_p == {
|
||||
"_cls": u"Person",
|
||||
"_id": p.id,
|
||||
"name": "OldDean",
|
||||
"newattr": u"garbage",
|
||||
}
|
||||
|
||||
def test_fields_containing_underscore(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
|
||||
class WeirdPerson(DynamicDocument):
|
||||
name = StringField()
|
||||
_name = StringField()
|
||||
|
||||
WeirdPerson.drop_collection()
|
||||
|
||||
p = WeirdPerson(name='Dean', _name='Dean')
|
||||
p = WeirdPerson(name="Dean", _name="Dean")
|
||||
p.save()
|
||||
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_id': p.id,
|
||||
'_name': u'Dean',
|
||||
'name': u'Dean'
|
||||
}
|
||||
)
|
||||
assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"}
|
||||
|
||||
p.name = 'OldDean'
|
||||
p._name = 'NewDean'
|
||||
p._newattr1 = 'garbage' # Unknown fields won't be added
|
||||
p.name = "OldDean"
|
||||
p._name = "NewDean"
|
||||
p._newattr1 = "garbage" # Unknown fields won't be added
|
||||
p.save()
|
||||
raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id)
|
||||
self.assertEqual(
|
||||
raw_p,
|
||||
{
|
||||
'_id': p.id,
|
||||
'_name': u'NewDean',
|
||||
'name': u'OldDean',
|
||||
}
|
||||
)
|
||||
assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"}
|
||||
|
||||
def test_dynamic_document_queries(self):
|
||||
"""Ensure we can query dynamic fields"""
|
||||
@@ -170,10 +147,10 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p.age = 22
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=22).count())
|
||||
assert 1 == self.Person.objects(age=22).count()
|
||||
p = self.Person.objects(age=22)
|
||||
p = p.get()
|
||||
self.assertEqual(22, p.age)
|
||||
assert 22 == p.age
|
||||
|
||||
def test_complex_dynamic_document_queries(self):
|
||||
class Person(DynamicDocument):
|
||||
@@ -193,26 +170,25 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
p2.age = 10
|
||||
p2.save()
|
||||
|
||||
self.assertEqual(Person.objects(age__icontains='ten').count(), 2)
|
||||
self.assertEqual(Person.objects(age__gte=10).count(), 1)
|
||||
assert Person.objects(age__icontains="ten").count() == 2
|
||||
assert Person.objects(age__gte=10).count() == 1
|
||||
|
||||
def test_complex_data_lookups(self):
|
||||
"""Ensure you can query dynamic document dynamic fields"""
|
||||
p = self.Person()
|
||||
p.misc = {'hello': 'world'}
|
||||
p.misc = {"hello": "world"}
|
||||
p.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(misc__hello='world').count())
|
||||
assert 1 == self.Person.objects(misc__hello="world").count()
|
||||
|
||||
def test_three_level_complex_data_lookups(self):
|
||||
"""Ensure you can query three level document dynamic fields"""
|
||||
p = self.Person.objects.create(
|
||||
misc={'hello': {'hello2': 'world'}}
|
||||
)
|
||||
self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count())
|
||||
self.Person.objects.create(misc={"hello": {"hello2": "world"}})
|
||||
assert 1 == self.Person.objects(misc__hello__hello2="world").count()
|
||||
|
||||
def test_complex_embedded_document_validation(self):
|
||||
"""Ensure embedded dynamic documents may be validated"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
content = URLField()
|
||||
|
||||
@@ -222,27 +198,29 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
Doc.drop_collection()
|
||||
doc = Doc()
|
||||
|
||||
embedded_doc_1 = Embedded(content='http://mongoengine.org')
|
||||
embedded_doc_1 = Embedded(content="http://mongoengine.org")
|
||||
embedded_doc_1.validate()
|
||||
|
||||
embedded_doc_2 = Embedded(content='this is not a url')
|
||||
self.assertRaises(ValidationError, embedded_doc_2.validate)
|
||||
embedded_doc_2 = Embedded(content="this is not a url")
|
||||
with pytest.raises(ValidationError):
|
||||
embedded_doc_2.validate()
|
||||
|
||||
doc.embedded_field_1 = embedded_doc_1
|
||||
doc.embedded_field_2 = embedded_doc_2
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Ensure that dynamic document plays nice with inheritance"""
|
||||
|
||||
class Employee(self.Person):
|
||||
salary = IntField()
|
||||
|
||||
Employee.drop_collection()
|
||||
|
||||
self.assertIn('name', Employee._fields)
|
||||
self.assertIn('salary', Employee._fields)
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
self.Person._get_collection_name())
|
||||
assert "name" in Employee._fields
|
||||
assert "salary" in Employee._fields
|
||||
assert Employee._get_collection_name() == self.Person._get_collection_name()
|
||||
|
||||
joe_bloggs = Employee()
|
||||
joe_bloggs.name = "Joe Bloggs"
|
||||
@@ -250,14 +228,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
joe_bloggs.age = 20
|
||||
joe_bloggs.save()
|
||||
|
||||
self.assertEqual(1, self.Person.objects(age=20).count())
|
||||
self.assertEqual(1, Employee.objects(age=20).count())
|
||||
assert 1 == self.Person.objects(age=20).count()
|
||||
assert 1 == Employee.objects(age=20).count()
|
||||
|
||||
joe_bloggs = self.Person.objects.first()
|
||||
self.assertIsInstance(joe_bloggs, Employee)
|
||||
assert isinstance(joe_bloggs, Employee)
|
||||
|
||||
def test_embedded_dynamic_document(self):
|
||||
"""Test dynamic embedded documents"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -268,33 +247,33 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
embedded_1.list_field = ["1", 2, {"hello": "world"}]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
}
|
||||
})
|
||||
}
|
||||
doc.save()
|
||||
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field,
|
||||
['1', 2, {'hello': 'world'}])
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_complex_embedded_documents(self):
|
||||
"""Test complex dynamic embedded documents setups"""
|
||||
|
||||
class Embedded(DynamicEmbeddedDocument):
|
||||
pass
|
||||
|
||||
@@ -305,51 +284,54 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
doc = Doc()
|
||||
|
||||
embedded_1 = Embedded()
|
||||
embedded_1.string_field = 'hello'
|
||||
embedded_1.string_field = "hello"
|
||||
embedded_1.int_field = 1
|
||||
embedded_1.dict_field = {'hello': 'world'}
|
||||
embedded_1.dict_field = {"hello": "world"}
|
||||
|
||||
embedded_2 = Embedded()
|
||||
embedded_2.string_field = 'hello'
|
||||
embedded_2.string_field = "hello"
|
||||
embedded_2.int_field = 1
|
||||
embedded_2.dict_field = {'hello': 'world'}
|
||||
embedded_2.list_field = ['1', 2, {'hello': 'world'}]
|
||||
embedded_2.dict_field = {"hello": "world"}
|
||||
embedded_2.list_field = ["1", 2, {"hello": "world"}]
|
||||
|
||||
embedded_1.list_field = ['1', 2, embedded_2]
|
||||
embedded_1.list_field = ["1", 2, embedded_2]
|
||||
doc.embedded_field = embedded_1
|
||||
|
||||
self.assertEqual(doc.to_mongo(), {
|
||||
assert doc.to_mongo() == {
|
||||
"embedded_field": {
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2,
|
||||
{"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ['1', 2, {'hello': 'world'}]}
|
||||
]
|
||||
"list_field": [
|
||||
"1",
|
||||
2,
|
||||
{
|
||||
"_cls": "Embedded",
|
||||
"string_field": "hello",
|
||||
"int_field": 1,
|
||||
"dict_field": {"hello": "world"},
|
||||
"list_field": ["1", 2, {"hello": "world"}],
|
||||
},
|
||||
],
|
||||
}
|
||||
})
|
||||
}
|
||||
doc.save()
|
||||
doc = Doc.objects.first()
|
||||
self.assertEqual(doc.embedded_field.__class__, Embedded)
|
||||
self.assertEqual(doc.embedded_field.string_field, "hello")
|
||||
self.assertEqual(doc.embedded_field.int_field, 1)
|
||||
self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(doc.embedded_field.list_field[0], '1')
|
||||
self.assertEqual(doc.embedded_field.list_field[1], 2)
|
||||
assert doc.embedded_field.__class__ == Embedded
|
||||
assert doc.embedded_field.string_field == "hello"
|
||||
assert doc.embedded_field.int_field == 1
|
||||
assert doc.embedded_field.dict_field == {"hello": "world"}
|
||||
assert doc.embedded_field.list_field[0] == "1"
|
||||
assert doc.embedded_field.list_field[1] == 2
|
||||
|
||||
embedded_field = doc.embedded_field.list_field[2]
|
||||
|
||||
self.assertEqual(embedded_field.__class__, Embedded)
|
||||
self.assertEqual(embedded_field.string_field, "hello")
|
||||
self.assertEqual(embedded_field.int_field, 1)
|
||||
self.assertEqual(embedded_field.dict_field, {'hello': 'world'})
|
||||
self.assertEqual(embedded_field.list_field, ['1', 2,
|
||||
{'hello': 'world'}])
|
||||
assert embedded_field.__class__ == Embedded
|
||||
assert embedded_field.string_field == "hello"
|
||||
assert embedded_field.int_field == 1
|
||||
assert embedded_field.dict_field == {"hello": "world"}
|
||||
assert embedded_field.list_field == ["1", 2, {"hello": "world"}]
|
||||
|
||||
def test_dynamic_and_embedded(self):
|
||||
"""Ensure embedded documents play nicely"""
|
||||
@@ -368,18 +350,18 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
person.address.city = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.age = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
def test_dynamic_embedded_works_with_only(self):
|
||||
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||
@@ -392,10 +374,15 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
|
||||
Person(
|
||||
name="Eric", address=Address(city="San Francisco", street_number="1337")
|
||||
).save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.street_number, '1337')
|
||||
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
|
||||
assert Person.objects.first().address.street_number == "1337"
|
||||
assert (
|
||||
Person.objects.only("address__street_number").first().address.street_number
|
||||
== "1337"
|
||||
)
|
||||
|
||||
def test_dynamic_and_embedded_dict_access(self):
|
||||
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||
@@ -419,21 +406,21 @@ class TestDynamicDocument(MongoDBTestCase):
|
||||
person["address"]["city"] = "Lundenne"
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Lundenne")
|
||||
assert Person.objects.first().address.city == "Lundenne"
|
||||
|
||||
self.assertEqual(Person.objects.first().phone, "555-1212")
|
||||
assert Person.objects.first().phone == "555-1212"
|
||||
|
||||
person = Person.objects.first()
|
||||
person.address = Address(city="Londinium")
|
||||
person.save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||
assert Person.objects.first().address.city == "Londinium"
|
||||
|
||||
person = Person.objects.first()
|
||||
person["age"] = 35
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
assert Person.objects.first().age == 35
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
631
tests/document/test_inheritance.py
Normal file
631
tests/document/test_inheritance.py
Normal file
@@ -0,0 +1,631 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericReferenceField,
|
||||
IntField,
|
||||
ReferenceField,
|
||||
StringField,
|
||||
)
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests.fixtures import Base
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestInheritance(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
for collection in list_collection_names(self.db):
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def test_constructor_cls(self):
|
||||
# Ensures _cls is properly set during construction
|
||||
# and when object gets reloaded (prevent regression of #1950)
|
||||
class EmbedData(EmbeddedDocument):
|
||||
data = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class DataDoc(Document):
|
||||
name = StringField()
|
||||
embed = EmbeddedDocumentField(EmbedData)
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
test_doc = DataDoc(name="test", embed=EmbedData(data="data"))
|
||||
assert test_doc._cls == "DataDoc"
|
||||
assert test_doc.embed._cls == "EmbedData"
|
||||
test_doc.save()
|
||||
saved_doc = DataDoc.objects.with_id(test_doc.id)
|
||||
assert test_doc._cls == saved_doc._cls
|
||||
assert test_doc.embed._cls == saved_doc.embed._cls
|
||||
test_doc.delete()
|
||||
|
||||
def test_superclasses(self):
|
||||
"""Ensure that the correct list of superclasses is assembled.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Guppy._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Mammal._superclasses == ("Animal",)
|
||||
assert Dog._superclasses == ("Animal", "Animal.Mammal")
|
||||
assert Human._superclasses == ("Animal", "Animal.Mammal")
|
||||
|
||||
def test_external_superclasses(self):
|
||||
"""Ensure that the correct list of super classes is assembled when
|
||||
importing part of the model.
|
||||
"""
|
||||
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ("Base",)
|
||||
assert Fish._superclasses == ("Base", "Base.Animal")
|
||||
assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish")
|
||||
assert Mammal._superclasses == ("Base", "Base.Animal")
|
||||
assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal")
|
||||
|
||||
def test_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._subclasses == (
|
||||
"Animal",
|
||||
"Animal.Fish",
|
||||
"Animal.Fish.Guppy",
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Animal.Mammal",
|
||||
"Animal.Mammal.Dog",
|
||||
"Animal.Mammal.Human",
|
||||
)
|
||||
assert Human._subclasses == ("Animal.Mammal.Human",)
|
||||
|
||||
def test_external_subclasses(self):
|
||||
"""Ensure that the correct list of _subclasses (subclasses) is
|
||||
assembled when importing part of the model.
|
||||
"""
|
||||
|
||||
class Animal(Base):
|
||||
pass
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
assert Animal._subclasses == (
|
||||
"Base.Animal",
|
||||
"Base.Animal.Fish",
|
||||
"Base.Animal.Fish.Guppy",
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
)
|
||||
assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy")
|
||||
assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",)
|
||||
assert Mammal._subclasses == (
|
||||
"Base.Animal.Mammal",
|
||||
"Base.Animal.Mammal.Dog",
|
||||
"Base.Animal.Mammal.Human",
|
||||
)
|
||||
assert Human._subclasses == ("Base.Animal.Mammal.Human",)
|
||||
|
||||
def test_dynamic_declarations(self):
|
||||
"""Test that declaring an extra class updates meta data"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal",)
|
||||
|
||||
# Test dynamically adding a class changes the meta data
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish")
|
||||
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish",)
|
||||
|
||||
# Test dynamically adding an inherited class changes the meta data
|
||||
class Pike(Fish):
|
||||
pass
|
||||
|
||||
assert Animal._superclasses == ()
|
||||
assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
assert Fish._superclasses == ("Animal",)
|
||||
assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike")
|
||||
|
||||
assert Pike._superclasses == ("Animal", "Animal.Fish")
|
||||
assert Pike._subclasses == ("Animal.Fish.Pike",)
|
||||
|
||||
def test_inheritance_meta_data(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_inheritance_to_mongo_keys(self):
|
||||
"""Ensure that document may inherit fields from a superclass document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
assert ["_cls", "age", "id", "name", "salary"] == sorted(
|
||||
Employee._fields.keys()
|
||||
)
|
||||
assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"]
|
||||
assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [
|
||||
"_cls",
|
||||
"name",
|
||||
"age",
|
||||
"salary",
|
||||
]
|
||||
assert Employee._get_collection_name() == Person._get_collection_name()
|
||||
|
||||
def test_indexes_and_multiple_inheritance(self):
|
||||
""" Ensure that all of the indexes are created for a document with
|
||||
multiple inheritance.
|
||||
"""
|
||||
|
||||
class A(Document):
|
||||
a = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["a"]}
|
||||
|
||||
class B(Document):
|
||||
b = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True, "indexes": ["b"]}
|
||||
|
||||
class C(A, B):
|
||||
pass
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
C.drop_collection()
|
||||
|
||||
C.ensure_indexes()
|
||||
|
||||
assert sorted(
|
||||
[idx["key"] for idx in C._get_collection().index_information().values()]
|
||||
) == sorted(
|
||||
[[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]]
|
||||
)
|
||||
|
||||
def test_polymorphic_queries(self):
|
||||
"""Ensure that the correct subclasses are returned from a query
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
Animal.drop_collection()
|
||||
|
||||
Animal().save()
|
||||
Fish().save()
|
||||
Mammal().save()
|
||||
Dog().save()
|
||||
Human().save()
|
||||
|
||||
classes = [obj.__class__ for obj in Animal.objects]
|
||||
assert classes == [Animal, Fish, Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Mammal.objects]
|
||||
assert classes == [Mammal, Dog, Human]
|
||||
|
||||
classes = [obj.__class__ for obj in Human.objects]
|
||||
assert classes == [Human]
|
||||
|
||||
def test_allow_inheritance(self):
|
||||
"""Ensure that inheritance is disabled by default on simple
|
||||
classes and that _cls will not be used.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
|
||||
# can't inherit because Animal didn't explicitly allow inheritance
|
||||
with pytest.raises(ValueError, match="Document Animal may not be subclassed"):
|
||||
|
||||
class Dog(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls etc aren't present on simple documents
|
||||
dog = Animal(name="dog").save()
|
||||
assert dog.to_mongo().keys() == ["_id", "name"]
|
||||
|
||||
collection = self.db[Animal._get_collection_name()]
|
||||
obj = collection.find_one()
|
||||
assert "_cls" not in obj
|
||||
|
||||
def test_cant_turn_off_inheritance_on_subclass(self):
|
||||
"""Ensure if inheritance is on in a subclass you cant turn it off.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== 'Only direct subclasses of Document may set "allow_inheritance" to False'
|
||||
)
|
||||
|
||||
def test_allow_inheritance_abstract_document(self):
|
||||
"""Ensure that abstract documents can set inheritance rules and that
|
||||
_cls will not be used.
|
||||
"""
|
||||
|
||||
class FinalDocument(Document):
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class Animal(FinalDocument):
|
||||
name = StringField()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
# Check that _cls isn't present in simple documents
|
||||
doc = Animal(name="dog")
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
def test_using_abstract_class_in_reference_field(self):
|
||||
# Ensures no regression of #1920
|
||||
class AbstractHuman(Document):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Dad(AbstractHuman):
|
||||
name = StringField()
|
||||
|
||||
class Home(Document):
|
||||
dad = ReferenceField(AbstractHuman) # Referencing the abstract class
|
||||
address = StringField()
|
||||
|
||||
dad = Dad(name="5").save()
|
||||
Home(dad=dad, address="street").save()
|
||||
|
||||
home = Home.objects.first()
|
||||
home.address = "garbage"
|
||||
home.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_class_referencing_self(self):
|
||||
# Ensures no regression of #1920
|
||||
class Human(Document):
|
||||
meta = {"abstract": True}
|
||||
creator = ReferenceField("self", dbref=True)
|
||||
|
||||
class User(Human):
|
||||
name = StringField()
|
||||
|
||||
user = User(name="John").save()
|
||||
user2 = User(name="Foo", creator=user).save()
|
||||
|
||||
user2 = User.objects.with_id(user2.id)
|
||||
user2.name = "Bar"
|
||||
user2.save() # Was failing with ValidationError
|
||||
|
||||
def test_abstract_handle_ids_in_metaclass_properly(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "id"
|
||||
|
||||
def test_auto_id_not_set_if_specific_in_parent_class(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
city_id = IntField(primary_key=True)
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 3
|
||||
assert berlin._fields_ordered[0] == "city_id"
|
||||
|
||||
def test_auto_id_vs_non_pk_id_field(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
id = IntField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
class EuropeanCity(City):
|
||||
name = StringField()
|
||||
|
||||
berlin = EuropeanCity(name="Berlin", continent="Europe")
|
||||
assert len(berlin._db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered)
|
||||
assert len(berlin._fields_ordered) == 4
|
||||
assert berlin._fields_ordered[0] == "auto_id_0"
|
||||
berlin.save()
|
||||
assert berlin.pk == berlin.auto_id_0
|
||||
|
||||
def test_abstract_document_creation_does_not_fail(self):
|
||||
class City(Document):
|
||||
continent = StringField()
|
||||
meta = {"abstract": True, "allow_inheritance": False}
|
||||
|
||||
city = City(continent="asia")
|
||||
assert city.pk is None
|
||||
# TODO: expected error? Shouldn't we create a new error type?
|
||||
with pytest.raises(KeyError):
|
||||
setattr(city, "pk", 1)
|
||||
|
||||
def test_allow_inheritance_embedded_document(self):
|
||||
"""Ensure embedded documents respect inheritance."""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class SpecialComment(Comment):
|
||||
pass
|
||||
|
||||
doc = Comment(content="test")
|
||||
assert "_cls" not in doc.to_mongo()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
doc = Comment(content="test")
|
||||
assert "_cls" in doc.to_mongo()
|
||||
|
||||
def test_document_inheritance(self):
|
||||
"""Ensure mutliple inheritance of abstract documents
|
||||
"""
|
||||
|
||||
class DateCreatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
class DateUpdatedDocument(Document):
|
||||
meta = {"allow_inheritance": True, "abstract": True}
|
||||
|
||||
try:
|
||||
|
||||
class MyDocument(DateCreatedDocument, DateUpdatedDocument):
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
assert False, "Couldn't create MyDocument class"
|
||||
|
||||
def test_abstract_documents(self):
|
||||
"""Ensure that a document superclass can be marked as abstract
|
||||
thereby not using it as the name for the collection."""
|
||||
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
"db_alias": "myDB",
|
||||
"shard_key": ("hello", "world"),
|
||||
}
|
||||
|
||||
meta_settings = {"abstract": True}
|
||||
meta_settings.update(defaults)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
meta = meta_settings
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Guppy(Fish):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
for k, v in defaults.items():
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
assert cls._meta[k] == v
|
||||
|
||||
assert "collection" not in Animal._meta
|
||||
assert "collection" not in Mammal._meta
|
||||
|
||||
assert Animal._get_collection_name() is None
|
||||
assert Mammal._get_collection_name() is None
|
||||
|
||||
assert Fish._get_collection_name() == "fish"
|
||||
assert Guppy._get_collection_name() == "fish"
|
||||
assert Human._get_collection_name() == "human"
|
||||
|
||||
# ensure that a subclass of a non-abstract class can't be abstract
|
||||
with pytest.raises(ValueError):
|
||||
|
||||
class EvilHuman(Human):
|
||||
evil = BooleanField(default=True)
|
||||
meta = {"abstract": True}
|
||||
|
||||
def test_abstract_embedded_documents(self):
|
||||
# 789: EmbeddedDocument shouldn't inherit abstract
|
||||
class A(EmbeddedDocument):
|
||||
meta = {"abstract": True}
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert not B._meta["abstract"]
|
||||
|
||||
def test_inherited_collections(self):
|
||||
"""Ensure that subclassed documents don't override parents'
|
||||
collections
|
||||
"""
|
||||
|
||||
class Drink(Document):
|
||||
name = StringField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Drinker(Document):
|
||||
drink = GenericReferenceField()
|
||||
|
||||
try:
|
||||
warnings.simplefilter("error")
|
||||
|
||||
class AcloholicDrink(Drink):
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
except SyntaxWarning:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
class AlcoholicDrink(Drink):
|
||||
meta = {"collection": "booze"}
|
||||
|
||||
else:
|
||||
raise AssertionError("SyntaxWarning should be triggered")
|
||||
|
||||
warnings.resetwarnings()
|
||||
|
||||
Drink.drop_collection()
|
||||
AlcoholicDrink.drop_collection()
|
||||
Drinker.drop_collection()
|
||||
|
||||
red_bull = Drink(name="Red Bull")
|
||||
red_bull.save()
|
||||
|
||||
programmer = Drinker(drink=red_bull)
|
||||
programmer.save()
|
||||
|
||||
beer = AlcoholicDrink(name="Beer")
|
||||
beer.save()
|
||||
real_person = Drinker(drink=beer)
|
||||
real_person.save()
|
||||
|
||||
assert Drinker.objects[0].drink.name == red_bull.name
|
||||
assert Drinker.objects[1].drink.name == beer.name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,14 @@
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from datetime import datetime
|
||||
from bson import ObjectId
|
||||
|
||||
import pymongo
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("TestJson",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestJson(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class TestJson(MongoDBTestCase):
|
||||
def test_json_names(self):
|
||||
"""
|
||||
Going to test reported issue:
|
||||
@@ -25,22 +17,24 @@ class TestJson(unittest.TestCase):
|
||||
a to_json with the original class names and not the abreviated
|
||||
mongodb document keys
|
||||
"""
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField(db_field='s')
|
||||
string = StringField(db_field="s")
|
||||
|
||||
class Doc(Document):
|
||||
string = StringField(db_field='s')
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||
string = StringField(db_field="s")
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field="e")
|
||||
|
||||
doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':'))
|
||||
doc_json = doc.to_json(
|
||||
sort_keys=True, use_db_field=False, separators=(",", ":")
|
||||
)
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField()
|
||||
|
||||
@@ -49,16 +43,18 @@ class TestJson(unittest.TestCase):
|
||||
embedded_field = EmbeddedDocumentField(Embedded)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.string == other.string and
|
||||
self.embedded_field == other.embedded_field)
|
||||
return (
|
||||
self.string == other.string
|
||||
and self.embedded_field == other.embedded_field
|
||||
)
|
||||
|
||||
doc = Doc(string="Hi", embedded_field=Embedded(string="Hi"))
|
||||
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(',', ':'))
|
||||
doc_json = doc.to_json(sort_keys=True, separators=(",", ":"))
|
||||
expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}"""
|
||||
self.assertEqual(doc_json, expected_json)
|
||||
assert doc_json == expected_json
|
||||
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
def test_json_complex(self):
|
||||
class EmbeddedDoc(EmbeddedDocument):
|
||||
@@ -68,41 +64,43 @@ class TestJson(unittest.TestCase):
|
||||
pass
|
||||
|
||||
class Doc(Document):
|
||||
string_field = StringField(default='1')
|
||||
string_field = StringField(default="1")
|
||||
int_field = IntField(default=1)
|
||||
float_field = FloatField(default=1.1)
|
||||
boolean_field = BooleanField(default=True)
|
||||
datetime_field = DateTimeField(default=datetime.now)
|
||||
embedded_document_field = EmbeddedDocumentField(EmbeddedDoc,
|
||||
default=lambda: EmbeddedDoc())
|
||||
embedded_document_field = EmbeddedDocumentField(
|
||||
EmbeddedDoc, default=lambda: EmbeddedDoc()
|
||||
)
|
||||
list_field = ListField(default=lambda: [1, 2, 3])
|
||||
dict_field = DictField(default=lambda: {"hello": "world"})
|
||||
objectid_field = ObjectIdField(default=ObjectId)
|
||||
reference_field = ReferenceField(Simple, default=lambda:
|
||||
Simple().save())
|
||||
reference_field = ReferenceField(Simple, default=lambda: Simple().save())
|
||||
map_field = MapField(IntField(), default=lambda: {"simple": 1})
|
||||
decimal_field = DecimalField(default=1.0)
|
||||
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
|
||||
url_field = URLField(default="http://mongoengine.org")
|
||||
dynamic_field = DynamicField(default=1)
|
||||
generic_reference_field = GenericReferenceField(
|
||||
default=lambda: Simple().save())
|
||||
sorted_list_field = SortedListField(IntField(),
|
||||
default=lambda: [1, 2, 3])
|
||||
default=lambda: Simple().save()
|
||||
)
|
||||
sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3])
|
||||
email_field = EmailField(default="ross@example.com")
|
||||
geo_point_field = GeoPointField(default=lambda: [1, 2])
|
||||
sequence_field = SequenceField()
|
||||
uuid_field = UUIDField(default=uuid.uuid4)
|
||||
generic_embedded_document_field = GenericEmbeddedDocumentField(
|
||||
default=lambda: EmbeddedDoc())
|
||||
default=lambda: EmbeddedDoc()
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
import json
|
||||
|
||||
return json.loads(self.to_json()) == json.loads(other.to_json())
|
||||
|
||||
doc = Doc()
|
||||
self.assertEqual(doc, Doc.from_json(doc.to_json()))
|
||||
assert doc == Doc.from_json(doc.to_json())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -2,55 +2,60 @@
|
||||
import unittest
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
__all__ = ("ValidatorErrorTest",)
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class TestValidatorError(MongoDBTestCase):
|
||||
def test_to_dict(self):
|
||||
"""Ensure a ValidationError handles error to_dict correctly.
|
||||
"""
|
||||
error = ValidationError('root')
|
||||
self.assertEqual(error.to_dict(), {})
|
||||
error = ValidationError("root")
|
||||
assert error.to_dict() == {}
|
||||
|
||||
# 1st level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st'), }
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertEqual(error.to_dict()['1st'], 'bad 1st')
|
||||
error.errors = {"1st": ValidationError("bad 1st")}
|
||||
assert "1st" in error.to_dict()
|
||||
assert error.to_dict()["1st"] == "bad 1st"
|
||||
|
||||
# 2nd level error schema
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd'),
|
||||
})}
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIsInstance(error.to_dict()['1st'], dict)
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd')
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st", errors={"2nd": ValidationError("bad 2nd")}
|
||||
)
|
||||
}
|
||||
assert "1st" in error.to_dict()
|
||||
assert isinstance(error.to_dict()["1st"], dict)
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert error.to_dict()["1st"]["2nd"] == "bad 2nd"
|
||||
|
||||
# moar levels
|
||||
error.errors = {'1st': ValidationError('bad 1st', errors={
|
||||
'2nd': ValidationError('bad 2nd', errors={
|
||||
'3rd': ValidationError('bad 3rd', errors={
|
||||
'4th': ValidationError('Inception'),
|
||||
}),
|
||||
}),
|
||||
})}
|
||||
self.assertIn('1st', error.to_dict())
|
||||
self.assertIn('2nd', error.to_dict()['1st'])
|
||||
self.assertIn('3rd', error.to_dict()['1st']['2nd'])
|
||||
self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd'])
|
||||
self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'],
|
||||
'Inception')
|
||||
error.errors = {
|
||||
"1st": ValidationError(
|
||||
"bad 1st",
|
||||
errors={
|
||||
"2nd": ValidationError(
|
||||
"bad 2nd",
|
||||
errors={
|
||||
"3rd": ValidationError(
|
||||
"bad 3rd", errors={"4th": ValidationError("Inception")}
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
assert "1st" in error.to_dict()
|
||||
assert "2nd" in error.to_dict()["1st"]
|
||||
assert "3rd" in error.to_dict()["1st"]["2nd"]
|
||||
assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"]
|
||||
assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception"
|
||||
|
||||
self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])")
|
||||
assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])"
|
||||
|
||||
def test_model_validation(self):
|
||||
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField(required=True)
|
||||
@@ -58,67 +63,69 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
try:
|
||||
User().validate()
|
||||
except ValidationError as e:
|
||||
self.assertIn("User:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'username': 'Field is required',
|
||||
'name': 'Field is required'})
|
||||
assert "User:None" in e.message
|
||||
assert e.to_dict() == {
|
||||
"username": "Field is required",
|
||||
"name": "Field is required",
|
||||
}
|
||||
|
||||
user = User(username="RossC0", name="Ross").save()
|
||||
user.name = None
|
||||
try:
|
||||
user.save()
|
||||
except ValidationError as e:
|
||||
self.assertIn("User:RossC0", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
'name': 'Field is required'})
|
||||
assert "User:RossC0" in e.message
|
||||
assert e.to_dict() == {"name": "Field is required"}
|
||||
|
||||
def test_fields_rewrite(self):
|
||||
class BasePerson(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Person(BasePerson):
|
||||
name = StringField(required=True)
|
||||
|
||||
p = Person(age=15)
|
||||
self.assertRaises(ValidationError, p.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
p.validate()
|
||||
|
||||
def test_embedded_document_validation(self):
|
||||
"""Ensure that embedded documents may be validated.
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
date = DateTimeField()
|
||||
content = StringField(required=True)
|
||||
|
||||
comment = Comment()
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.content = 'test'
|
||||
comment.content = "test"
|
||||
comment.validate()
|
||||
|
||||
comment.date = 4
|
||||
self.assertRaises(ValidationError, comment.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
comment.validate()
|
||||
|
||||
comment.date = datetime.now()
|
||||
comment.validate()
|
||||
self.assertEqual(comment._instance, None)
|
||||
assert comment._instance is None
|
||||
|
||||
def test_embedded_db_field_validate(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
id = StringField(primary_key=True)
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
try:
|
||||
Doc(id="bad").validate()
|
||||
except ValidationError as e:
|
||||
self.assertIn("SubDoc:None", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
assert "SubDoc:None" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@@ -126,25 +133,23 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
doc = Doc.objects.first()
|
||||
keys = doc._data.keys()
|
||||
self.assertEqual(2, len(keys))
|
||||
self.assertIn('e', keys)
|
||||
self.assertIn('id', keys)
|
||||
assert 2 == len(keys)
|
||||
assert "e" in keys
|
||||
assert "id" in keys
|
||||
|
||||
doc.e.val = "OK"
|
||||
try:
|
||||
doc.save()
|
||||
except ValidationError as e:
|
||||
self.assertIn("Doc:test", e.message)
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
assert "Doc:test" in e.message
|
||||
assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}}
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
e = EmbeddedDocumentField(SubDoc, db_field="eb")
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
@@ -153,23 +158,26 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
self.assertRaises(ValidationError, s.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
s.validate()
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
self.assertRaises(ValidationError, d2.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
d2.validate()
|
||||
|
||||
def test_parent_reference_in_child_document(self):
|
||||
"""
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
@@ -190,9 +198,10 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
Test to ensure a ReferenceField can store a reference to a parent
|
||||
class when inherited and when set via attribute. Issue #954.
|
||||
"""
|
||||
|
||||
class Parent(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
reference = ReferenceField('self')
|
||||
meta = {"allow_inheritance": True}
|
||||
reference = ReferenceField("self")
|
||||
|
||||
class Child(Parent):
|
||||
pass
|
||||
@@ -210,5 +219,5 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
self.fail("ValidationError raised: %s" % e.message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,3 +0,0 @@
|
||||
from .fields import *
|
||||
from .file_tests import *
|
||||
from .geo import *
|
||||
|
||||
@@ -1,27 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import six
|
||||
|
||||
from bson import Binary
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5')
|
||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
||||
"latin-1"
|
||||
)
|
||||
|
||||
|
||||
class TestBinaryField(MongoDBTestCase):
|
||||
def test_binary_fields(self):
|
||||
"""Ensure that binary fields can be stored and retrieved.
|
||||
"""
|
||||
|
||||
class Attachment(Document):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = six.b('\xe6\x00\xc4\xff\x07')
|
||||
MIME_TYPE = 'application/octet-stream'
|
||||
BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
MIME_TYPE = "application/octet-stream"
|
||||
|
||||
Attachment.drop_collection()
|
||||
|
||||
@@ -29,12 +30,13 @@ class TestBinaryField(MongoDBTestCase):
|
||||
attachment.save()
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
self.assertEqual(MIME_TYPE, attachment_1.content_type)
|
||||
self.assertEqual(BLOB, six.binary_type(attachment_1.blob))
|
||||
assert MIME_TYPE == attachment_1.content_type
|
||||
assert BLOB == bytes(attachment_1.blob)
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields.
|
||||
"""
|
||||
|
||||
class AttachmentRequired(Document):
|
||||
blob = BinaryField(required=True)
|
||||
|
||||
@@ -42,13 +44,15 @@ class TestBinaryField(MongoDBTestCase):
|
||||
blob = BinaryField(max_bytes=4)
|
||||
|
||||
attachment_required = AttachmentRequired()
|
||||
self.assertRaises(ValidationError, attachment_required.validate)
|
||||
attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07'))
|
||||
with pytest.raises(ValidationError):
|
||||
attachment_required.validate()
|
||||
attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1"))
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = six.b('\xe6\x00\xc4\xff\x07')
|
||||
_4_BYTES = six.b('\xe6\x00\xc4\xff')
|
||||
self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate)
|
||||
_5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
_4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1")
|
||||
with pytest.raises(ValidationError):
|
||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
|
||||
def test_validation_fails(self):
|
||||
@@ -57,8 +61,9 @@ class TestBinaryField(MongoDBTestCase):
|
||||
class Attachment(Document):
|
||||
blob = BinaryField()
|
||||
|
||||
for invalid_data in (2, u'Im_a_unicode', ['some_str']):
|
||||
self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate)
|
||||
for invalid_data in (2, u"Im_a_unicode", ["some_str"]):
|
||||
with pytest.raises(ValidationError):
|
||||
Attachment(blob=invalid_data).validate()
|
||||
|
||||
def test__primary(self):
|
||||
class Attachment(Document):
|
||||
@@ -67,23 +72,21 @@ class TestBinaryField(MongoDBTestCase):
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.count())
|
||||
self.assertEqual(1, Attachment.objects.filter(id=att.id).count())
|
||||
assert 1 == Attachment.objects.count()
|
||||
assert 1 == Attachment.objects.filter(id=att.id).count()
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_primary_filter_by_binary_pk_as_str(self):
|
||||
raise SkipTest("Querying by id as string is not currently supported")
|
||||
|
||||
class Attachment(Document):
|
||||
id = BinaryField(primary_key=True)
|
||||
|
||||
Attachment.drop_collection()
|
||||
binary_id = uuid.uuid4().bytes
|
||||
att = Attachment(id=binary_id).save()
|
||||
self.assertEqual(1, Attachment.objects.filter(id=binary_id).count())
|
||||
assert 1 == Attachment.objects.filter(id=binary_id).count()
|
||||
att.delete()
|
||||
self.assertEqual(0, Attachment.objects.count())
|
||||
assert 0 == Attachment.objects.count()
|
||||
|
||||
def test_match_querying_with_bytes(self):
|
||||
class MyDocument(Document):
|
||||
@@ -93,7 +96,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_match_querying_with_binary(self):
|
||||
class MyDocument(Document):
|
||||
@@ -104,40 +107,37 @@ class TestBinaryField(MongoDBTestCase):
|
||||
doc = MyDocument(bin_field=BIN_VALUE).save()
|
||||
|
||||
matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first()
|
||||
self.assertEqual(matched_doc.id, doc.id)
|
||||
assert matched_doc.id == doc.id
|
||||
|
||||
def test_modify_operation__set(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
some_field = StringField()
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
doc = MyDocument.objects(some_field='test').modify(
|
||||
upsert=True, new=True,
|
||||
set__bin_field=BIN_VALUE
|
||||
doc = MyDocument.objects(some_field="test").modify(
|
||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||
)
|
||||
self.assertEqual(doc.some_field, 'test')
|
||||
if six.PY3:
|
||||
self.assertEqual(doc.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(doc.bin_field, Binary(BIN_VALUE))
|
||||
assert doc.some_field == "test"
|
||||
assert doc.bin_field == BIN_VALUE
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
|
||||
class MyDocument(Document):
|
||||
bin_field = BinaryField()
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = six.b('\xe6\x00\xc4\xff\x07')
|
||||
bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE)
|
||||
self.assertEqual(n_updated, 1)
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||
bin_field=BIN_VALUE
|
||||
)
|
||||
assert n_updated == 1
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
if six.PY3:
|
||||
self.assertEqual(fetched.bin_field, BIN_VALUE)
|
||||
else:
|
||||
self.assertEqual(fetched.bin_field, Binary(BIN_VALUE))
|
||||
assert fetched.bin_field == BIN_VALUE
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@@ -11,15 +12,13 @@ class TestBooleanField(MongoDBTestCase):
|
||||
|
||||
person = Person(admin=True)
|
||||
person.save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person),
|
||||
{'_id': person.id,
|
||||
'admin': True})
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "admin": True}
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to boolean
|
||||
fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
@@ -28,22 +27,26 @@ class TestBooleanField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.admin = 2
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = 'Yes'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.admin = 'False'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "Yes"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.admin = "False"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_weirdness_constructor(self):
|
||||
"""When attribute is set in contructor, it gets cast into a bool
|
||||
which causes some weird behavior. We dont necessarily want to maintain this behavior
|
||||
but its a known issue
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
admin = BooleanField()
|
||||
|
||||
new_person = Person(admin='False')
|
||||
self.assertTrue(new_person.admin)
|
||||
new_person = Person(admin="False")
|
||||
assert new_person.admin
|
||||
|
||||
new_person = Person(admin='0')
|
||||
self.assertTrue(new_person.admin)
|
||||
new_person = Person(admin="0")
|
||||
assert new_person.admin
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
def test_get_and_save(self):
|
||||
"""
|
||||
Tests #1047: CachedReferenceField creates DBRefs on to_python,
|
||||
but can't save them on to_mongo.
|
||||
"""
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
@@ -24,10 +25,11 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
Ocorrence(person="testte",
|
||||
animal=Animal(name="Leopard", tag="heavy").save()).save()
|
||||
Ocorrence(
|
||||
person="testte", animal=Animal(name="Leopard", tag="heavy").save()
|
||||
).save()
|
||||
p = Ocorrence.objects.get()
|
||||
p.person = 'new_testte'
|
||||
p.person = "new_testte"
|
||||
p.save()
|
||||
|
||||
def test_general_things(self):
|
||||
@@ -37,8 +39,7 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag'])
|
||||
animal = CachedReferenceField(Animal, fields=["tag"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
@@ -46,30 +47,29 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
|
||||
assert Animal._cached_reference_fields == [Ocorrence.animal]
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
|
||||
assert Ocorrence.objects(animal=None).count() == 1
|
||||
|
||||
self.assertEqual(
|
||||
a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk})
|
||||
assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk}
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag='heavy').count()
|
||||
self.assertEqual(count, 1)
|
||||
count = Ocorrence.objects(animal__tag="heavy").count()
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag='heavy').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
ocorrence = Ocorrence.objects(animal__tag="heavy").first()
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_with_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
@@ -78,28 +78,22 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class SocialTest(Document):
|
||||
group = StringField()
|
||||
person = CachedReferenceField(
|
||||
PersonAuto,
|
||||
fields=('salary',))
|
||||
person = CachedReferenceField(PersonAuto, fields=("salary",))
|
||||
|
||||
PersonAuto.drop_collection()
|
||||
SocialTest.drop_collection()
|
||||
|
||||
p = PersonAuto(name="Alberto", salary=Decimal('7000.00'))
|
||||
p = PersonAuto(name="Alberto", salary=Decimal("7000.00"))
|
||||
p.save()
|
||||
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialTest.objects._collection.find_one({'person.salary': 7000.00}), {
|
||||
'_id': s.pk,
|
||||
'group': s.group,
|
||||
'person': {
|
||||
'_id': p.pk,
|
||||
'salary': 7000.00
|
||||
}
|
||||
})
|
||||
assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == {
|
||||
"_id": s.pk,
|
||||
"group": s.group,
|
||||
"person": {"_id": p.pk, "salary": 7000.00},
|
||||
}
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
@@ -111,17 +105,14 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class SocialData(Document):
|
||||
obs = StringField()
|
||||
tags = ListField(
|
||||
StringField())
|
||||
person = CachedReferenceField(
|
||||
Person,
|
||||
fields=('group',))
|
||||
tags = ListField(StringField())
|
||||
person = CachedReferenceField(Person, fields=("group",))
|
||||
|
||||
Group.drop_collection()
|
||||
Person.drop_collection()
|
||||
SocialData.drop_collection()
|
||||
|
||||
g1 = Group(name='dev')
|
||||
g1 = Group(name="dev")
|
||||
g1.save()
|
||||
|
||||
g2 = Group(name="designers")
|
||||
@@ -136,25 +127,21 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
p3 = Person(name="Afro design", group=g2)
|
||||
p3.save()
|
||||
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2'])
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"])
|
||||
s1.save()
|
||||
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4'])
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"])
|
||||
s2.save()
|
||||
|
||||
self.assertEqual(SocialData.objects._collection.find_one(
|
||||
{'tags': 'tag2'}), {
|
||||
'_id': s1.pk,
|
||||
'obs': 'testing 123',
|
||||
'tags': ['tag1', 'tag2'],
|
||||
'person': {
|
||||
'_id': p1.pk,
|
||||
'group': g1.pk
|
||||
}
|
||||
})
|
||||
assert SocialData.objects._collection.find_one({"tags": "tag2"}) == {
|
||||
"_id": s1.pk,
|
||||
"obs": "testing 123",
|
||||
"tags": ["tag1", "tag2"],
|
||||
"person": {"_id": p1.pk, "group": g1.pk},
|
||||
}
|
||||
|
||||
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
|
||||
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
|
||||
assert SocialData.objects(person__group=g2).count() == 1
|
||||
assert SocialData.objects(person__group=g2).first() == s2
|
||||
|
||||
def test_cached_reference_field_push_with_fields(self):
|
||||
class Product(Document):
|
||||
@@ -163,185 +150,136 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
Product.drop_collection()
|
||||
|
||||
class Basket(Document):
|
||||
products = ListField(CachedReferenceField(Product, fields=['name']))
|
||||
products = ListField(CachedReferenceField(Product, fields=["name"]))
|
||||
|
||||
Basket.drop_collection()
|
||||
product1 = Product(name='abc').save()
|
||||
product2 = Product(name='def').save()
|
||||
product1 = Product(name="abc").save()
|
||||
product2 = Product(name="def").save()
|
||||
basket = Basket(products=[product1]).save()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
'_id': basket.pk,
|
||||
'products': [
|
||||
{
|
||||
'_id': product1.pk,
|
||||
'name': product1.name
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [{"_id": product1.pk, "name": product1.name}],
|
||||
}
|
||||
# push to list
|
||||
basket.update(push__products=product2)
|
||||
basket.reload()
|
||||
self.assertEqual(
|
||||
Basket.objects._collection.find_one(),
|
||||
{
|
||||
'_id': basket.pk,
|
||||
'products': [
|
||||
{
|
||||
'_id': product1.pk,
|
||||
'name': product1.name
|
||||
},
|
||||
{
|
||||
'_id': product2.pk,
|
||||
'name': product2.name
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
assert Basket.objects._collection.find_one() == {
|
||||
"_id": basket.pk,
|
||||
"products": [
|
||||
{"_id": product1.pk, "name": product1.name},
|
||||
{"_id": product2.pk, "name": product2.name},
|
||||
],
|
||||
}
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(choices=TYPES)
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a2 = Person.objects.with_id(a2.id)
|
||||
self.assertEqual(a2.father.tp, a1.tp)
|
||||
assert a2.father.tp == a1.tp
|
||||
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pj"
|
||||
}
|
||||
})
|
||||
"father": {"_id": a1.pk, "tp": u"pj"},
|
||||
}
|
||||
|
||||
self.assertEqual(Person.objects(father=a1)._query, {
|
||||
'father._id': a1.pk
|
||||
})
|
||||
self.assertEqual(Person.objects(father=a1).count(), 1)
|
||||
assert Person.objects(father=a1)._query == {"father._id": a1.pk}
|
||||
assert Person.objects(father=a1).count() == 1
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pf"
|
||||
}
|
||||
})
|
||||
"father": {"_id": a1.pk, "tp": u"pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
with self.assertRaises(InvalidDocumentError):
|
||||
with pytest.raises(InvalidDocumentError):
|
||||
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
|
||||
type('WrongEmbeddedDocument', (
|
||||
EmbeddedDocument,), {
|
||||
'test': CachedReferenceField(Test)
|
||||
})
|
||||
type(
|
||||
"WrongEmbeddedDocument",
|
||||
(EmbeddedDocument,),
|
||||
{"test": CachedReferenceField(Test)},
|
||||
)
|
||||
|
||||
def test_cached_reference_auto_sync(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
father = CachedReferenceField("self", fields=("tp",))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2 = Person(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pf'
|
||||
}
|
||||
})
|
||||
assert dict(a2.to_mongo()) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pf"},
|
||||
}
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
TYPES = (("pf", "PF"), ("pj", "PJ"))
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
tp = StringField(choices=TYPES)
|
||||
|
||||
father = CachedReferenceField(
|
||||
'self', fields=('tp',), auto_sync=False)
|
||||
father = CachedReferenceField("self", fields=("tp",), auto_sync=False)
|
||||
|
||||
Persone.drop_collection()
|
||||
|
||||
a1 = Persone(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Persone(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2 = Persone(name="Wilson Junior", tp="pf", father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.tp = "pf"
|
||||
a1.save()
|
||||
|
||||
self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pj'
|
||||
}
|
||||
})
|
||||
assert Persone.objects._collection.find_one({"_id": a2.pk}) == {
|
||||
"_id": a2.pk,
|
||||
"name": "Wilson Junior",
|
||||
"tp": "pf",
|
||||
"father": {"_id": a1.pk, "tp": "pj"},
|
||||
}
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
TPS = (
|
||||
('n', "Normal"),
|
||||
('u', "Urgent")
|
||||
)
|
||||
TPS = (("n", "Normal"), ("u", "Urgent"))
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
verbose_name="Type",
|
||||
db_field="t",
|
||||
choices=TPS)
|
||||
tp = StringField(verbose_name="Type", db_field="t", choices=TPS)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
@@ -351,45 +289,41 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tp'])
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy",
|
||||
owner=Owner(tp='u', name="Wilson Júnior")
|
||||
)
|
||||
a = Animal(
|
||||
name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
't': 'u'
|
||||
}
|
||||
})
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u')
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"t": "u"},
|
||||
}
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["t"] == "u"
|
||||
|
||||
# Check to_mongo with fields
|
||||
self.assertNotIn('animal', o.to_mongo(fields=['person']))
|
||||
assert "animal" not in o.to_mongo(fields=["person"])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tp='u').count()
|
||||
self.assertEqual(count, 1)
|
||||
count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count()
|
||||
assert count == 1
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tp='u').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
animal__tag="heavy", animal__owner__tp="u"
|
||||
).first()
|
||||
assert ocorrence.person == "teste"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
@@ -404,43 +338,40 @@ class TestCachedReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tags'])
|
||||
animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy",
|
||||
owner=Owner(tags=['cool', 'funny'],
|
||||
name="Wilson Júnior")
|
||||
)
|
||||
a = Animal(
|
||||
name="Leopard",
|
||||
tag="heavy",
|
||||
owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"),
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
'tags': ['cool', 'funny']
|
||||
}
|
||||
})
|
||||
assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == {
|
||||
"_id": a.pk,
|
||||
"tag": "heavy",
|
||||
"owner": {"tags": ["cool", "funny"]},
|
||||
}
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['tags'],
|
||||
['cool', 'funny'])
|
||||
assert o.to_mongo()["animal"]["tag"] == "heavy"
|
||||
assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"]
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
query = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tags='cool')._query
|
||||
self.assertEqual(
|
||||
query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'})
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
)._query
|
||||
assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"}
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tags='cool').first()
|
||||
self.assertEqual(ocorrence.person, "teste 2")
|
||||
self.assertIsInstance(ocorrence.animal, Animal)
|
||||
animal__tag="heavy", animal__owner__tags="cool"
|
||||
).first()
|
||||
assert ocorrence.person == "teste 2"
|
||||
assert isinstance(ocorrence.animal, Animal)
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import math
|
||||
import itertools
|
||||
import math
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
@@ -14,9 +16,10 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
date_with_dots = ComplexDateTimeField(separator='.')
|
||||
date_with_dots = ComplexDateTimeField(separator=".")
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
@@ -27,7 +30,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
@@ -35,7 +38,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
@@ -43,7 +46,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
|
||||
# Pre UTC microseconds above 1000 is wonky - with default datetimefields
|
||||
# log.date has an invalid microsecond value so I can't construct
|
||||
@@ -53,26 +56,34 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
assert log.date == d1
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# Test string padding
|
||||
microsecond = map(int, [math.pow(10, x) for x in range(6)])
|
||||
mm = dd = hh = ii = ss = [1, 10]
|
||||
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date']
|
||||
self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None)
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||
assert (
|
||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
is not None
|
||||
)
|
||||
|
||||
# Test separator
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots']
|
||||
self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None)
|
||||
stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[
|
||||
"date_with_dots"
|
||||
]
|
||||
assert (
|
||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
"""Tests for complex datetime fields - which can handle
|
||||
microseconds without rounding.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = ComplexDateTimeField()
|
||||
|
||||
@@ -84,62 +95,61 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
log.save()
|
||||
|
||||
log1 = LogEntry.objects.get(date=d1)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
# create extra 59 log entries for a total of 60
|
||||
for i in range(1951, 2010):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1, 999)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 60)
|
||||
assert LogEntry.objects.count() == 60
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 59:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 30)
|
||||
assert logs.count() == 30
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2011, 1, 1),
|
||||
date__gte=datetime.datetime(2000, 1, 1),
|
||||
)
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Test microsecond-level ordering/filtering
|
||||
for microsecond in (99, 999, 9999, 10000):
|
||||
LogEntry(
|
||||
date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)
|
||||
).save()
|
||||
LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save()
|
||||
|
||||
logs = list(LogEntry.objects.order_by('date'))
|
||||
logs = list(LogEntry.objects.order_by("date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date < next_log.date)
|
||||
assert log.date < next_log.date
|
||||
|
||||
logs = list(LogEntry.objects.order_by('-date'))
|
||||
logs = list(LogEntry.objects.order_by("-date"))
|
||||
for next_idx, log in enumerate(logs[:-1], start=1):
|
||||
next_log = logs[next_idx]
|
||||
self.assertTrue(log.date > next_log.date)
|
||||
assert log.date > next_log.date
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000))
|
||||
self.assertEqual(logs.count(), 4)
|
||||
date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)
|
||||
)
|
||||
assert logs.count() == 4
|
||||
|
||||
def test_no_default_value(self):
|
||||
class Log(Document):
|
||||
@@ -148,25 +158,26 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertIsNone(log.timestamp)
|
||||
assert log.timestamp is None
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertIsNone(fetched_log.timestamp)
|
||||
assert fetched_log.timestamp is None
|
||||
|
||||
def test_default_static_value(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField(default=NOW)
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertEqual(log.timestamp, NOW)
|
||||
assert log.timestamp == NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertEqual(fetched_log.timestamp, NOW)
|
||||
assert fetched_log.timestamp == NOW
|
||||
|
||||
def test_default_callable(self):
|
||||
NOW = datetime.datetime.utcnow()
|
||||
@@ -177,8 +188,23 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log()
|
||||
self.assertGreaterEqual(log.timestamp, NOW)
|
||||
assert log.timestamp >= NOW
|
||||
log.save()
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
self.assertGreaterEqual(fetched_log.timestamp, NOW)
|
||||
assert fetched_log.timestamp >= NOW
|
||||
|
||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
||||
# test regression of #2253
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log(timestamp="garbage")
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
log.save()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import six
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@@ -8,7 +9,6 @@ except ImportError:
|
||||
dateutil = None
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@@ -18,41 +18,47 @@ class TestDateField(MongoDBTestCase):
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt='')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt="")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_date_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateField()
|
||||
|
||||
md = MyDoc(dt=' ')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt=" ")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_values_today(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
day = DateField(default=datetime.date.today)
|
||||
|
||||
person = Person()
|
||||
person.validate()
|
||||
self.assertEqual(person.day, person.day)
|
||||
self.assertEqual(person.day, datetime.date.today())
|
||||
self.assertEqual(person._data['day'], person.day)
|
||||
assert person.day == person.day
|
||||
assert person.day == datetime.date.today()
|
||||
assert person._data["day"] == person.day
|
||||
|
||||
def test_date(self):
|
||||
"""Tests showing pymongo date fields
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
@@ -63,7 +69,7 @@ class TestDateField(MongoDBTestCase):
|
||||
log.date = datetime.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, datetime.date.today())
|
||||
assert log.date == datetime.date.today()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1)
|
||||
@@ -71,30 +77,20 @@ class TestDateField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1.date())
|
||||
self.assertEqual(log.date, d2.date())
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateField()
|
||||
|
||||
@@ -106,42 +102,43 @@ class TestDateField(MongoDBTestCase):
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(' ')):
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||
self.assertEqual(log, log1)
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = datetime.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateField()
|
||||
|
||||
@@ -152,14 +149,16 @@ class TestDateField(MongoDBTestCase):
|
||||
log.time = datetime.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = datetime.datetime.now().isoformat(' ')
|
||||
log.time = datetime.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = datetime.datetime.now().isoformat('T')
|
||||
log.time = datetime.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = 'ABC'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime as dt
|
||||
import six
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@@ -19,27 +20,32 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
Ensure an exception is raised when trying to
|
||||
cast an empty string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt='')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt="")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_datetime_from_whitespace_string(self):
|
||||
"""
|
||||
Ensure an exception is raised when trying to
|
||||
cast a whitespace-only string to datetime.
|
||||
"""
|
||||
|
||||
class MyDoc(Document):
|
||||
dt = DateTimeField()
|
||||
|
||||
md = MyDoc(dt=' ')
|
||||
self.assertRaises(ValidationError, md.save)
|
||||
md = MyDoc(dt=" ")
|
||||
with pytest.raises(ValidationError):
|
||||
md.save()
|
||||
|
||||
def test_default_value_utcnow(self):
|
||||
"""Ensure that default field values are used when creating
|
||||
a document.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
created = DateTimeField(default=dt.datetime.utcnow)
|
||||
|
||||
@@ -47,9 +53,9 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
person = Person()
|
||||
person.validate()
|
||||
person_created_t0 = person.created
|
||||
self.assertLess(person.created - utcnow, dt.timedelta(seconds=1))
|
||||
self.assertEqual(person_created_t0, person.created) # make sure it does not change
|
||||
self.assertEqual(person._data['created'], person.created)
|
||||
assert person.created - utcnow < dt.timedelta(seconds=1)
|
||||
assert person_created_t0 == person.created # make sure it does not change
|
||||
assert person._data["created"] == person.created
|
||||
|
||||
def test_handling_microseconds(self):
|
||||
"""Tests showing pymongo datetime fields handling of microseconds.
|
||||
@@ -58,6 +64,7 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
|
||||
See: http://api.mongodb.org/python/current/api/bson/son.html#dt
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
@@ -68,7 +75,7 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = dt.date.today()
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date.date(), dt.date.today())
|
||||
assert log.date.date() == dt.date.today()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
@@ -78,8 +85,8 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond
|
||||
d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999)
|
||||
@@ -87,22 +94,12 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = dt.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertNotEqual(log.date, d1)
|
||||
self.assertEqual(log.date, d2)
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
class LogEntry(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
@@ -114,51 +111,51 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.validate()
|
||||
log.save()
|
||||
|
||||
for query in (d1, d1.isoformat(' ')):
|
||||
for query in (d1, d1.isoformat(" ")):
|
||||
log1 = LogEntry.objects.get(date=query)
|
||||
self.assertEqual(log, log1)
|
||||
assert log == log1
|
||||
|
||||
if dateutil:
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat('T'))
|
||||
self.assertEqual(log, log1)
|
||||
log1 = LogEntry.objects.get(date=d1.isoformat("T"))
|
||||
assert log == log1
|
||||
|
||||
# create additional 19 log entries for a total of 20
|
||||
for i in range(1971, 1990):
|
||||
d = dt.datetime(i, 1, 1, 0, 0, 1)
|
||||
LogEntry(date=d).save()
|
||||
|
||||
self.assertEqual(LogEntry.objects.count(), 20)
|
||||
assert LogEntry.objects.count() == 20
|
||||
|
||||
# Test ordering
|
||||
logs = LogEntry.objects.order_by("date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date <= logs[i + 1].date)
|
||||
assert logs[i].date <= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
logs = LogEntry.objects.order_by("-date")
|
||||
i = 0
|
||||
while i < 19:
|
||||
self.assertTrue(logs[i].date >= logs[i + 1].date)
|
||||
assert logs[i].date >= logs[i + 1].date
|
||||
i += 1
|
||||
|
||||
# Test searching
|
||||
logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1))
|
||||
self.assertEqual(logs.count(), 10)
|
||||
assert logs.count() == 10
|
||||
|
||||
logs = LogEntry.objects.filter(
|
||||
date__lte=dt.datetime(1980, 1, 1),
|
||||
date__gte=dt.datetime(1975, 1, 1),
|
||||
date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1)
|
||||
)
|
||||
self.assertEqual(logs.count(), 5)
|
||||
assert logs.count() == 5
|
||||
|
||||
def test_datetime_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to datetime
|
||||
fields.
|
||||
"""
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
|
||||
@@ -169,45 +166,51 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
log.time = dt.date.today()
|
||||
log.validate()
|
||||
|
||||
log.time = dt.datetime.now().isoformat(' ')
|
||||
log.time = dt.datetime.now().isoformat(" ")
|
||||
log.validate()
|
||||
|
||||
log.time = '2019-05-16 21:42:57.897847'
|
||||
log.time = "2019-05-16 21:42:57.897847"
|
||||
log.validate()
|
||||
|
||||
if dateutil:
|
||||
log.time = dt.datetime.now().isoformat('T')
|
||||
log.time = dt.datetime.now().isoformat("T")
|
||||
log.validate()
|
||||
|
||||
log.time = -1
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = 'ABC'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = '2019-05-16 21:GARBAGE:12'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = '2019-05-16 21:42:57.GARBAGE'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
log.time = '2019-05-16 21:42:57.123.456'
|
||||
self.assertRaises(ValidationError, log.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "ABC"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:GARBAGE:12"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.GARBAGE"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
log.time = "2019-05-16 21:42:57.123.456"
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
def test_parse_datetime_as_str(self):
|
||||
class DTDoc(Document):
|
||||
date = DateTimeField()
|
||||
|
||||
date_str = '2019-03-02 22:26:01'
|
||||
date_str = "2019-03-02 22:26:01"
|
||||
|
||||
# make sure that passing a parsable datetime works
|
||||
dtd = DTDoc()
|
||||
dtd.date = date_str
|
||||
self.assertIsInstance(dtd.date, six.string_types)
|
||||
assert isinstance(dtd.date, str)
|
||||
dtd.save()
|
||||
dtd.reload()
|
||||
|
||||
self.assertIsInstance(dtd.date, dt.datetime)
|
||||
self.assertEqual(str(dtd.date), date_str)
|
||||
assert isinstance(dtd.date, dt.datetime)
|
||||
assert str(dtd.date) == date_str
|
||||
|
||||
dtd.date = 'January 1st, 9999999999'
|
||||
self.assertRaises(ValidationError, dtd.validate)
|
||||
dtd.date = "January 1st, 9999999999"
|
||||
with pytest.raises(ValidationError):
|
||||
dtd.validate()
|
||||
|
||||
|
||||
class TestDateTimeTzAware(MongoDBTestCase):
|
||||
@@ -217,7 +220,7 @@ class TestDateTimeTzAware(MongoDBTestCase):
|
||||
connection._connections = {}
|
||||
connection._dbs = {}
|
||||
|
||||
connect(db='mongoenginetest', tz_aware=True)
|
||||
connect(db="mongoenginetest", tz_aware=True)
|
||||
|
||||
class LogEntry(Document):
|
||||
time = DateTimeField()
|
||||
@@ -228,4 +231,4 @@ class TestDateTimeTzAware(MongoDBTestCase):
|
||||
|
||||
log = LogEntry.objects.first()
|
||||
log.time = dt.datetime(2013, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(['time'], log._changed_fields)
|
||||
assert ["time"] == log._changed_fields
|
||||
|
||||
@@ -1,39 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from decimal import Decimal
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestDecimalField(MongoDBTestCase):
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to decimal fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = DecimalField(min_value=Decimal('0.1'),
|
||||
max_value=Decimal('3.5'))
|
||||
height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5"))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(height=Decimal('1.89')).save()
|
||||
Person(height=Decimal("1.89")).save()
|
||||
person = Person.objects.first()
|
||||
self.assertEqual(person.height, Decimal('1.89'))
|
||||
assert person.height == Decimal("1.89")
|
||||
|
||||
person.height = '2.0'
|
||||
person.height = "2.0"
|
||||
person.save()
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal('0.01')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = Decimal('4.0')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = 'something invalid'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("0.01")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = Decimal("4.0")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.height = "something invalid"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height='something invalid')
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
person_2 = Person(height="something invalid")
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
def test_comparison(self):
|
||||
class Person(Document):
|
||||
@@ -46,11 +51,11 @@ class TestDecimalField(MongoDBTestCase):
|
||||
Person(money=8).save()
|
||||
Person(money=10).save()
|
||||
|
||||
self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count())
|
||||
self.assertEqual(2, Person.objects(money__gt=7).count())
|
||||
self.assertEqual(2, Person.objects(money__gt="7").count())
|
||||
assert 2 == Person.objects(money__gt=Decimal("7")).count()
|
||||
assert 2 == Person.objects(money__gt=7).count()
|
||||
assert 2 == Person.objects(money__gt="7").count()
|
||||
|
||||
self.assertEqual(3, Person.objects(money__gte="7").count())
|
||||
assert 3 == Person.objects(money__gte="7").count()
|
||||
|
||||
def test_storage(self):
|
||||
class Person(Document):
|
||||
@@ -58,7 +63,14 @@ class TestDecimalField(MongoDBTestCase):
|
||||
string_value = DecimalField(precision=4, force_string=True)
|
||||
|
||||
Person.drop_collection()
|
||||
values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")]
|
||||
values_to_store = [
|
||||
10,
|
||||
10.1,
|
||||
10.11,
|
||||
"10.111",
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.11111"),
|
||||
]
|
||||
for store_at_creation in [True, False]:
|
||||
for value in values_to_store:
|
||||
# to_python is called explicitly if values were sent in the kwargs of __init__
|
||||
@@ -72,20 +84,27 @@ class TestDecimalField(MongoDBTestCase):
|
||||
|
||||
# How its stored
|
||||
expected = [
|
||||
{'float_value': 10.0, 'string_value': '10.0000'},
|
||||
{'float_value': 10.1, 'string_value': '10.1000'},
|
||||
{'float_value': 10.11, 'string_value': '10.1100'},
|
||||
{'float_value': 10.111, 'string_value': '10.1110'},
|
||||
{'float_value': 10.1111, 'string_value': '10.1111'},
|
||||
{'float_value': 10.1111, 'string_value': '10.1111'}]
|
||||
{"float_value": 10.0, "string_value": "10.0000"},
|
||||
{"float_value": 10.1, "string_value": "10.1000"},
|
||||
{"float_value": 10.11, "string_value": "10.1100"},
|
||||
{"float_value": 10.111, "string_value": "10.1110"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
{"float_value": 10.1111, "string_value": "10.1111"},
|
||||
]
|
||||
expected.extend(expected)
|
||||
actual = list(Person.objects.exclude('id').as_pymongo())
|
||||
self.assertEqual(expected, actual)
|
||||
actual = list(Person.objects.exclude("id").as_pymongo())
|
||||
assert expected == actual
|
||||
|
||||
# How it comes out locally
|
||||
expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'),
|
||||
Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')]
|
||||
expected = [
|
||||
Decimal("10.0000"),
|
||||
Decimal("10.1000"),
|
||||
Decimal("10.1100"),
|
||||
Decimal("10.1110"),
|
||||
Decimal("10.1111"),
|
||||
Decimal("10.1111"),
|
||||
]
|
||||
expected.extend(expected)
|
||||
for field_name in ['float_value', 'string_value']:
|
||||
for field_name in ["float_value", "string_value"]:
|
||||
actual = list(Person.objects().scalar(field_name))
|
||||
self.assertEqual(expected, actual)
|
||||
assert expected == actual
|
||||
|
||||
@@ -1,100 +1,123 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import InvalidDocument
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version
|
||||
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
class TestDictField(MongoDBTestCase):
|
||||
|
||||
def test_storage(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
info = {'testkey': 'testvalue'}
|
||||
info = {"testkey": "testvalue"}
|
||||
post = BlogPost(info=info).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(post),
|
||||
{
|
||||
'_id': post.id,
|
||||
'info': info
|
||||
}
|
||||
)
|
||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
def test_validate_invalid_type(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
||||
for invalid_info in invalid_infos:
|
||||
with pytest.raises(ValidationError):
|
||||
BlogPost(info=invalid_info).validate()
|
||||
|
||||
def test_keys_with_dots_or_dollars(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = 'my post'
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
|
||||
post.info = ['test', 'test']
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"$title": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {'$title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"nested": {"$title": "test"}}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {'nested': {'$title': 'test'}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"$title.test": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {'the.title': 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
# MongoDB < 3.6 rejects dots
|
||||
# To avoid checking the mongodb version from the DictField class
|
||||
# we rely on MongoDB to reject the data during the save
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {'nested': {'the.title': 'test'}}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {1: 'test'}
|
||||
self.assertRaises(ValidationError, post.validate)
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
|
||||
post.info = {'title': 'test'}
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(info={"title": "test"})
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}}
|
||||
post.info = {"title": "dollar_sign", "details": {"te$t": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'details': {'test': 'test'}}
|
||||
post.info = {"details": {"test": "test"}}
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = {'details': {'test': 3}}
|
||||
post.info = {"details": {"test": 3}}
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 4)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__title__exact='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
|
||||
assert BlogPost.objects.count() == 4
|
||||
assert BlogPost.objects.filter(info__title__exact="test").count() == 1
|
||||
assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1
|
||||
|
||||
post = BlogPost.objects.filter(info__title__exact='dollar_sign').first()
|
||||
self.assertIn('te$t', post['info']['details'])
|
||||
post = BlogPost.objects.filter(info__title__exact="dollar_sign").first()
|
||||
assert "te$t" in post["info"]["details"]
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0
|
||||
assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0
|
||||
|
||||
post = BlogPost.objects.create(info={'title': 'original'})
|
||||
post.info.update({'title': 'updated'})
|
||||
post = BlogPost.objects.create(info={"title": "original"})
|
||||
post.info.update({"title": "updated"})
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual('updated', post.info['title'])
|
||||
assert "updated" == post.info["title"]
|
||||
|
||||
post.info.setdefault('authors', [])
|
||||
post.info.setdefault("authors", [])
|
||||
post.save()
|
||||
post.reload()
|
||||
self.assertEqual([], post.info['authors'])
|
||||
assert post.info["authors"] == []
|
||||
|
||||
def test_dictfield_dump_document(self):
|
||||
"""Ensure a DictField can handle another document's dump."""
|
||||
|
||||
class Doc(Document):
|
||||
field = DictField()
|
||||
|
||||
@@ -106,51 +129,60 @@ class TestDictField(MongoDBTestCase):
|
||||
id = IntField(primary_key=True, default=1)
|
||||
recursive = DictField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class ToEmbedChild(ToEmbedParent):
|
||||
pass
|
||||
|
||||
to_embed_recursive = ToEmbed(id=1).save()
|
||||
to_embed = ToEmbed(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}})
|
||||
assert isinstance(doc.field, dict)
|
||||
assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}}
|
||||
# Same thing with a Document with a _cls field
|
||||
to_embed_recursive = ToEmbedChild(id=1).save()
|
||||
to_embed_child = ToEmbedChild(
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save()
|
||||
id=2, recursive=to_embed_recursive.to_mongo().to_dict()
|
||||
).save()
|
||||
doc = Doc(field=to_embed_child.to_mongo().to_dict())
|
||||
doc.save()
|
||||
self.assertIsInstance(doc.field, dict)
|
||||
assert isinstance(doc.field, dict)
|
||||
expected = {
|
||||
'_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild',
|
||||
'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}}
|
||||
"_id": 2,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {
|
||||
"_id": 1,
|
||||
"_cls": "ToEmbedParent.ToEmbedChild",
|
||||
"recursive": {},
|
||||
},
|
||||
}
|
||||
self.assertEqual(doc.field, expected)
|
||||
assert doc.field == expected
|
||||
|
||||
def test_dictfield_strict(self):
|
||||
"""Ensure that dict field handles validation if provided a strict field type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someint'] = 1
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['somestring'] = "abc"
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
def test_dictfield_complex(self):
|
||||
"""Ensure that the dict field can handle the complex types."""
|
||||
|
||||
class SettingBase(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class StringSetting(SettingBase):
|
||||
value = StringField()
|
||||
@@ -164,73 +196,76 @@ class TestDictField(MongoDBTestCase):
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['somestring'] = StringSetting(value='foo')
|
||||
e.mapping['someint'] = IntegerSetting(value=42)
|
||||
e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!',
|
||||
'float': 1.001,
|
||||
'complex': IntegerSetting(value=42),
|
||||
'list': [IntegerSetting(value=42),
|
||||
StringSetting(value='foo')]}
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.mapping["nested_dict"] = {
|
||||
"number": 1,
|
||||
"string": "Hi!",
|
||||
"float": 1.001,
|
||||
"complex": IntegerSetting(value=42),
|
||||
"list": [IntegerSetting(value=42), StringSetting(value="foo")],
|
||||
}
|
||||
e.save()
|
||||
|
||||
e2 = Simple.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping['somestring'], StringSetting)
|
||||
self.assertIsInstance(e2.mapping['someint'], IntegerSetting)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
|
||||
assert Simple.objects.filter(mapping__someint__value=42).count() == 1
|
||||
assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
|
||||
set__mapping__nested_dict__list__1=StringSetting(value="Boo")
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count()
|
||||
== 0
|
||||
)
|
||||
assert (
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count()
|
||||
== 1
|
||||
)
|
||||
|
||||
def test_push_dict(self):
|
||||
class MyModel(Document):
|
||||
events = ListField(DictField())
|
||||
|
||||
doc = MyModel(events=[{'a': 1}]).save()
|
||||
doc = MyModel(events=[{"a": 1}]).save()
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {
|
||||
'_id': doc.id,
|
||||
'events': [{'a': 1}]
|
||||
}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]}
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
MyModel.objects(id=doc.id).update(push__events={})
|
||||
raw_doc = get_as_pymongo(doc)
|
||||
expected_raw_doc = {
|
||||
'_id': doc.id,
|
||||
'events': [{'a': 1}, {}]
|
||||
}
|
||||
self.assertEqual(raw_doc, expected_raw_doc)
|
||||
expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]}
|
||||
assert raw_doc == expected_raw_doc
|
||||
|
||||
def test_ensure_unique_default_instances(self):
|
||||
"""Ensure that every field has it's own unique default instance."""
|
||||
|
||||
class D(Document):
|
||||
data = DictField()
|
||||
data2 = DictField(default=lambda: {})
|
||||
|
||||
d1 = D()
|
||||
d1.data['foo'] = 'bar'
|
||||
d1.data2['foo'] = 'bar'
|
||||
d1.data["foo"] = "bar"
|
||||
d1.data2["foo"] = "bar"
|
||||
d2 = D()
|
||||
self.assertEqual(d2.data, {})
|
||||
self.assertEqual(d2.data2, {})
|
||||
assert d2.data == {}
|
||||
assert d2.data2 == {}
|
||||
|
||||
def test_dict_field_invalid_dict_value(self):
|
||||
class DictFieldTest(Document):
|
||||
@@ -240,11 +275,13 @@ class TestDictField(MongoDBTestCase):
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
test.validate()
|
||||
|
||||
def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self):
|
||||
class DictFieldTest(Document):
|
||||
@@ -255,31 +292,34 @@ class TestDictField(MongoDBTestCase):
|
||||
class Embedded(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
embed = Embedded(name='garbage')
|
||||
embed = Embedded(name="garbage")
|
||||
doc = DictFieldTest(dictionary=embed)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
doc.validate()
|
||||
self.assertIn("'dictionary'", str(ctx_err.exception))
|
||||
self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception))
|
||||
|
||||
error_msg = str(exc_info.value)
|
||||
assert "'dictionary'" in error_msg
|
||||
assert "Only dictionaries may be used in a DictField" in error_msg
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someints'] = [1, 2]
|
||||
e.mapping["someints"] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
assert isinstance(e.mapping, BaseDict)
|
||||
assert {"ints": [3, 4]} == e.mapping
|
||||
|
||||
# try creating an invalid mapping
|
||||
with self.assertRaises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar", ]})
|
||||
with pytest.raises(ValueError):
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar"]})
|
||||
|
||||
def test_dictfield_with_referencefield_complex_nesting_cases(self):
|
||||
"""Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)"""
|
||||
@@ -296,29 +336,33 @@ class TestDictField(MongoDBTestCase):
|
||||
mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False)))
|
||||
mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True))))
|
||||
mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False))))
|
||||
mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))))
|
||||
mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))))
|
||||
mapping8 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))
|
||||
)
|
||||
mapping9 = DictField(
|
||||
ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))
|
||||
)
|
||||
|
||||
Doc.drop_collection()
|
||||
Simple.drop_collection()
|
||||
|
||||
d = Doc(s='aa').save()
|
||||
d = Doc(s="aa").save()
|
||||
e = Simple()
|
||||
e.mapping0['someint'] = e.mapping1['someint'] = d
|
||||
e.mapping2['someint'] = e.mapping3['someint'] = [d]
|
||||
e.mapping4['someint'] = e.mapping5['someint'] = {'d': d}
|
||||
e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}]
|
||||
e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}]
|
||||
e.mapping0["someint"] = e.mapping1["someint"] = d
|
||||
e.mapping2["someint"] = e.mapping3["someint"] = [d]
|
||||
e.mapping4["someint"] = e.mapping5["someint"] = {"d": d}
|
||||
e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}]
|
||||
e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}]
|
||||
e.save()
|
||||
|
||||
s = Simple.objects.first()
|
||||
self.assertIsInstance(s.mapping0['someint'], Doc)
|
||||
self.assertIsInstance(s.mapping1['someint'], Doc)
|
||||
self.assertIsInstance(s.mapping2['someint'][0], Doc)
|
||||
self.assertIsInstance(s.mapping3['someint'][0], Doc)
|
||||
self.assertIsInstance(s.mapping4['someint']['d'], Doc)
|
||||
self.assertIsInstance(s.mapping5['someint']['d'], Doc)
|
||||
self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc)
|
||||
self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc)
|
||||
self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc)
|
||||
self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc)
|
||||
assert isinstance(s.mapping0["someint"], Doc)
|
||||
assert isinstance(s.mapping1["someint"], Doc)
|
||||
assert isinstance(s.mapping2["someint"][0], Doc)
|
||||
assert isinstance(s.mapping3["someint"][0], Doc)
|
||||
assert isinstance(s.mapping4["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping5["someint"]["d"], Doc)
|
||||
assert isinstance(s.mapping6["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping7["someint"][0]["d"], Doc)
|
||||
assert isinstance(s.mapping8["someint"][0]["d"][0], Doc)
|
||||
assert isinstance(s.mapping9["someint"][0]["d"][0], Doc)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from unittest import SkipTest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@@ -12,52 +12,52 @@ class TestEmailField(MongoDBTestCase):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
user = User(email='ross@example.com')
|
||||
user = User(email="ross@example.com")
|
||||
user.validate()
|
||||
|
||||
user = User(email='ross@example.co.uk')
|
||||
user = User(email="ross@example.co.uk")
|
||||
user.validate()
|
||||
|
||||
user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S'
|
||||
'aJIazqqWkm7.net'))
|
||||
user = User(
|
||||
email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net")
|
||||
)
|
||||
user.validate()
|
||||
|
||||
user = User(email='new-tld@example.technology')
|
||||
user = User(email="new-tld@example.technology")
|
||||
user.validate()
|
||||
|
||||
user = User(email='ross@example.com.')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="ross@example.com.")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# unicode domain
|
||||
user = User(email=u'user@пример.рф')
|
||||
user = User(email=u"user@пример.рф")
|
||||
user.validate()
|
||||
|
||||
# invalid unicode domain
|
||||
user = User(email=u'user@пример')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email=u"user@пример")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# invalid data type
|
||||
user = User(email=123)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_unicode_user(self):
|
||||
# Don't run this test on pypy3, which doesn't support unicode regex:
|
||||
# https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
raise SkipTest('unicode email addresses are not supported on PyPy 3')
|
||||
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
# unicode user shouldn't validate by default...
|
||||
user = User(email=u'Dörte@Sörensen.example.com')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_utf8_user set to True
|
||||
class User(Document):
|
||||
email = EmailField(allow_utf8_user=True)
|
||||
|
||||
user = User(email=u'Dörte@Sörensen.example.com')
|
||||
user = User(email=u"Dörte@Sörensen.example.com")
|
||||
user.validate()
|
||||
|
||||
def test_email_field_domain_whitelist(self):
|
||||
@@ -65,43 +65,48 @@ class TestEmailField(MongoDBTestCase):
|
||||
email = EmailField()
|
||||
|
||||
# localhost domain shouldn't validate by default...
|
||||
user = User(email='me@localhost')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="me@localhost")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine if it's whitelisted
|
||||
class User(Document):
|
||||
email = EmailField(domain_whitelist=['localhost'])
|
||||
email = EmailField(domain_whitelist=["localhost"])
|
||||
|
||||
user = User(email='me@localhost')
|
||||
user = User(email="me@localhost")
|
||||
user.validate()
|
||||
|
||||
def test_email_domain_validation_fails_if_invalid_idn(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
invalid_idn = '.google.com'
|
||||
user = User(email='me@%s' % invalid_idn)
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
invalid_idn = ".google.com"
|
||||
user = User(email="me@%s" % invalid_idn)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
user.validate()
|
||||
self.assertIn("domain failed IDN encoding", str(ctx_err.exception))
|
||||
assert "domain failed IDN encoding" in str(exc_info.value)
|
||||
|
||||
def test_email_field_ip_domain(self):
|
||||
class User(Document):
|
||||
email = EmailField()
|
||||
|
||||
valid_ipv4 = 'email@[127.0.0.1]'
|
||||
valid_ipv6 = 'email@[2001:dB8::1]'
|
||||
invalid_ip = 'email@[324.0.0.1]'
|
||||
valid_ipv4 = "email@[127.0.0.1]"
|
||||
valid_ipv6 = "email@[2001:dB8::1]"
|
||||
invalid_ip = "email@[324.0.0.1]"
|
||||
|
||||
# IP address as a domain shouldn't validate by default...
|
||||
user = User(email=valid_ipv4)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=valid_ipv6)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# ...but it should be fine with allow_ip_domain set to True
|
||||
class User(Document):
|
||||
@@ -115,16 +120,18 @@ class TestEmailField(MongoDBTestCase):
|
||||
|
||||
# invalid IP should still fail validation
|
||||
user = User(email=invalid_ip)
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
def test_email_field_honors_regex(self):
|
||||
class User(Document):
|
||||
email = EmailField(regex=r'\w+@example.com')
|
||||
email = EmailField(regex=r"\w+@example.com")
|
||||
|
||||
# Fails regex validation
|
||||
user = User(email='me@foo.com')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
user = User(email="me@foo.com")
|
||||
with pytest.raises(ValidationError):
|
||||
user.validate()
|
||||
|
||||
# Passes regex validation
|
||||
user = User(email='me@example.com')
|
||||
self.assertIsNone(user.validate())
|
||||
user = User(email="me@example.com")
|
||||
assert user.validate() is None
|
||||
|
||||
@@ -1,7 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \
|
||||
InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \
|
||||
ReferenceField
|
||||
import pytest
|
||||
|
||||
from mongoengine import (
|
||||
Document,
|
||||
EmbeddedDocument,
|
||||
EmbeddedDocumentField,
|
||||
GenericEmbeddedDocumentField,
|
||||
IntField,
|
||||
InvalidQueryError,
|
||||
ListField,
|
||||
LookUpError,
|
||||
StringField,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
@@ -12,37 +23,41 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
name = StringField()
|
||||
|
||||
field = EmbeddedDocumentField(MyDoc)
|
||||
self.assertEqual(field.document_type_obj, MyDoc)
|
||||
assert field.document_type_obj == MyDoc
|
||||
|
||||
field2 = EmbeddedDocumentField('MyDoc')
|
||||
self.assertEqual(field2.document_type_obj, 'MyDoc')
|
||||
field2 = EmbeddedDocumentField("MyDoc")
|
||||
assert field2.document_type_obj == "MyDoc"
|
||||
|
||||
def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
EmbeddedDocumentField(dict)
|
||||
|
||||
def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
emb = EmbeddedDocumentField('MyDoc')
|
||||
with self.assertRaises(ValidationError) as ctx:
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
emb.document_type
|
||||
self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception))
|
||||
assert (
|
||||
"Invalid embedded document class provided to an EmbeddedDocumentField"
|
||||
in str(exc_info.value)
|
||||
)
|
||||
|
||||
def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self):
|
||||
# Relates to #1661
|
||||
class MyDoc(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingDoc(Document):
|
||||
emb = EmbeddedDocumentField(MyDoc)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class MyFailingdoc2(Document):
|
||||
emb = EmbeddedDocumentField('MyDoc')
|
||||
emb = EmbeddedDocumentField("MyDoc")
|
||||
|
||||
def test_query_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
@@ -55,34 +70,31 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(
|
||||
settings=AdminSettings(foo1='bar1', foo2='bar2'),
|
||||
name='John',
|
||||
).save()
|
||||
p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
Person.objects(settings__notexist='bar').first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
Person.objects.only('settings.notexist')
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id)
|
||||
only_p = Person.objects.only('settings.foo1').first()
|
||||
self.assertEqual(only_p.settings.foo1, p.settings.foo1)
|
||||
self.assertIsNone(only_p.settings.foo2)
|
||||
self.assertIsNone(only_p.name)
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p.id
|
||||
only_p = Person.objects.only("settings.foo1").first()
|
||||
assert only_p.settings.foo1 == p.settings.foo1
|
||||
assert only_p.settings.foo2 is None
|
||||
assert only_p.name is None
|
||||
|
||||
exclude_p = Person.objects.exclude('settings.foo1').first()
|
||||
self.assertIsNone(exclude_p.settings.foo1)
|
||||
self.assertEqual(exclude_p.settings.foo2, p.settings.foo2)
|
||||
self.assertEqual(exclude_p.name, p.name)
|
||||
exclude_p = Person.objects.exclude("settings.foo1").first()
|
||||
assert exclude_p.settings.foo1 is None
|
||||
assert exclude_p.settings.foo2 == p.settings.foo2
|
||||
assert exclude_p.name == p.name
|
||||
|
||||
def test_query_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
@@ -93,26 +105,26 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo'))
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id)
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
||||
only_p = Person.objects.only('settings.base_foo', 'settings._cls').first()
|
||||
self.assertEqual(only_p.settings.base_foo, 'basefoo')
|
||||
self.assertIsNone(only_p.settings.sub_foo)
|
||||
only_p = Person.objects.only("settings.base_foo", "settings._cls").first()
|
||||
assert only_p.settings.base_foo == "basefoo"
|
||||
assert only_p.settings.sub_foo is None
|
||||
|
||||
def test_query_list_embedded_document_with_inheritance(self):
|
||||
class Post(EmbeddedDocument):
|
||||
title = StringField(max_length=120, required=True)
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class TextPost(Post):
|
||||
content = StringField()
|
||||
@@ -123,22 +135,21 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
class Record(Document):
|
||||
posts = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save()
|
||||
record_text = Record(posts=[TextPost(content='a', title='foo')]).save()
|
||||
record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save()
|
||||
record_text = Record(posts=[TextPost(content="a", title="foo")]).save()
|
||||
|
||||
records = list(Record.objects(posts__author=record_movie.posts[0].author))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_movie.id)
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_movie.id
|
||||
|
||||
records = list(Record.objects(posts__content=record_text.posts[0].content))
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].id, record_text.id)
|
||||
assert len(records) == 1
|
||||
assert records[0].id == record_text.id
|
||||
|
||||
self.assertEqual(Record.objects(posts__title='foo').count(), 2)
|
||||
assert Record.objects(posts__title="foo").count() == 2
|
||||
|
||||
|
||||
class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
@@ -153,21 +164,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.like = Car(name='Fiat')
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Car)
|
||||
assert isinstance(person.like, Car)
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices."""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
@@ -181,20 +193,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.like = Car(name='Fiat')
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person = Person(name="Test User")
|
||||
person.like = Car(name="Fiat")
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.like = Dish(food="arroz", number=15)
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.like, Dish)
|
||||
assert isinstance(person.like, Dish)
|
||||
|
||||
def test_generic_list_embedded_document_choices(self):
|
||||
"""Ensure you can limit GenericEmbeddedDocument choices inside
|
||||
a list field.
|
||||
"""
|
||||
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
@@ -208,39 +222,38 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
person = Person(name='Test User')
|
||||
person.likes = [Car(name='Fiat')]
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person = Person(name="Test User")
|
||||
person.likes = [Car(name="Fiat")]
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.likes = [Dish(food="arroz", number=15)]
|
||||
person.save()
|
||||
|
||||
person = Person.objects.first()
|
||||
self.assertIsInstance(person.likes[0], Dish)
|
||||
assert isinstance(person.likes[0], Dish)
|
||||
|
||||
def test_choices_validation_documents(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given a valid choice.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(
|
||||
GenericEmbeddedDocumentField(choices=(UserComments,))
|
||||
)
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Ensure Validation Passes
|
||||
BlogPost(comments=[
|
||||
UserComments(author='user2', message='message2'),
|
||||
]).save()
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_choices_validation_documents_invalid(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given an invalid choice.
|
||||
This should throw a ValidationError exception.
|
||||
"""
|
||||
|
||||
class UserComments(EmbeddedDocument):
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
@@ -250,31 +263,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
message = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(
|
||||
GenericEmbeddedDocumentField(choices=(UserComments,))
|
||||
)
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,)))
|
||||
|
||||
# Single Entry Failure
|
||||
post = BlogPost(comments=[
|
||||
ModeratorComments(author='mod1', message='message1'),
|
||||
])
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")])
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
# Mixed Entry Failure
|
||||
post = BlogPost(comments=[
|
||||
ModeratorComments(author='mod1', message='message1'),
|
||||
UserComments(author='user2', message='message2'),
|
||||
])
|
||||
self.assertRaises(ValidationError, post.save)
|
||||
post = BlogPost(
|
||||
comments=[
|
||||
ModeratorComments(author="mod1", message="message1"),
|
||||
UserComments(author="user2", message="message2"),
|
||||
]
|
||||
)
|
||||
with pytest.raises(ValidationError):
|
||||
post.save()
|
||||
|
||||
def test_choices_validation_documents_inheritance(self):
|
||||
"""
|
||||
Ensure fields with document choices validate given subclass of choice.
|
||||
"""
|
||||
|
||||
class Comments(EmbeddedDocument):
|
||||
meta = {
|
||||
'abstract': True
|
||||
}
|
||||
meta = {"abstract": True}
|
||||
author = StringField()
|
||||
message = StringField()
|
||||
|
||||
@@ -282,14 +294,10 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
pass
|
||||
|
||||
class BlogPost(Document):
|
||||
comments = ListField(
|
||||
GenericEmbeddedDocumentField(choices=(Comments,))
|
||||
)
|
||||
comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,)))
|
||||
|
||||
# Save Valid EmbeddedDocument Type
|
||||
BlogPost(comments=[
|
||||
UserComments(author='user2', message='message2'),
|
||||
]).save()
|
||||
BlogPost(comments=[UserComments(author="user2", message="message2")]).save()
|
||||
|
||||
def test_query_generic_embedded_document_attribute(self):
|
||||
class AdminSettings(EmbeddedDocument):
|
||||
@@ -299,28 +307,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
foo2 = StringField()
|
||||
|
||||
class Person(Document):
|
||||
settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings))
|
||||
settings = GenericEmbeddedDocumentField(
|
||||
choices=(AdminSettings, NonAdminSettings)
|
||||
)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(settings=AdminSettings(foo1='bar1')).save()
|
||||
p2 = Person(settings=NonAdminSettings(foo2='bar2')).save()
|
||||
p1 = Person(settings=AdminSettings(foo1="bar1")).save()
|
||||
p2 = Person(settings=NonAdminSettings(foo2="bar2")).save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
Person.objects(settings__notexist='bar').first()
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
with self.assertRaises(LookUpError):
|
||||
Person.objects.only('settings.notexist')
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id)
|
||||
self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id)
|
||||
assert Person.objects(settings__foo1="bar1").first().id == p1.id
|
||||
assert Person.objects(settings__foo2="bar2").first().id == p2.id
|
||||
|
||||
def test_query_generic_embedded_document_attribute_with_inheritance(self):
|
||||
class BaseSettings(EmbeddedDocument):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
base_foo = StringField()
|
||||
|
||||
class AdminSettings(BaseSettings):
|
||||
@@ -331,14 +341,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo'))
|
||||
p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo"))
|
||||
p.save()
|
||||
|
||||
# Test non exiting attribute
|
||||
with self.assertRaises(InvalidQueryError) as ctx_err:
|
||||
self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id)
|
||||
self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"')
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id)
|
||||
self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id)
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
assert Person.objects(settings__sub_foo="subfoo").first().id == p.id
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,49 +1,51 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import os
|
||||
import unittest
|
||||
import tempfile
|
||||
import unittest
|
||||
from io import BytesIO
|
||||
|
||||
import gridfs
|
||||
import six
|
||||
import pytest
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.python_support import StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
|
||||
HAS_PIL = True
|
||||
except ImportError:
|
||||
HAS_PIL = False
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png')
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png')
|
||||
require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed")
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png")
|
||||
TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
||||
|
||||
|
||||
def get_file(path):
|
||||
"""Use a BytesIO instead of a file to allow
|
||||
to have a one-liner and avoid that the file remains opened"""
|
||||
bytes_io = StringIO()
|
||||
with open(path, 'rb') as f:
|
||||
bytes_io = BytesIO()
|
||||
with open(path, "rb") as f:
|
||||
bytes_io.write(f.read())
|
||||
bytes_io.seek(0)
|
||||
return bytes_io
|
||||
|
||||
|
||||
class FileTest(MongoDBTestCase):
|
||||
|
||||
class TestFileField(MongoDBTestCase):
|
||||
def tearDown(self):
|
||||
self.db.drop_collection('fs.files')
|
||||
self.db.drop_collection('fs.chunks')
|
||||
self.db.drop_collection("fs.files")
|
||||
self.db.drop_collection("fs.chunks")
|
||||
|
||||
def test_file_field_optional(self):
|
||||
# Make sure FileField is optional and not required
|
||||
class DemoFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
DemoFile.objects.create()
|
||||
|
||||
def test_file_fields(self):
|
||||
@@ -55,18 +57,21 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert putfile == result
|
||||
assert (
|
||||
"%s" % result.the_file
|
||||
== "<GridFSProxy: hello (%s)>" % result.the_file.grid_id
|
||||
)
|
||||
assert result.the_file.read() == text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.delete() # Remove file from GridFS
|
||||
PutFile.objects.delete()
|
||||
|
||||
@@ -74,29 +79,30 @@ class FileTest(MongoDBTestCase):
|
||||
PutFile.drop_collection()
|
||||
|
||||
putfile = PutFile()
|
||||
putstring = StringIO()
|
||||
putstring = BytesIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.the_file.put(putstring, content_type=content_type)
|
||||
putfile.save()
|
||||
|
||||
result = PutFile.objects.first()
|
||||
self.assertEqual(putfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert putfile == result
|
||||
assert result.the_file.read() == text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_fields_stream(self):
|
||||
"""Ensure that file fields can be written to and their data retrieved
|
||||
"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.the_file.new_file(content_type=content_type)
|
||||
@@ -106,32 +112,32 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert streamfile == result
|
||||
assert result.the_file.read() == text + more_text
|
||||
assert result.the_file.content_type == content_type
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
assert result.the_file.tell() == 0
|
||||
assert result.the_file.read(len(text)) == text
|
||||
assert result.the_file.tell() == len(text)
|
||||
assert result.the_file.read(len(more_text)) == more_text
|
||||
assert result.the_file.tell() == len(text + more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
assert result.the_file.read() is None
|
||||
|
||||
def test_file_fields_stream_after_none(self):
|
||||
"""Ensure that a file field can be written to after it has been saved as
|
||||
None
|
||||
"""
|
||||
|
||||
class StreamFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.save()
|
||||
@@ -142,27 +148,26 @@ class FileTest(MongoDBTestCase):
|
||||
streamfile.save()
|
||||
|
||||
result = StreamFile.objects.first()
|
||||
self.assertEqual(streamfile, result)
|
||||
self.assertEqual(result.the_file.read(), text + more_text)
|
||||
# self.assertEqual(result.the_file.content_type, content_type)
|
||||
assert streamfile == result
|
||||
assert result.the_file.read() == text + more_text
|
||||
# assert result.the_file.content_type == content_type
|
||||
result.the_file.seek(0)
|
||||
self.assertEqual(result.the_file.tell(), 0)
|
||||
self.assertEqual(result.the_file.read(len(text)), text)
|
||||
self.assertEqual(result.the_file.tell(), len(text))
|
||||
self.assertEqual(result.the_file.read(len(more_text)), more_text)
|
||||
self.assertEqual(result.the_file.tell(), len(text + more_text))
|
||||
assert result.the_file.tell() == 0
|
||||
assert result.the_file.read(len(text)) == text
|
||||
assert result.the_file.tell() == len(text)
|
||||
assert result.the_file.read(len(more_text)) == more_text
|
||||
assert result.the_file.tell() == len(text + more_text)
|
||||
result.the_file.delete()
|
||||
|
||||
# Ensure deleted file returns None
|
||||
self.assertTrue(result.the_file.read() is None)
|
||||
assert result.the_file.read() is None
|
||||
|
||||
def test_file_fields_set(self):
|
||||
|
||||
class SetFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
more_text = six.b('Foo Bar')
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
|
||||
SetFile.drop_collection()
|
||||
|
||||
@@ -171,27 +176,26 @@ class FileTest(MongoDBTestCase):
|
||||
setfile.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), text)
|
||||
assert setfile == result
|
||||
assert result.the_file.read() == text
|
||||
|
||||
# Try replacing file with new one
|
||||
result.the_file.replace(more_text)
|
||||
result.save()
|
||||
|
||||
result = SetFile.objects.first()
|
||||
self.assertEqual(setfile, result)
|
||||
self.assertEqual(result.the_file.read(), more_text)
|
||||
assert setfile == result
|
||||
assert result.the_file.read() == more_text
|
||||
result.the_file.delete()
|
||||
|
||||
def test_file_field_no_default(self):
|
||||
|
||||
class GridDocument(Document):
|
||||
the_file = FileField()
|
||||
|
||||
GridDocument.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write("Hello World!".encode("latin-1"))
|
||||
f.flush()
|
||||
|
||||
# Test without default
|
||||
@@ -199,34 +203,35 @@ class FileTest(MongoDBTestCase):
|
||||
doc_a.save()
|
||||
|
||||
doc_b = GridDocument.objects.with_id(doc_a.id)
|
||||
doc_b.the_file.replace(f, filename='doc_b')
|
||||
doc_b.the_file.replace(f, filename="doc_b")
|
||||
doc_b.save()
|
||||
self.assertNotEqual(doc_b.the_file.grid_id, None)
|
||||
assert doc_b.the_file.grid_id is not None
|
||||
|
||||
# Test it matches
|
||||
doc_c = GridDocument.objects.with_id(doc_b.id)
|
||||
self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id)
|
||||
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file=six.b(''))
|
||||
doc_d = GridDocument(the_file="".encode("latin-1"))
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id)
|
||||
assert doc_d.the_file.grid_id == doc_e.the_file.grid_id
|
||||
|
||||
doc_e.the_file.replace(f, filename='doc_e')
|
||||
doc_e.the_file.replace(f, filename="doc_e")
|
||||
doc_e.save()
|
||||
|
||||
doc_f = GridDocument.objects.with_id(doc_e.id)
|
||||
self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id)
|
||||
assert doc_e.the_file.grid_id == doc_f.the_file.grid_id
|
||||
|
||||
db = GridDocument._get_db()
|
||||
grid_fs = gridfs.GridFS(db)
|
||||
self.assertEqual(['doc_b', 'doc_e'], grid_fs.list())
|
||||
assert ["doc_b", "doc_e"] == grid_fs.list()
|
||||
|
||||
def test_file_uniqueness(self):
|
||||
"""Ensure that each instance of a FileField is unique
|
||||
"""
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
the_file = FileField()
|
||||
@@ -234,15 +239,15 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b('Hello, World!'))
|
||||
test_file.the_file.put("Hello, World!".encode("latin-1"))
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
test_file_dupe = TestFile()
|
||||
data = test_file_dupe.the_file.read() # Should be None
|
||||
|
||||
self.assertNotEqual(test_file.name, test_file_dupe.name)
|
||||
self.assertNotEqual(test_file.the_file.read(), data)
|
||||
assert test_file.name != test_file_dupe.name
|
||||
assert test_file.the_file.read() != data
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -255,61 +260,68 @@ class FileTest(MongoDBTestCase):
|
||||
photo = FileField()
|
||||
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
marmot = Animal(genus="Marmota", family="Sciuridae")
|
||||
|
||||
marmot_photo_content = get_file(TEST_IMAGE_PATH) # Retrieve a photo from disk
|
||||
marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar')
|
||||
marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar")
|
||||
marmot.photo.close()
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photo.content_type, 'image/jpeg')
|
||||
self.assertEqual(marmot.photo.foo, 'bar')
|
||||
assert marmot.photo.content_type == "image/jpeg"
|
||||
assert marmot.photo.foo == "bar"
|
||||
|
||||
def test_file_reassigning(self):
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.get().length, 8313)
|
||||
assert test_file.the_file.get().length == 8313
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.get().length, 4971)
|
||||
assert test_file.the_file.get().length == 4971
|
||||
|
||||
def test_file_boolean(self):
|
||||
"""Ensure that a boolean test of a FileField indicates its presence
|
||||
"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertFalse(bool(test_file.the_file))
|
||||
test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain')
|
||||
assert not bool(test_file.the_file)
|
||||
test_file.the_file.put(
|
||||
"Hello, World!".encode("latin-1"), content_type="text/plain"
|
||||
)
|
||||
test_file.save()
|
||||
self.assertTrue(bool(test_file.the_file))
|
||||
assert bool(test_file.the_file)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.content_type, "text/plain")
|
||||
assert test_file.the_file.content_type == "text/plain"
|
||||
|
||||
def test_file_cmp(self):
|
||||
"""Test comparing against other types"""
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
test_file = TestFile()
|
||||
self.assertNotIn(test_file.the_file, [{"test": 1}])
|
||||
assert test_file.the_file not in [{"test": 1}]
|
||||
|
||||
def test_file_disk_space(self):
|
||||
""" Test disk space usage when we delete/replace a file """
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
@@ -320,16 +332,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
assert len(list(files)) == 1
|
||||
assert len(list(chunks)) == 1
|
||||
|
||||
# Deleting the docoument should delete the files
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
# Test case where we don't store a file in the first place
|
||||
testfile = TestFile()
|
||||
@@ -337,48 +349,46 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
# Test case where we overwrite the file
|
||||
testfile = TestFile()
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = six.b('Bonjour, World!')
|
||||
text = "Bonjour, World!".encode("latin-1")
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 1)
|
||||
self.assertEqual(len(list(chunks)), 1)
|
||||
assert len(list(files)) == 1
|
||||
assert len(list(chunks)) == 1
|
||||
|
||||
testfile.delete()
|
||||
|
||||
files = db.fs.files.find()
|
||||
chunks = db.fs.chunks.find()
|
||||
self.assertEqual(len(list(files)), 0)
|
||||
self.assertEqual(len(list(chunks)), 0)
|
||||
assert len(list(files)) == 0
|
||||
assert len(list(chunks)) == 0
|
||||
|
||||
@require_pil
|
||||
def test_image_field(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField()
|
||||
|
||||
TestImage.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write("Hello World!".encode("latin-1"))
|
||||
f.flush()
|
||||
|
||||
t = TestImage()
|
||||
@@ -386,7 +396,7 @@ class FileTest(MongoDBTestCase):
|
||||
t.image.put(f)
|
||||
self.fail("Should have raised an invalidation error")
|
||||
except ValidationError as e:
|
||||
self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f)
|
||||
assert "%s" % e == "Invalid image: cannot identify image file %s" % f
|
||||
|
||||
t = TestImage()
|
||||
t.image.put(get_file(TEST_IMAGE_PATH))
|
||||
@@ -394,34 +404,31 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
assert t.image.format == "PNG"
|
||||
|
||||
w, h = t.image.size
|
||||
self.assertEqual(w, 371)
|
||||
self.assertEqual(h, 76)
|
||||
assert w == 371
|
||||
assert h == 76
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_reassigning(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestFile(Document):
|
||||
the_file = ImageField()
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save()
|
||||
self.assertEqual(test_file.the_file.size, (371, 76))
|
||||
assert test_file.the_file.size == (371, 76)
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = get_file(TEST_IMAGE2_PATH)
|
||||
test_file.save()
|
||||
self.assertEqual(test_file.the_file.size, (45, 101))
|
||||
assert test_file.the_file.size == (45, 101)
|
||||
|
||||
@require_pil
|
||||
def test_image_field_resize(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37))
|
||||
|
||||
@@ -433,18 +440,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
assert t.image.format == "PNG"
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
self.assertEqual(h, 37)
|
||||
assert w == 185
|
||||
assert h == 37
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_resize_force(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(size=(185, 37, True))
|
||||
|
||||
@@ -456,18 +461,16 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.format, 'PNG')
|
||||
assert t.image.format == "PNG"
|
||||
w, h = t.image.size
|
||||
|
||||
self.assertEqual(w, 185)
|
||||
self.assertEqual(h, 37)
|
||||
assert w == 185
|
||||
assert h == 37
|
||||
|
||||
t.image.delete()
|
||||
|
||||
@require_pil
|
||||
def test_image_field_thumbnail(self):
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
image = ImageField(thumbnail_size=(92, 18))
|
||||
|
||||
@@ -479,19 +482,18 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
t = TestImage.objects.first()
|
||||
|
||||
self.assertEqual(t.image.thumbnail.format, 'PNG')
|
||||
self.assertEqual(t.image.thumbnail.width, 92)
|
||||
self.assertEqual(t.image.thumbnail.height, 18)
|
||||
assert t.image.thumbnail.format == "PNG"
|
||||
assert t.image.thumbnail.width == 92
|
||||
assert t.image.thumbnail.height == 18
|
||||
|
||||
t.image.delete()
|
||||
|
||||
def test_file_multidb(self):
|
||||
register_connection('test_files', 'test_files')
|
||||
register_connection("test_files", "test_files")
|
||||
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
the_file = FileField(db_alias="test_files",
|
||||
collection_name="macumba")
|
||||
the_file = FileField(db_alias="test_files", collection_name="macumba")
|
||||
|
||||
TestFile.drop_collection()
|
||||
|
||||
@@ -502,23 +504,21 @@ class FileTest(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b('Hello, World!'),
|
||||
name="hello.txt")
|
||||
test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt")
|
||||
test_file.save()
|
||||
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
self.assertEqual(data.get('name'), 'hello.txt')
|
||||
assert data.get("name") == "hello.txt"
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(), six.b('Hello, World!'))
|
||||
assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = six.b('HELLO, WORLD!')
|
||||
test_file.the_file = "Hello, World!".encode("latin-1")
|
||||
test_file.save()
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
self.assertEqual(test_file.the_file.read(),
|
||||
six.b('HELLO, WORLD!'))
|
||||
assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
|
||||
|
||||
def test_copyable(self):
|
||||
class PutFile(Document):
|
||||
@@ -526,8 +526,8 @@ class FileTest(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b('Hello, World!')
|
||||
content_type = 'text/plain'
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
putfile.the_file.put(text, content_type=content_type)
|
||||
@@ -536,14 +536,11 @@ class FileTest(MongoDBTestCase):
|
||||
class TestFile(Document):
|
||||
name = StringField()
|
||||
|
||||
self.assertEqual(putfile, copy.copy(putfile))
|
||||
self.assertEqual(putfile, copy.deepcopy(putfile))
|
||||
assert putfile == copy.copy(putfile)
|
||||
assert putfile == copy.deepcopy(putfile)
|
||||
|
||||
@require_pil
|
||||
def test_get_image_by_grid_id(self):
|
||||
|
||||
if not HAS_PIL:
|
||||
raise SkipTest('PIL not installed')
|
||||
|
||||
class TestImage(Document):
|
||||
|
||||
image1 = ImageField()
|
||||
@@ -559,8 +556,7 @@ class FileTest(MongoDBTestCase):
|
||||
test = TestImage.objects.first()
|
||||
grid_id = test.image1.grid_id
|
||||
|
||||
self.assertEqual(1, TestImage.objects(Q(image1=grid_id)
|
||||
or Q(image2=grid_id)).count())
|
||||
assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()
|
||||
|
||||
def test_complex_field_filefield(self):
|
||||
"""Ensure you can add meta data to file"""
|
||||
@@ -571,21 +567,21 @@ class FileTest(MongoDBTestCase):
|
||||
photos = ListField(FileField())
|
||||
|
||||
Animal.drop_collection()
|
||||
marmot = Animal(genus='Marmota', family='Sciuridae')
|
||||
marmot = Animal(genus="Marmota", family="Sciuridae")
|
||||
|
||||
with open(TEST_IMAGE_PATH, 'rb') as marmot_photo: # Retrieve a photo from disk
|
||||
photos_field = marmot._fields['photos'].field
|
||||
new_proxy = photos_field.get_proxy_obj('photos', marmot)
|
||||
new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar')
|
||||
with open(TEST_IMAGE_PATH, "rb") as marmot_photo: # Retrieve a photo from disk
|
||||
photos_field = marmot._fields["photos"].field
|
||||
new_proxy = photos_field.get_proxy_obj("photos", marmot)
|
||||
new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar")
|
||||
|
||||
marmot.photos.append(new_proxy)
|
||||
marmot.save()
|
||||
|
||||
marmot = Animal.objects.get()
|
||||
self.assertEqual(marmot.photos[0].content_type, 'image/jpeg')
|
||||
self.assertEqual(marmot.photos[0].foo, 'bar')
|
||||
self.assertEqual(marmot.photos[0].get().length, 8313)
|
||||
assert marmot.photos[0].content_type == "image/jpeg"
|
||||
assert marmot.photos[0].foo == "bar"
|
||||
assert marmot.photos[0].get().length == 8313
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
@@ -7,7 +7,6 @@ from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestFloatField(MongoDBTestCase):
|
||||
|
||||
def test_float_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
float_fld = FloatField()
|
||||
@@ -17,12 +16,13 @@ class TestFloatField(MongoDBTestCase):
|
||||
TestDocument(float_fld=None).save()
|
||||
TestDocument(float_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count())
|
||||
assert 1 == TestDocument.objects(float_fld__ne=None).count()
|
||||
assert 1 == TestDocument.objects(float_fld__ne=1).count()
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to float fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
height = FloatField(min_value=0.1, max_value=3.5)
|
||||
|
||||
@@ -33,26 +33,30 @@ class TestFloatField(MongoDBTestCase):
|
||||
person.height = 1.89
|
||||
person.validate()
|
||||
|
||||
person.height = '2.0'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.height = "2.0"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.height = 0.01
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person.height = 4.0
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
person_2 = Person(height='something invalid')
|
||||
self.assertRaises(ValidationError, person_2.validate)
|
||||
person_2 = Person(height="something invalid")
|
||||
with pytest.raises(ValidationError):
|
||||
person_2.validate()
|
||||
|
||||
big_person = BigPerson()
|
||||
|
||||
for value, value_type in enumerate(six.integer_types):
|
||||
big_person.height = value_type(value)
|
||||
big_person.validate()
|
||||
big_person.height = int(0)
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 500
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 100000 # Too big for a float value
|
||||
self.assertRaises(ValidationError, big_person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
big_person.validate()
|
||||
|
||||
@@ -2,30 +2,23 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
__all__ = ("GeoFieldTest", )
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
self.db = get_db()
|
||||
|
||||
class TestGeoField(MongoDBTestCase):
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail('Should not validate the location {0}'.format(loc))
|
||||
self.fail("Should not validate the location {0}".format(loc))
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
assert expected == e.to_dict()["loc"]
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = GeoPointField()
|
||||
|
||||
invalid_coords = [{"x": 1, "y": 2}, 5, "a"]
|
||||
expected = 'GeoPointField can only accept tuples or lists of (x, y)'
|
||||
expected = "GeoPointField can only accept tuples or lists of (x, y)"
|
||||
|
||||
for coord in invalid_coords:
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
@@ -40,7 +33,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [21, 4, 'a']
|
||||
invalid_coords = [21, 4, "a"]
|
||||
for coord in invalid_coords:
|
||||
expected = "GeoPointField can only accept tuples or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
@@ -50,7 +43,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = PointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = (
|
||||
"PointField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": []}
|
||||
@@ -77,19 +72,16 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[1, 2]).validate()
|
||||
Location(loc={
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
81.4471435546875,
|
||||
23.61432859499169
|
||||
]}).validate()
|
||||
Location(
|
||||
loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]}
|
||||
).validate()
|
||||
|
||||
def test_linestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = LineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -97,7 +89,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
@@ -105,16 +99,25 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[1, 2, 3]]
|
||||
expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||
expected = (
|
||||
"Invalid LineString:\nBoth values (%s) in point must be float or int"
|
||||
% repr(coord[0])
|
||||
)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
|
||||
@@ -124,7 +127,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = PolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = (
|
||||
"PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -136,7 +141,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[5, "a"]]]
|
||||
expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
expected = (
|
||||
"Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[]]]
|
||||
@@ -162,7 +169,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = MultiPointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -188,19 +195,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2]]).validate()
|
||||
Location(loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [
|
||||
[1, 2],
|
||||
[81.4471435546875, 23.61432859499169]
|
||||
]}).validate()
|
||||
Location(
|
||||
loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]],
|
||||
}
|
||||
).validate()
|
||||
|
||||
def test_multilinestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiLineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -216,16 +223,25 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nValue (%s) must be a two-dimensional point"
|
||||
% repr(invalid_coords[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
expected = (
|
||||
"Invalid MultiLineString:\nBoth values (%s) in point must be float or int"
|
||||
% repr(coord[0][0])
|
||||
)
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
@@ -235,7 +251,7 @@ class GeoFieldTest(unittest.TestCase):
|
||||
loc = MultiPolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
@@ -243,7 +259,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[5, "a"]]]]
|
||||
@@ -255,7 +273,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2, 3]]]]
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
@@ -263,7 +283,9 @@ class GeoFieldTest(unittest.TestCase):
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2], [3, 4]]]]
|
||||
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
expected = (
|
||||
"Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
)
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
@@ -271,17 +293,19 @@ class GeoFieldTest(unittest.TestCase):
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}])
|
||||
assert geo_indicies == [{"fields": [("location", "2d")]}]
|
||||
|
||||
def test_geopoint_embedded_indexes(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields on
|
||||
embedded documents.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = GeoPointField()
|
||||
name = StringField()
|
||||
@@ -291,11 +315,12 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}])
|
||||
assert geo_indicies == [{"fields": [("venue.location", "2d")]}]
|
||||
|
||||
def test_indexes_2dsphere(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
point = PointField()
|
||||
@@ -303,13 +328,14 @@ class GeoFieldTest(unittest.TestCase):
|
||||
polygon = PolygonField()
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies)
|
||||
assert {"fields": [("line", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("polygon", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("point", "2dsphere")]} in geo_indicies
|
||||
|
||||
def test_indexes_2dsphere_embedded(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
name = StringField()
|
||||
point = PointField()
|
||||
@@ -321,12 +347,11 @@ class GeoFieldTest(unittest.TestCase):
|
||||
venue = EmbeddedDocumentField(Venue)
|
||||
|
||||
geo_indicies = Event._geo_indices()
|
||||
self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies)
|
||||
self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies)
|
||||
assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies
|
||||
assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies
|
||||
|
||||
def test_geo_indexes_recursion(self):
|
||||
|
||||
class Location(Document):
|
||||
name = StringField()
|
||||
location = GeoPointField()
|
||||
@@ -338,14 +363,14 @@ class GeoFieldTest(unittest.TestCase):
|
||||
Location.drop_collection()
|
||||
Parent.drop_collection()
|
||||
|
||||
Parent(name='Berlin').save()
|
||||
Parent(name="Berlin").save()
|
||||
info = Parent._get_collection().index_information()
|
||||
self.assertNotIn('location_2d', info)
|
||||
assert "location_2d" not in info
|
||||
info = Location._get_collection().index_information()
|
||||
self.assertIn('location_2d', info)
|
||||
assert "location_2d" in info
|
||||
|
||||
self.assertEqual(len(Parent._geo_indices()), 0)
|
||||
self.assertEqual(len(Location._geo_indices()), 1)
|
||||
assert len(Parent._geo_indices()) == 0
|
||||
assert len(Location._geo_indices()) == 1
|
||||
|
||||
def test_geo_indexes_auto_index(self):
|
||||
|
||||
@@ -354,18 +379,18 @@ class GeoFieldTest(unittest.TestCase):
|
||||
location = PointField(auto_index=False)
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [[("location", "2dsphere"), ("datetime", 1)]]
|
||||
}
|
||||
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
assert Log._geo_indices() == []
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
assert info["location_2dsphere_datetime_1"]["key"] == [
|
||||
("location", "2dsphere"),
|
||||
("datetime", 1),
|
||||
]
|
||||
|
||||
# Test listing explicitly
|
||||
class Log(Document):
|
||||
@@ -373,20 +398,20 @@ class GeoFieldTest(unittest.TestCase):
|
||||
datetime = DateTimeField()
|
||||
|
||||
meta = {
|
||||
'indexes': [
|
||||
{'fields': [("location", "2dsphere"), ("datetime", 1)]}
|
||||
]
|
||||
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
|
||||
}
|
||||
|
||||
self.assertEqual([], Log._geo_indices())
|
||||
assert Log._geo_indices() == []
|
||||
|
||||
Log.drop_collection()
|
||||
Log.ensure_indexes()
|
||||
|
||||
info = Log._get_collection().index_information()
|
||||
self.assertEqual(info["location_2dsphere_datetime_1"]["key"],
|
||||
[('location', '2dsphere'), ('datetime', 1)])
|
||||
assert info["location_2dsphere_datetime_1"]["key"] == [
|
||||
("location", "2dsphere"),
|
||||
("datetime", 1),
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,14 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestIntField(MongoDBTestCase):
|
||||
|
||||
def test_int_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to int fields.
|
||||
"""
|
||||
|
||||
class Person(Document):
|
||||
age = IntField(min_value=0, max_value=110)
|
||||
|
||||
@@ -23,11 +25,14 @@ class TestIntField(MongoDBTestCase):
|
||||
person.validate()
|
||||
|
||||
person.age = -1
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.age = 120
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
person.age = 'ten'
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
person.age = "ten"
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
@@ -38,5 +43,5 @@ class TestIntField(MongoDBTestCase):
|
||||
TestDocument(int_fld=None).save()
|
||||
TestDocument(int_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count())
|
||||
self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count())
|
||||
assert 1 == TestDocument.objects(int_fld__ne=None).count()
|
||||
assert 1 == TestDocument.objects(int_fld__ne=1).count()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import DBRef, ObjectId
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import LazyReference
|
||||
@@ -11,7 +12,8 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
def test_lazy_reference_config(self):
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument)
|
||||
with pytest.raises(ValidationError):
|
||||
LazyReferenceField(EmbeddedDocument)
|
||||
|
||||
def test___repr__(self):
|
||||
class Animal(Document):
|
||||
@@ -25,7 +27,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal()
|
||||
oc = Ocurrence(animal=animal)
|
||||
self.assertIn('LazyReference', repr(oc.animal))
|
||||
assert "LazyReference" in repr(oc.animal)
|
||||
|
||||
def test___getattr___unknown_attr_raises_attribute_error(self):
|
||||
class Animal(Document):
|
||||
@@ -39,7 +41,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal().save()
|
||||
oc = Ocurrence(animal=animal)
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
oc.animal.not_exist
|
||||
|
||||
def test_lazy_reference_simple(self):
|
||||
@@ -57,19 +59,19 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
assert fetched_animal == animal
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
assert fetched_animal is double_fetch
|
||||
assert double_fetch.tag == "heavy"
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
assert fetch_force is not fetched_animal
|
||||
assert fetch_force.tag == "not so heavy"
|
||||
|
||||
def test_lazy_reference_fetch_invalid_ref(self):
|
||||
class Animal(Document):
|
||||
@@ -87,13 +89,13 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
animal.delete()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(DoesNotExist):
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
with pytest.raises(DoesNotExist):
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
@@ -109,21 +111,20 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
||||
sub_animal = SubAnimal(nick="doggo", name="dog").save()
|
||||
for ref in (
|
||||
animal,
|
||||
animal.pk,
|
||||
DBRef(animal._get_collection_name(), animal.pk),
|
||||
LazyReference(Animal, animal.pk),
|
||||
|
||||
sub_animal,
|
||||
sub_animal.pk,
|
||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
):
|
||||
animal,
|
||||
animal.pk,
|
||||
DBRef(animal._get_collection_name(), animal.pk),
|
||||
LazyReference(Animal, animal.pk),
|
||||
sub_animal,
|
||||
sub_animal.pk,
|
||||
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
p.animal.fetch()
|
||||
|
||||
def test_lazy_reference_bad_set(self):
|
||||
@@ -144,19 +145,20 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
'foo',
|
||||
baddoc,
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk)
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
42,
|
||||
"foo",
|
||||
baddoc,
|
||||
DBRef(baddoc._get_collection_name(), animal.pk),
|
||||
LazyReference(BadDoc, animal.pk),
|
||||
):
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_lazy_reference_query_conversion(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@@ -172,26 +174,27 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_lazy_reference_query_conversion_dbref(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@@ -207,21 +210,21 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_lazy_reference_passthrough(self):
|
||||
class Animal(Document):
|
||||
@@ -238,21 +241,21 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(animal=animal, animal_passthrough=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal['name']
|
||||
with self.assertRaises(AttributeError):
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
with pytest.raises(KeyError):
|
||||
p.animal["name"]
|
||||
with pytest.raises(AttributeError):
|
||||
p.animal.name
|
||||
self.assertEqual(p.animal.pk, animal.pk)
|
||||
assert p.animal.pk == animal.pk
|
||||
|
||||
self.assertEqual(p.animal_passthrough.name, "Leopard")
|
||||
self.assertEqual(p.animal_passthrough['name'], "Leopard")
|
||||
assert p.animal_passthrough.name == "Leopard"
|
||||
assert p.animal_passthrough["name"] == "Leopard"
|
||||
|
||||
# Should not be able to access referenced document's methods
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
p.animal.save
|
||||
with self.assertRaises(KeyError):
|
||||
p.animal['save']
|
||||
with pytest.raises(KeyError):
|
||||
p.animal["save"]
|
||||
|
||||
def test_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
@@ -266,9 +269,9 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person='foo').save()
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
assert p.animal is None
|
||||
|
||||
def test_lazy_reference_equality(self):
|
||||
class Animal(Document):
|
||||
@@ -279,12 +282,12 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
animalref = LazyReference(Animal, animal.pk)
|
||||
self.assertEqual(animal, animalref)
|
||||
self.assertEqual(animalref, animal)
|
||||
assert animal == animalref
|
||||
assert animalref == animal
|
||||
|
||||
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
|
||||
self.assertNotEqual(animal, other_animalref)
|
||||
self.assertNotEqual(other_animalref, animal)
|
||||
assert animal != other_animalref
|
||||
assert other_animalref != animal
|
||||
|
||||
def test_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
@@ -303,21 +306,21 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal('doggo').save()
|
||||
animal2 = Animal('cheeta').save()
|
||||
animal1 = Animal(name="doggo").save()
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
assert isinstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
assert isinstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
||||
direct=animal1
|
||||
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
|
||||
direct=animal1,
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
@@ -345,19 +348,19 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
Ocurrence(person="test", animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIsInstance(p.animal, LazyReference)
|
||||
assert isinstance(p.animal, LazyReference)
|
||||
fetched_animal = p.animal.fetch()
|
||||
self.assertEqual(fetched_animal, animal)
|
||||
assert fetched_animal == animal
|
||||
# `fetch` keep cache on referenced document by default...
|
||||
animal.tag = "not so heavy"
|
||||
animal.save()
|
||||
double_fetch = p.animal.fetch()
|
||||
self.assertIs(fetched_animal, double_fetch)
|
||||
self.assertEqual(double_fetch.tag, "heavy")
|
||||
assert fetched_animal is double_fetch
|
||||
assert double_fetch.tag == "heavy"
|
||||
# ...unless specified otherwise
|
||||
fetch_force = p.animal.fetch(force=True)
|
||||
self.assertIsNot(fetch_force, fetched_animal)
|
||||
self.assertEqual(fetch_force.tag, "not so heavy")
|
||||
assert fetch_force is not fetched_animal
|
||||
assert fetch_force.tag == "not so heavy"
|
||||
|
||||
def test_generic_lazy_reference_choices(self):
|
||||
class Animal(Document):
|
||||
@@ -383,14 +386,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
mineral = Mineral(name="Granite").save()
|
||||
|
||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with self.assertRaises(ValidationError):
|
||||
_ = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(living_thing=mineral).save()
|
||||
|
||||
occ = Ocurrence.objects.get(living_thing=animal)
|
||||
self.assertEqual(occ, occ_animal)
|
||||
self.assertIsInstance(occ.thing, LazyReference)
|
||||
self.assertIsInstance(occ.living_thing, LazyReference)
|
||||
assert occ == occ_animal
|
||||
assert isinstance(occ.thing, LazyReference)
|
||||
assert isinstance(occ.living_thing, LazyReference)
|
||||
|
||||
occ.thing = vegetal
|
||||
occ.living_thing = vegetal
|
||||
@@ -398,12 +401,12 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
occ.thing = mineral
|
||||
occ.living_thing = mineral
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
occ.save()
|
||||
|
||||
def test_generic_lazy_reference_set(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
@@ -419,19 +422,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
nick = StringField()
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
sub_animal = SubAnimal(nick='doggo', name='dog').save()
|
||||
sub_animal = SubAnimal(nick="doggo", name="dog").save()
|
||||
for ref in (
|
||||
animal,
|
||||
LazyReference(Animal, animal.pk),
|
||||
{'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)},
|
||||
|
||||
sub_animal,
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
{'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)},
|
||||
):
|
||||
animal,
|
||||
LazyReference(Animal, animal.pk),
|
||||
{"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)},
|
||||
sub_animal,
|
||||
LazyReference(SubAnimal, sub_animal.pk),
|
||||
{
|
||||
"_cls": "SubAnimal",
|
||||
"_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk),
|
||||
},
|
||||
):
|
||||
p = Ocurrence(person="test", animal=ref).save()
|
||||
p.reload()
|
||||
self.assertIsInstance(p.animal, (LazyReference, Document))
|
||||
assert isinstance(p.animal, (LazyReference, Document))
|
||||
p.animal.fetch()
|
||||
|
||||
def test_generic_lazy_reference_bad_set(self):
|
||||
@@ -441,7 +446,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField(choices=['Animal'])
|
||||
animal = GenericLazyReferenceField(choices=["Animal"])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
@@ -451,14 +456,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
animal = Animal(name="Leopard", tag="heavy").save()
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (
|
||||
42,
|
||||
'foo',
|
||||
baddoc,
|
||||
LazyReference(BadDoc, animal.pk)
|
||||
):
|
||||
with self.assertRaises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_generic_lazy_reference_query_conversion(self):
|
||||
class Member(Document):
|
||||
@@ -476,21 +476,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
# Same thing by passing a LazyReference instance
|
||||
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_generic_lazy_reference_not_set(self):
|
||||
class Animal(Document):
|
||||
@@ -504,9 +504,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
Ocurrence(person='foo').save()
|
||||
Ocurrence(person="foo").save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertIs(p.animal, None)
|
||||
assert p.animal is None
|
||||
|
||||
def test_generic_lazy_reference_accepts_string_instead_of_class(self):
|
||||
class Animal(Document):
|
||||
@@ -515,7 +515,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
|
||||
class Ocurrence(Document):
|
||||
person = StringField()
|
||||
animal = GenericLazyReferenceField('Animal')
|
||||
animal = GenericLazyReferenceField("Animal")
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
@@ -523,7 +523,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
animal = Animal().save()
|
||||
Ocurrence(animal=animal).save()
|
||||
p = Ocurrence.objects.get()
|
||||
self.assertEqual(p.animal, animal)
|
||||
assert p.animal == animal
|
||||
|
||||
def test_generic_lazy_reference_embedded(self):
|
||||
class Animal(Document):
|
||||
@@ -542,27 +542,33 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
Animal.drop_collection()
|
||||
Ocurrence.drop_collection()
|
||||
|
||||
animal1 = Animal('doggo').save()
|
||||
animal2 = Animal('cheeta').save()
|
||||
animal1 = Animal(name="doggo").save()
|
||||
animal2 = Animal(name="cheeta").save()
|
||||
|
||||
def check_fields_type(occ):
|
||||
self.assertIsInstance(occ.direct, LazyReference)
|
||||
assert isinstance(occ.direct, LazyReference)
|
||||
for elem in occ.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
self.assertIsInstance(occ.in_embedded.direct, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
assert isinstance(occ.in_embedded.direct, LazyReference)
|
||||
for elem in occ.in_embedded.in_list:
|
||||
self.assertIsInstance(elem, LazyReference)
|
||||
assert isinstance(elem, LazyReference)
|
||||
|
||||
occ = Ocurrence(
|
||||
in_list=[animal1, animal2],
|
||||
in_embedded={'in_list': [animal1, animal2], 'direct': animal1},
|
||||
direct=animal1
|
||||
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
|
||||
direct=animal1,
|
||||
).save()
|
||||
check_fields_type(occ)
|
||||
occ.reload()
|
||||
check_fields_type(occ)
|
||||
animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)}
|
||||
animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)}
|
||||
animal1_ref = {
|
||||
"_cls": "Animal",
|
||||
"_ref": DBRef(animal1._get_collection_name(), animal1.pk),
|
||||
}
|
||||
animal2_ref = {
|
||||
"_cls": "Animal",
|
||||
"_ref": DBRef(animal2._get_collection_name(), animal2.pk),
|
||||
}
|
||||
occ.direct = animal1_ref
|
||||
occ.in_list = [animal1_ref, animal2_ref]
|
||||
occ.in_embedded.direct = animal1_ref
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import six
|
||||
|
||||
try:
|
||||
from bson.int64 import Int64
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
from bson.int64 import Int64
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@@ -13,23 +8,26 @@ from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestLongField(MongoDBTestCase):
|
||||
|
||||
def test_long_field_is_considered_as_int64(self):
|
||||
"""
|
||||
Tests that long fields are stored as long in mongo, even if long
|
||||
value is small enough to be an int.
|
||||
"""
|
||||
|
||||
class TestLongFieldConsideredAsInt64(Document):
|
||||
some_long = LongField()
|
||||
|
||||
doc = TestLongFieldConsideredAsInt64(some_long=42).save()
|
||||
db = get_db()
|
||||
self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64)
|
||||
self.assertIsInstance(doc.some_long, six.integer_types)
|
||||
assert isinstance(
|
||||
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
||||
)
|
||||
assert isinstance(doc.some_long, int)
|
||||
|
||||
def test_long_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to long fields.
|
||||
"""
|
||||
|
||||
class TestDocument(Document):
|
||||
value = LongField(min_value=0, max_value=110)
|
||||
|
||||
@@ -38,11 +36,14 @@ class TestLongField(MongoDBTestCase):
|
||||
doc.validate()
|
||||
|
||||
doc.value = -1
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
doc.value = 120
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
doc.value = 'ten'
|
||||
self.assertRaises(ValidationError, doc.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
doc.value = "ten"
|
||||
with pytest.raises(ValidationError):
|
||||
doc.validate()
|
||||
|
||||
def test_long_ne_operator(self):
|
||||
class TestDocument(Document):
|
||||
@@ -53,4 +54,4 @@ class TestLongField(MongoDBTestCase):
|
||||
TestDocument(long_fld=None).save()
|
||||
TestDocument(long_fld=1).save()
|
||||
|
||||
self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count())
|
||||
assert 1 == TestDocument.objects(long_fld__ne=None).count()
|
||||
|
||||
@@ -1,29 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestMapField(MongoDBTestCase):
|
||||
|
||||
def test_mapfield(self):
|
||||
"""Ensure that the MapField handles the declared type."""
|
||||
|
||||
class Simple(Document):
|
||||
mapping = MapField(IntField())
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someint'] = 1
|
||||
e.mapping["someint"] = 1
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['somestring'] = "abc"
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["somestring"] = "abc"
|
||||
e.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
|
||||
class NoDeclaredType(Document):
|
||||
mapping = MapField()
|
||||
|
||||
@@ -45,38 +47,37 @@ class TestMapField(MongoDBTestCase):
|
||||
Extensible.drop_collection()
|
||||
|
||||
e = Extensible()
|
||||
e.mapping['somestring'] = StringSetting(value='foo')
|
||||
e.mapping['someint'] = IntegerSetting(value=42)
|
||||
e.mapping["somestring"] = StringSetting(value="foo")
|
||||
e.mapping["someint"] = IntegerSetting(value=42)
|
||||
e.save()
|
||||
|
||||
e2 = Extensible.objects.get(id=e.id)
|
||||
self.assertIsInstance(e2.mapping['somestring'], StringSetting)
|
||||
self.assertIsInstance(e2.mapping['someint'], IntegerSetting)
|
||||
assert isinstance(e2.mapping["somestring"], StringSetting)
|
||||
assert isinstance(e2.mapping["someint"], IntegerSetting)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
e.mapping['someint'] = 123
|
||||
with pytest.raises(ValidationError):
|
||||
e.mapping["someint"] = 123
|
||||
e.save()
|
||||
|
||||
def test_embedded_mapfield_db_field(self):
|
||||
class Embedded(EmbeddedDocument):
|
||||
number = IntField(default=0, db_field='i')
|
||||
number = IntField(default=0, db_field="i")
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded),
|
||||
db_field='x')
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x")
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['DICTIONARY_KEY'] = Embedded(number=1)
|
||||
test.my_map["DICTIONARY_KEY"] = Embedded(number=1)
|
||||
test.save()
|
||||
|
||||
Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1)
|
||||
|
||||
test = Test.objects.get()
|
||||
self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2)
|
||||
assert test.my_map["DICTIONARY_KEY"].number == 2
|
||||
doc = self.db.test.find_one()
|
||||
self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2)
|
||||
assert doc["x"]["DICTIONARY_KEY"]["i"] == 2
|
||||
|
||||
def test_mapfield_numerical_index(self):
|
||||
"""Ensure that MapField accept numeric strings as indexes."""
|
||||
@@ -90,9 +91,9 @@ class TestMapField(MongoDBTestCase):
|
||||
Test.drop_collection()
|
||||
|
||||
test = Test()
|
||||
test.my_map['1'] = Embedded(name='test')
|
||||
test.my_map["1"] = Embedded(name="test")
|
||||
test.save()
|
||||
test.my_map['1'].name = 'test updated'
|
||||
test.my_map["1"].name = "test updated"
|
||||
test.save()
|
||||
|
||||
def test_map_field_lookup(self):
|
||||
@@ -110,15 +111,20 @@ class TestMapField(MongoDBTestCase):
|
||||
actions = MapField(EmbeddedDocumentField(Action))
|
||||
|
||||
Log.drop_collection()
|
||||
Log(name="wilson", visited={'friends': datetime.datetime.now()},
|
||||
actions={'friends': Action(operation='drink', object='beer')}).save()
|
||||
Log(
|
||||
name="wilson",
|
||||
visited={"friends": datetime.datetime.now()},
|
||||
actions={"friends": Action(operation="drink", object="beer")},
|
||||
).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
visited__friends__exists=True).count())
|
||||
assert 1 == Log.objects(visited__friends__exists=True).count()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
actions__friends__operation='drink',
|
||||
actions__friends__object='beer').count())
|
||||
assert (
|
||||
1
|
||||
== Log.objects(
|
||||
actions__friends__operation="drink", actions__friends__object="beer"
|
||||
).count()
|
||||
)
|
||||
|
||||
def test_map_field_unicode(self):
|
||||
class Info(EmbeddedDocument):
|
||||
@@ -130,15 +136,11 @@ class TestMapField(MongoDBTestCase):
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
tree = BlogPost(info_dict={
|
||||
u"éééé": {
|
||||
'description': u"VALUE: éééé"
|
||||
}
|
||||
})
|
||||
tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}})
|
||||
|
||||
tree.save()
|
||||
|
||||
self.assertEqual(
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description,
|
||||
u"VALUE: éééé"
|
||||
assert (
|
||||
BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description
|
||||
== u"VALUE: éééé"
|
||||
)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import SON, DBRef
|
||||
from bson import DBRef, SON
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@@ -24,19 +24,22 @@ class TestReferenceField(MongoDBTestCase):
|
||||
|
||||
# Make sure ReferenceField only accepts a document class or a string
|
||||
# with a document class name.
|
||||
self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument)
|
||||
with pytest.raises(ValidationError):
|
||||
ReferenceField(EmbeddedDocument)
|
||||
|
||||
user = User(name='Test User')
|
||||
user = User(name="Test User")
|
||||
|
||||
# Ensure that the referenced object must have been saved
|
||||
post1 = BlogPost(content='Chips and gravy taste good.')
|
||||
post1 = BlogPost(content="Chips and gravy taste good.")
|
||||
post1.author = user
|
||||
self.assertRaises(ValidationError, post1.save)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.save()
|
||||
|
||||
# Check that an invalid object type cannot be used
|
||||
post2 = BlogPost(content='Chips and chilli taste good.')
|
||||
post2 = BlogPost(content="Chips and chilli taste good.")
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.validate()
|
||||
|
||||
# Ensure ObjectID's are accepted as references
|
||||
user_object_id = user.pk
|
||||
@@ -52,42 +55,27 @@ class TestReferenceField(MongoDBTestCase):
|
||||
# Make sure referencing a saved document of the *wrong* type fails
|
||||
post2.save()
|
||||
post1.author = post2
|
||||
self.assertRaises(ValidationError, post1.validate)
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
"""Make sure storing Object ID references works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self')
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1.pk).save()
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
with pytest.raises(ValidationError):
|
||||
post1.validate()
|
||||
|
||||
def test_dbref_reference_fields(self):
|
||||
"""Make sure storing references as bson.dbref.DBRef works."""
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=True)
|
||||
parent = ReferenceField("self", dbref=True)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="John").save()
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
self.assertEqual(
|
||||
Person._get_collection().find_one({'name': 'Ross'})['parent'],
|
||||
DBRef('person', p1.pk)
|
||||
assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef(
|
||||
"person", p1.pk
|
||||
)
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
assert p.parent == p1
|
||||
|
||||
def test_dbref_to_mongo(self):
|
||||
"""Make sure that calling to_mongo on a ReferenceField which
|
||||
@@ -97,21 +85,15 @@ class TestReferenceField(MongoDBTestCase):
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=False)
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
p = Person(
|
||||
name='Steve',
|
||||
parent=DBRef('person', 'abcdefghijklmnop')
|
||||
)
|
||||
self.assertEqual(p.to_mongo(), SON([
|
||||
('name', u'Steve'),
|
||||
('parent', 'abcdefghijklmnop')
|
||||
]))
|
||||
p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop"))
|
||||
assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")])
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
parent = ReferenceField('self', dbref=False)
|
||||
parent = ReferenceField("self", dbref=False)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
@@ -119,18 +101,19 @@ class TestReferenceField(MongoDBTestCase):
|
||||
Person(name="Ross", parent=p1).save()
|
||||
|
||||
col = Person._get_collection()
|
||||
data = col.find_one({'name': 'Ross'})
|
||||
self.assertEqual(data['parent'], p1.pk)
|
||||
data = col.find_one({"name": "Ross"})
|
||||
assert data["parent"] == p1.pk
|
||||
|
||||
p = Person.objects.get(name="Ross")
|
||||
self.assertEqual(p.parent, p1)
|
||||
assert p.parent == p1
|
||||
|
||||
def test_undefined_reference(self):
|
||||
"""Ensure that ReferenceFields may reference undefined Documents.
|
||||
"""
|
||||
|
||||
class Product(Document):
|
||||
name = StringField()
|
||||
company = ReferenceField('Company')
|
||||
company = ReferenceField("Company")
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
@@ -138,28 +121,29 @@ class TestReferenceField(MongoDBTestCase):
|
||||
Product.drop_collection()
|
||||
Company.drop_collection()
|
||||
|
||||
ten_gen = Company(name='10gen')
|
||||
ten_gen = Company(name="10gen")
|
||||
ten_gen.save()
|
||||
mongodb = Product(name='MongoDB', company=ten_gen)
|
||||
mongodb = Product(name="MongoDB", company=ten_gen)
|
||||
mongodb.save()
|
||||
|
||||
me = Product(name='MongoEngine')
|
||||
me = Product(name="MongoEngine")
|
||||
me.save()
|
||||
|
||||
obj = Product.objects(company=ten_gen).first()
|
||||
self.assertEqual(obj, mongodb)
|
||||
self.assertEqual(obj.company, ten_gen)
|
||||
assert obj == mongodb
|
||||
assert obj.company == ten_gen
|
||||
|
||||
obj = Product.objects(company=None).first()
|
||||
self.assertEqual(obj, me)
|
||||
assert obj == me
|
||||
|
||||
obj = Product.objects.get(company=None)
|
||||
self.assertEqual(obj, me)
|
||||
assert obj == me
|
||||
|
||||
def test_reference_query_conversion(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@@ -175,22 +159,23 @@ class TestReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
def test_reference_query_conversion_dbref(self):
|
||||
"""Ensure that ReferenceFields can be queried using objects and values
|
||||
of the type of the primary key of the referenced object.
|
||||
"""
|
||||
|
||||
class Member(Document):
|
||||
user_num = IntField(primary_key=True)
|
||||
|
||||
@@ -206,14 +191,14 @@ class TestReferenceField(MongoDBTestCase):
|
||||
m2 = Member(user_num=2)
|
||||
m2.save()
|
||||
|
||||
post1 = BlogPost(title='post 1', author=m1)
|
||||
post1 = BlogPost(title="post 1", author=m1)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='post 2', author=m2)
|
||||
post2 = BlogPost(title="post 2", author=m2)
|
||||
post2.save()
|
||||
|
||||
post = BlogPost.objects(author=m1).first()
|
||||
self.assertEqual(post.id, post1.id)
|
||||
assert post.id == post1.id
|
||||
|
||||
post = BlogPost.objects(author=m2).first()
|
||||
self.assertEqual(post.id, post2.id)
|
||||
assert post.id == post2.id
|
||||
|
||||
@@ -11,79 +11,79 @@ class TestSequenceField(MongoDBTestCase):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 1000
|
||||
|
||||
def test_sequence_field_get_next_value(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 11)
|
||||
self.db['mongoengine.counters'].drop()
|
||||
assert Person.id.get_next_value() == 11
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), 1)
|
||||
assert Person.id.get_next_value() == 1
|
||||
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), '11')
|
||||
self.db['mongoengine.counters'].drop()
|
||||
assert Person.id.get_next_value() == "11"
|
||||
self.db["mongoengine.counters"].drop()
|
||||
|
||||
self.assertEqual(Person.id.get_next_value(), '1')
|
||||
assert Person.id.get_next_value() == "1"
|
||||
|
||||
def test_sequence_field_sequence_name(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, sequence_name='jelly')
|
||||
id = SequenceField(primary_key=True, sequence_name="jelly")
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 1000
|
||||
|
||||
def test_multiple_sequence_fields(self):
|
||||
class Person(Document):
|
||||
@@ -91,56 +91,56 @@ class TestSequenceField(MongoDBTestCase):
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
counters = [i.counter for i in Person.objects]
|
||||
self.assertEqual(counters, range(1, 11))
|
||||
assert counters == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
Person.id.set_next_value(1000)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 1000
|
||||
|
||||
Person.counter.set_next_value(999)
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'})
|
||||
self.assertEqual(c['next'], 999)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"})
|
||||
assert c["next"] == 999
|
||||
|
||||
def test_sequence_fields_reload(self):
|
||||
class Animal(Document):
|
||||
counter = SequenceField()
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Animal.drop_collection()
|
||||
|
||||
a = Animal(name="Boi").save()
|
||||
|
||||
self.assertEqual(a.counter, 1)
|
||||
assert a.counter == 1
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 1)
|
||||
assert a.counter == 1
|
||||
|
||||
a.counter = None
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
a.save()
|
||||
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
|
||||
a = Animal.objects.first()
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
a.reload()
|
||||
self.assertEqual(a.counter, 2)
|
||||
assert a.counter == 2
|
||||
|
||||
def test_multiple_sequence_fields_on_docs(self):
|
||||
class Animal(Document):
|
||||
@@ -151,7 +151,7 @@ class TestSequenceField(MongoDBTestCase):
|
||||
id = SequenceField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Animal.drop_collection()
|
||||
Person.drop_collection()
|
||||
|
||||
@@ -159,44 +159,44 @@ class TestSequenceField(MongoDBTestCase):
|
||||
Animal(name="Animal %s" % x).save()
|
||||
Person(name="Person %s" % x).save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, range(1, 11))
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
id = [i.id for i in Animal.objects]
|
||||
self.assertEqual(id, range(1, 11))
|
||||
assert id == list(range(1, 11))
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
def test_sequence_field_value_decorator(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True, value_decorator=str)
|
||||
name = StringField()
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Person.drop_collection()
|
||||
|
||||
for x in range(10):
|
||||
p = Person(name="Person %s" % x)
|
||||
p.save()
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
self.assertEqual(ids, map(str, range(1, 11)))
|
||||
assert ids == [str(i) for i in range(1, 11)]
|
||||
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 10)
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
|
||||
def test_embedded_sequence_field(self):
|
||||
class Comment(EmbeddedDocument):
|
||||
@@ -207,23 +207,27 @@ class TestSequenceField(MongoDBTestCase):
|
||||
title = StringField(required=True)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
self.db['mongoengine.counters'].drop()
|
||||
self.db["mongoengine.counters"].drop()
|
||||
Post.drop_collection()
|
||||
|
||||
Post(title="MongoEngine",
|
||||
comments=[Comment(content="NoSQL Rocks"),
|
||||
Comment(content="MongoEngine Rocks")]).save()
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'})
|
||||
self.assertEqual(c['next'], 2)
|
||||
Post(
|
||||
title="MongoEngine",
|
||||
comments=[
|
||||
Comment(content="NoSQL Rocks"),
|
||||
Comment(content="MongoEngine Rocks"),
|
||||
],
|
||||
).save()
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"})
|
||||
assert c["next"] == 2
|
||||
post = Post.objects.first()
|
||||
self.assertEqual(1, post.comments[0].id)
|
||||
self.assertEqual(2, post.comments[1].id)
|
||||
assert 1 == post.comments[0].id
|
||||
assert 2 == post.comments[1].id
|
||||
|
||||
def test_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
counter = SequenceField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Foo(Base):
|
||||
pass
|
||||
@@ -231,24 +235,25 @@ class TestSequenceField(MongoDBTestCase):
|
||||
class Bar(Base):
|
||||
pass
|
||||
|
||||
bar = Bar(name='Bar')
|
||||
bar = Bar(name="Bar")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name='Foo')
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertTrue('base.counter' in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertFalse(('foo.counter' or 'bar.counter') in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertNotEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields['counter'].owner_document, Base)
|
||||
self.assertEqual(bar._fields['counter'].owner_document, Base)
|
||||
assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert not (
|
||||
("foo.counter" or "bar.counter")
|
||||
in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
)
|
||||
assert foo.counter != bar.counter
|
||||
assert foo._fields["counter"].owner_document == Base
|
||||
assert bar._fields["counter"].owner_document == Base
|
||||
|
||||
def test_no_inherited_sequencefield(self):
|
||||
class Base(Document):
|
||||
name = StringField()
|
||||
meta = {'abstract': True}
|
||||
meta = {"abstract": True}
|
||||
|
||||
class Foo(Base):
|
||||
counter = SequenceField()
|
||||
@@ -256,16 +261,18 @@ class TestSequenceField(MongoDBTestCase):
|
||||
class Bar(Base):
|
||||
counter = SequenceField()
|
||||
|
||||
bar = Bar(name='Bar')
|
||||
bar = Bar(name="Bar")
|
||||
bar.save()
|
||||
|
||||
foo = Foo(name='Foo')
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
self.assertFalse('base.counter' in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertTrue(('foo.counter' and 'bar.counter') in
|
||||
self.db['mongoengine.counters'].find().distinct('_id'))
|
||||
self.assertEqual(foo.counter, bar.counter)
|
||||
self.assertEqual(foo._fields['counter'].owner_document, Foo)
|
||||
self.assertEqual(bar._fields['counter'].owner_document, Bar)
|
||||
assert "base.counter" not in self.db["mongoengine.counters"].find().distinct(
|
||||
"_id"
|
||||
)
|
||||
existing_counters = self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert "foo.counter" in existing_counters
|
||||
assert "bar.counter" in existing_counters
|
||||
assert foo.counter == bar.counter
|
||||
assert foo._fields["counter"].owner_document == Foo
|
||||
assert bar._fields["counter"].owner_document == Bar
|
||||
|
||||
@@ -1,53 +1,60 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestURLField(MongoDBTestCase):
|
||||
|
||||
def test_validation(self):
|
||||
"""Ensure that URLFields validate urls properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = 'google'
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
link.url = "google"
|
||||
with pytest.raises(ValidationError):
|
||||
link.validate()
|
||||
|
||||
link.url = 'http://www.google.com:8080'
|
||||
link.url = "http://www.google.com:8080"
|
||||
link.validate()
|
||||
|
||||
def test_unicode_url_validation(self):
|
||||
"""Ensure unicode URLs are validated properly."""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = u'http://привет.com'
|
||||
link.url = u"http://привет.com"
|
||||
|
||||
# TODO fix URL validation - this *IS* a valid URL
|
||||
# For now we just want to make sure that the error message is correct
|
||||
with self.assertRaises(ValidationError) as ctx_err:
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
link.validate()
|
||||
self.assertEqual(unicode(ctx_err.exception),
|
||||
u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])")
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
|
||||
)
|
||||
|
||||
def test_url_scheme_validation(self):
|
||||
"""Ensure that URLFields validate urls with specific schemes properly.
|
||||
"""
|
||||
|
||||
class Link(Document):
|
||||
url = URLField()
|
||||
|
||||
class SchemeLink(Document):
|
||||
url = URLField(schemes=['ws', 'irc'])
|
||||
url = URLField(schemes=["ws", "irc"])
|
||||
|
||||
link = Link()
|
||||
link.url = 'ws://google.com'
|
||||
self.assertRaises(ValidationError, link.validate)
|
||||
link.url = "ws://google.com"
|
||||
with pytest.raises(ValidationError):
|
||||
link.validate()
|
||||
|
||||
scheme_link = SchemeLink()
|
||||
scheme_link.url = 'ws://google.com'
|
||||
scheme_link.url = "ws://google.com"
|
||||
scheme_link.validate()
|
||||
|
||||
def test_underscore_allowed_in_domains_names(self):
|
||||
@@ -55,5 +62,5 @@ class TestURLField(MongoDBTestCase):
|
||||
url = URLField()
|
||||
|
||||
link = Link()
|
||||
link.url = 'https://san_leandro-ca.geebo.com'
|
||||
link.url = "https://san_leandro-ca.geebo.com"
|
||||
link.validate()
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
|
||||
from mongoengine import *
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
|
||||
@@ -14,12 +15,7 @@ class TestUUIDField(MongoDBTestCase):
|
||||
def test_storage(self):
|
||||
uid = uuid.uuid4()
|
||||
person = Person(api_key=uid).save()
|
||||
self.assertEqual(
|
||||
get_as_pymongo(person),
|
||||
{'_id': person.id,
|
||||
'api_key': str(uid)
|
||||
}
|
||||
)
|
||||
assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)}
|
||||
|
||||
def test_field_string(self):
|
||||
"""Test UUID fields storing as String
|
||||
@@ -28,8 +24,8 @@ class TestUUIDField(MongoDBTestCase):
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
assert 1 == Person.objects(api_key=uu).count()
|
||||
assert uu == Person.objects.first().api_key
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
@@ -37,11 +33,14 @@ class TestUUIDField(MongoDBTestCase):
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
invalid = (
|
||||
"9d159858-549b-4975-9f98-dd2f987c113g",
|
||||
"9d159858-549b-4975-9f98-dd2f987c113",
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
def test_field_binary(self):
|
||||
"""Test UUID fields storing as Binary object."""
|
||||
@@ -49,8 +48,8 @@ class TestUUIDField(MongoDBTestCase):
|
||||
|
||||
uu = uuid.uuid4()
|
||||
Person(api_key=uu).save()
|
||||
self.assertEqual(1, Person.objects(api_key=uu).count())
|
||||
self.assertEqual(uu, Person.objects.first().api_key)
|
||||
assert 1 == Person.objects(api_key=uu).count()
|
||||
assert uu == Person.objects.first().api_key
|
||||
|
||||
person = Person()
|
||||
valid = (uuid.uuid4(), uuid.uuid1())
|
||||
@@ -58,8 +57,11 @@ class TestUUIDField(MongoDBTestCase):
|
||||
person.api_key = api_key
|
||||
person.validate()
|
||||
|
||||
invalid = ('9d159858-549b-4975-9f98-dd2f987c113g',
|
||||
'9d159858-549b-4975-9f98-dd2f987c113')
|
||||
invalid = (
|
||||
"9d159858-549b-4975-9f98-dd2f987c113g",
|
||||
"9d159858-549b-4975-9f98-dd2f987c113",
|
||||
)
|
||||
for api_key in invalid:
|
||||
person.api_key = api_key
|
||||
self.assertRaises(ValidationError, person.validate)
|
||||
with pytest.raises(ValidationError):
|
||||
person.validate()
|
||||
|
||||
@@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument):
|
||||
|
||||
class PickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
@@ -19,7 +19,7 @@ class PickleTest(Document):
|
||||
|
||||
class NewDocumentPickleTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
photo = FileField()
|
||||
@@ -36,17 +36,17 @@ class PickleDynamicTest(DynamicDocument):
|
||||
|
||||
class PickleSignalsTest(Document):
|
||||
number = IntField()
|
||||
string = StringField(choices=(('One', '1'), ('Two', '2')))
|
||||
string = StringField(choices=(("One", "1"), ("Two", "2")))
|
||||
embedded = EmbeddedDocumentField(PickleEmbedded)
|
||||
lists = ListField(StringField())
|
||||
|
||||
@classmethod
|
||||
def post_save(self, sender, document, created, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
pickle.dumps(document)
|
||||
|
||||
@classmethod
|
||||
def post_delete(self, sender, document, **kwargs):
|
||||
pickled = pickle.dumps(document)
|
||||
pickle.dumps(document)
|
||||
|
||||
|
||||
signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest)
|
||||
@@ -58,4 +58,4 @@ class Mixin(object):
|
||||
|
||||
|
||||
class Base(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
from .transform import *
|
||||
from .field_list import *
|
||||
from .queryset import *
|
||||
from .visitor import *
|
||||
from .geo import *
|
||||
from .modify import *
|
||||
|
||||
@@ -1,440 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import QueryFieldList
|
||||
|
||||
__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest")
|
||||
|
||||
|
||||
class QueryFieldListTest(unittest.TestCase):
|
||||
|
||||
def test_empty(self):
|
||||
q = QueryFieldList()
|
||||
self.assertFalse(q)
|
||||
|
||||
q = QueryFieldList(always_include=['_cls'])
|
||||
self.assertFalse(q)
|
||||
|
||||
def test_include_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
|
||||
def test_include_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'a': 1, 'b': 1})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 1})
|
||||
|
||||
def test_exclude_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0})
|
||||
|
||||
def test_exclude_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE)
|
||||
self.assertEqual(q.as_dict(), {'a': 0, 'b': 0})
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'c': 1})
|
||||
|
||||
def test_always_include(self):
|
||||
q = QueryFieldList(always_include=['x', 'y'])
|
||||
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||
|
||||
def test_reset(self):
|
||||
q = QueryFieldList(always_include=['x', 'y'])
|
||||
q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1})
|
||||
q.reset()
|
||||
self.assertFalse(q)
|
||||
q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY)
|
||||
self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1})
|
||||
|
||||
def test_using_a_slice(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=['a'], value={"$slice": 5})
|
||||
self.assertEqual(q.as_dict(), {'a': {"$slice": 5}})
|
||||
|
||||
|
||||
class OnlyExcludeAllTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_mixing_only_exclude(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
e = StringField()
|
||||
f = StringField()
|
||||
|
||||
include = ['a', 'b', 'c', 'd', 'e']
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
qs = MyDoc.objects.exclude(*exclude)
|
||||
qs = qs.fields(**{i: 1 for i in include})
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1})
|
||||
qs = qs.only(*only)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1})
|
||||
|
||||
def test_slicing(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
d = ListField()
|
||||
e = ListField()
|
||||
f = ListField()
|
||||
|
||||
include = ['a', 'b', 'c', 'd', 'e']
|
||||
exclude = ['d', 'e']
|
||||
only = ['b', 'c']
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
qs = qs.only(*only)
|
||||
qs = qs.fields(slice__b=5)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}, 'c': 1})
|
||||
|
||||
qs = qs.fields(slice__c=[5, 1])
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}})
|
||||
|
||||
qs = qs.exclude('c')
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'b': {'$slice': 5}})
|
||||
|
||||
def test_mix_slice_with_other_fields(self):
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
|
||||
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
|
||||
self.assertEqual(qs._loaded_fields.as_dict(),
|
||||
{'c': {'$slice': 2}, 'a': 1})
|
||||
|
||||
def test_only(self):
|
||||
"""Ensure that QuerySet.only only returns the requested fields.
|
||||
"""
|
||||
person = self.Person(name='test', age=25)
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects.only('name').get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, None)
|
||||
|
||||
obj = self.Person.objects.only('age').get()
|
||||
self.assertEqual(obj.name, None)
|
||||
self.assertEqual(obj.age, person.age)
|
||||
|
||||
obj = self.Person.objects.only('name', 'age').get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, person.age)
|
||||
|
||||
obj = self.Person.objects.only(*('id', 'name',)).get()
|
||||
self.assertEqual(obj.name, person.name)
|
||||
self.assertEqual(obj.age, None)
|
||||
|
||||
# Check polymorphism still works
|
||||
class Employee(self.Person):
|
||||
salary = IntField(db_field='wage')
|
||||
|
||||
employee = Employee(name='test employee', age=40, salary=30000)
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only('age').get()
|
||||
self.assertIsInstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only('salary').get()
|
||||
self.assertEqual(obj.salary, employee.salary)
|
||||
self.assertEqual(obj.name, None)
|
||||
|
||||
def test_only_with_subfields(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class VariousData(EmbeddedDocument):
|
||||
some = BooleanField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
various = MapField(field=EmbeddedDocumentField(VariousData))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}})
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.only('author.name',).get()
|
||||
self.assertEqual(obj.content, None)
|
||||
self.assertEqual(obj.author.email, None)
|
||||
self.assertEqual(obj.author.name, 'Test User')
|
||||
self.assertEqual(obj.comments, [])
|
||||
|
||||
obj = BlogPost.objects.only('various.test_dynamic.some').get()
|
||||
self.assertEqual(obj.various["test_dynamic"].some, True)
|
||||
|
||||
obj = BlogPost.objects.only('content', 'comments.title',).get()
|
||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||
self.assertEqual(obj.comments[0].text, None)
|
||||
self.assertEqual(obj.comments[1].text, None)
|
||||
|
||||
obj = BlogPost.objects.only('comments',).get()
|
||||
self.assertEqual(obj.content, None)
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[1].title, 'Coffee')
|
||||
self.assertEqual(obj.comments[0].text, 'Great post!')
|
||||
self.assertEqual(obj.comments[1].text, 'I hate coffee')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content='Had a good coffee today...')
|
||||
post.author = User(name='Test User')
|
||||
post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.exclude('author', 'comments.text').get()
|
||||
self.assertEqual(obj.author, None)
|
||||
self.assertEqual(obj.content, 'Had a good coffee today...')
|
||||
self.assertEqual(obj.comments[0].title, 'I aggree')
|
||||
self.assertEqual(obj.comments[0].text, None)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude_only_combining(self):
|
||||
class Attachment(EmbeddedDocument):
|
||||
name = StringField()
|
||||
content = StringField()
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||
|
||||
Email.drop_collection()
|
||||
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||
email.attachments = [
|
||||
Attachment(name='file1.doc', content='ABC'),
|
||||
Attachment(name='file2.doc', content='XYZ'),
|
||||
]
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude('content_type').exclude('body').get()
|
||||
self.assertEqual(obj.sender, 'me')
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get()
|
||||
self.assertEqual(obj.sender, None)
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, None)
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get()
|
||||
self.assertEqual(obj.attachments[0].name, 'file1.doc')
|
||||
self.assertEqual(obj.attachments[0].content, None)
|
||||
self.assertEqual(obj.sender, None)
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, None)
|
||||
self.assertEqual(obj.body, None)
|
||||
self.assertEqual(obj.content_type, None)
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_all_fields(self):
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain')
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get()
|
||||
self.assertEqual(obj.sender, 'me')
|
||||
self.assertEqual(obj.to, 'you')
|
||||
self.assertEqual(obj.subject, 'From Russia with Love')
|
||||
self.assertEqual(obj.body, 'Hello!')
|
||||
self.assertEqual(obj.content_type, 'text/plain')
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_slicing_fields(self):
|
||||
"""Ensure that query slicing an array works.
|
||||
"""
|
||||
class Numbers(Document):
|
||||
n = ListField(IntField())
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||
self.assertEqual(numbers.n, [0, 1, 2])
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||
self.assertEqual(numbers.n, [-3, -2, -1])
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||
self.assertEqual(numbers.n, [2, 3, 4])
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||
self.assertEqual(numbers.n, [-5, -4, -3, -2])
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||
self.assertEqual(numbers.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
def test_slicing_nested_fields(self):
|
||||
"""Ensure that query slicing an embedded array works.
|
||||
"""
|
||||
|
||||
class EmbeddedNumber(EmbeddedDocument):
|
||||
n = ListField(IntField())
|
||||
|
||||
class Numbers(Document):
|
||||
embedded = EmbeddedDocumentField(EmbeddedNumber)
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers()
|
||||
numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||
self.assertEqual(numbers.embedded.n, [0, 1, 2])
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||
self.assertEqual(numbers.embedded.n, [-3, -2, -1])
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||
self.assertEqual(numbers.embedded.n, [2, 3, 4])
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2])
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||
self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1])
|
||||
|
||||
def test_exclude_from_subclasses_docs(self):
|
||||
|
||||
class Base(Document):
|
||||
username = StringField()
|
||||
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Anon(Base):
|
||||
anon = BooleanField()
|
||||
|
||||
class User(Base):
|
||||
password = StringField()
|
||||
wibble = StringField()
|
||||
|
||||
Base.drop_collection()
|
||||
User(username="mongodb", password="secret").save()
|
||||
|
||||
user = Base.objects().exclude("password", "wibble").first()
|
||||
self.assertEqual(user.password, None)
|
||||
|
||||
self.assertRaises(LookUpError, Base.objects.exclude, "made_up")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
467
tests/queryset/test_field_list.py
Normal file
467
tests/queryset/test_field_list.py
Normal file
@@ -0,0 +1,467 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import QueryFieldList
|
||||
|
||||
|
||||
class TestQueryFieldList:
|
||||
def test_empty(self):
|
||||
q = QueryFieldList()
|
||||
assert not q
|
||||
|
||||
q = QueryFieldList(always_include=["_cls"])
|
||||
assert not q
|
||||
|
||||
def test_include_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(
|
||||
fields=["a", "b"], value=QueryFieldList.ONLY, _only_called=True
|
||||
)
|
||||
assert q.as_dict() == {"a": 1, "b": 1}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"a": 1, "b": 1, "c": 1}
|
||||
|
||||
def test_include_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"a": 1, "b": 1}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 1}
|
||||
|
||||
def test_exclude_exclude(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 0, "b": 0}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 0, "b": 0, "c": 0}
|
||||
|
||||
def test_exclude_include(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE)
|
||||
assert q.as_dict() == {"a": 0, "b": 0}
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"c": 1}
|
||||
|
||||
def test_always_include(self):
|
||||
q = QueryFieldList(always_include=["x", "y"])
|
||||
q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"x": 1, "y": 1, "c": 1}
|
||||
|
||||
def test_reset(self):
|
||||
q = QueryFieldList(always_include=["x", "y"])
|
||||
q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE)
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"x": 1, "y": 1, "c": 1}
|
||||
q.reset()
|
||||
assert not q
|
||||
q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY)
|
||||
assert q.as_dict() == {"x": 1, "y": 1, "b": 1, "c": 1}
|
||||
|
||||
def test_using_a_slice(self):
|
||||
q = QueryFieldList()
|
||||
q += QueryFieldList(fields=["a"], value={"$slice": 5})
|
||||
assert q.as_dict() == {"a": {"$slice": 5}}
|
||||
|
||||
|
||||
class TestOnlyExcludeAll(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_mixing_only_exclude(self):
|
||||
class MyDoc(Document):
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
e = StringField()
|
||||
f = StringField()
|
||||
|
||||
include = ["a", "b", "c", "d", "e"]
|
||||
exclude = ["d", "e"]
|
||||
only = ["b", "c"]
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1}
|
||||
qs = qs.only(*only)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
qs = qs.exclude(*exclude)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1}
|
||||
qs = qs.only(*only)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
|
||||
qs = MyDoc.objects.exclude(*exclude)
|
||||
qs = qs.fields(**{i: 1 for i in include})
|
||||
assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1}
|
||||
qs = qs.only(*only)
|
||||
assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1}
|
||||
|
||||
def test_slicing(self):
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
d = ListField()
|
||||
e = ListField()
|
||||
f = ListField()
|
||||
|
||||
include = ["a", "b", "c", "d", "e"]
|
||||
exclude = ["d", "e"]
|
||||
only = ["b", "c"]
|
||||
|
||||
qs = MyDoc.objects.fields(**{i: 1 for i in include})
|
||||
qs = qs.exclude(*exclude)
|
||||
qs = qs.only(*only)
|
||||
qs = qs.fields(slice__b=5)
|
||||
assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}, "c": 1}
|
||||
|
||||
qs = qs.fields(slice__c=[5, 1])
|
||||
assert qs._loaded_fields.as_dict() == {
|
||||
"b": {"$slice": 5},
|
||||
"c": {"$slice": [5, 1]},
|
||||
}
|
||||
|
||||
qs = qs.exclude("c")
|
||||
assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}}
|
||||
|
||||
def test_mix_slice_with_other_fields(self):
|
||||
class MyDoc(Document):
|
||||
a = ListField()
|
||||
b = ListField()
|
||||
c = ListField()
|
||||
|
||||
qs = MyDoc.objects.fields(a=1, b=0, slice__c=2)
|
||||
assert qs._loaded_fields.as_dict() == {"c": {"$slice": 2}, "a": 1}
|
||||
|
||||
def test_only(self):
|
||||
"""Ensure that QuerySet.only only returns the requested fields.
|
||||
"""
|
||||
person = self.Person(name="test", age=25)
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects.only("name").get()
|
||||
assert obj.name == person.name
|
||||
assert obj.age is None
|
||||
|
||||
obj = self.Person.objects.only("age").get()
|
||||
assert obj.name is None
|
||||
assert obj.age == person.age
|
||||
|
||||
obj = self.Person.objects.only("name", "age").get()
|
||||
assert obj.name == person.name
|
||||
assert obj.age == person.age
|
||||
|
||||
obj = self.Person.objects.only(*("id", "name")).get()
|
||||
assert obj.name == person.name
|
||||
assert obj.age is None
|
||||
|
||||
# Check polymorphism still works
|
||||
class Employee(self.Person):
|
||||
salary = IntField(db_field="wage")
|
||||
|
||||
employee = Employee(name="test employee", age=40, salary=30000)
|
||||
employee.save()
|
||||
|
||||
obj = self.Person.objects(id=employee.id).only("age").get()
|
||||
assert isinstance(obj, Employee)
|
||||
|
||||
# Check field names are looked up properly
|
||||
obj = Employee.objects(id=employee.id).only("salary").get()
|
||||
assert obj.salary == employee.salary
|
||||
assert obj.name is None
|
||||
|
||||
def test_only_with_subfields(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class VariousData(EmbeddedDocument):
|
||||
some = BooleanField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
various = MapField(field=EmbeddedDocumentField(VariousData))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(
|
||||
content="Had a good coffee today...",
|
||||
various={"test_dynamic": {"some": True}},
|
||||
)
|
||||
post.author = User(name="Test User")
|
||||
post.comments = [
|
||||
Comment(title="I aggree", text="Great post!"),
|
||||
Comment(title="Coffee", text="I hate coffee"),
|
||||
]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.only("author.name").get()
|
||||
assert obj.content is None
|
||||
assert obj.author.email is None
|
||||
assert obj.author.name == "Test User"
|
||||
assert obj.comments == []
|
||||
|
||||
obj = BlogPost.objects.only("various.test_dynamic.some").get()
|
||||
assert obj.various["test_dynamic"].some is True
|
||||
|
||||
obj = BlogPost.objects.only("content", "comments.title").get()
|
||||
assert obj.content == "Had a good coffee today..."
|
||||
assert obj.author is None
|
||||
assert obj.comments[0].title == "I aggree"
|
||||
assert obj.comments[1].title == "Coffee"
|
||||
assert obj.comments[0].text is None
|
||||
assert obj.comments[1].text is None
|
||||
|
||||
obj = BlogPost.objects.only("comments").get()
|
||||
assert obj.content is None
|
||||
assert obj.author is None
|
||||
assert obj.comments[0].title == "I aggree"
|
||||
assert obj.comments[1].title == "Coffee"
|
||||
assert obj.comments[0].text == "Great post!"
|
||||
assert obj.comments[1].text == "I hate coffee"
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude(self):
|
||||
class User(EmbeddedDocument):
|
||||
name = StringField()
|
||||
email = StringField()
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
title = StringField()
|
||||
text = StringField()
|
||||
|
||||
class BlogPost(Document):
|
||||
content = StringField()
|
||||
author = EmbeddedDocumentField(User)
|
||||
comments = ListField(EmbeddedDocumentField(Comment))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(content="Had a good coffee today...")
|
||||
post.author = User(name="Test User")
|
||||
post.comments = [
|
||||
Comment(title="I aggree", text="Great post!"),
|
||||
Comment(title="Coffee", text="I hate coffee"),
|
||||
]
|
||||
post.save()
|
||||
|
||||
obj = BlogPost.objects.exclude("author", "comments.text").get()
|
||||
assert obj.author is None
|
||||
assert obj.content == "Had a good coffee today..."
|
||||
assert obj.comments[0].title == "I aggree"
|
||||
assert obj.comments[0].text is None
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_exclude_only_combining(self):
|
||||
class Attachment(EmbeddedDocument):
|
||||
name = StringField()
|
||||
content = StringField()
|
||||
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
attachments = ListField(EmbeddedDocumentField(Attachment))
|
||||
|
||||
Email.drop_collection()
|
||||
email = Email(
|
||||
sender="me",
|
||||
to="you",
|
||||
subject="From Russia with Love",
|
||||
body="Hello!",
|
||||
content_type="text/plain",
|
||||
)
|
||||
email.attachments = [
|
||||
Attachment(name="file1.doc", content="ABC"),
|
||||
Attachment(name="file2.doc", content="XYZ"),
|
||||
]
|
||||
email.save()
|
||||
|
||||
obj = Email.objects.exclude("content_type").exclude("body").get()
|
||||
assert obj.sender == "me"
|
||||
assert obj.to == "you"
|
||||
assert obj.subject == "From Russia with Love"
|
||||
assert obj.body is None
|
||||
assert obj.content_type is None
|
||||
|
||||
obj = Email.objects.only("sender", "to").exclude("body", "sender").get()
|
||||
assert obj.sender is None
|
||||
assert obj.to == "you"
|
||||
assert obj.subject is None
|
||||
assert obj.body is None
|
||||
assert obj.content_type is None
|
||||
|
||||
obj = (
|
||||
Email.objects.exclude("attachments.content")
|
||||
.exclude("body")
|
||||
.only("to", "attachments.name")
|
||||
.get()
|
||||
)
|
||||
assert obj.attachments[0].name == "file1.doc"
|
||||
assert obj.attachments[0].content is None
|
||||
assert obj.sender is None
|
||||
assert obj.to == "you"
|
||||
assert obj.subject is None
|
||||
assert obj.body is None
|
||||
assert obj.content_type is None
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_all_fields(self):
|
||||
class Email(Document):
|
||||
sender = StringField()
|
||||
to = StringField()
|
||||
subject = StringField()
|
||||
body = StringField()
|
||||
content_type = StringField()
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
email = Email(
|
||||
sender="me",
|
||||
to="you",
|
||||
subject="From Russia with Love",
|
||||
body="Hello!",
|
||||
content_type="text/plain",
|
||||
)
|
||||
email.save()
|
||||
|
||||
obj = (
|
||||
Email.objects.exclude("content_type", "body")
|
||||
.only("to", "body")
|
||||
.all_fields()
|
||||
.get()
|
||||
)
|
||||
assert obj.sender == "me"
|
||||
assert obj.to == "you"
|
||||
assert obj.subject == "From Russia with Love"
|
||||
assert obj.body == "Hello!"
|
||||
assert obj.content_type == "text/plain"
|
||||
|
||||
Email.drop_collection()
|
||||
|
||||
def test_slicing_fields(self):
|
||||
"""Ensure that query slicing an array works.
|
||||
"""
|
||||
|
||||
class Numbers(Document):
|
||||
n = ListField(IntField())
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__n=3).get()
|
||||
assert numbers.n == [0, 1, 2]
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__n=-3).get()
|
||||
assert numbers.n == [-3, -2, -1]
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__n=[2, 3]).get()
|
||||
assert numbers.n == [2, 3, 4]
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 4]).get()
|
||||
assert numbers.n == [-5, -4, -3, -2]
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__n=[-5, 10]).get()
|
||||
assert numbers.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get()
|
||||
assert numbers.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
def test_slicing_nested_fields(self):
|
||||
"""Ensure that query slicing an embedded array works.
|
||||
"""
|
||||
|
||||
class EmbeddedNumber(EmbeddedDocument):
|
||||
n = ListField(IntField())
|
||||
|
||||
class Numbers(Document):
|
||||
embedded = EmbeddedDocumentField(EmbeddedNumber)
|
||||
|
||||
Numbers.drop_collection()
|
||||
|
||||
numbers = Numbers()
|
||||
numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1])
|
||||
numbers.save()
|
||||
|
||||
# first three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=3).get()
|
||||
assert numbers.embedded.n == [0, 1, 2]
|
||||
|
||||
# last three
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=-3).get()
|
||||
assert numbers.embedded.n == [-3, -2, -1]
|
||||
|
||||
# skip 2, limit 3
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get()
|
||||
assert numbers.embedded.n == [2, 3, 4]
|
||||
|
||||
# skip to fifth from last, limit 4
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get()
|
||||
assert numbers.embedded.n == [-5, -4, -3, -2]
|
||||
|
||||
# skip to fifth from last, limit 10
|
||||
numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get()
|
||||
assert numbers.embedded.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
# skip to fifth from last, limit 10 dict method
|
||||
numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get()
|
||||
assert numbers.embedded.n == [-5, -4, -3, -2, -1]
|
||||
|
||||
def test_exclude_from_subclasses_docs(self):
|
||||
class Base(Document):
|
||||
username = StringField()
|
||||
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class Anon(Base):
|
||||
anon = BooleanField()
|
||||
|
||||
class User(Base):
|
||||
password = StringField()
|
||||
wibble = StringField()
|
||||
|
||||
Base.drop_collection()
|
||||
User(username="mongodb", password="secret").save()
|
||||
|
||||
user = Base.objects().exclude("password", "wibble").first()
|
||||
assert user.password is None
|
||||
|
||||
with pytest.raises(LookUpError):
|
||||
Base.objects.exclude("made_up")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -6,13 +6,10 @@ from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
__all__ = ("GeoQueriesTest",)
|
||||
|
||||
|
||||
class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
class TestGeoQueries(MongoDBTestCase):
|
||||
def _create_event_data(self, point_field_class=GeoPointField):
|
||||
"""Create some sample data re-used in many of the tests below."""
|
||||
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
date = DateTimeField()
|
||||
@@ -28,15 +25,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
event1 = Event.objects.create(
|
||||
title="Coltrane Motion @ Double Door",
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=1),
|
||||
location=[-87.677137, 41.909889])
|
||||
location=[-87.677137, 41.909889],
|
||||
)
|
||||
event2 = Event.objects.create(
|
||||
title="Coltrane Motion @ Bottom of the Hill",
|
||||
date=datetime.datetime.now() - datetime.timedelta(days=10),
|
||||
location=[-122.4194155, 37.7749295])
|
||||
location=[-122.4194155, 37.7749295],
|
||||
)
|
||||
event3 = Event.objects.create(
|
||||
title="Coltrane Motion @ Empty Bottle",
|
||||
date=datetime.datetime.now(),
|
||||
location=[-87.686638, 41.900474])
|
||||
location=[-87.686638, 41.900474],
|
||||
)
|
||||
|
||||
return event1, event2, event3
|
||||
|
||||
@@ -48,14 +48,14 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event1, event3, event2]
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event3, event1, event2]
|
||||
|
||||
def test_near_and_max_distance(self):
|
||||
"""Ensure the "max_distance" operator works alongside the "near"
|
||||
@@ -65,10 +65,9 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__max_distance=10)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
events = self.Event.objects(location__near=point, location__max_distance=10)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
def test_near_and_min_distance(self):
|
||||
"""Ensure the "min_distance" operator works alongside the "near"
|
||||
@@ -78,9 +77,8 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events at least 10 degrees away of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__min_distance=10)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = self.Event.objects(location__near=point, location__min_distance=10)
|
||||
assert events.count() == 2
|
||||
|
||||
def test_within_distance(self):
|
||||
"""Make sure the "within_distance" operator works."""
|
||||
@@ -88,34 +86,30 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 5]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
assert events.count() == 2
|
||||
events = list(events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
assert event2 not in events
|
||||
assert event1 in events
|
||||
assert event3 in events
|
||||
|
||||
# find events within 10 degrees of san francisco
|
||||
point_and_distance = [[-122.415579, 37.7566023], 10]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
# find events within 1 degree of greenpoint, broolyn, nyc, ny
|
||||
point_and_distance = [[-73.9509714, 40.7237134], 1]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
self.assertEqual(events.count(), 0)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
assert events.count() == 0
|
||||
|
||||
# ensure ordering is respected by "within_distance"
|
||||
point_and_distance = [[-87.67892, 41.9120459], 10]
|
||||
events = self.Event.objects(
|
||||
location__within_distance=point_and_distance)
|
||||
events = self.Event.objects(location__within_distance=point_and_distance)
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
assert events.count() == 2
|
||||
assert events[0] == event3
|
||||
|
||||
def test_within_box(self):
|
||||
"""Ensure the "within_box" operator works."""
|
||||
@@ -124,8 +118,8 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = self.Event.objects(location__within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event2.id
|
||||
|
||||
def test_within_polygon(self):
|
||||
"""Ensure the "within_polygon" operator works."""
|
||||
@@ -139,87 +133,78 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
(-87.656164, 41.898061),
|
||||
]
|
||||
events = self.Event.objects(location__within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event1.id
|
||||
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
(-4.40094, 53.389881),
|
||||
]
|
||||
events = self.Event.objects(location__within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
assert events.count() == 0
|
||||
|
||||
def test_2dsphere_near(self):
|
||||
"""Make sure the "near" operator works with a PointField, which
|
||||
corresponds to a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event1, event3, event2]
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(location__near=[-87.67892, 41.9120459])
|
||||
events = events.order_by("-date")
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event3, event1, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event3, event1, event2]
|
||||
|
||||
def test_2dsphere_near_and_max_distance(self):
|
||||
"""Ensure the "max_distance" operator works alongside the "near"
|
||||
operator with a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find events within 10km of san francisco
|
||||
point = [-122.415579, 37.7566023]
|
||||
events = self.Event.objects(location__near=point,
|
||||
location__max_distance=10000)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
events = self.Event.objects(location__near=point, location__max_distance=10000)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
# find events within 1km of greenpoint, broolyn, nyc, ny
|
||||
events = self.Event.objects(location__near=[-73.9509714, 40.7237134],
|
||||
location__max_distance=1000)
|
||||
self.assertEqual(events.count(), 0)
|
||||
events = self.Event.objects(
|
||||
location__near=[-73.9509714, 40.7237134], location__max_distance=1000
|
||||
)
|
||||
assert events.count() == 0
|
||||
|
||||
# ensure ordering is respected by "near"
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__max_distance=10000
|
||||
location__near=[-87.67892, 41.9120459], location__max_distance=10000
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 2)
|
||||
self.assertEqual(events[0], event3)
|
||||
assert events.count() == 2
|
||||
assert events[0] == event3
|
||||
|
||||
def test_2dsphere_geo_within_box(self):
|
||||
"""Ensure the "geo_within_box" operator works with a 2dsphere
|
||||
index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# check that within_box works
|
||||
box = [(-125.0, 35.0), (-100.0, 40.0)]
|
||||
events = self.Event.objects(location__geo_within_box=box)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event2.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event2.id
|
||||
|
||||
def test_2dsphere_geo_within_polygon(self):
|
||||
"""Ensure the "geo_within_polygon" operator works with a
|
||||
2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
polygon = [
|
||||
(-87.694445, 41.912114),
|
||||
@@ -229,64 +214,59 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
(-87.656164, 41.898061),
|
||||
]
|
||||
events = self.Event.objects(location__geo_within_polygon=polygon)
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0].id, event1.id)
|
||||
assert events.count() == 1
|
||||
assert events[0].id == event1.id
|
||||
|
||||
polygon2 = [
|
||||
(-1.742249, 54.033586),
|
||||
(-1.225891, 52.792797),
|
||||
(-4.40094, 53.389881)
|
||||
(-4.40094, 53.389881),
|
||||
]
|
||||
events = self.Event.objects(location__geo_within_polygon=polygon2)
|
||||
self.assertEqual(events.count(), 0)
|
||||
assert events.count() == 0
|
||||
|
||||
def test_2dsphere_near_and_min_max_distance(self):
|
||||
"""Ensure "min_distace" and "max_distance" operators work well
|
||||
together with the "near" operator in a 2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# ensure min_distance and max_distance combine well
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=1000,
|
||||
location__max_distance=10000
|
||||
location__max_distance=10000,
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event3)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event3
|
||||
|
||||
# ensure ordering is respected by "near" with "min_distance"
|
||||
events = self.Event.objects(
|
||||
location__near=[-87.67892, 41.9120459],
|
||||
location__min_distance=10000
|
||||
location__near=[-87.67892, 41.9120459], location__min_distance=10000
|
||||
).order_by("-date")
|
||||
self.assertEqual(events.count(), 1)
|
||||
self.assertEqual(events[0], event2)
|
||||
assert events.count() == 1
|
||||
assert events[0] == event2
|
||||
|
||||
def test_2dsphere_geo_within_center(self):
|
||||
"""Make sure the "geo_within_center" operator works with a
|
||||
2dsphere index.
|
||||
"""
|
||||
event1, event2, event3 = self._create_event_data(
|
||||
point_field_class=PointField
|
||||
)
|
||||
event1, event2, event3 = self._create_event_data(point_field_class=PointField)
|
||||
|
||||
# find events within 5 degrees of pitchfork office, chicago
|
||||
point_and_distance = [[-87.67892, 41.9120459], 2]
|
||||
events = self.Event.objects(
|
||||
location__geo_within_center=point_and_distance)
|
||||
self.assertEqual(events.count(), 2)
|
||||
events = self.Event.objects(location__geo_within_center=point_and_distance)
|
||||
assert events.count() == 2
|
||||
events = list(events)
|
||||
self.assertNotIn(event2, events)
|
||||
self.assertIn(event1, events)
|
||||
self.assertIn(event3, events)
|
||||
assert event2 not in events
|
||||
assert event1 in events
|
||||
assert event3 in events
|
||||
|
||||
def _test_embedded(self, point_field_class):
|
||||
"""Helper test method ensuring given point field class works
|
||||
well in an embedded document.
|
||||
"""
|
||||
|
||||
class Venue(EmbeddedDocument):
|
||||
location = point_field_class()
|
||||
name = StringField()
|
||||
@@ -300,19 +280,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889])
|
||||
venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295])
|
||||
|
||||
event1 = Event(title="Coltrane Motion @ Double Door",
|
||||
venue=venue1).save()
|
||||
event2 = Event(title="Coltrane Motion @ Bottom of the Hill",
|
||||
venue=venue2).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle",
|
||||
venue=venue1).save()
|
||||
event1 = Event(title="Coltrane Motion @ Double Door", venue=venue1).save()
|
||||
event2 = Event(
|
||||
title="Coltrane Motion @ Bottom of the Hill", venue=venue2
|
||||
).save()
|
||||
event3 = Event(title="Coltrane Motion @ Empty Bottle", venue=venue1).save()
|
||||
|
||||
# find all events "near" pitchfork office, chicago.
|
||||
# note that "near" will show the san francisco event, too,
|
||||
# although it sorts to last.
|
||||
events = Event.objects(venue__location__near=[-87.67892, 41.9120459])
|
||||
self.assertEqual(events.count(), 3)
|
||||
self.assertEqual(list(events), [event1, event3, event2])
|
||||
assert events.count() == 3
|
||||
assert list(events) == [event1, event3, event2]
|
||||
|
||||
def test_geo_spatial_embedded(self):
|
||||
"""Make sure GeoPointField works properly in an embedded document."""
|
||||
@@ -324,6 +303,7 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
def test_spherical_geospatial_operators(self):
|
||||
"""Ensure that spherical geospatial queries are working."""
|
||||
|
||||
class Point(Document):
|
||||
location = GeoPointField()
|
||||
|
||||
@@ -339,58 +319,55 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# Finds both points because they are within 60 km of the reference
|
||||
# point equidistant between them.
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
assert points.count() == 2
|
||||
|
||||
# Same behavior for _within_spherical_distance
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[
|
||||
[-122, 37.5],
|
||||
60 / earth_radius
|
||||
]
|
||||
location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 2)
|
||||
assert points.count() == 2
|
||||
|
||||
points = Point.objects(location__near_sphere=[-122, 37.5],
|
||||
location__max_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 2)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius
|
||||
)
|
||||
assert points.count() == 2
|
||||
|
||||
# Test query works with max_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__max_distance=60 / earth_radius)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius
|
||||
)
|
||||
close_point = points.first()
|
||||
self.assertEqual(points.count(), 1)
|
||||
assert points.count() == 1
|
||||
|
||||
# Test query works with min_distance, being farer from one point
|
||||
points = Point.objects(location__near_sphere=[-122, 37.8],
|
||||
location__min_distance=60 / earth_radius)
|
||||
self.assertEqual(points.count(), 1)
|
||||
points = Point.objects(
|
||||
location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius
|
||||
)
|
||||
assert points.count() == 1
|
||||
far_point = points.first()
|
||||
self.assertNotEqual(close_point, far_point)
|
||||
assert close_point != far_point
|
||||
|
||||
# Finds both points, but orders the north point first because it's
|
||||
# closer to the reference point to the north.
|
||||
points = Point.objects(location__near_sphere=[-122, 38.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, north_point.id)
|
||||
self.assertEqual(points[1].id, south_point.id)
|
||||
assert points.count() == 2
|
||||
assert points[0].id == north_point.id
|
||||
assert points[1].id == south_point.id
|
||||
|
||||
# Finds both points, but orders the south point first because it's
|
||||
# closer to the reference point to the south.
|
||||
points = Point.objects(location__near_sphere=[-122, 36.5])
|
||||
self.assertEqual(points.count(), 2)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
self.assertEqual(points[1].id, north_point.id)
|
||||
assert points.count() == 2
|
||||
assert points[0].id == south_point.id
|
||||
assert points[1].id == north_point.id
|
||||
|
||||
# Finds only one point because only the first point is within 60km of
|
||||
# the reference point to the south.
|
||||
points = Point.objects(
|
||||
location__within_spherical_distance=[
|
||||
[-122, 36.5],
|
||||
60 / earth_radius
|
||||
]
|
||||
location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius]
|
||||
)
|
||||
self.assertEqual(points.count(), 1)
|
||||
self.assertEqual(points[0].id, south_point.id)
|
||||
assert points.count() == 1
|
||||
assert points[0].id == south_point.id
|
||||
|
||||
def test_linestring(self):
|
||||
class Road(Document):
|
||||
@@ -404,48 +381,51 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# near
|
||||
point = {"type": "Point", "coordinates": [40, 5]}
|
||||
roads = Road.objects.filter(line__near=point["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__near=point).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__near={"$geometry": point}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_within=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [40, 6]]}
|
||||
line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]}
|
||||
roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects=line).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
def test_polygon(self):
|
||||
class Road(Document):
|
||||
@@ -459,66 +439,66 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
# near
|
||||
point = {"type": "Point", "coordinates": [40, 5]}
|
||||
roads = Road.objects.filter(poly__near=point["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__near=point).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__near={"$geometry": point}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Within
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_within=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
# Intersects
|
||||
line = {"type": "LineString",
|
||||
"coordinates": [[40, 5], [41, 6]]}
|
||||
line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]}
|
||||
roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects=line).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
polygon = {"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]}
|
||||
polygon = {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]],
|
||||
}
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects=polygon).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count()
|
||||
self.assertEqual(1, roads)
|
||||
assert 1 == roads
|
||||
|
||||
def test_aspymongo_with_only(self):
|
||||
"""Ensure as_pymongo works with only"""
|
||||
|
||||
class Place(Document):
|
||||
location = PointField()
|
||||
|
||||
Place.drop_collection()
|
||||
p = Place(location=[24.946861267089844, 60.16311983618494])
|
||||
p.save()
|
||||
qs = Place.objects().only('location')
|
||||
self.assertDictEqual(
|
||||
qs.as_pymongo()[0]['location'],
|
||||
{u'type': u'Point',
|
||||
u'coordinates': [
|
||||
24.946861267089844,
|
||||
60.16311983618494]
|
||||
}
|
||||
)
|
||||
qs = Place.objects().only("location")
|
||||
assert qs.as_pymongo()[0]["location"] == {
|
||||
u"type": u"Point",
|
||||
u"coordinates": [24.946861267089844, 60.16311983618494],
|
||||
}
|
||||
|
||||
def test_2dsphere_point_sets_correctly(self):
|
||||
class Location(Document):
|
||||
@@ -528,11 +508,11 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(loc=[1, 2]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]})
|
||||
assert loc["loc"] == {"type": "Point", "coordinates": [1, 2]}
|
||||
|
||||
Location.objects.update(set__loc=[2, 1])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]})
|
||||
assert loc["loc"] == {"type": "Point", "coordinates": [2, 1]}
|
||||
|
||||
def test_2dsphere_linestring_sets_correctly(self):
|
||||
class Location(Document):
|
||||
@@ -542,11 +522,11 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(line=[[1, 2], [2, 2]]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
|
||||
assert loc["line"] == {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}
|
||||
|
||||
Location.objects.update(set__line=[[2, 1], [1, 2]])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]})
|
||||
assert loc["line"] == {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}
|
||||
|
||||
def test_geojson_PolygonField(self):
|
||||
class Location(Document):
|
||||
@@ -556,12 +536,18 @@ class GeoQueriesTest(MongoDBTestCase):
|
||||
|
||||
Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save()
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
assert loc["poly"] == {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
}
|
||||
|
||||
Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]])
|
||||
loc = Location.objects.as_pymongo()[0]
|
||||
self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]})
|
||||
assert loc["poly"] == {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]],
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,8 +1,6 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine import connect, Document, IntField, StringField, ListField
|
||||
|
||||
__all__ = ("FindAndModifyTest",)
|
||||
from mongoengine import Document, IntField, ListField, StringField, connect
|
||||
|
||||
|
||||
class Doc(Document):
|
||||
@@ -10,21 +8,20 @@ class Doc(Document):
|
||||
value = IntField()
|
||||
|
||||
|
||||
class FindAndModifyTest(unittest.TestCase):
|
||||
|
||||
class TestFindAndModify(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
Doc.drop_collection()
|
||||
|
||||
def assertDbEqual(self, docs):
|
||||
self.assertEqual(list(Doc._collection.find().sort("id")), docs)
|
||||
assert list(Doc._collection.find().sort("id")) == docs
|
||||
|
||||
def test_modify(self):
|
||||
Doc(id=0, value=0).save()
|
||||
doc = Doc(id=1, value=1).save()
|
||||
|
||||
old_doc = Doc.objects(id=1).modify(set__value=-1)
|
||||
self.assertEqual(old_doc.to_json(), doc.to_json())
|
||||
assert old_doc.to_json() == doc.to_json()
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||
|
||||
def test_modify_with_new(self):
|
||||
@@ -33,18 +30,18 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
|
||||
new_doc = Doc.objects(id=1).modify(set__value=-1, new=True)
|
||||
doc.value = -1
|
||||
self.assertEqual(new_doc.to_json(), doc.to_json())
|
||||
assert new_doc.to_json() == doc.to_json()
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||
|
||||
def test_modify_not_existing(self):
|
||||
Doc(id=0, value=0).save()
|
||||
self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None)
|
||||
assert Doc.objects(id=1).modify(set__value=-1) is None
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}])
|
||||
|
||||
def test_modify_with_upsert(self):
|
||||
Doc(id=0, value=0).save()
|
||||
old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True)
|
||||
self.assertEqual(old_doc, None)
|
||||
assert old_doc is None
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
|
||||
|
||||
def test_modify_with_upsert_existing(self):
|
||||
@@ -52,13 +49,13 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
doc = Doc(id=1, value=1).save()
|
||||
|
||||
old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True)
|
||||
self.assertEqual(old_doc.to_json(), doc.to_json())
|
||||
assert old_doc.to_json() == doc.to_json()
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||
|
||||
def test_modify_with_upsert_with_new(self):
|
||||
Doc(id=0, value=0).save()
|
||||
new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1)
|
||||
self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1})
|
||||
assert new_doc.to_mongo() == {"_id": 1, "value": 1}
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}])
|
||||
|
||||
def test_modify_with_remove(self):
|
||||
@@ -66,12 +63,12 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
doc = Doc(id=1, value=1).save()
|
||||
|
||||
old_doc = Doc.objects(id=1).modify(remove=True)
|
||||
self.assertEqual(old_doc.to_json(), doc.to_json())
|
||||
assert old_doc.to_json() == doc.to_json()
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}])
|
||||
|
||||
def test_find_and_modify_with_remove_not_existing(self):
|
||||
Doc(id=0, value=0).save()
|
||||
self.assertEqual(Doc.objects(id=1).modify(remove=True), None)
|
||||
assert Doc.objects(id=1).modify(remove=True) is None
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}])
|
||||
|
||||
def test_modify_with_order_by(self):
|
||||
@@ -81,17 +78,22 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
doc = Doc(id=3, value=0).save()
|
||||
|
||||
old_doc = Doc.objects().order_by("-id").modify(set__value=-1)
|
||||
self.assertEqual(old_doc.to_json(), doc.to_json())
|
||||
self.assertDbEqual([
|
||||
{"_id": 0, "value": 3}, {"_id": 1, "value": 2},
|
||||
{"_id": 2, "value": 1}, {"_id": 3, "value": -1}])
|
||||
assert old_doc.to_json() == doc.to_json()
|
||||
self.assertDbEqual(
|
||||
[
|
||||
{"_id": 0, "value": 3},
|
||||
{"_id": 1, "value": 2},
|
||||
{"_id": 2, "value": 1},
|
||||
{"_id": 3, "value": -1},
|
||||
]
|
||||
)
|
||||
|
||||
def test_modify_with_fields(self):
|
||||
Doc(id=0, value=0).save()
|
||||
Doc(id=1, value=1).save()
|
||||
|
||||
old_doc = Doc.objects(id=1).only("id").modify(set__value=-1)
|
||||
self.assertEqual(old_doc.to_mongo(), {"_id": 1})
|
||||
assert old_doc.to_mongo() == {"_id": 1}
|
||||
self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}])
|
||||
|
||||
def test_modify_with_push(self):
|
||||
@@ -103,27 +105,25 @@ class FindAndModifyTest(unittest.TestCase):
|
||||
blog = BlogPost.objects.create()
|
||||
|
||||
# Push a new tag via modify with new=False (default).
|
||||
BlogPost(id=blog.id).modify(push__tags='code')
|
||||
self.assertEqual(blog.tags, [])
|
||||
BlogPost(id=blog.id).modify(push__tags="code")
|
||||
assert blog.tags == []
|
||||
blog.reload()
|
||||
self.assertEqual(blog.tags, ['code'])
|
||||
assert blog.tags == ["code"]
|
||||
|
||||
# Push a new tag via modify with new=True.
|
||||
blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True)
|
||||
self.assertEqual(blog.tags, ['code', 'java'])
|
||||
blog = BlogPost.objects(id=blog.id).modify(push__tags="java", new=True)
|
||||
assert blog.tags == ["code", "java"]
|
||||
|
||||
# Push a new tag with a positional argument.
|
||||
blog = BlogPost.objects(id=blog.id).modify(
|
||||
push__tags__0='python',
|
||||
new=True)
|
||||
self.assertEqual(blog.tags, ['python', 'code', 'java'])
|
||||
blog = BlogPost.objects(id=blog.id).modify(push__tags__0="python", new=True)
|
||||
assert blog.tags == ["python", "code", "java"]
|
||||
|
||||
# Push multiple new tags with a positional argument.
|
||||
blog = BlogPost.objects(id=blog.id).modify(
|
||||
push__tags__1=['go', 'rust'],
|
||||
new=True)
|
||||
self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java'])
|
||||
push__tags__1=["go", "rust"], new=True
|
||||
)
|
||||
assert blog.tags == ["python", "go", "rust", "code", "java"]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,10 +1,9 @@
|
||||
import pickle
|
||||
import unittest
|
||||
from pymongo.mongo_client import MongoClient
|
||||
from mongoengine import Document, StringField, IntField
|
||||
from mongoengine.connection import connect
|
||||
|
||||
__author__ = 'stas'
|
||||
from mongoengine import Document, IntField, StringField
|
||||
from mongoengine.connection import connect
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class Person(Document):
|
||||
@@ -12,22 +11,15 @@ class Person(Document):
|
||||
age = IntField()
|
||||
|
||||
|
||||
class TestQuerysetPickable(unittest.TestCase):
|
||||
class TestQuerysetPickable(MongoDBTestCase):
|
||||
"""
|
||||
Test for adding pickling support for QuerySet instances
|
||||
See issue https://github.com/MongoEngine/mongoengine/issues/442
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(TestQuerysetPickable, self).setUp()
|
||||
|
||||
connection = connect(db="test") # type: pymongo.mongo_client.MongoClient
|
||||
|
||||
connection.drop_database("test")
|
||||
|
||||
self.john = Person.objects.create(
|
||||
name="John",
|
||||
age=21
|
||||
)
|
||||
self.john = Person.objects.create(name="John", age=21)
|
||||
|
||||
def test_picke_simple_qs(self):
|
||||
|
||||
@@ -45,35 +37,23 @@ class TestQuerysetPickable(unittest.TestCase):
|
||||
|
||||
loadedQs = self._get_loaded(qs)
|
||||
|
||||
self.assertEqual(qs.count(), loadedQs.count())
|
||||
assert qs.count() == loadedQs.count()
|
||||
|
||||
# can update loadedQs
|
||||
loadedQs.update(age=23)
|
||||
|
||||
# check
|
||||
self.assertEqual(Person.objects.first().age, 23)
|
||||
assert Person.objects.first().age == 23
|
||||
|
||||
def test_pickle_support_filtration(self):
|
||||
Person.objects.create(
|
||||
name="Alice",
|
||||
age=22
|
||||
)
|
||||
Person.objects.create(name="Alice", age=22)
|
||||
|
||||
Person.objects.create(
|
||||
name="Bob",
|
||||
age=23
|
||||
)
|
||||
Person.objects.create(name="Bob", age=23)
|
||||
|
||||
qs = Person.objects.filter(age__gte=22)
|
||||
self.assertEqual(qs.count(), 2)
|
||||
assert qs.count() == 2
|
||||
|
||||
loaded = self._get_loaded(qs)
|
||||
|
||||
self.assertEqual(loaded.count(), 2)
|
||||
self.assertEqual(loaded.filter(name="Bob").first().age, 23)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
assert loaded.count() == 2
|
||||
assert loaded.filter(name="Bob").first().age == 23
|
||||
File diff suppressed because it is too large
Load Diff
255
tests/queryset/test_queryset_aggregation.py
Normal file
255
tests/queryset/test_queryset_aggregation.py
Normal file
@@ -0,0 +1,255 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
class TestQuerysetAggregate(MongoDBTestCase):
|
||||
def test_read_preference_aggregation_framework(self):
|
||||
class Bar(Document):
|
||||
txt = StringField()
|
||||
|
||||
meta = {"indexes": ["txt"]}
|
||||
|
||||
# Aggregates with read_preference
|
||||
pipeline = []
|
||||
bars = Bar.objects.read_preference(
|
||||
ReadPreference.SECONDARY_PREFERRED
|
||||
).aggregate(pipeline)
|
||||
assert (
|
||||
bars._CommandCursor__collection.read_preference
|
||||
== ReadPreference.SECONDARY_PREFERRED
|
||||
)
|
||||
|
||||
def test_queryset_aggregation_framework(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects(age__lte=22).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [
|
||||
{"_id": p1.pk, "name": "ISABELLA LUANNA"},
|
||||
{"_id": p2.pk, "name": "WILSON JUNIOR"},
|
||||
]
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects(age__lte=22).order_by("-name").aggregate(pipeline)
|
||||
|
||||
assert list(data) == [
|
||||
{"_id": p2.pk, "name": "WILSON JUNIOR"},
|
||||
{"_id": p1.pk, "name": "ISABELLA LUANNA"},
|
||||
]
|
||||
|
||||
pipeline = [
|
||||
{"$group": {"_id": None, "total": {"$sum": 1}, "avg": {"$avg": "$age"}}}
|
||||
]
|
||||
data = (
|
||||
Person.objects(age__gte=17, age__lte=40)
|
||||
.order_by("-age")
|
||||
.aggregate(pipeline)
|
||||
)
|
||||
assert list(data) == [{"_id": None, "avg": 29, "total": 2}]
|
||||
|
||||
pipeline = [{"$match": {"name": "Isabella Luanna"}}]
|
||||
data = Person.objects().aggregate(pipeline)
|
||||
assert list(data) == [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}]
|
||||
|
||||
def test_queryset_aggregation_with_skip(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects.skip(1).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [
|
||||
{"_id": p2.pk, "name": "WILSON JUNIOR"},
|
||||
{"_id": p3.pk, "name": "SANDRA MARA"},
|
||||
]
|
||||
|
||||
def test_queryset_aggregation_with_limit(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects.limit(1).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]
|
||||
|
||||
def test_queryset_aggregation_with_sort(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects.order_by("name").aggregate(pipeline)
|
||||
|
||||
assert list(data) == [
|
||||
{"_id": p1.pk, "name": "ISABELLA LUANNA"},
|
||||
{"_id": p3.pk, "name": "SANDRA MARA"},
|
||||
{"_id": p2.pk, "name": "WILSON JUNIOR"},
|
||||
]
|
||||
|
||||
def test_queryset_aggregation_with_skip_with_limit(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = list(Person.objects.skip(1).limit(1).aggregate(pipeline))
|
||||
|
||||
assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}]
|
||||
|
||||
# Make sure limit/skip chaining order has no impact
|
||||
data2 = Person.objects.limit(1).skip(1).aggregate(pipeline)
|
||||
|
||||
assert data == list(data2)
|
||||
|
||||
def test_queryset_aggregation_with_sort_with_limit(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects.order_by("name").limit(2).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [
|
||||
{"_id": p1.pk, "name": "ISABELLA LUANNA"},
|
||||
{"_id": p3.pk, "name": "SANDRA MARA"},
|
||||
]
|
||||
|
||||
# Verify adding limit/skip steps works as expected
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}]
|
||||
data = Person.objects.order_by("name").limit(2).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]
|
||||
|
||||
pipeline = [
|
||||
{"$project": {"name": {"$toUpper": "$name"}}},
|
||||
{"$skip": 1},
|
||||
{"$limit": 1},
|
||||
]
|
||||
data = Person.objects.order_by("name").limit(2).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}]
|
||||
|
||||
def test_queryset_aggregation_with_sort_with_skip(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects.order_by("name").skip(2).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}]
|
||||
|
||||
def test_queryset_aggregation_with_sort_with_skip_with_limit(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna", age=16)
|
||||
p2 = Person(name="Wilson Junior", age=21)
|
||||
p3 = Person(name="Sandra Mara", age=37)
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
data = Person.objects.order_by("name").skip(1).limit(1).aggregate(pipeline)
|
||||
|
||||
assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}]
|
||||
|
||||
def test_queryset_aggregation_deprecated_interface(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
p1 = Person(name="Isabella Luanna")
|
||||
p2 = Person(name="Wilson Junior")
|
||||
p3 = Person(name="Sandra Mara")
|
||||
Person.objects.insert([p1, p2, p3])
|
||||
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}]
|
||||
|
||||
# Make sure a warning is emitted
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("error", DeprecationWarning)
|
||||
with self.assertRaises(DeprecationWarning):
|
||||
Person.objects.order_by("name").limit(2).aggregate(*pipeline)
|
||||
|
||||
# Make sure old interface works as expected with a 1-step pipeline
|
||||
data = Person.objects.order_by("name").limit(2).aggregate(*pipeline)
|
||||
|
||||
assert list(data) == [
|
||||
{"_id": p1.pk, "name": "ISABELLA LUANNA"},
|
||||
{"_id": p3.pk, "name": "SANDRA MARA"},
|
||||
]
|
||||
|
||||
# Make sure old interface works as expected with a 2-steps pipeline
|
||||
pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}]
|
||||
data = Person.objects.order_by("name").limit(2).aggregate(*pipeline)
|
||||
|
||||
assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
374
tests/queryset/test_transform.py
Normal file
374
tests/queryset/test_transform.py
Normal file
@@ -0,0 +1,374 @@
|
||||
import unittest
|
||||
|
||||
from bson.son import SON
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import Q, transform
|
||||
|
||||
|
||||
class TestTransform(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
def test_transform_query(self):
|
||||
"""Ensure that the _transform_query function operates correctly.
|
||||
"""
|
||||
assert transform.query(name="test", age=30) == {"name": "test", "age": 30}
|
||||
assert transform.query(age__lt=30) == {"age": {"$lt": 30}}
|
||||
assert transform.query(age__gt=20, age__lt=50) == {
|
||||
"age": {"$gt": 20, "$lt": 50}
|
||||
}
|
||||
assert transform.query(age=20, age__gt=50) == {
|
||||
"$and": [{"age": {"$gt": 50}}, {"age": 20}]
|
||||
}
|
||||
assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}}
|
||||
assert transform.query(name__exists=True) == {"name": {"$exists": True}}
|
||||
assert transform.query(name=["Mark"], __raw__={"name": {"$in": "Tom"}}) == {
|
||||
"$and": [{"name": ["Mark"]}, {"name": {"$in": "Tom"}}]
|
||||
}
|
||||
assert transform.query(name__in=["Tom"], __raw__={"name": "Mark"}) == {
|
||||
"$and": [{"name": {"$in": ["Tom"]}}, {"name": "Mark"}]
|
||||
}
|
||||
|
||||
def test_transform_update(self):
|
||||
class LisDoc(Document):
|
||||
foo = ListField(StringField())
|
||||
|
||||
class DicDoc(Document):
|
||||
dictField = DictField()
|
||||
|
||||
class Doc(Document):
|
||||
pass
|
||||
|
||||
LisDoc.drop_collection()
|
||||
DicDoc.drop_collection()
|
||||
Doc.drop_collection()
|
||||
|
||||
DicDoc().save()
|
||||
doc = Doc().save()
|
||||
|
||||
for k, v in (
|
||||
("set", "$set"),
|
||||
("set_on_insert", "$setOnInsert"),
|
||||
("push", "$push"),
|
||||
):
|
||||
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
|
||||
assert isinstance(update[v]["dictField.test"], dict)
|
||||
|
||||
# Update special cases
|
||||
update = transform.update(DicDoc, unset__dictField__test=doc)
|
||||
assert update["$unset"]["dictField.test"] == 1
|
||||
|
||||
update = transform.update(DicDoc, pull__dictField__test=doc)
|
||||
assert isinstance(update["$pull"]["dictField"]["test"], dict)
|
||||
|
||||
update = transform.update(LisDoc, pull__foo__in=["a"])
|
||||
assert update == {"$pull": {"foo": {"$in": ["a"]}}}
|
||||
|
||||
def test_transform_update_push(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, push__tags=["mongo", "db"])
|
||||
assert update == {"$push": {"tags": ["mongo", "db"]}}
|
||||
|
||||
update = transform.update(BlogPost, push_all__tags=["mongo", "db"])
|
||||
assert update == {"$push": {"tags": {"$each": ["mongo", "db"]}}}
|
||||
|
||||
def test_transform_update_no_operator_default_to_set(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, tags=["mongo", "db"])
|
||||
assert update == {"$set": {"tags": ["mongo", "db"]}}
|
||||
|
||||
def test_query_field_name(self):
|
||||
"""Ensure that the correct field name is used when querying.
|
||||
"""
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField(db_field="commentContent")
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField(db_field="postTitle")
|
||||
comments = ListField(
|
||||
EmbeddedDocumentField(Comment), db_field="postComments"
|
||||
)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
data = {"title": "Post 1", "comments": [Comment(content="test")]}
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
assert "postTitle" in BlogPost.objects(title=data["title"])._query
|
||||
assert not ("title" in BlogPost.objects(title=data["title"])._query)
|
||||
assert BlogPost.objects(title=data["title"]).count() == 1
|
||||
|
||||
assert "_id" in BlogPost.objects(pk=post.id)._query
|
||||
assert BlogPost.objects(pk=post.id).count() == 1
|
||||
|
||||
assert (
|
||||
"postComments.commentContent"
|
||||
in BlogPost.objects(comments__content="test")._query
|
||||
)
|
||||
assert BlogPost.objects(comments__content="test").count() == 1
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_query_pk_field_name(self):
|
||||
"""Ensure that the correct "primary key" field name is used when
|
||||
querying
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField(primary_key=True, db_field="postTitle")
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
data = {"title": "Post 1"}
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
assert "_id" in BlogPost.objects(pk=data["title"])._query
|
||||
assert "_id" in BlogPost.objects(title=data["title"])._query
|
||||
assert BlogPost.objects(pk=data["title"]).count() == 1
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_chaining(self):
|
||||
class A(Document):
|
||||
pass
|
||||
|
||||
class B(Document):
|
||||
a = ReferenceField(A)
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
|
||||
a1 = A().save()
|
||||
a2 = A().save()
|
||||
|
||||
B(a=a1).save()
|
||||
|
||||
# Works
|
||||
q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query
|
||||
|
||||
# Doesn't work
|
||||
q2 = B.objects.filter(a__in=[a1, a2])
|
||||
q2 = q2.filter(a=a1)._query
|
||||
|
||||
assert q1 == q2
|
||||
|
||||
def test_raw_query_and_Q_objects(self):
|
||||
"""
|
||||
Test raw plays nicely
|
||||
"""
|
||||
|
||||
class Foo(Document):
|
||||
name = StringField()
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
query = Foo.objects(__raw__={"$nor": [{"name": "bar"}]})._query
|
||||
assert query == {"$nor": [{"name": "bar"}]}
|
||||
|
||||
q1 = {"$or": [{"a": 1}, {"b": 1}]}
|
||||
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||
assert query == {"$or": [{"a": 1}, {"b": 1}], "c": 1}
|
||||
|
||||
def test_raw_and_merging(self):
|
||||
class Doc(Document):
|
||||
meta = {"allow_inheritance": False}
|
||||
|
||||
raw_query = Doc.objects(
|
||||
__raw__={
|
||||
"deleted": False,
|
||||
"scraped": "yes",
|
||||
"$nor": [
|
||||
{"views.extracted": "no"},
|
||||
{"attachments.views.extracted": "no"},
|
||||
],
|
||||
}
|
||||
)._query
|
||||
|
||||
assert raw_query == {
|
||||
"deleted": False,
|
||||
"scraped": "yes",
|
||||
"$nor": [{"views.extracted": "no"}, {"attachments.views.extracted": "no"}],
|
||||
}
|
||||
|
||||
def test_geojson_PointField(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
update = transform.update(Location, set__loc=[1, 2])
|
||||
assert update == {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}}
|
||||
|
||||
update = transform.update(
|
||||
Location, set__loc={"type": "Point", "coordinates": [1, 2]}
|
||||
)
|
||||
assert update == {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}}
|
||||
|
||||
def test_geojson_LineStringField(self):
|
||||
class Location(Document):
|
||||
line = LineStringField()
|
||||
|
||||
update = transform.update(Location, set__line=[[1, 2], [2, 2]])
|
||||
assert update == {
|
||||
"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}
|
||||
}
|
||||
|
||||
update = transform.update(
|
||||
Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]}
|
||||
)
|
||||
assert update == {
|
||||
"$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}
|
||||
}
|
||||
|
||||
def test_geojson_PolygonField(self):
|
||||
class Location(Document):
|
||||
poly = PolygonField()
|
||||
|
||||
update = transform.update(
|
||||
Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]
|
||||
)
|
||||
assert update == {
|
||||
"$set": {
|
||||
"poly": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update = transform.update(
|
||||
Location,
|
||||
set__poly={
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
},
|
||||
)
|
||||
assert update == {
|
||||
"$set": {
|
||||
"poly": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_type(self):
|
||||
class Doc(Document):
|
||||
df = DynamicField()
|
||||
|
||||
Doc(df=True).save()
|
||||
Doc(df=7).save()
|
||||
Doc(df="df").save()
|
||||
assert Doc.objects(df__type=1).count() == 0 # double
|
||||
assert Doc.objects(df__type=8).count() == 1 # bool
|
||||
assert Doc.objects(df__type=2).count() == 1 # str
|
||||
assert Doc.objects(df__type=16).count() == 1 # int
|
||||
|
||||
def test_last_field_name_like_operator(self):
|
||||
class EmbeddedItem(EmbeddedDocument):
|
||||
type = StringField()
|
||||
name = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
item = EmbeddedDocumentField(EmbeddedItem)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe"))
|
||||
doc.save()
|
||||
|
||||
assert 1 == Doc.objects(item__type__="axe").count()
|
||||
assert 1 == Doc.objects(item__name__="Heroic axe").count()
|
||||
|
||||
Doc.objects(id=doc.id).update(set__item__type__="sword")
|
||||
assert 1 == Doc.objects(item__type__="sword").count()
|
||||
assert 0 == Doc.objects(item__type__="axe").count()
|
||||
|
||||
def test_understandable_error_raised(self):
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
box = [(35.0, -125.0), (40.0, -100.0)]
|
||||
# I *meant* to execute location__within_box=box
|
||||
events = Event.objects(location__within=box)
|
||||
with pytest.raises(InvalidQueryError):
|
||||
events.count()
|
||||
|
||||
def test_update_pull_for_list_fields(self):
|
||||
"""
|
||||
Test added to check pull operation in update for
|
||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
||||
"""
|
||||
|
||||
class Word(EmbeddedDocument):
|
||||
word = StringField()
|
||||
index = IntField()
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
heading = ListField(StringField())
|
||||
text = EmbeddedDocumentListField(Word)
|
||||
|
||||
class MainDoc(Document):
|
||||
title = StringField()
|
||||
content = EmbeddedDocumentField(SubDoc)
|
||||
|
||||
word = Word(word="abc", index=1)
|
||||
update = transform.update(MainDoc, pull__content__text=word)
|
||||
assert update == {
|
||||
"$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])}
|
||||
}
|
||||
|
||||
update = transform.update(MainDoc, pull__content__heading="xyz")
|
||||
assert update == {"$pull": {"content.heading": "xyz"}}
|
||||
|
||||
update = transform.update(MainDoc, pull__content__text__word__in=["foo", "bar"])
|
||||
assert update == {"$pull": {"content.text": {"word": {"$in": ["foo", "bar"]}}}}
|
||||
|
||||
update = transform.update(
|
||||
MainDoc, pull__content__text__word__nin=["foo", "bar"]
|
||||
)
|
||||
assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}}
|
||||
|
||||
def test_transform_embedded_document_list_fields(self):
|
||||
"""
|
||||
Test added to check filtering
|
||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
||||
"""
|
||||
|
||||
class Drink(EmbeddedDocument):
|
||||
id = StringField()
|
||||
meta = {"strict": False}
|
||||
|
||||
class Shop(Document):
|
||||
drinks = EmbeddedDocumentListField(Drink)
|
||||
|
||||
Shop.drop_collection()
|
||||
drinks = [Drink(id="drink_1"), Drink(id="drink_2")]
|
||||
Shop.objects.create(drinks=drinks)
|
||||
q_obj = transform.query(
|
||||
Shop, drinks__all=[{"$elemMatch": {"_id": x.id}} for x in drinks]
|
||||
)
|
||||
assert q_obj == {
|
||||
"drinks": {"$all": [{"$elemMatch": {"_id": x.id}} for x in drinks]}
|
||||
}
|
||||
|
||||
Shop.drop_collection()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
423
tests/queryset/test_visitor.py
Normal file
423
tests/queryset/test_visitor.py
Normal file
@@ -0,0 +1,423 @@
|
||||
import datetime
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from bson import ObjectId
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import Q
|
||||
|
||||
|
||||
class TestQ(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_empty_q(self):
|
||||
"""Ensure that empty Q objects won't hurt.
|
||||
"""
|
||||
q1 = Q()
|
||||
q2 = Q(age__gte=18)
|
||||
q3 = Q()
|
||||
q4 = Q(name="test")
|
||||
q5 = Q()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
query = {"$or": [{"age": {"$gte": 18}}, {"name": "test"}]}
|
||||
assert (q1 | q2 | q3 | q4 | q5).to_query(Person) == query
|
||||
|
||||
query = {"age": {"$gte": 18}, "name": "test"}
|
||||
assert (q1 & q2 & q3 & q4 & q5).to_query(Person) == query
|
||||
|
||||
def test_q_with_dbref(self):
|
||||
"""Ensure Q objects handle DBRefs correctly"""
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
pass
|
||||
|
||||
class Post(Document):
|
||||
created_user = ReferenceField(User)
|
||||
|
||||
user = User.objects.create()
|
||||
Post.objects.create(created_user=user)
|
||||
|
||||
assert Post.objects.filter(created_user=user).count() == 1
|
||||
assert Post.objects.filter(Q(created_user=user)).count() == 1
|
||||
|
||||
def test_and_combination(self):
|
||||
"""Ensure that Q-objects correctly AND together.
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = StringField()
|
||||
|
||||
query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||
assert query == {"$and": [{"x": {"$lt": 7}}, {"x": {"$lt": 3}}]}
|
||||
|
||||
query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||
assert query == {"$and": [{"y": "a"}, {"x": {"$lt": 7}}, {"x": {"$lt": 3}}]}
|
||||
|
||||
# Check normal cases work without an error
|
||||
query = Q(x__lt=7) & Q(x__gt=3)
|
||||
|
||||
q1 = Q(x__lt=7)
|
||||
q2 = Q(x__gt=3)
|
||||
query = (q1 & q2).to_query(TestDoc)
|
||||
assert query == {"x": {"$lt": 7, "$gt": 3}}
|
||||
|
||||
# More complex nested example
|
||||
query = Q(x__lt=100) & Q(y__ne="NotMyString")
|
||||
query &= Q(y__in=["a", "b", "c"]) & Q(x__gt=-100)
|
||||
mongo_query = {
|
||||
"x": {"$lt": 100, "$gt": -100},
|
||||
"y": {"$ne": "NotMyString", "$in": ["a", "b", "c"]},
|
||||
}
|
||||
assert query.to_query(TestDoc) == mongo_query
|
||||
|
||||
def test_or_combination(self):
|
||||
"""Ensure that Q-objects correctly OR together.
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
|
||||
q1 = Q(x__lt=3)
|
||||
q2 = Q(x__gt=7)
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
assert query == {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]}
|
||||
|
||||
def test_and_or_combination(self):
|
||||
"""Ensure that Q-objects handle ANDing ORed components.
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
query = Q(x__gt=0) | Q(x__exists=False)
|
||||
query &= Q(x__lt=100)
|
||||
assert query.to_query(TestDoc) == {
|
||||
"$and": [
|
||||
{"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]},
|
||||
{"x": {"$lt": 100}},
|
||||
]
|
||||
}
|
||||
|
||||
q1 = Q(x__gt=0) | Q(x__exists=False)
|
||||
q2 = Q(x__lt=100) | Q(y=True)
|
||||
query = (q1 & q2).to_query(TestDoc)
|
||||
|
||||
TestDoc(x=101).save()
|
||||
TestDoc(x=10).save()
|
||||
TestDoc(y=True).save()
|
||||
|
||||
assert query == {
|
||||
"$and": [
|
||||
{"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]},
|
||||
{"$or": [{"x": {"$lt": 100}}, {"y": True}]},
|
||||
]
|
||||
}
|
||||
assert 2 == TestDoc.objects(q1 & q2).count()
|
||||
|
||||
def test_or_and_or_combination(self):
|
||||
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
|
||||
"""
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
TestDoc(x=-1, y=True).save()
|
||||
TestDoc(x=101, y=True).save()
|
||||
TestDoc(x=99, y=False).save()
|
||||
TestDoc(x=101, y=False).save()
|
||||
|
||||
q1 = Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))
|
||||
q2 = Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
|
||||
assert query == {
|
||||
"$or": [
|
||||
{
|
||||
"$and": [
|
||||
{"x": {"$gt": 0}},
|
||||
{"$or": [{"y": True}, {"y": {"$exists": False}}]},
|
||||
]
|
||||
},
|
||||
{
|
||||
"$and": [
|
||||
{"x": {"$lt": 100}},
|
||||
{"$or": [{"y": False}, {"y": {"$exists": False}}]},
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
assert 2 == TestDoc.objects(q1 | q2).count()
|
||||
|
||||
def test_multiple_occurence_in_field(self):
|
||||
class Test(Document):
|
||||
name = StringField(max_length=40)
|
||||
title = StringField(max_length=40)
|
||||
|
||||
q1 = Q(name__contains="te") | Q(title__contains="te")
|
||||
q2 = Q(name__contains="12") | Q(title__contains="12")
|
||||
|
||||
q3 = q1 & q2
|
||||
|
||||
query = q3.to_query(Test)
|
||||
assert query["$and"][0] == q1.to_query(Test)
|
||||
assert query["$and"][1] == q2.to_query(Test)
|
||||
|
||||
def test_q_clone(self):
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
for i in range(1, 101):
|
||||
t = TestDoc(x=i)
|
||||
t.save()
|
||||
|
||||
# Check normal cases work without an error
|
||||
test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3))
|
||||
|
||||
assert test.count() == 3
|
||||
|
||||
test2 = test.clone()
|
||||
assert test2.count() == 3
|
||||
assert test2 != test
|
||||
|
||||
test3 = test2.filter(x=6)
|
||||
assert test3.count() == 1
|
||||
assert test.count() == 3
|
||||
|
||||
def test_q(self):
|
||||
"""Ensure that Q objects may be used to query for documents.
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
publish_date = DateTimeField()
|
||||
published = BooleanField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(
|
||||
title="Test 1", publish_date=datetime.datetime(2010, 1, 8), published=False
|
||||
)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(
|
||||
title="Test 2", publish_date=datetime.datetime(2010, 1, 15), published=True
|
||||
)
|
||||
post2.save()
|
||||
|
||||
post3 = BlogPost(title="Test 3", published=True)
|
||||
post3.save()
|
||||
|
||||
post4 = BlogPost(title="Test 4", publish_date=datetime.datetime(2010, 1, 8))
|
||||
post4.save()
|
||||
|
||||
post5 = BlogPost(title="Test 1", publish_date=datetime.datetime(2010, 1, 15))
|
||||
post5.save()
|
||||
|
||||
post6 = BlogPost(title="Test 1", published=False)
|
||||
post6.save()
|
||||
|
||||
# Check ObjectId lookup works
|
||||
obj = BlogPost.objects(id=post1.id).first()
|
||||
assert obj == post1
|
||||
|
||||
# Check Q object combination with one does not exist
|
||||
q = BlogPost.objects(Q(title="Test 5") | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
published_posts = (post2, post3)
|
||||
assert all(obj.id in posts for obj in published_posts)
|
||||
|
||||
q = BlogPost.objects(Q(title="Test 1") | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
published_posts = (post1, post2, post3, post5, post6)
|
||||
assert all(obj.id in posts for obj in published_posts)
|
||||
|
||||
# Check Q object combination
|
||||
date = datetime.datetime(2010, 1, 10)
|
||||
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
published_posts = (post1, post2, post3, post4)
|
||||
assert all(obj.id in posts for obj in published_posts)
|
||||
|
||||
assert not any(obj.id in posts for obj in [post5, post6])
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Check the 'in' operator
|
||||
self.Person(name="user1", age=20).save()
|
||||
self.Person(name="user2", age=20).save()
|
||||
self.Person(name="user3", age=30).save()
|
||||
self.Person(name="user4", age=40).save()
|
||||
|
||||
assert self.Person.objects(Q(age__in=[20])).count() == 2
|
||||
assert self.Person.objects(Q(age__in=[20, 30])).count() == 3
|
||||
|
||||
# Test invalid query objs
|
||||
with pytest.raises(InvalidQueryError):
|
||||
self.Person.objects("user1")
|
||||
|
||||
# filter should fail, too
|
||||
with pytest.raises(InvalidQueryError):
|
||||
self.Person.objects.filter("user1")
|
||||
|
||||
def test_q_regex(self):
|
||||
"""Ensure that Q objects can be queried using regexes.
|
||||
"""
|
||||
person = self.Person(name="Guido van Rossum")
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects(Q(name=re.compile("^Gui"))).first()
|
||||
assert obj == person
|
||||
obj = self.Person.objects(Q(name=re.compile("^gui"))).first()
|
||||
assert obj is None
|
||||
|
||||
obj = self.Person.objects(Q(name=re.compile("^gui", re.I))).first()
|
||||
assert obj == person
|
||||
|
||||
obj = self.Person.objects(Q(name__not=re.compile("^bob"))).first()
|
||||
assert obj == person
|
||||
|
||||
obj = self.Person.objects(Q(name__not=re.compile("^Gui"))).first()
|
||||
assert obj is None
|
||||
|
||||
def test_q_repr(self):
|
||||
assert repr(Q()) == "Q(**{})"
|
||||
assert repr(Q(name="test")) == "Q(**{'name': 'test'})"
|
||||
|
||||
assert (
|
||||
repr(Q(name="test") & Q(age__gte=18))
|
||||
== "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))"
|
||||
)
|
||||
|
||||
assert (
|
||||
repr(Q(name="test") | Q(age__gte=18))
|
||||
== "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))"
|
||||
)
|
||||
|
||||
def test_q_lists(self):
|
||||
"""Ensure that Q objects query ListFields correctly.
|
||||
"""
|
||||
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost(tags=["python", "mongo"]).save()
|
||||
BlogPost(tags=["python"]).save()
|
||||
|
||||
assert BlogPost.objects(Q(tags="mongo")).count() == 1
|
||||
assert BlogPost.objects(Q(tags="python")).count() == 2
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_q_merge_queries_edge_case(self):
|
||||
class User(Document):
|
||||
email = EmailField(required=False)
|
||||
name = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
pk = ObjectId()
|
||||
User(email="example@example.com", pk=pk).save()
|
||||
|
||||
assert (
|
||||
1
|
||||
== User.objects.filter(Q(email="example@example.com") | Q(name="John Doe"))
|
||||
.limit(2)
|
||||
.filter(pk=pk)
|
||||
.count()
|
||||
)
|
||||
|
||||
def test_chained_q_or_filtering(self):
|
||||
class Post(EmbeddedDocument):
|
||||
name = StringField(required=True)
|
||||
|
||||
class Item(Document):
|
||||
postables = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
Item.drop_collection()
|
||||
|
||||
Item(postables=[Post(name="a"), Post(name="b")]).save()
|
||||
Item(postables=[Post(name="a"), Post(name="c")]).save()
|
||||
Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save()
|
||||
|
||||
assert (
|
||||
Item.objects(Q(postables__name="a") & Q(postables__name="b")).count() == 2
|
||||
)
|
||||
assert (
|
||||
Item.objects.filter(postables__name="a").filter(postables__name="b").count()
|
||||
== 2
|
||||
)
|
||||
|
||||
def test_equality(self):
|
||||
assert Q(name="John") == Q(name="John")
|
||||
assert Q() == Q()
|
||||
|
||||
def test_inequality(self):
|
||||
assert Q(name="John") != Q(name="Ralph")
|
||||
|
||||
def test_operation_equality(self):
|
||||
q1 = Q(name="John") | Q(title="Sir") & Q(surname="Paul")
|
||||
q2 = Q(name="John") | Q(title="Sir") & Q(surname="Paul")
|
||||
assert q1 == q2
|
||||
|
||||
def test_operation_inequality(self):
|
||||
q1 = Q(name="John") | Q(title="Sir")
|
||||
q2 = Q(title="Sir") | Q(name="John")
|
||||
assert q1 != q2
|
||||
|
||||
def test_combine_and_empty(self):
|
||||
q = Q(x=1)
|
||||
assert q & Q() == q
|
||||
assert Q() & q == q
|
||||
|
||||
def test_combine_and_both_empty(self):
|
||||
assert Q() & Q() == Q()
|
||||
|
||||
def test_combine_or_empty(self):
|
||||
q = Q(x=1)
|
||||
assert q | Q() == q
|
||||
assert Q() | q == q
|
||||
|
||||
def test_combine_or_both_empty(self):
|
||||
assert Q() | Q() == Q()
|
||||
|
||||
def test_q_bool(self):
|
||||
assert Q(name="John")
|
||||
assert not Q()
|
||||
|
||||
def test_combine_bool(self):
|
||||
assert not Q() & Q()
|
||||
assert Q() & Q(name="John")
|
||||
assert Q(name="John") & Q()
|
||||
assert Q() | Q(name="John")
|
||||
assert Q(name="John") | Q()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -1,301 +0,0 @@
|
||||
import unittest
|
||||
|
||||
from bson.son import SON
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.queryset import Q, transform
|
||||
|
||||
__all__ = ("TransformTest",)
|
||||
|
||||
|
||||
class TransformTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_transform_query(self):
|
||||
"""Ensure that the _transform_query function operates correctly.
|
||||
"""
|
||||
self.assertEqual(transform.query(name='test', age=30),
|
||||
{'name': 'test', 'age': 30})
|
||||
self.assertEqual(transform.query(age__lt=30),
|
||||
{'age': {'$lt': 30}})
|
||||
self.assertEqual(transform.query(age__gt=20, age__lt=50),
|
||||
{'age': {'$gt': 20, '$lt': 50}})
|
||||
self.assertEqual(transform.query(age=20, age__gt=50),
|
||||
{'$and': [{'age': {'$gt': 50}}, {'age': 20}]})
|
||||
self.assertEqual(transform.query(friend__age__gte=30),
|
||||
{'friend.age': {'$gte': 30}})
|
||||
self.assertEqual(transform.query(name__exists=True),
|
||||
{'name': {'$exists': True}})
|
||||
|
||||
def test_transform_update(self):
|
||||
class LisDoc(Document):
|
||||
foo = ListField(StringField())
|
||||
|
||||
class DicDoc(Document):
|
||||
dictField = DictField()
|
||||
|
||||
class Doc(Document):
|
||||
pass
|
||||
|
||||
LisDoc.drop_collection()
|
||||
DicDoc.drop_collection()
|
||||
Doc.drop_collection()
|
||||
|
||||
DicDoc().save()
|
||||
doc = Doc().save()
|
||||
|
||||
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
|
||||
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
|
||||
self.assertIsInstance(update[v]["dictField.test"], dict)
|
||||
|
||||
# Update special cases
|
||||
update = transform.update(DicDoc, unset__dictField__test=doc)
|
||||
self.assertEqual(update["$unset"]["dictField.test"], 1)
|
||||
|
||||
update = transform.update(DicDoc, pull__dictField__test=doc)
|
||||
self.assertIsInstance(update["$pull"]["dictField"]["test"], dict)
|
||||
|
||||
update = transform.update(LisDoc, pull__foo__in=['a'])
|
||||
self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}})
|
||||
|
||||
def test_transform_update_push(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, push__tags=['mongo', 'db'])
|
||||
self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}})
|
||||
|
||||
update = transform.update(BlogPost, push_all__tags=['mongo', 'db'])
|
||||
self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}})
|
||||
|
||||
def test_transform_update_no_operator_default_to_set(self):
|
||||
"""Ensure the differences in behvaior between 'push' and 'push_all'"""
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
update = transform.update(BlogPost, tags=['mongo', 'db'])
|
||||
self.assertEqual(update, {'$set': {'tags': ['mongo', 'db']}})
|
||||
|
||||
def test_query_field_name(self):
|
||||
"""Ensure that the correct field name is used when querying.
|
||||
"""
|
||||
class Comment(EmbeddedDocument):
|
||||
content = StringField(db_field='commentContent')
|
||||
|
||||
class BlogPost(Document):
|
||||
title = StringField(db_field='postTitle')
|
||||
comments = ListField(EmbeddedDocumentField(Comment),
|
||||
db_field='postComments')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
data = {'title': 'Post 1', 'comments': [Comment(content='test')]}
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query)
|
||||
self.assertFalse('title' in
|
||||
BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(title=data['title']).count(), 1)
|
||||
|
||||
self.assertIn('_id', BlogPost.objects(pk=post.id)._query)
|
||||
self.assertEqual(BlogPost.objects(pk=post.id).count(), 1)
|
||||
|
||||
self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query)
|
||||
self.assertEqual(BlogPost.objects(comments__content='test').count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_query_pk_field_name(self):
|
||||
"""Ensure that the correct "primary key" field name is used when
|
||||
querying
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
title = StringField(primary_key=True, db_field='postTitle')
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
data = {'title': 'Post 1'}
|
||||
post = BlogPost(**data)
|
||||
post.save()
|
||||
|
||||
self.assertIn('_id', BlogPost.objects(pk=data['title'])._query)
|
||||
self.assertIn('_id', BlogPost.objects(title=data['title'])._query)
|
||||
self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_chaining(self):
|
||||
class A(Document):
|
||||
pass
|
||||
|
||||
class B(Document):
|
||||
a = ReferenceField(A)
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
|
||||
a1 = A().save()
|
||||
a2 = A().save()
|
||||
|
||||
B(a=a1).save()
|
||||
|
||||
# Works
|
||||
q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query
|
||||
|
||||
# Doesn't work
|
||||
q2 = B.objects.filter(a__in=[a1, a2])
|
||||
q2 = q2.filter(a=a1)._query
|
||||
|
||||
self.assertEqual(q1, q2)
|
||||
|
||||
def test_raw_query_and_Q_objects(self):
|
||||
"""
|
||||
Test raw plays nicely
|
||||
"""
|
||||
class Foo(Document):
|
||||
name = StringField()
|
||||
a = StringField()
|
||||
b = StringField()
|
||||
c = StringField()
|
||||
|
||||
meta = {
|
||||
'allow_inheritance': False
|
||||
}
|
||||
|
||||
query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query
|
||||
self.assertEqual(query, {'$nor': [{'name': 'bar'}]})
|
||||
|
||||
q1 = {'$or': [{'a': 1}, {'b': 1}]}
|
||||
query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query
|
||||
self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1})
|
||||
|
||||
def test_raw_and_merging(self):
|
||||
class Doc(Document):
|
||||
meta = {'allow_inheritance': False}
|
||||
|
||||
raw_query = Doc.objects(__raw__={
|
||||
'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [
|
||||
{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}
|
||||
]
|
||||
})._query
|
||||
|
||||
self.assertEqual(raw_query, {
|
||||
'deleted': False,
|
||||
'scraped': 'yes',
|
||||
'$nor': [
|
||||
{'views.extracted': 'no'},
|
||||
{'attachments.views.extracted': 'no'}
|
||||
]
|
||||
})
|
||||
|
||||
def test_geojson_PointField(self):
|
||||
class Location(Document):
|
||||
loc = PointField()
|
||||
|
||||
update = transform.update(Location, set__loc=[1, 2])
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
|
||||
|
||||
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]})
|
||||
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
|
||||
|
||||
def test_geojson_LineStringField(self):
|
||||
class Location(Document):
|
||||
line = LineStringField()
|
||||
|
||||
update = transform.update(Location, set__line=[[1, 2], [2, 2]])
|
||||
self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}})
|
||||
|
||||
update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]})
|
||||
self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}})
|
||||
|
||||
def test_geojson_PolygonField(self):
|
||||
class Location(Document):
|
||||
poly = PolygonField()
|
||||
|
||||
update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]])
|
||||
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
|
||||
|
||||
update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]})
|
||||
self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}})
|
||||
|
||||
def test_type(self):
|
||||
class Doc(Document):
|
||||
df = DynamicField()
|
||||
Doc(df=True).save()
|
||||
Doc(df=7).save()
|
||||
Doc(df="df").save()
|
||||
self.assertEqual(Doc.objects(df__type=1).count(), 0) # double
|
||||
self.assertEqual(Doc.objects(df__type=8).count(), 1) # bool
|
||||
self.assertEqual(Doc.objects(df__type=2).count(), 1) # str
|
||||
self.assertEqual(Doc.objects(df__type=16).count(), 1) # int
|
||||
|
||||
def test_last_field_name_like_operator(self):
|
||||
class EmbeddedItem(EmbeddedDocument):
|
||||
type = StringField()
|
||||
name = StringField()
|
||||
|
||||
class Doc(Document):
|
||||
item = EmbeddedDocumentField(EmbeddedItem)
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe"))
|
||||
doc.save()
|
||||
|
||||
self.assertEqual(1, Doc.objects(item__type__="axe").count())
|
||||
self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count())
|
||||
|
||||
Doc.objects(id=doc.id).update(set__item__type__='sword')
|
||||
self.assertEqual(1, Doc.objects(item__type__="sword").count())
|
||||
self.assertEqual(0, Doc.objects(item__type__="axe").count())
|
||||
|
||||
def test_understandable_error_raised(self):
|
||||
class Event(Document):
|
||||
title = StringField()
|
||||
location = GeoPointField()
|
||||
|
||||
box = [(35.0, -125.0), (40.0, -100.0)]
|
||||
# I *meant* to execute location__within_box=box
|
||||
events = Event.objects(location__within=box)
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
events.count()
|
||||
|
||||
def test_update_pull_for_list_fields(self):
|
||||
"""
|
||||
Test added to check pull operation in update for
|
||||
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
|
||||
"""
|
||||
class Word(EmbeddedDocument):
|
||||
word = StringField()
|
||||
index = IntField()
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
heading = ListField(StringField())
|
||||
text = EmbeddedDocumentListField(Word)
|
||||
|
||||
class MainDoc(Document):
|
||||
title = StringField()
|
||||
content = EmbeddedDocumentField(SubDoc)
|
||||
|
||||
word = Word(word='abc', index=1)
|
||||
update = transform.update(MainDoc, pull__content__text=word)
|
||||
self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}})
|
||||
|
||||
update = transform.update(MainDoc, pull__content__heading='xyz')
|
||||
self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}})
|
||||
|
||||
update = transform.update(MainDoc, pull__content__text__word__in=['foo', 'bar'])
|
||||
self.assertEqual(update, {'$pull': {'content.text': {'word': {'$in': ['foo', 'bar']}}}})
|
||||
|
||||
update = transform.update(MainDoc, pull__content__text__word__nin=['foo', 'bar'])
|
||||
self.assertEqual(update, {'$pull': {'content.text': {'word': {'$nin': ['foo', 'bar']}}}})
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,358 +0,0 @@
|
||||
import datetime
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from bson import ObjectId
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.queryset import Q
|
||||
|
||||
__all__ = ("QTest",)
|
||||
|
||||
|
||||
class QTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
Person.drop_collection()
|
||||
self.Person = Person
|
||||
|
||||
def test_empty_q(self):
|
||||
"""Ensure that empty Q objects won't hurt.
|
||||
"""
|
||||
q1 = Q()
|
||||
q2 = Q(age__gte=18)
|
||||
q3 = Q()
|
||||
q4 = Q(name='test')
|
||||
q5 = Q()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
age = IntField()
|
||||
|
||||
query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]}
|
||||
self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query)
|
||||
|
||||
query = {'age': {'$gte': 18}, 'name': 'test'}
|
||||
self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query)
|
||||
|
||||
def test_q_with_dbref(self):
|
||||
"""Ensure Q objects handle DBRefs correctly"""
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
class User(Document):
|
||||
pass
|
||||
|
||||
class Post(Document):
|
||||
created_user = ReferenceField(User)
|
||||
|
||||
user = User.objects.create()
|
||||
Post.objects.create(created_user=user)
|
||||
|
||||
self.assertEqual(Post.objects.filter(created_user=user).count(), 1)
|
||||
self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1)
|
||||
|
||||
def test_and_combination(self):
|
||||
"""Ensure that Q-objects correctly AND together.
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = StringField()
|
||||
|
||||
query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||
self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
|
||||
|
||||
query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc)
|
||||
self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]})
|
||||
|
||||
# Check normal cases work without an error
|
||||
query = Q(x__lt=7) & Q(x__gt=3)
|
||||
|
||||
q1 = Q(x__lt=7)
|
||||
q2 = Q(x__gt=3)
|
||||
query = (q1 & q2).to_query(TestDoc)
|
||||
self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}})
|
||||
|
||||
# More complex nested example
|
||||
query = Q(x__lt=100) & Q(y__ne='NotMyString')
|
||||
query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100)
|
||||
mongo_query = {
|
||||
'x': {'$lt': 100, '$gt': -100},
|
||||
'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']},
|
||||
}
|
||||
self.assertEqual(query.to_query(TestDoc), mongo_query)
|
||||
|
||||
def test_or_combination(self):
|
||||
"""Ensure that Q-objects correctly OR together.
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
|
||||
q1 = Q(x__lt=3)
|
||||
q2 = Q(x__gt=7)
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
self.assertEqual(query, {
|
||||
'$or': [
|
||||
{'x': {'$lt': 3}},
|
||||
{'x': {'$gt': 7}},
|
||||
]
|
||||
})
|
||||
|
||||
def test_and_or_combination(self):
|
||||
"""Ensure that Q-objects handle ANDing ORed components.
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
|
||||
query = (Q(x__gt=0) | Q(x__exists=False))
|
||||
query &= Q(x__lt=100)
|
||||
self.assertEqual(query.to_query(TestDoc), {'$and': [
|
||||
{'$or': [{'x': {'$gt': 0}},
|
||||
{'x': {'$exists': False}}]},
|
||||
{'x': {'$lt': 100}}]
|
||||
})
|
||||
|
||||
q1 = (Q(x__gt=0) | Q(x__exists=False))
|
||||
q2 = (Q(x__lt=100) | Q(y=True))
|
||||
query = (q1 & q2).to_query(TestDoc)
|
||||
|
||||
TestDoc(x=101).save()
|
||||
TestDoc(x=10).save()
|
||||
TestDoc(y=True).save()
|
||||
|
||||
self.assertEqual(query, {
|
||||
'$and': [
|
||||
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
|
||||
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
|
||||
]
|
||||
})
|
||||
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
|
||||
|
||||
def test_or_and_or_combination(self):
|
||||
"""Ensure that Q-objects handle ORing ANDed ORed components. :)
|
||||
"""
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
y = BooleanField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
TestDoc(x=-1, y=True).save()
|
||||
TestDoc(x=101, y=True).save()
|
||||
TestDoc(x=99, y=False).save()
|
||||
TestDoc(x=101, y=False).save()
|
||||
|
||||
q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)))
|
||||
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
|
||||
query = (q1 | q2).to_query(TestDoc)
|
||||
|
||||
self.assertEqual(query, {
|
||||
'$or': [
|
||||
{'$and': [{'x': {'$gt': 0}},
|
||||
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
|
||||
{'$and': [{'x': {'$lt': 100}},
|
||||
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
|
||||
]
|
||||
})
|
||||
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
|
||||
|
||||
def test_multiple_occurence_in_field(self):
|
||||
class Test(Document):
|
||||
name = StringField(max_length=40)
|
||||
title = StringField(max_length=40)
|
||||
|
||||
q1 = Q(name__contains='te') | Q(title__contains='te')
|
||||
q2 = Q(name__contains='12') | Q(title__contains='12')
|
||||
|
||||
q3 = q1 & q2
|
||||
|
||||
query = q3.to_query(Test)
|
||||
self.assertEqual(query["$and"][0], q1.to_query(Test))
|
||||
self.assertEqual(query["$and"][1], q2.to_query(Test))
|
||||
|
||||
def test_q_clone(self):
|
||||
|
||||
class TestDoc(Document):
|
||||
x = IntField()
|
||||
|
||||
TestDoc.drop_collection()
|
||||
for i in range(1, 101):
|
||||
t = TestDoc(x=i)
|
||||
t.save()
|
||||
|
||||
# Check normal cases work without an error
|
||||
test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3))
|
||||
|
||||
self.assertEqual(test.count(), 3)
|
||||
|
||||
test2 = test.clone()
|
||||
self.assertEqual(test2.count(), 3)
|
||||
self.assertNotEqual(test2, test)
|
||||
|
||||
test3 = test2.filter(x=6)
|
||||
self.assertEqual(test3.count(), 1)
|
||||
self.assertEqual(test.count(), 3)
|
||||
|
||||
def test_q(self):
|
||||
"""Ensure that Q objects may be used to query for documents.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
title = StringField()
|
||||
publish_date = DateTimeField()
|
||||
published = BooleanField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False)
|
||||
post1.save()
|
||||
|
||||
post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True)
|
||||
post2.save()
|
||||
|
||||
post3 = BlogPost(title='Test 3', published=True)
|
||||
post3.save()
|
||||
|
||||
post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8))
|
||||
post4.save()
|
||||
|
||||
post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15))
|
||||
post5.save()
|
||||
|
||||
post6 = BlogPost(title='Test 1', published=False)
|
||||
post6.save()
|
||||
|
||||
# Check ObjectId lookup works
|
||||
obj = BlogPost.objects(id=post1.id).first()
|
||||
self.assertEqual(obj, post1)
|
||||
|
||||
# Check Q object combination with one does not exist
|
||||
q = BlogPost.objects(Q(title='Test 5') | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
published_posts = (post2, post3)
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
|
||||
q = BlogPost.objects(Q(title='Test 1') | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
published_posts = (post1, post2, post3, post5, post6)
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
|
||||
# Check Q object combination
|
||||
date = datetime.datetime(2010, 1, 10)
|
||||
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
|
||||
posts = [post.id for post in q]
|
||||
|
||||
published_posts = (post1, post2, post3, post4)
|
||||
self.assertTrue(all(obj.id in posts for obj in published_posts))
|
||||
|
||||
self.assertFalse(any(obj.id in posts for obj in [post5, post6]))
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
# Check the 'in' operator
|
||||
self.Person(name='user1', age=20).save()
|
||||
self.Person(name='user2', age=20).save()
|
||||
self.Person(name='user3', age=30).save()
|
||||
self.Person(name='user4', age=40).save()
|
||||
|
||||
self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2)
|
||||
self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3)
|
||||
|
||||
# Test invalid query objs
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects('user1')
|
||||
|
||||
# filter should fail, too
|
||||
with self.assertRaises(InvalidQueryError):
|
||||
self.Person.objects.filter('user1')
|
||||
|
||||
def test_q_regex(self):
|
||||
"""Ensure that Q objects can be queried using regexes.
|
||||
"""
|
||||
person = self.Person(name='Guido van Rossum')
|
||||
person.save()
|
||||
|
||||
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
|
||||
self.assertEqual(obj, person)
|
||||
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()
|
||||
self.assertEqual(obj, None)
|
||||
|
||||
obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first()
|
||||
self.assertEqual(obj, person)
|
||||
|
||||
obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first()
|
||||
self.assertEqual(obj, person)
|
||||
|
||||
obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first()
|
||||
self.assertEqual(obj, None)
|
||||
|
||||
def test_q_repr(self):
|
||||
self.assertEqual(repr(Q()), 'Q(**{})')
|
||||
self.assertEqual(repr(Q(name='test')), "Q(**{'name': 'test'})")
|
||||
|
||||
self.assertEqual(
|
||||
repr(Q(name='test') & Q(age__gte=18)),
|
||||
"(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))")
|
||||
|
||||
self.assertEqual(
|
||||
repr(Q(name='test') | Q(age__gte=18)),
|
||||
"(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))")
|
||||
|
||||
def test_q_lists(self):
|
||||
"""Ensure that Q objects query ListFields correctly.
|
||||
"""
|
||||
class BlogPost(Document):
|
||||
tags = ListField(StringField())
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
BlogPost(tags=['python', 'mongo']).save()
|
||||
BlogPost(tags=['python']).save()
|
||||
|
||||
self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1)
|
||||
self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2)
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_q_merge_queries_edge_case(self):
|
||||
|
||||
class User(Document):
|
||||
email = EmailField(required=False)
|
||||
name = StringField()
|
||||
|
||||
User.drop_collection()
|
||||
pk = ObjectId()
|
||||
User(email='example@example.com', pk=pk).save()
|
||||
|
||||
self.assertEqual(1, User.objects.filter(Q(email='example@example.com') |
|
||||
Q(name='John Doe')).limit(2).filter(pk=pk).count())
|
||||
|
||||
def test_chained_q_or_filtering(self):
|
||||
|
||||
class Post(EmbeddedDocument):
|
||||
name = StringField(required=True)
|
||||
|
||||
class Item(Document):
|
||||
postables = ListField(EmbeddedDocumentField(Post))
|
||||
|
||||
Item.drop_collection()
|
||||
|
||||
Item(postables=[Post(name="a"), Post(name="b")]).save()
|
||||
Item(postables=[Post(name="a"), Post(name="c")]).save()
|
||||
Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save()
|
||||
|
||||
self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2)
|
||||
self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
9
tests/test_ci.py
Normal file
9
tests/test_ci.py
Normal file
@@ -0,0 +1,9 @@
|
||||
def test_ci_placeholder():
|
||||
# This empty test is used within the CI to
|
||||
# setup the tox venv without running the test suite
|
||||
# if we simply skip all test with pytest -k=wrong_pattern
|
||||
# pytest command would return with exit_code=5 (i.e "no tests run")
|
||||
# making travis fail
|
||||
# this empty test is the recommended way to handle this
|
||||
# as described in https://github.com/pytest-dev/pytest/issues/2393
|
||||
pass
|
||||
@@ -1,15 +1,16 @@
|
||||
import unittest
|
||||
|
||||
from mongoengine.common import _import_class
|
||||
import pytest
|
||||
|
||||
from mongoengine import Document
|
||||
from mongoengine.common import _import_class
|
||||
|
||||
|
||||
class TestCommon(unittest.TestCase):
|
||||
|
||||
class TestCommon:
|
||||
def test__import_class(self):
|
||||
doc_cls = _import_class("Document")
|
||||
self.assertIs(doc_cls, Document)
|
||||
assert doc_cls is Document
|
||||
|
||||
def test__import_class_raise_if_not_known(self):
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
_import_class("UnknownClass")
|
||||
|
||||
@@ -1,33 +1,52 @@
|
||||
import datetime
|
||||
|
||||
from pymongo import MongoClient
|
||||
from pymongo.errors import OperationFailure, InvalidName
|
||||
from pymongo import ReadPreference
|
||||
from bson.tz_util import utc
|
||||
import pymongo
|
||||
|
||||
from pymongo import MongoClient, ReadPreference
|
||||
from pymongo.errors import InvalidName, OperationFailure
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
import pymongo
|
||||
from bson.tz_util import utc
|
||||
|
||||
from mongoengine import (
|
||||
connect, register_connection,
|
||||
Document, DateTimeField,
|
||||
disconnect_all, StringField)
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import (MongoEngineConnectionError, get_db,
|
||||
get_connection, disconnect, DEFAULT_DATABASE_NAME)
|
||||
from mongoengine import (
|
||||
DateTimeField,
|
||||
Document,
|
||||
StringField,
|
||||
connect,
|
||||
disconnect_all,
|
||||
register_connection,
|
||||
)
|
||||
from mongoengine.connection import (
|
||||
ConnectionFailure,
|
||||
DEFAULT_DATABASE_NAME,
|
||||
disconnect,
|
||||
get_connection,
|
||||
get_db,
|
||||
)
|
||||
|
||||
|
||||
def get_tz_awareness(connection):
|
||||
return connection.codec_options.tz_aware
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
try:
|
||||
import mongomock
|
||||
|
||||
MONGOMOCK_INSTALLED = True
|
||||
except ImportError:
|
||||
MONGOMOCK_INSTALLED = False
|
||||
|
||||
require_mongomock = pytest.mark.skipif(
|
||||
not MONGOMOCK_INSTALLED, reason="you need mongomock installed to run this testcase"
|
||||
)
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
disconnect_all()
|
||||
@@ -43,58 +62,58 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def test_connect(self):
|
||||
"""Ensure that the connect() method works properly."""
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "mongoenginetest"
|
||||
|
||||
connect('mongoenginetest2', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
connect("mongoenginetest2", alias="testdb")
|
||||
conn = get_connection("testdb")
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connect_disconnect_works_properly(self):
|
||||
class History1(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db1'}
|
||||
meta = {"db_alias": "db1"}
|
||||
|
||||
class History2(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db2'}
|
||||
meta = {"db_alias": "db2"}
|
||||
|
||||
connect('db1', alias='db1')
|
||||
connect('db2', alias='db2')
|
||||
connect("db1", alias="db1")
|
||||
connect("db2", alias="db2")
|
||||
|
||||
History1.drop_collection()
|
||||
History2.drop_collection()
|
||||
|
||||
h = History1(name='default').save()
|
||||
h1 = History2(name='db1').save()
|
||||
h = History1(name="default").save()
|
||||
h1 = History2(name="db1").save()
|
||||
|
||||
self.assertEqual(list(History1.objects().as_pymongo()),
|
||||
[{'_id': h.id, 'name': 'default'}])
|
||||
self.assertEqual(list(History2.objects().as_pymongo()),
|
||||
[{'_id': h1.id, 'name': 'db1'}])
|
||||
assert list(History1.objects().as_pymongo()) == [
|
||||
{"_id": h.id, "name": "default"}
|
||||
]
|
||||
assert list(History2.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}]
|
||||
|
||||
disconnect('db1')
|
||||
disconnect('db2')
|
||||
disconnect("db1")
|
||||
disconnect("db2")
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
with pytest.raises(ConnectionFailure):
|
||||
list(History1.objects().as_pymongo())
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
with pytest.raises(ConnectionFailure):
|
||||
list(History2.objects().as_pymongo())
|
||||
|
||||
connect('db1', alias='db1')
|
||||
connect('db2', alias='db2')
|
||||
connect("db1", alias="db1")
|
||||
connect("db2", alias="db2")
|
||||
|
||||
self.assertEqual(list(History1.objects().as_pymongo()),
|
||||
[{'_id': h.id, 'name': 'default'}])
|
||||
self.assertEqual(list(History2.objects().as_pymongo()),
|
||||
[{'_id': h1.id, 'name': 'db1'}])
|
||||
assert list(History1.objects().as_pymongo()) == [
|
||||
{"_id": h.id, "name": "default"}
|
||||
]
|
||||
assert list(History2.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}]
|
||||
|
||||
def test_connect_different_documents_to_different_database(self):
|
||||
class History(Document):
|
||||
@@ -102,170 +121,207 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
class History1(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db1'}
|
||||
meta = {"db_alias": "db1"}
|
||||
|
||||
class History2(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db2'}
|
||||
meta = {"db_alias": "db2"}
|
||||
|
||||
connect()
|
||||
connect('db1', alias='db1')
|
||||
connect('db2', alias='db2')
|
||||
connect("db1", alias="db1")
|
||||
connect("db2", alias="db2")
|
||||
|
||||
History.drop_collection()
|
||||
History1.drop_collection()
|
||||
History2.drop_collection()
|
||||
|
||||
h = History(name='default').save()
|
||||
h1 = History1(name='db1').save()
|
||||
h2 = History2(name='db2').save()
|
||||
h = History(name="default").save()
|
||||
h1 = History1(name="db1").save()
|
||||
h2 = History2(name="db2").save()
|
||||
|
||||
self.assertEqual(History._collection.database.name, DEFAULT_DATABASE_NAME)
|
||||
self.assertEqual(History1._collection.database.name, 'db1')
|
||||
self.assertEqual(History2._collection.database.name, 'db2')
|
||||
assert History._collection.database.name == DEFAULT_DATABASE_NAME
|
||||
assert History1._collection.database.name == "db1"
|
||||
assert History2._collection.database.name == "db2"
|
||||
|
||||
self.assertEqual(list(History.objects().as_pymongo()),
|
||||
[{'_id': h.id, 'name': 'default'}])
|
||||
self.assertEqual(list(History1.objects().as_pymongo()),
|
||||
[{'_id': h1.id, 'name': 'db1'}])
|
||||
self.assertEqual(list(History2.objects().as_pymongo()),
|
||||
[{'_id': h2.id, 'name': 'db2'}])
|
||||
assert list(History.objects().as_pymongo()) == [
|
||||
{"_id": h.id, "name": "default"}
|
||||
]
|
||||
assert list(History1.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}]
|
||||
assert list(History2.objects().as_pymongo()) == [{"_id": h2.id, "name": "db2"}]
|
||||
|
||||
def test_connect_fails_if_connect_2_times_with_default_alias(self):
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError) as ctx_err:
|
||||
connect('mongoenginetest2')
|
||||
self.assertEqual("A different connection with alias `default` was already registered. Use disconnect() first", str(ctx_err.exception))
|
||||
with pytest.raises(ConnectionFailure) as exc_info:
|
||||
connect("mongoenginetest2")
|
||||
assert (
|
||||
"A different connection with alias `default` was already registered. Use disconnect() first"
|
||||
== str(exc_info.value)
|
||||
)
|
||||
|
||||
def test_connect_fails_if_connect_2_times_with_custom_alias(self):
|
||||
connect('mongoenginetest', alias='alias1')
|
||||
connect("mongoenginetest", alias="alias1")
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError) as ctx_err:
|
||||
connect('mongoenginetest2', alias='alias1')
|
||||
with pytest.raises(ConnectionFailure) as exc_info:
|
||||
connect("mongoenginetest2", alias="alias1")
|
||||
|
||||
self.assertEqual("A different connection with alias `alias1` was already registered. Use disconnect() first", str(ctx_err.exception))
|
||||
assert (
|
||||
"A different connection with alias `alias1` was already registered. Use disconnect() first"
|
||||
== str(exc_info.value)
|
||||
)
|
||||
|
||||
def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way(self):
|
||||
def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way(
|
||||
self,
|
||||
):
|
||||
"""Intended to keep the detecton function simple but robust"""
|
||||
db_name = 'mongoenginetest'
|
||||
db_alias = 'alias1'
|
||||
connect(db=db_name, alias=db_alias, host='localhost', port=27017)
|
||||
db_name = "mongoenginetest"
|
||||
db_alias = "alias1"
|
||||
connect(db=db_name, alias=db_alias, host="localhost", port=27017)
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
connect(host='mongodb://localhost:27017/%s' % db_name, alias=db_alias)
|
||||
with pytest.raises(ConnectionFailure):
|
||||
connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias)
|
||||
|
||||
def test_connect_passes_silently_connect_multiple_times_with_same_config(self):
|
||||
# test default connection to `test`
|
||||
connect()
|
||||
connect()
|
||||
self.assertEqual(len(mongoengine.connection._connections), 1)
|
||||
connect('test01', alias='test01')
|
||||
connect('test01', alias='test01')
|
||||
self.assertEqual(len(mongoengine.connection._connections), 2)
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02')
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02')
|
||||
self.assertEqual(len(mongoengine.connection._connections), 3)
|
||||
assert len(mongoengine.connection._connections) == 1
|
||||
connect("test01", alias="test01")
|
||||
connect("test01", alias="test01")
|
||||
assert len(mongoengine.connection._connections) == 2
|
||||
connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02")
|
||||
connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02")
|
||||
assert len(mongoengine.connection._connections) == 3
|
||||
|
||||
def test_connect_with_invalid_db_name(self):
|
||||
"""Ensure that connect() method fails fast if db name is invalid
|
||||
"""
|
||||
with self.assertRaises(InvalidName):
|
||||
connect('mongomock://localhost')
|
||||
with pytest.raises(InvalidName):
|
||||
connect("mongomock://localhost")
|
||||
|
||||
def test_connect_with_db_name_external(self):
|
||||
"""Ensure that connect() works if db name is $external
|
||||
"""
|
||||
"""Ensure that the connect() method works properly."""
|
||||
connect('$external')
|
||||
connect("$external")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, '$external')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "$external"
|
||||
|
||||
connect('$external', alias='testdb')
|
||||
conn = get_connection('testdb')
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
connect("$external", alias="testdb")
|
||||
conn = get_connection("testdb")
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connect_with_invalid_db_name_type(self):
|
||||
"""Ensure that connect() method fails fast if db name has invalid type
|
||||
"""
|
||||
with self.assertRaises(TypeError):
|
||||
non_string_db_name = ['e. g. list instead of a string']
|
||||
with pytest.raises(TypeError):
|
||||
non_string_db_name = ["e. g. list instead of a string"]
|
||||
connect(non_string_db_name)
|
||||
|
||||
@require_mongomock
|
||||
def test_connect_in_mocking(self):
|
||||
"""Ensure that the connect() method works properly in mocking.
|
||||
"""
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest('you need mongomock installed to run this testcase')
|
||||
|
||||
connect('mongoenginetest', host='mongomock://localhost')
|
||||
connect("mongoenginetest", host="mongomock://localhost")
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect("mongoenginetest2", host="mongomock://localhost", alias="testdb2")
|
||||
conn = get_connection("testdb2")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(
|
||||
"mongoenginetest3",
|
||||
host="mongodb://localhost",
|
||||
is_mock=True,
|
||||
alias="testdb3",
|
||||
)
|
||||
conn = get_connection("testdb3")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect('mongoenginetest4', is_mock=True, alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect("mongoenginetest4", is_mock=True, alias="testdb4")
|
||||
conn = get_connection("testdb4")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(
|
||||
host="mongodb://localhost:27017/mongoenginetest5",
|
||||
is_mock=True,
|
||||
alias="testdb5",
|
||||
)
|
||||
conn = get_connection("testdb5")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(host="mongomock://localhost:27017/mongoenginetest6", alias="testdb6")
|
||||
conn = get_connection("testdb6")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7')
|
||||
conn = get_connection('testdb7')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(
|
||||
host="mongomock://localhost:27017/mongoenginetest7",
|
||||
is_mock=True,
|
||||
alias="testdb7",
|
||||
)
|
||||
conn = get_connection("testdb7")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
@require_mongomock
|
||||
def test_default_database_with_mocking(self):
|
||||
"""Ensure that the default database is correctly set when using mongomock.
|
||||
"""
|
||||
disconnect_all()
|
||||
|
||||
class SomeDocument(Document):
|
||||
pass
|
||||
|
||||
conn = connect(host="mongomock://localhost:27017/mongoenginetest")
|
||||
some_document = SomeDocument()
|
||||
# database won't exist until we save a document
|
||||
some_document.save()
|
||||
assert conn.get_default_database().name == "mongoenginetest"
|
||||
assert conn.list_database_names()[0] == "mongoenginetest"
|
||||
|
||||
@require_mongomock
|
||||
def test_connect_with_host_list(self):
|
||||
"""Ensure that the connect() method works when host is a list
|
||||
|
||||
Uses mongomock to test w/o needing multiple mongod/mongos processes
|
||||
"""
|
||||
try:
|
||||
import mongomock
|
||||
except ImportError:
|
||||
raise SkipTest('you need mongomock installed to run this testcase')
|
||||
|
||||
connect(host=['mongomock://localhost'])
|
||||
connect(host=["mongomock://localhost"])
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2')
|
||||
conn = get_connection('testdb2')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(host=["mongodb://localhost"], is_mock=True, alias="testdb2")
|
||||
conn = get_connection("testdb2")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost'], is_mock=True, alias='testdb3')
|
||||
conn = get_connection('testdb3')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(host=["localhost"], is_mock=True, alias="testdb3")
|
||||
conn = get_connection("testdb3")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4')
|
||||
conn = get_connection('testdb4')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(
|
||||
host=["mongomock://localhost:27017", "mongomock://localhost:27018"],
|
||||
alias="testdb4",
|
||||
)
|
||||
conn = get_connection("testdb4")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5')
|
||||
conn = get_connection('testdb5')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(
|
||||
host=["mongodb://localhost:27017", "mongodb://localhost:27018"],
|
||||
is_mock=True,
|
||||
alias="testdb5",
|
||||
)
|
||||
conn = get_connection("testdb5")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6')
|
||||
conn = get_connection('testdb6')
|
||||
self.assertIsInstance(conn, mongomock.MongoClient)
|
||||
connect(
|
||||
host=["localhost:27017", "localhost:27018"], is_mock=True, alias="testdb6"
|
||||
)
|
||||
conn = get_connection("testdb6")
|
||||
assert isinstance(conn, mongomock.MongoClient)
|
||||
|
||||
def test_disconnect_cleans_globals(self):
|
||||
"""Ensure that the disconnect() method cleans the globals objects"""
|
||||
@@ -273,52 +329,52 @@ class ConnectionTest(unittest.TestCase):
|
||||
dbs = mongoengine.connection._dbs
|
||||
connection_settings = mongoengine.connection._connection_settings
|
||||
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
self.assertEqual(len(connections), 1)
|
||||
self.assertEqual(len(dbs), 0)
|
||||
self.assertEqual(len(connection_settings), 1)
|
||||
assert len(connections) == 1
|
||||
assert len(dbs) == 0
|
||||
assert len(connection_settings) == 1
|
||||
|
||||
class TestDoc(Document):
|
||||
pass
|
||||
|
||||
TestDoc.drop_collection() # triggers the db
|
||||
self.assertEqual(len(dbs), 1)
|
||||
assert len(dbs) == 1
|
||||
|
||||
disconnect()
|
||||
self.assertEqual(len(connections), 0)
|
||||
self.assertEqual(len(dbs), 0)
|
||||
self.assertEqual(len(connection_settings), 0)
|
||||
assert len(connections) == 0
|
||||
assert len(dbs) == 0
|
||||
assert len(connection_settings) == 0
|
||||
|
||||
def test_disconnect_cleans_cached_collection_attribute_in_document(self):
|
||||
"""Ensure that the disconnect() method works properly"""
|
||||
conn1 = connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
class History(Document):
|
||||
pass
|
||||
|
||||
self.assertIsNone(History._collection)
|
||||
assert History._collection is None
|
||||
|
||||
History.drop_collection()
|
||||
|
||||
History.objects.first() # will trigger the caching of _collection attribute
|
||||
self.assertIsNotNone(History._collection)
|
||||
History.objects.first() # will trigger the caching of _collection attribute
|
||||
assert History._collection is not None
|
||||
|
||||
disconnect()
|
||||
|
||||
self.assertIsNone(History._collection)
|
||||
assert History._collection is None
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError) as ctx_err:
|
||||
with pytest.raises(ConnectionFailure) as exc_info:
|
||||
History.objects.first()
|
||||
self.assertEqual("You have not defined a default connection", str(ctx_err.exception))
|
||||
assert "You have not defined a default connection" == str(exc_info.value)
|
||||
|
||||
def test_connect_disconnect_works_on_same_document(self):
|
||||
"""Ensure that the connect/disconnect works properly with a single Document"""
|
||||
db1 = 'db1'
|
||||
db2 = 'db2'
|
||||
db1 = "db1"
|
||||
db2 = "db2"
|
||||
|
||||
# Ensure freshness of the 2 databases through pymongo
|
||||
client = MongoClient('localhost', 27017)
|
||||
client = MongoClient("localhost", 27017)
|
||||
client.drop_database(db1)
|
||||
client.drop_database(db2)
|
||||
|
||||
@@ -328,68 +384,68 @@ class ConnectionTest(unittest.TestCase):
|
||||
class User(Document):
|
||||
name = StringField(required=True)
|
||||
|
||||
user1 = User(name='John is in db1').save()
|
||||
user1 = User(name="John is in db1").save()
|
||||
disconnect()
|
||||
|
||||
# Make sure save doesnt work at this stage
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
User(name='Wont work').save()
|
||||
with pytest.raises(ConnectionFailure):
|
||||
User(name="Wont work").save()
|
||||
|
||||
# Save in db2
|
||||
connect(db2)
|
||||
user2 = User(name='Bob is in db2').save()
|
||||
user2 = User(name="Bob is in db2").save()
|
||||
disconnect()
|
||||
|
||||
db1_users = list(client[db1].user.find())
|
||||
self.assertEqual(db1_users, [{'_id': user1.id, 'name': 'John is in db1'}])
|
||||
assert db1_users == [{"_id": user1.id, "name": "John is in db1"}]
|
||||
db2_users = list(client[db2].user.find())
|
||||
self.assertEqual(db2_users, [{'_id': user2.id, 'name': 'Bob is in db2'}])
|
||||
assert db2_users == [{"_id": user2.id, "name": "Bob is in db2"}]
|
||||
|
||||
def test_disconnect_silently_pass_if_alias_does_not_exist(self):
|
||||
connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(connections), 0)
|
||||
disconnect(alias='not_exist')
|
||||
assert len(connections) == 0
|
||||
disconnect(alias="not_exist")
|
||||
|
||||
def test_disconnect_all(self):
|
||||
connections = mongoengine.connection._connections
|
||||
dbs = mongoengine.connection._dbs
|
||||
connection_settings = mongoengine.connection._connection_settings
|
||||
|
||||
connect('mongoenginetest')
|
||||
connect('mongoenginetest2', alias='db1')
|
||||
connect("mongoenginetest")
|
||||
connect("mongoenginetest2", alias="db1")
|
||||
|
||||
class History(Document):
|
||||
pass
|
||||
|
||||
class History1(Document):
|
||||
name = StringField()
|
||||
meta = {'db_alias': 'db1'}
|
||||
meta = {"db_alias": "db1"}
|
||||
|
||||
History.drop_collection() # will trigger the caching of _collection attribute
|
||||
History.drop_collection() # will trigger the caching of _collection attribute
|
||||
History.objects.first()
|
||||
History1.drop_collection()
|
||||
History1.objects.first()
|
||||
|
||||
self.assertIsNotNone(History._collection)
|
||||
self.assertIsNotNone(History1._collection)
|
||||
assert History._collection is not None
|
||||
assert History1._collection is not None
|
||||
|
||||
self.assertEqual(len(connections), 2)
|
||||
self.assertEqual(len(dbs), 2)
|
||||
self.assertEqual(len(connection_settings), 2)
|
||||
assert len(connections) == 2
|
||||
assert len(dbs) == 2
|
||||
assert len(connection_settings) == 2
|
||||
|
||||
disconnect_all()
|
||||
|
||||
self.assertIsNone(History._collection)
|
||||
self.assertIsNone(History1._collection)
|
||||
assert History._collection is None
|
||||
assert History1._collection is None
|
||||
|
||||
self.assertEqual(len(connections), 0)
|
||||
self.assertEqual(len(dbs), 0)
|
||||
self.assertEqual(len(connection_settings), 0)
|
||||
assert len(connections) == 0
|
||||
assert len(dbs) == 0
|
||||
assert len(connection_settings) == 0
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
with pytest.raises(ConnectionFailure):
|
||||
History.objects.first()
|
||||
|
||||
with self.assertRaises(MongoEngineConnectionError):
|
||||
with pytest.raises(ConnectionFailure):
|
||||
History1.objects.first()
|
||||
|
||||
def test_disconnect_all_silently_pass_if_no_connection_exist(self):
|
||||
@@ -398,19 +454,19 @@ class ConnectionTest(unittest.TestCase):
|
||||
def test_sharing_connections(self):
|
||||
"""Ensure that connections are shared when the connection settings are exactly the same
|
||||
"""
|
||||
connect('mongoenginetests', alias='testdb1')
|
||||
expected_connection = get_connection('testdb1')
|
||||
connect("mongoenginetests", alias="testdb1")
|
||||
expected_connection = get_connection("testdb1")
|
||||
|
||||
connect('mongoenginetests', alias='testdb2')
|
||||
actual_connection = get_connection('testdb2')
|
||||
connect("mongoenginetests", alias="testdb2")
|
||||
actual_connection = get_connection("testdb2")
|
||||
|
||||
expected_connection.server_info()
|
||||
|
||||
self.assertEqual(expected_connection, actual_connection)
|
||||
assert expected_connection == actual_connection
|
||||
|
||||
def test_connect_uri(self):
|
||||
"""Ensure that the connect() method works properly with URIs."""
|
||||
c = connect(db='mongoenginetest', alias='admin')
|
||||
c = connect(db="mongoenginetest", alias="admin")
|
||||
c.admin.system.users.delete_many({})
|
||||
c.mongoenginetest.system.users.delete_many({})
|
||||
|
||||
@@ -418,14 +474,16 @@ class ConnectionTest(unittest.TestCase):
|
||||
c.admin.authenticate("admin", "password")
|
||||
c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"])
|
||||
|
||||
connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest')
|
||||
connect(
|
||||
"testdb_uri", host="mongodb://username:password@localhost/mongoenginetest"
|
||||
)
|
||||
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "mongoenginetest"
|
||||
|
||||
c.admin.system.users.delete_many({})
|
||||
c.mongoenginetest.system.users.delete_many({})
|
||||
@@ -434,67 +492,73 @@ class ConnectionTest(unittest.TestCase):
|
||||
"""Ensure connect() method works properly if the URI doesn't
|
||||
include a database name.
|
||||
"""
|
||||
connect("mongoenginetest", host='mongodb://localhost/')
|
||||
connect("mongoenginetest", host="mongodb://localhost/")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "mongoenginetest"
|
||||
|
||||
def test_connect_uri_default_db(self):
|
||||
"""Ensure connect() defaults to the right database name if
|
||||
the URI and the database_name don't explicitly specify it.
|
||||
"""
|
||||
connect(host='mongodb://localhost/')
|
||||
connect(host="mongodb://localhost/")
|
||||
|
||||
conn = get_connection()
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "test"
|
||||
|
||||
def test_uri_without_credentials_doesnt_override_conn_settings(self):
|
||||
"""Ensure connect() uses the username & password params if the URI
|
||||
doesn't explicitly specify them.
|
||||
"""
|
||||
c = connect(host='mongodb://localhost/mongoenginetest',
|
||||
username='user',
|
||||
password='pass')
|
||||
connect(
|
||||
host="mongodb://localhost/mongoenginetest", username="user", password="pass"
|
||||
)
|
||||
|
||||
# OperationFailure means that mongoengine attempted authentication
|
||||
# w/ the provided username/password and failed - that's the desired
|
||||
# behavior. If the MongoDB URI would override the credentials
|
||||
self.assertRaises(OperationFailure, get_db)
|
||||
with pytest.raises(OperationFailure):
|
||||
get_db()
|
||||
|
||||
def test_connect_uri_with_authsource(self):
|
||||
"""Ensure that the connect() method works well with `authSource`
|
||||
option in the URI.
|
||||
"""
|
||||
# Create users
|
||||
c = connect('mongoenginetest')
|
||||
c = connect("mongoenginetest")
|
||||
|
||||
c.admin.system.users.delete_many({})
|
||||
c.admin.command("createUser", "username2", pwd="password", roles=["dbOwner"])
|
||||
|
||||
# Authentication fails without "authSource"
|
||||
test_conn = connect(
|
||||
'mongoenginetest', alias='test1',
|
||||
host='mongodb://username2:password@localhost/mongoenginetest'
|
||||
"mongoenginetest",
|
||||
alias="test1",
|
||||
host="mongodb://username2:password@localhost/mongoenginetest",
|
||||
)
|
||||
self.assertRaises(OperationFailure, test_conn.server_info)
|
||||
with pytest.raises(OperationFailure):
|
||||
test_conn.server_info()
|
||||
|
||||
# Authentication succeeds with "authSource"
|
||||
authd_conn = connect(
|
||||
'mongoenginetest', alias='test2',
|
||||
host=('mongodb://username2:password@localhost/'
|
||||
'mongoenginetest?authSource=admin')
|
||||
"mongoenginetest",
|
||||
alias="test2",
|
||||
host=(
|
||||
"mongodb://username2:password@localhost/"
|
||||
"mongoenginetest?authSource=admin"
|
||||
),
|
||||
)
|
||||
db = get_db('test2')
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest')
|
||||
db = get_db("test2")
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "mongoenginetest"
|
||||
|
||||
# Clear all users
|
||||
authd_conn.admin.system.users.delete_many({})
|
||||
@@ -502,82 +566,89 @@ class ConnectionTest(unittest.TestCase):
|
||||
def test_register_connection(self):
|
||||
"""Ensure that connections with different aliases may be registered.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest2')
|
||||
register_connection("testdb", "mongoenginetest2")
|
||||
|
||||
self.assertRaises(MongoEngineConnectionError, get_connection)
|
||||
conn = get_connection('testdb')
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
with pytest.raises(ConnectionFailure):
|
||||
get_connection()
|
||||
conn = get_connection("testdb")
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
db = get_db('testdb')
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'mongoenginetest2')
|
||||
db = get_db("testdb")
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "mongoenginetest2"
|
||||
|
||||
def test_register_connection_defaults(self):
|
||||
"""Ensure that defaults are used when the host and port are None.
|
||||
"""
|
||||
register_connection('testdb', 'mongoenginetest', host=None, port=None)
|
||||
register_connection("testdb", "mongoenginetest", host=None, port=None)
|
||||
|
||||
conn = get_connection('testdb')
|
||||
self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
|
||||
conn = get_connection("testdb")
|
||||
assert isinstance(conn, pymongo.mongo_client.MongoClient)
|
||||
|
||||
def test_connection_kwargs(self):
|
||||
"""Ensure that connection kwargs get passed to pymongo."""
|
||||
connect('mongoenginetest', alias='t1', tz_aware=True)
|
||||
conn = get_connection('t1')
|
||||
connect("mongoenginetest", alias="t1", tz_aware=True)
|
||||
conn = get_connection("t1")
|
||||
|
||||
self.assertTrue(get_tz_awareness(conn))
|
||||
assert get_tz_awareness(conn)
|
||||
|
||||
connect('mongoenginetest2', alias='t2')
|
||||
conn = get_connection('t2')
|
||||
self.assertFalse(get_tz_awareness(conn))
|
||||
connect("mongoenginetest2", alias="t2")
|
||||
conn = get_connection("t2")
|
||||
assert not get_tz_awareness(conn)
|
||||
|
||||
def test_connection_pool_via_kwarg(self):
|
||||
"""Ensure we can specify a max connection pool size using
|
||||
a connection kwarg.
|
||||
"""
|
||||
pool_size_kwargs = {'maxpoolsize': 100}
|
||||
pool_size_kwargs = {"maxpoolsize": 100}
|
||||
|
||||
conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs)
|
||||
self.assertEqual(conn.max_pool_size, 100)
|
||||
conn = connect(
|
||||
"mongoenginetest", alias="max_pool_size_via_kwarg", **pool_size_kwargs
|
||||
)
|
||||
assert conn.max_pool_size == 100
|
||||
|
||||
def test_connection_pool_via_uri(self):
|
||||
"""Ensure we can specify a max connection pool size using
|
||||
an option in a connection URI.
|
||||
"""
|
||||
conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri')
|
||||
self.assertEqual(conn.max_pool_size, 100)
|
||||
conn = connect(
|
||||
host="mongodb://localhost/test?maxpoolsize=100",
|
||||
alias="max_pool_size_via_uri",
|
||||
)
|
||||
assert conn.max_pool_size == 100
|
||||
|
||||
def test_write_concern(self):
|
||||
"""Ensure write concern can be specified in connect() via
|
||||
a kwarg or as part of the connection URI.
|
||||
"""
|
||||
conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true')
|
||||
conn2 = connect('testing', alias='conn2', w=1, j=True)
|
||||
self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True})
|
||||
self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True})
|
||||
conn1 = connect(
|
||||
alias="conn1", host="mongodb://localhost/testing?w=1&journal=true"
|
||||
)
|
||||
conn2 = connect("testing", alias="conn2", w=1, journal=True)
|
||||
assert conn1.write_concern.document == {"w": 1, "j": True}
|
||||
assert conn2.write_concern.document == {"w": 1, "j": True}
|
||||
|
||||
def test_connect_with_replicaset_via_uri(self):
|
||||
"""Ensure connect() works when specifying a replicaSet via the
|
||||
MongoDB URI.
|
||||
"""
|
||||
c = connect(host='mongodb://localhost/test?replicaSet=local-rs')
|
||||
connect(host="mongodb://localhost/test?replicaSet=local-rs")
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "test"
|
||||
|
||||
def test_connect_with_replicaset_via_kwargs(self):
|
||||
"""Ensure connect() works when specifying a replicaSet via the
|
||||
connection kwargs
|
||||
"""
|
||||
c = connect(replicaset='local-rs')
|
||||
self.assertEqual(c._MongoClient__options.replica_set_name,
|
||||
'local-rs')
|
||||
c = connect(replicaset="local-rs")
|
||||
assert c._MongoClient__options.replica_set_name == "local-rs"
|
||||
db = get_db()
|
||||
self.assertIsInstance(db, pymongo.database.Database)
|
||||
self.assertEqual(db.name, 'test')
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "test"
|
||||
|
||||
def test_connect_tz_aware(self):
|
||||
connect('mongoenginetest', tz_aware=True)
|
||||
connect("mongoenginetest", tz_aware=True)
|
||||
d = datetime.datetime(2010, 5, 5, tzinfo=utc)
|
||||
|
||||
class DateDoc(Document):
|
||||
@@ -587,40 +658,42 @@ class ConnectionTest(unittest.TestCase):
|
||||
DateDoc(the_date=d).save()
|
||||
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
assert d == date_doc.the_date
|
||||
|
||||
def test_read_preference_from_parse(self):
|
||||
conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred")
|
||||
self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED)
|
||||
conn = connect(
|
||||
host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred"
|
||||
)
|
||||
assert conn.read_preference == ReadPreference.SECONDARY_PREFERRED
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
connect("mongoenginetest", alias="t1", host="localhost")
|
||||
|
||||
connect('mongoenginetest2', alias='t2', host="127.0.0.1")
|
||||
connect("mongoenginetest2", alias="t2", host="127.0.0.1")
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertIn('t1', mongo_connections.keys())
|
||||
self.assertIn('t2', mongo_connections.keys())
|
||||
assert len(mongo_connections.items()) == 2
|
||||
assert "t1" in mongo_connections.keys()
|
||||
assert "t2" in mongo_connections.keys()
|
||||
|
||||
# Handle PyMongo 3+ Async Connection
|
||||
# Ensure we are connected, throws ServerSelectionTimeoutError otherwise.
|
||||
# Purposely not catching exception to fail test if thrown.
|
||||
mongo_connections['t1'].server_info()
|
||||
mongo_connections['t2'].server_info()
|
||||
self.assertEqual(mongo_connections['t1'].address[0], 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1')
|
||||
mongo_connections["t1"].server_info()
|
||||
mongo_connections["t2"].server_info()
|
||||
assert mongo_connections["t1"].address[0] == "localhost"
|
||||
assert mongo_connections["t2"].address[0] == "127.0.0.1"
|
||||
|
||||
def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self):
|
||||
c1 = connect(alias='testdb1', db='testdb1')
|
||||
c2 = connect(alias='testdb2', db='testdb2')
|
||||
self.assertIs(c1, c2)
|
||||
c1 = connect(alias="testdb1", db="testdb1")
|
||||
c2 = connect(alias="testdb2", db="testdb2")
|
||||
assert c1 is c2
|
||||
|
||||
def test_connect_2_databases_uses_different_client_if_different_parameters(self):
|
||||
c1 = connect(alias='testdb1', db='testdb1', username='u1')
|
||||
c2 = connect(alias='testdb2', db='testdb2', username='u2')
|
||||
self.assertIsNot(c1, c2)
|
||||
c1 = connect(alias="testdb1", db="testdb1", username="u1")
|
||||
c2 = connect(alias="testdb2", db="testdb2", username="u2")
|
||||
assert c1 is not c2
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,18 +1,71 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import (switch_db, switch_collection,
|
||||
no_sub_classes, no_dereference,
|
||||
query_counter)
|
||||
from mongoengine.context_managers import (
|
||||
no_dereference,
|
||||
no_sub_classes,
|
||||
query_counter,
|
||||
set_read_write_concern,
|
||||
set_write_concern,
|
||||
switch_collection,
|
||||
switch_db,
|
||||
)
|
||||
from mongoengine.pymongo_support import count_documents
|
||||
|
||||
|
||||
class ContextManagersTest(unittest.TestCase):
|
||||
class TestContextManagers:
|
||||
def test_set_write_concern(self):
|
||||
connect("mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
collection = User._get_collection()
|
||||
original_write_concern = collection.write_concern
|
||||
|
||||
with set_write_concern(
|
||||
collection, {"w": "majority", "j": True, "wtimeout": 1234}
|
||||
) as updated_collection:
|
||||
assert updated_collection.write_concern.document == {
|
||||
"w": "majority",
|
||||
"j": True,
|
||||
"wtimeout": 1234,
|
||||
}
|
||||
|
||||
assert original_write_concern.document == collection.write_concern.document
|
||||
|
||||
def test_set_read_write_concern(self):
|
||||
connect("mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
collection = User._get_collection()
|
||||
|
||||
original_read_concern = collection.read_concern
|
||||
original_write_concern = collection.write_concern
|
||||
|
||||
with set_read_write_concern(
|
||||
collection,
|
||||
{"w": "majority", "j": True, "wtimeout": 1234},
|
||||
{"level": "local"},
|
||||
) as update_collection:
|
||||
assert update_collection.read_concern.document == {"level": "local"}
|
||||
assert update_collection.write_concern.document == {
|
||||
"w": "majority",
|
||||
"j": True,
|
||||
"wtimeout": 1234,
|
||||
}
|
||||
|
||||
assert original_read_concern.document == collection.read_concern.document
|
||||
assert original_write_concern.document == collection.write_concern.document
|
||||
|
||||
def test_switch_db_context_manager(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
connect("mongoenginetest")
|
||||
register_connection("testdb-1", "mongoenginetest2")
|
||||
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
@@ -20,53 +73,53 @@ class ContextManagersTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
Group(name="hello - default").save()
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
assert 1 == Group.objects.count()
|
||||
|
||||
with switch_db(Group, 'testdb-1') as Group:
|
||||
with switch_db(Group, "testdb-1") as Group:
|
||||
|
||||
self.assertEqual(0, Group.objects.count())
|
||||
assert 0 == Group.objects.count()
|
||||
|
||||
Group(name="hello").save()
|
||||
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
assert 1 == Group.objects.count()
|
||||
|
||||
Group.drop_collection()
|
||||
self.assertEqual(0, Group.objects.count())
|
||||
assert 0 == Group.objects.count()
|
||||
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
assert 1 == Group.objects.count()
|
||||
|
||||
def test_switch_collection_context_manager(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection(alias='testdb-1', db='mongoenginetest2')
|
||||
connect("mongoenginetest")
|
||||
register_connection(alias="testdb-1", db="mongoenginetest2")
|
||||
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
Group.drop_collection() # drops in default
|
||||
Group.drop_collection() # drops in default
|
||||
|
||||
with switch_collection(Group, 'group1') as Group:
|
||||
Group.drop_collection() # drops in group1
|
||||
with switch_collection(Group, "group1") as Group:
|
||||
Group.drop_collection() # drops in group1
|
||||
|
||||
Group(name="hello - group").save()
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
assert 1 == Group.objects.count()
|
||||
|
||||
with switch_collection(Group, 'group1') as Group:
|
||||
with switch_collection(Group, "group1") as Group:
|
||||
|
||||
self.assertEqual(0, Group.objects.count())
|
||||
assert 0 == Group.objects.count()
|
||||
|
||||
Group(name="hello - group1").save()
|
||||
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
assert 1 == Group.objects.count()
|
||||
|
||||
Group.drop_collection()
|
||||
self.assertEqual(0, Group.objects.count())
|
||||
assert 0 == Group.objects.count()
|
||||
|
||||
self.assertEqual(1, Group.objects.count())
|
||||
assert 1 == Group.objects.count()
|
||||
|
||||
def test_no_dereference_context_manager_object_id(self):
|
||||
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||
"""
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
@@ -80,31 +133,31 @@ class ContextManagersTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
User(name="user %s" % i).save()
|
||||
|
||||
user = User.objects.first()
|
||||
Group(ref=user, members=User.objects, generic=user).save()
|
||||
|
||||
with no_dereference(Group) as NoDeRefGroup:
|
||||
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||
assert Group._fields["members"]._auto_dereference
|
||||
assert not NoDeRefGroup._fields["members"]._auto_dereference
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
group = Group.objects.first()
|
||||
for m in group.members:
|
||||
self.assertNotIsInstance(m, User)
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
assert not isinstance(m, User)
|
||||
assert not isinstance(group.ref, User)
|
||||
assert not isinstance(group.generic, User)
|
||||
|
||||
for m in group.members:
|
||||
self.assertIsInstance(m, User)
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
assert isinstance(m, User)
|
||||
assert isinstance(group.ref, User)
|
||||
assert isinstance(group.generic, User)
|
||||
|
||||
def test_no_dereference_context_manager_dbref(self):
|
||||
"""Ensure that DBRef items in ListFields aren't dereferenced.
|
||||
"""
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
@@ -118,31 +171,29 @@ class ContextManagersTest(unittest.TestCase):
|
||||
Group.drop_collection()
|
||||
|
||||
for i in range(1, 51):
|
||||
User(name='user %s' % i).save()
|
||||
User(name="user %s" % i).save()
|
||||
|
||||
user = User.objects.first()
|
||||
Group(ref=user, members=User.objects, generic=user).save()
|
||||
|
||||
with no_dereference(Group) as NoDeRefGroup:
|
||||
self.assertTrue(Group._fields['members']._auto_dereference)
|
||||
self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference)
|
||||
assert Group._fields["members"]._auto_dereference
|
||||
assert not NoDeRefGroup._fields["members"]._auto_dereference
|
||||
|
||||
with no_dereference(Group) as Group:
|
||||
group = Group.objects.first()
|
||||
self.assertTrue(all([not isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertNotIsInstance(group.ref, User)
|
||||
self.assertNotIsInstance(group.generic, User)
|
||||
assert all([not isinstance(m, User) for m in group.members])
|
||||
assert not isinstance(group.ref, User)
|
||||
assert not isinstance(group.generic, User)
|
||||
|
||||
self.assertTrue(all([isinstance(m, User)
|
||||
for m in group.members]))
|
||||
self.assertIsInstance(group.ref, User)
|
||||
self.assertIsInstance(group.generic, User)
|
||||
assert all([isinstance(m, User) for m in group.members])
|
||||
assert isinstance(group.ref, User)
|
||||
assert isinstance(group.generic, User)
|
||||
|
||||
def test_no_sub_classes(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
@@ -158,84 +209,85 @@ class ContextManagersTest(unittest.TestCase):
|
||||
B(x=30).save()
|
||||
C(x=40).save()
|
||||
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
assert A.objects.count() == 5
|
||||
assert B.objects.count() == 3
|
||||
assert C.objects.count() == 1
|
||||
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A.objects.count(), 2)
|
||||
assert A.objects.count() == 2
|
||||
|
||||
for obj in A.objects:
|
||||
self.assertEqual(obj.__class__, A)
|
||||
assert obj.__class__ == A
|
||||
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B.objects.count(), 2)
|
||||
assert B.objects.count() == 2
|
||||
|
||||
for obj in B.objects:
|
||||
self.assertEqual(obj.__class__, B)
|
||||
assert obj.__class__ == B
|
||||
|
||||
with no_sub_classes(C):
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
assert C.objects.count() == 1
|
||||
|
||||
for obj in C.objects:
|
||||
self.assertEqual(obj.__class__, C)
|
||||
assert obj.__class__ == C
|
||||
|
||||
# Confirm context manager exit correctly
|
||||
self.assertEqual(A.objects.count(), 5)
|
||||
self.assertEqual(B.objects.count(), 3)
|
||||
self.assertEqual(C.objects.count(), 1)
|
||||
assert A.objects.count() == 5
|
||||
assert B.objects.count() == 3
|
||||
assert C.objects.count() == 1
|
||||
|
||||
def test_no_sub_classes_modification_to_document_class_are_temporary(self):
|
||||
class A(Document):
|
||||
x = IntField()
|
||||
meta = {'allow_inheritance': True}
|
||||
meta = {"allow_inheritance": True}
|
||||
|
||||
class B(A):
|
||||
z = IntField()
|
||||
|
||||
self.assertEqual(A._subclasses, ('A', 'A.B'))
|
||||
assert A._subclasses == ("A", "A.B")
|
||||
with no_sub_classes(A):
|
||||
self.assertEqual(A._subclasses, ('A',))
|
||||
self.assertEqual(A._subclasses, ('A', 'A.B'))
|
||||
assert A._subclasses == ("A",)
|
||||
assert A._subclasses == ("A", "A.B")
|
||||
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
assert B._subclasses == ("A.B",)
|
||||
with no_sub_classes(B):
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
self.assertEqual(B._subclasses, ('A.B',))
|
||||
assert B._subclasses == ("A.B",)
|
||||
assert B._subclasses == ("A.B",)
|
||||
|
||||
def test_no_subclass_context_manager_does_not_swallow_exception(self):
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with pytest.raises(TypeError):
|
||||
with no_sub_classes(User):
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_does_not_swallow_exception(self):
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
with query_counter() as q:
|
||||
with pytest.raises(TypeError):
|
||||
with query_counter():
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_temporarily_modifies_profiling_level(self):
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
initial_profiling_level = db.profiling_level()
|
||||
|
||||
try:
|
||||
NEW_LEVEL = 1
|
||||
db.set_profiling_level(NEW_LEVEL)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
with query_counter() as q:
|
||||
self.assertEqual(db.profiling_level(), 2)
|
||||
self.assertEqual(db.profiling_level(), NEW_LEVEL)
|
||||
new_level = 1
|
||||
db.set_profiling_level(new_level)
|
||||
assert db.profiling_level() == new_level
|
||||
with query_counter():
|
||||
assert db.profiling_level() == 2
|
||||
assert db.profiling_level() == new_level
|
||||
except Exception:
|
||||
db.set_profiling_level(initial_profiling_level) # Ensures it gets reseted no matter the outcome of the test
|
||||
db.set_profiling_level(
|
||||
initial_profiling_level
|
||||
) # Ensures it gets reseted no matter the outcome of the test
|
||||
raise
|
||||
|
||||
def test_query_counter(self):
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
@@ -245,73 +297,123 @@ class ContextManagersTest(unittest.TestCase):
|
||||
count_documents(collection, {})
|
||||
|
||||
def issue_1_insert_query():
|
||||
collection.insert_one({'test': 'garbage'})
|
||||
collection.insert_one({"test": "garbage"})
|
||||
|
||||
def issue_1_find_query():
|
||||
collection.find_one()
|
||||
|
||||
counter = 0
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, counter)
|
||||
self.assertEqual(q, counter) # Ensures previous count query did not get counted
|
||||
assert q == counter
|
||||
assert q == counter # Ensures previous count query did not get counted
|
||||
|
||||
for _ in range(10):
|
||||
issue_1_insert_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
assert q == counter
|
||||
|
||||
for _ in range(4):
|
||||
issue_1_find_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
assert q == counter
|
||||
|
||||
for _ in range(3):
|
||||
issue_1_count_query()
|
||||
counter += 1
|
||||
self.assertEqual(q, counter)
|
||||
assert q == counter
|
||||
|
||||
self.assertEqual(int(q), counter) # test __int__
|
||||
self.assertEqual(repr(q), str(int(q))) # test __repr__
|
||||
self.assertGreater(q, -1) # test __gt__
|
||||
self.assertGreaterEqual(q, int(q)) # test __gte__
|
||||
self.assertNotEqual(q, -1)
|
||||
self.assertLess(q, 1000)
|
||||
self.assertLessEqual(q, int(q))
|
||||
assert int(q) == counter # test __int__
|
||||
assert repr(q) == str(int(q)) # test __repr__
|
||||
assert q > -1 # test __gt__
|
||||
assert q >= int(q) # test __gte__
|
||||
assert q != -1
|
||||
assert q < 1000
|
||||
assert q <= int(q)
|
||||
|
||||
def test_query_counter_alias(self):
|
||||
"""query_counter works properly with db aliases?"""
|
||||
# Register a connection with db_alias testdb-1
|
||||
register_connection("testdb-1", "mongoenginetest2")
|
||||
|
||||
class A(Document):
|
||||
"""Uses default db_alias"""
|
||||
|
||||
name = StringField()
|
||||
|
||||
class B(Document):
|
||||
"""Uses testdb-1 db_alias"""
|
||||
|
||||
name = StringField()
|
||||
meta = {"db_alias": "testdb-1"}
|
||||
|
||||
A.drop_collection()
|
||||
B.drop_collection()
|
||||
|
||||
with query_counter() as q:
|
||||
assert q == 0
|
||||
A.objects.create(name="A")
|
||||
assert q == 1
|
||||
a = A.objects.first()
|
||||
assert q == 2
|
||||
a.name = "Test A"
|
||||
a.save()
|
||||
assert q == 3
|
||||
# querying the other db should'nt alter the counter
|
||||
B.objects().first()
|
||||
assert q == 3
|
||||
|
||||
with query_counter(alias="testdb-1") as q:
|
||||
assert q == 0
|
||||
B.objects.create(name="B")
|
||||
assert q == 1
|
||||
b = B.objects.first()
|
||||
assert q == 2
|
||||
b.name = "Test B"
|
||||
b.save()
|
||||
assert b.name == "Test B"
|
||||
assert q == 3
|
||||
# querying the other db should'nt alter the counter
|
||||
A.objects().first()
|
||||
assert q == 3
|
||||
|
||||
def test_query_counter_counts_getmore_queries(self):
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.drop()
|
||||
|
||||
many_docs = [{'test': 'garbage %s' % i} for i in range(150)]
|
||||
collection.insert_many(many_docs) # first batch of documents contains 101 documents
|
||||
many_docs = [{"test": "garbage %s" % i} for i in range(150)]
|
||||
collection.insert_many(
|
||||
many_docs
|
||||
) # first batch of documents contains 101 documents
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
assert q == 0
|
||||
list(collection.find())
|
||||
self.assertEqual(q, 2) # 1st select + 1 getmore
|
||||
assert q == 2 # 1st select + 1 getmore
|
||||
|
||||
def test_query_counter_ignores_particular_queries(self):
|
||||
connect('mongoenginetest')
|
||||
connect("mongoenginetest")
|
||||
db = get_db()
|
||||
|
||||
collection = db.query_counter
|
||||
collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)])
|
||||
collection.insert_many([{"test": "garbage %s" % i} for i in range(10)])
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
assert q == 0
|
||||
cursor = collection.find()
|
||||
self.assertEqual(q, 0) # cursor wasn't opened yet
|
||||
_ = next(cursor) # opens the cursor and fires the find query
|
||||
self.assertEqual(q, 1)
|
||||
assert q == 0 # cursor wasn't opened yet
|
||||
_ = next(cursor) # opens the cursor and fires the find query
|
||||
assert q == 1
|
||||
|
||||
cursor.close() # issues a `killcursors` query that is ignored by the context
|
||||
self.assertEqual(q, 1)
|
||||
_ = db.system.indexes.find_one() # queries on db.system.indexes are ignored as well
|
||||
self.assertEqual(q, 1)
|
||||
cursor.close() # issues a `killcursors` query that is ignored by the context
|
||||
assert q == 1
|
||||
_ = (
|
||||
db.system.indexes.find_one()
|
||||
) # queries on db.system.indexes are ignored as well
|
||||
assert q == 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,161 +1,168 @@
|
||||
import unittest
|
||||
from six import iterkeys
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import Document
|
||||
from mongoengine.base.datastructures import StrictDict, BaseList, BaseDict
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict
|
||||
|
||||
|
||||
class DocumentStub(object):
|
||||
def __init__(self):
|
||||
self._changed_fields = []
|
||||
self._unset_fields = []
|
||||
|
||||
def _mark_as_changed(self, key):
|
||||
self._changed_fields.append(key)
|
||||
|
||||
def _mark_as_unset(self, key):
|
||||
self._unset_fields.append(key)
|
||||
|
||||
class TestBaseDict(unittest.TestCase):
|
||||
|
||||
class TestBaseDict:
|
||||
@staticmethod
|
||||
def _get_basedict(dict_items):
|
||||
"""Get a BaseList bound to a fake document instance"""
|
||||
fake_doc = DocumentStub()
|
||||
base_list = BaseDict(dict_items, instance=None, name='my_name')
|
||||
base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor
|
||||
base_list = BaseDict(dict_items, instance=None, name="my_name")
|
||||
base_list._instance = (
|
||||
fake_doc # hack to inject the mock, it does not work in the constructor
|
||||
)
|
||||
return base_list
|
||||
|
||||
def test___init___(self):
|
||||
class MyDoc(Document):
|
||||
pass
|
||||
|
||||
dict_items = {'k': 'v'}
|
||||
dict_items = {"k": "v"}
|
||||
doc = MyDoc()
|
||||
base_dict = BaseDict(dict_items, instance=doc, name='my_name')
|
||||
self.assertIsInstance(base_dict._instance, Document)
|
||||
self.assertEqual(base_dict._name, 'my_name')
|
||||
self.assertEqual(base_dict, dict_items)
|
||||
base_dict = BaseDict(dict_items, instance=doc, name="my_name")
|
||||
assert isinstance(base_dict._instance, Document)
|
||||
assert base_dict._name == "my_name"
|
||||
assert base_dict == dict_items
|
||||
|
||||
def test_setdefault_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.setdefault('k', 'v')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
base_dict.setdefault("k", "v")
|
||||
assert base_dict._instance._changed_fields == [base_dict._name]
|
||||
|
||||
def test_popitems_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
self.assertEqual(base_dict.popitem(), ('k', 'v'))
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
self.assertFalse(base_dict)
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
assert base_dict.popitem() == ("k", "v")
|
||||
assert base_dict._instance._changed_fields == [base_dict._name]
|
||||
assert not base_dict
|
||||
|
||||
def test_pop_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
self.assertEqual(base_dict.pop('k'), 'v')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [base_dict._name])
|
||||
self.assertFalse(base_dict)
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
assert base_dict.pop("k") == "v"
|
||||
assert base_dict._instance._changed_fields == [base_dict._name]
|
||||
assert not base_dict
|
||||
|
||||
def test_pop_calls_does_not_mark_as_changed_when_it_fails(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
with self.assertRaises(KeyError):
|
||||
base_dict.pop('X')
|
||||
self.assertFalse(base_dict._instance._changed_fields)
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
with pytest.raises(KeyError):
|
||||
base_dict.pop("X")
|
||||
assert not base_dict._instance._changed_fields
|
||||
|
||||
def test_clear_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
base_dict.clear()
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_dict, {})
|
||||
assert base_dict._instance._changed_fields == ["my_name"]
|
||||
assert base_dict == {}
|
||||
|
||||
def test___delitem___calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
del base_dict['k']
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k'])
|
||||
self.assertEqual(base_dict, {})
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
del base_dict["k"]
|
||||
assert base_dict._instance._changed_fields == ["my_name.k"]
|
||||
assert base_dict == {}
|
||||
|
||||
def test___getitem____KeyError(self):
|
||||
base_dict = self._get_basedict({})
|
||||
with self.assertRaises(KeyError):
|
||||
base_dict['new']
|
||||
with pytest.raises(KeyError):
|
||||
base_dict["new"]
|
||||
|
||||
def test___getitem____simple_value(self):
|
||||
base_dict = self._get_basedict({'k': 'v'})
|
||||
base_dict['k'] = 'v'
|
||||
base_dict = self._get_basedict({"k": "v"})
|
||||
base_dict["k"] = "v"
|
||||
|
||||
def test___getitem____sublist_gets_converted_to_BaseList(self):
|
||||
base_dict = self._get_basedict({'k': [0, 1, 2]})
|
||||
sub_list = base_dict['k']
|
||||
self.assertEqual(sub_list, [0, 1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
self.assertIs(sub_list._instance, base_dict._instance)
|
||||
self.assertEqual(sub_list._name, 'my_name.k')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
base_dict = self._get_basedict({"k": [0, 1, 2]})
|
||||
sub_list = base_dict["k"]
|
||||
assert sub_list == [0, 1, 2]
|
||||
assert isinstance(sub_list, BaseList)
|
||||
assert sub_list._instance is base_dict._instance
|
||||
assert sub_list._name == "my_name.k"
|
||||
assert base_dict._instance._changed_fields == []
|
||||
|
||||
# Challenge mark_as_changed from sublist
|
||||
sub_list[1] = None
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.1'])
|
||||
assert base_dict._instance._changed_fields == ["my_name.k.1"]
|
||||
|
||||
def test___getitem____subdict_gets_converted_to_BaseDict(self):
|
||||
base_dict = self._get_basedict({'k': {'subk': 'subv'}})
|
||||
sub_dict = base_dict['k']
|
||||
self.assertEqual(sub_dict, {'subk': 'subv'})
|
||||
self.assertIsInstance(sub_dict, BaseDict)
|
||||
self.assertIs(sub_dict._instance, base_dict._instance)
|
||||
self.assertEqual(sub_dict._name, 'my_name.k')
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
base_dict = self._get_basedict({"k": {"subk": "subv"}})
|
||||
sub_dict = base_dict["k"]
|
||||
assert sub_dict == {"subk": "subv"}
|
||||
assert isinstance(sub_dict, BaseDict)
|
||||
assert sub_dict._instance is base_dict._instance
|
||||
assert sub_dict._name == "my_name.k"
|
||||
assert base_dict._instance._changed_fields == []
|
||||
|
||||
# Challenge mark_as_changed from subdict
|
||||
sub_dict['subk'] = None
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.subk'])
|
||||
sub_dict["subk"] = None
|
||||
assert base_dict._instance._changed_fields == ["my_name.k.subk"]
|
||||
|
||||
def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self):
|
||||
base_dict = self._get_basedict({'k': [0, 1, 2]})
|
||||
sub_list = base_dict.get('k')
|
||||
self.assertEqual(sub_list, [0, 1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
base_dict = self._get_basedict({"k": [0, 1, 2]})
|
||||
sub_list = base_dict.get("k")
|
||||
assert sub_list == [0, 1, 2]
|
||||
assert isinstance(sub_list, BaseList)
|
||||
|
||||
def test_get_returns_the_same_as___getitem__(self):
|
||||
base_dict = self._get_basedict({'k': [0, 1, 2]})
|
||||
get_ = base_dict.get('k')
|
||||
getitem_ = base_dict['k']
|
||||
self.assertEqual(get_, getitem_)
|
||||
base_dict = self._get_basedict({"k": [0, 1, 2]})
|
||||
get_ = base_dict.get("k")
|
||||
getitem_ = base_dict["k"]
|
||||
assert get_ == getitem_
|
||||
|
||||
def test_get_default(self):
|
||||
base_dict = self._get_basedict({})
|
||||
sentinel = object()
|
||||
self.assertEqual(base_dict.get('new'), None)
|
||||
self.assertIs(base_dict.get('new', sentinel), sentinel)
|
||||
assert base_dict.get("new") is None
|
||||
assert base_dict.get("new", sentinel) is sentinel
|
||||
|
||||
def test___setitem___calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict['k'] = 'v'
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.k'])
|
||||
self.assertEqual(base_dict, {'k': 'v'})
|
||||
base_dict["k"] = "v"
|
||||
assert base_dict._instance._changed_fields == ["my_name.k"]
|
||||
assert base_dict == {"k": "v"}
|
||||
|
||||
def test_update_calls_mark_as_changed(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.update({'k': 'v'})
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name'])
|
||||
base_dict.update({"k": "v"})
|
||||
assert base_dict._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test___setattr____not_tracked_by_changes(self):
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.a_new_attr = 'test'
|
||||
self.assertEqual(base_dict._instance._changed_fields, [])
|
||||
base_dict.a_new_attr = "test"
|
||||
assert base_dict._instance._changed_fields == []
|
||||
|
||||
def test___delattr____tracked_by_changes(self):
|
||||
# This is probably a bug as __setattr__ is not tracked
|
||||
# This is even bad because it could be that there is an attribute
|
||||
# with the same name as a key
|
||||
base_dict = self._get_basedict({})
|
||||
base_dict.a_new_attr = 'test'
|
||||
base_dict.a_new_attr = "test"
|
||||
del base_dict.a_new_attr
|
||||
self.assertEqual(base_dict._instance._changed_fields, ['my_name.a_new_attr'])
|
||||
assert base_dict._instance._changed_fields == ["my_name.a_new_attr"]
|
||||
|
||||
|
||||
class TestBaseList(unittest.TestCase):
|
||||
|
||||
class TestBaseList:
|
||||
@staticmethod
|
||||
def _get_baselist(list_items):
|
||||
"""Get a BaseList bound to a fake document instance"""
|
||||
fake_doc = DocumentStub()
|
||||
base_list = BaseList(list_items, instance=None, name='my_name')
|
||||
base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor
|
||||
base_list = BaseList(list_items, instance=None, name="my_name")
|
||||
base_list._instance = (
|
||||
fake_doc # hack to inject the mock, it does not work in the constructor
|
||||
)
|
||||
return base_list
|
||||
|
||||
def test___init___(self):
|
||||
@@ -164,28 +171,28 @@ class TestBaseList(unittest.TestCase):
|
||||
|
||||
list_items = [True]
|
||||
doc = MyDoc()
|
||||
base_list = BaseList(list_items, instance=doc, name='my_name')
|
||||
self.assertIsInstance(base_list._instance, Document)
|
||||
self.assertEqual(base_list._name, 'my_name')
|
||||
self.assertEqual(base_list, list_items)
|
||||
base_list = BaseList(list_items, instance=doc, name="my_name")
|
||||
assert isinstance(base_list._instance, Document)
|
||||
assert base_list._name == "my_name"
|
||||
assert base_list == list_items
|
||||
|
||||
def test___iter__(self):
|
||||
values = [True, False, True, False]
|
||||
base_list = BaseList(values, instance=None, name='my_name')
|
||||
self.assertEqual(values, list(base_list))
|
||||
base_list = BaseList(values, instance=None, name="my_name")
|
||||
assert values == list(base_list)
|
||||
|
||||
def test___iter___allow_modification_while_iterating_withou_error(self):
|
||||
# regular list allows for this, thus this subclass must comply to that
|
||||
base_list = BaseList([True, False, True, False], instance=None, name='my_name')
|
||||
base_list = BaseList([True, False, True, False], instance=None, name="my_name")
|
||||
for idx, val in enumerate(base_list):
|
||||
if val:
|
||||
base_list.pop(idx)
|
||||
|
||||
def test_append_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
self.assertFalse(base_list._instance._changed_fields)
|
||||
assert not base_list._instance._changed_fields
|
||||
base_list.append(True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_subclass_append(self):
|
||||
# Due to the way mark_as_changed_wrapper is implemented
|
||||
@@ -193,206 +200,203 @@ class TestBaseList(unittest.TestCase):
|
||||
class SubBaseList(BaseList):
|
||||
pass
|
||||
|
||||
base_list = SubBaseList([], instance=None, name='my_name')
|
||||
base_list = SubBaseList([], instance=None, name="my_name")
|
||||
base_list.append(True)
|
||||
|
||||
def test___getitem__using_simple_index(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
self.assertEqual(base_list[0], 0)
|
||||
self.assertEqual(base_list[1], 1)
|
||||
self.assertEqual(base_list[-1], 2)
|
||||
assert base_list[0] == 0
|
||||
assert base_list[1] == 1
|
||||
assert base_list[-1] == 2
|
||||
|
||||
def test___getitem__using_slice(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
self.assertEqual(base_list[1:3], [1, 2])
|
||||
self.assertEqual(base_list[0:3:2], [0, 2])
|
||||
assert base_list[1:3] == [1, 2]
|
||||
assert base_list[0:3:2] == [0, 2]
|
||||
|
||||
def test___getitem___using_slice_returns_list(self):
|
||||
# Bug: using slice does not properly handles the instance
|
||||
# and mark_as_changed behaviour.
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
sliced = base_list[1:3]
|
||||
self.assertEqual(sliced, [1, 2])
|
||||
self.assertIsInstance(sliced, list)
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
assert sliced == [1, 2]
|
||||
assert isinstance(sliced, list)
|
||||
assert base_list._instance._changed_fields == []
|
||||
|
||||
def test___getitem__sublist_returns_BaseList_bound_to_instance(self):
|
||||
base_list = self._get_baselist(
|
||||
[
|
||||
[1, 2],
|
||||
[3, 4]
|
||||
]
|
||||
)
|
||||
base_list = self._get_baselist([[1, 2], [3, 4]])
|
||||
sub_list = base_list[0]
|
||||
self.assertEqual(sub_list, [1, 2])
|
||||
self.assertIsInstance(sub_list, BaseList)
|
||||
self.assertIs(sub_list._instance, base_list._instance)
|
||||
self.assertEqual(sub_list._name, 'my_name.0')
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
assert sub_list == [1, 2]
|
||||
assert isinstance(sub_list, BaseList)
|
||||
assert sub_list._instance is base_list._instance
|
||||
assert sub_list._name == "my_name.0"
|
||||
assert base_list._instance._changed_fields == []
|
||||
|
||||
# Challenge mark_as_changed from sublist
|
||||
sub_list[1] = None
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name.0.1'])
|
||||
assert base_list._instance._changed_fields == ["my_name.0.1"]
|
||||
|
||||
def test___getitem__subdict_returns_BaseList_bound_to_instance(self):
|
||||
base_list = self._get_baselist(
|
||||
[
|
||||
{'subk': 'subv'}
|
||||
]
|
||||
)
|
||||
base_list = self._get_baselist([{"subk": "subv"}])
|
||||
sub_dict = base_list[0]
|
||||
self.assertEqual(sub_dict, {'subk': 'subv'})
|
||||
self.assertIsInstance(sub_dict, BaseDict)
|
||||
self.assertIs(sub_dict._instance, base_list._instance)
|
||||
self.assertEqual(sub_dict._name, 'my_name.0')
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
assert sub_dict == {"subk": "subv"}
|
||||
assert isinstance(sub_dict, BaseDict)
|
||||
assert sub_dict._instance is base_list._instance
|
||||
assert sub_dict._name == "my_name.0"
|
||||
assert base_list._instance._changed_fields == []
|
||||
|
||||
# Challenge mark_as_changed from subdict
|
||||
sub_dict['subk'] = None
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name.0.subk'])
|
||||
sub_dict["subk"] = None
|
||||
assert base_list._instance._changed_fields == ["my_name.0.subk"]
|
||||
|
||||
def test_extend_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list.extend([True])
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_insert_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list.insert(0, True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_remove_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list.remove(True)
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_remove_not_mark_as_changed_when_it_fails(self):
|
||||
base_list = self._get_baselist([True])
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
base_list.remove(False)
|
||||
self.assertFalse(base_list._instance._changed_fields)
|
||||
assert not base_list._instance._changed_fields
|
||||
|
||||
def test_pop_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list.pop()
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_reverse_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, False])
|
||||
base_list.reverse()
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test___delitem___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
del base_list[0]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test___setitem___calls_with_full_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([])
|
||||
base_list[:] = [0, 1] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [0, 1])
|
||||
base_list[:] = [
|
||||
0,
|
||||
1,
|
||||
]
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [0, 1]
|
||||
|
||||
def test___setitem___calls_with_partial_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:2] = [1, 0] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [1, 0, 2])
|
||||
base_list[0:2] = [
|
||||
1,
|
||||
0,
|
||||
]
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [1, 0, 2]
|
||||
|
||||
def test___setitem___calls_with_step_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [-1, 1, -2])
|
||||
base_list[0:3:2] = [-1, -2] # uses __setitem__
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [-1, 1, -2]
|
||||
|
||||
def test___setitem___with_slice(self):
|
||||
base_list = self._get_baselist([0, 1, 2, 3, 4, 5])
|
||||
base_list[0:6:2] = [None, None, None]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [None, 1, None, 3, None, 5])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [None, 1, None, 3, None, 5]
|
||||
|
||||
def test___setitem___item_0_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list[0] = False
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [False])
|
||||
assert base_list._instance._changed_fields == ["my_name.0"]
|
||||
assert base_list == [False]
|
||||
|
||||
def test___setitem___item_1_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, True])
|
||||
base_list[1] = False
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name.1'])
|
||||
self.assertEqual(base_list, [True, False])
|
||||
assert base_list._instance._changed_fields == ["my_name.1"]
|
||||
assert base_list == [True, False]
|
||||
|
||||
def test___delslice___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1])
|
||||
del base_list[0:1]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
self.assertEqual(base_list, [1])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [1]
|
||||
|
||||
def test___iadd___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
base_list += [False]
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test___imul___calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True])
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
assert base_list._instance._changed_fields == []
|
||||
base_list *= 2
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_sort_calls_not_marked_as_changed_when_it_fails(self):
|
||||
base_list = self._get_baselist([True])
|
||||
with self.assertRaises(TypeError):
|
||||
with pytest.raises(TypeError):
|
||||
base_list.sort(key=1)
|
||||
|
||||
self.assertEqual(base_list._instance._changed_fields, [])
|
||||
assert base_list._instance._changed_fields == []
|
||||
|
||||
def test_sort_calls_mark_as_changed(self):
|
||||
base_list = self._get_baselist([True, False])
|
||||
base_list.sort()
|
||||
self.assertEqual(base_list._instance._changed_fields, ['my_name'])
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
|
||||
def test_sort_calls_with_key(self):
|
||||
base_list = self._get_baselist([1, 2, 11])
|
||||
base_list.sort(key=lambda i: str(i))
|
||||
self.assertEqual(base_list, [1, 11, 2])
|
||||
assert base_list == [1, 11, 2]
|
||||
|
||||
|
||||
class TestStrictDict(unittest.TestCase):
|
||||
def strict_dict_class(self, *args, **kwargs):
|
||||
return StrictDict.create(*args, **kwargs)
|
||||
|
||||
def setUp(self):
|
||||
self.dtype = self.strict_dict_class(("a", "b", "c"))
|
||||
|
||||
def strict_dict_class(self, *args, **kwargs):
|
||||
return StrictDict.create(*args, **kwargs)
|
||||
|
||||
def test_init(self):
|
||||
d = self.dtype(a=1, b=1, c=1)
|
||||
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
|
||||
assert (d.a, d.b, d.c) == (1, 1, 1)
|
||||
|
||||
def test_iterkeys(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(list(iterkeys(d)), ['a'])
|
||||
assert list(d.keys()) == ["a"]
|
||||
|
||||
def test_len(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(len(d), 1)
|
||||
assert len(d) == 1
|
||||
|
||||
def test_pop(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertIn('a', d)
|
||||
d.pop('a')
|
||||
self.assertNotIn('a', d)
|
||||
assert "a" in d
|
||||
d.pop("a")
|
||||
assert "a" not in d
|
||||
|
||||
def test_repr(self):
|
||||
d = self.dtype(a=1, b=2, c=3)
|
||||
self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}')
|
||||
assert repr(d) == '{"a": 1, "b": 2, "c": 3}'
|
||||
|
||||
# make sure quotes are escaped properly
|
||||
d = self.dtype(a='"', b="'", c="")
|
||||
self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}')
|
||||
assert repr(d) == '{"a": \'"\', "b": "\'", "c": \'\'}'
|
||||
|
||||
def test_init_fails_on_nonexisting_attrs(self):
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
self.dtype(a=1, b=2, d=3)
|
||||
|
||||
def test_eq(self):
|
||||
@@ -404,46 +408,47 @@ class TestStrictDict(unittest.TestCase):
|
||||
h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1)
|
||||
i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2)
|
||||
|
||||
self.assertEqual(d, dd)
|
||||
self.assertNotEqual(d, e)
|
||||
self.assertNotEqual(d, f)
|
||||
self.assertNotEqual(d, g)
|
||||
self.assertNotEqual(f, d)
|
||||
self.assertEqual(d, h)
|
||||
self.assertNotEqual(d, i)
|
||||
assert d == dd
|
||||
assert d != e
|
||||
assert d != f
|
||||
assert d != g
|
||||
assert f != d
|
||||
assert d == h
|
||||
assert d != i
|
||||
|
||||
def test_setattr_getattr(self):
|
||||
d = self.dtype()
|
||||
d.a = 1
|
||||
self.assertEqual(d.a, 1)
|
||||
self.assertRaises(AttributeError, getattr, d, 'b')
|
||||
assert d.a == 1
|
||||
with pytest.raises(AttributeError):
|
||||
getattr(d, "b")
|
||||
|
||||
def test_setattr_raises_on_nonexisting_attr(self):
|
||||
d = self.dtype()
|
||||
with self.assertRaises(AttributeError):
|
||||
with pytest.raises(AttributeError):
|
||||
d.x = 1
|
||||
|
||||
def test_setattr_getattr_special(self):
|
||||
d = self.strict_dict_class(["items"])
|
||||
d.items = 1
|
||||
self.assertEqual(d.items, 1)
|
||||
assert d.items == 1
|
||||
|
||||
def test_get(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(d.get('a'), 1)
|
||||
self.assertEqual(d.get('b', 'bla'), 'bla')
|
||||
assert d.get("a") == 1
|
||||
assert d.get("b", "bla") == "bla"
|
||||
|
||||
def test_items(self):
|
||||
d = self.dtype(a=1)
|
||||
self.assertEqual(d.items(), [('a', 1)])
|
||||
assert d.items() == [("a", 1)]
|
||||
d = self.dtype(a=1, b=2)
|
||||
self.assertEqual(d.items(), [('a', 1), ('b', 2)])
|
||||
assert d.items() == [("a", 1), ("b", 2)]
|
||||
|
||||
def test_mappings_protocol(self):
|
||||
d = self.dtype(a=1, b=2)
|
||||
self.assertEqual(dict(d), {'a': 1, 'b': 2})
|
||||
self.assertEqual(dict(**d), {'a': 1, 'b': 2})
|
||||
assert dict(d) == {"a": 1, "b": 2}
|
||||
assert dict(**d) == {"a": 1, "b": 2}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,9 @@
|
||||
import unittest
|
||||
|
||||
from pymongo import ReadPreference
|
||||
from pymongo import MongoClient
|
||||
from pymongo import MongoClient, ReadPreference
|
||||
|
||||
import mongoengine
|
||||
from mongoengine.connection import MongoEngineConnectionError
|
||||
from mongoengine.connection import ConnectionFailure
|
||||
|
||||
|
||||
CONN_CLASS = MongoClient
|
||||
@@ -12,7 +11,6 @@ READ_PREF = ReadPreference.SECONDARY
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
@@ -26,20 +24,21 @@ class ConnectionTest(unittest.TestCase):
|
||||
def test_replicaset_uri_passes_read_preference(self):
|
||||
"""Requires a replica set called "rs" on port 27017
|
||||
"""
|
||||
|
||||
try:
|
||||
conn = mongoengine.connect(db='mongoenginetest',
|
||||
host="mongodb://localhost/mongoenginetest?replicaSet=rs",
|
||||
read_preference=READ_PREF)
|
||||
except MongoEngineConnectionError as e:
|
||||
conn = mongoengine.connect(
|
||||
db="mongoenginetest",
|
||||
host="mongodb://localhost/mongoenginetest?replicaSet=rs",
|
||||
read_preference=READ_PREF,
|
||||
)
|
||||
except ConnectionFailure:
|
||||
return
|
||||
|
||||
if not isinstance(conn, CONN_CLASS):
|
||||
# really???
|
||||
return
|
||||
|
||||
self.assertEqual(conn.read_preference, READ_PREF)
|
||||
assert conn.read_preference == READ_PREF
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -7,7 +7,7 @@ from mongoengine import signals
|
||||
signal_output = []
|
||||
|
||||
|
||||
class SignalTests(unittest.TestCase):
|
||||
class TestSignal(unittest.TestCase):
|
||||
"""
|
||||
Testing signals before/after saving and deleting.
|
||||
"""
|
||||
@@ -20,7 +20,7 @@ class SignalTests(unittest.TestCase):
|
||||
return signal_output
|
||||
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
connect(db="mongoenginetest")
|
||||
|
||||
class Author(Document):
|
||||
# Make the id deterministic for easier testing
|
||||
@@ -32,60 +32,65 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def pre_init(cls, sender, document, *args, **kwargs):
|
||||
signal_output.append('pre_init signal, %s' % cls.__name__)
|
||||
signal_output.append(kwargs['values'])
|
||||
signal_output.append("pre_init signal, %s" % cls.__name__)
|
||||
signal_output.append(kwargs["values"])
|
||||
|
||||
@classmethod
|
||||
def post_init(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created))
|
||||
signal_output.append(
|
||||
"post_init signal, %s, document._created = %s"
|
||||
% (document, document._created)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save signal, %s' % document)
|
||||
signal_output.append("pre_save signal, %s" % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def pre_save_post_validation(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_save_post_validation signal, %s' % document)
|
||||
if kwargs.pop('created', False):
|
||||
signal_output.append('Is created')
|
||||
signal_output.append("pre_save_post_validation signal, %s" % document)
|
||||
if kwargs.pop("created", False):
|
||||
signal_output.append("Is created")
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
signal_output.append("Is updated")
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
|
||||
signal_output.append('post_save signal, %s' % document)
|
||||
signal_output.append('post_save dirty keys, %s' % dirty_keys)
|
||||
if kwargs.pop('created', False):
|
||||
signal_output.append('Is created')
|
||||
dirty_keys = list(document._delta()[0].keys()) + list(
|
||||
document._delta()[1].keys()
|
||||
)
|
||||
signal_output.append("post_save signal, %s" % document)
|
||||
signal_output.append("post_save dirty keys, %s" % dirty_keys)
|
||||
if kwargs.pop("created", False):
|
||||
signal_output.append("Is created")
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
signal_output.append("Is updated")
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def pre_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_delete signal, %s' % document)
|
||||
signal_output.append("pre_delete signal, %s" % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
signal_output.append("post_delete signal, %s" % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('pre_bulk_insert signal, %s' % documents)
|
||||
signal_output.append("pre_bulk_insert signal, %s" % documents)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('post_bulk_insert signal, %s' % documents)
|
||||
if kwargs.pop('loaded', False):
|
||||
signal_output.append('Is loaded')
|
||||
signal_output.append("post_bulk_insert signal, %s" % documents)
|
||||
if kwargs.pop("loaded", False):
|
||||
signal_output.append("Is loaded")
|
||||
else:
|
||||
signal_output.append('Not loaded')
|
||||
signal_output.append("Not loaded")
|
||||
signal_output.append(kwargs)
|
||||
|
||||
self.Author = Author
|
||||
@@ -101,12 +106,12 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def pre_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('pre_delete signal, %s' % document)
|
||||
signal_output.append("pre_delete signal, %s" % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
@classmethod
|
||||
def post_delete(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_delete signal, %s' % document)
|
||||
signal_output.append("post_delete signal, %s" % document)
|
||||
signal_output.append(kwargs)
|
||||
|
||||
self.Another = Another
|
||||
@@ -117,11 +122,11 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
signal_output.append('Is created')
|
||||
if "created" in kwargs:
|
||||
if kwargs["created"]:
|
||||
signal_output.append("Is created")
|
||||
else:
|
||||
signal_output.append('Is updated')
|
||||
signal_output.append("Is updated")
|
||||
|
||||
self.ExplicitId = ExplicitId
|
||||
ExplicitId.drop_collection()
|
||||
@@ -136,9 +141,13 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def pre_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('pre_bulk_insert signal, %s' %
|
||||
[(doc, {'active': documents[n].active})
|
||||
for n, doc in enumerate(documents)])
|
||||
signal_output.append(
|
||||
"pre_bulk_insert signal, %s"
|
||||
% [
|
||||
(doc, {"active": documents[n].active})
|
||||
for n, doc in enumerate(documents)
|
||||
]
|
||||
)
|
||||
|
||||
# make changes here, this is just an example -
|
||||
# it could be anything that needs pre-validation or looks-ups before bulk bulk inserting
|
||||
@@ -149,13 +158,17 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def post_bulk_insert(cls, sender, documents, **kwargs):
|
||||
signal_output.append('post_bulk_insert signal, %s' %
|
||||
[(doc, {'active': documents[n].active})
|
||||
for n, doc in enumerate(documents)])
|
||||
if kwargs.pop('loaded', False):
|
||||
signal_output.append('Is loaded')
|
||||
signal_output.append(
|
||||
"post_bulk_insert signal, %s"
|
||||
% [
|
||||
(doc, {"active": documents[n].active})
|
||||
for n, doc in enumerate(documents)
|
||||
]
|
||||
)
|
||||
if kwargs.pop("loaded", False):
|
||||
signal_output.append("Is loaded")
|
||||
else:
|
||||
signal_output.append('Not loaded')
|
||||
signal_output.append("Not loaded")
|
||||
signal_output.append(kwargs)
|
||||
|
||||
self.Post = Post
|
||||
@@ -178,7 +191,9 @@ class SignalTests(unittest.TestCase):
|
||||
signals.pre_init.connect(Author.pre_init, sender=Author)
|
||||
signals.post_init.connect(Author.post_init, sender=Author)
|
||||
signals.pre_save.connect(Author.pre_save, sender=Author)
|
||||
signals.pre_save_post_validation.connect(Author.pre_save_post_validation, sender=Author)
|
||||
signals.pre_save_post_validation.connect(
|
||||
Author.pre_save_post_validation, sender=Author
|
||||
)
|
||||
signals.post_save.connect(Author.post_save, sender=Author)
|
||||
signals.pre_delete.connect(Author.pre_delete, sender=Author)
|
||||
signals.post_delete.connect(Author.post_delete, sender=Author)
|
||||
@@ -199,7 +214,9 @@ class SignalTests(unittest.TestCase):
|
||||
signals.post_delete.disconnect(self.Author.post_delete)
|
||||
signals.pre_delete.disconnect(self.Author.pre_delete)
|
||||
signals.post_save.disconnect(self.Author.post_save)
|
||||
signals.pre_save_post_validation.disconnect(self.Author.pre_save_post_validation)
|
||||
signals.pre_save_post_validation.disconnect(
|
||||
self.Author.pre_save_post_validation
|
||||
)
|
||||
signals.pre_save.disconnect(self.Author.pre_save)
|
||||
signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert)
|
||||
signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert)
|
||||
@@ -230,36 +247,36 @@ class SignalTests(unittest.TestCase):
|
||||
# Note that there is a chance that the following assert fails in case
|
||||
# some receivers (eventually created in other tests)
|
||||
# gets garbage collected (https://pythonhosted.org/blinker/#blinker.base.Signal.connect)
|
||||
self.assertEqual(self.pre_signals, post_signals)
|
||||
assert self.pre_signals == post_signals
|
||||
|
||||
def test_model_signals(self):
|
||||
""" Model saves should throw some signals. """
|
||||
|
||||
def create_author():
|
||||
self.Author(name='Bill Shakespeare')
|
||||
self.Author(name="Bill Shakespeare")
|
||||
|
||||
def bulk_create_author_with_load():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
a1 = self.Author(name="Bill Shakespeare")
|
||||
self.Author.objects.insert([a1], load_bulk=True)
|
||||
|
||||
def bulk_create_author_without_load():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
a1 = self.Author(name="Bill Shakespeare")
|
||||
self.Author.objects.insert([a1], load_bulk=False)
|
||||
|
||||
def load_existing_author():
|
||||
a = self.Author(name='Bill Shakespeare')
|
||||
a = self.Author(name="Bill Shakespeare")
|
||||
a.save()
|
||||
self.get_signal_output(lambda: None) # eliminate signal output
|
||||
a1 = self.Author.objects(name='Bill Shakespeare')[0]
|
||||
_ = self.Author.objects(name="Bill Shakespeare")[0]
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
assert self.get_signal_output(create_author) == [
|
||||
"pre_init signal, Author",
|
||||
{'name': 'Bill Shakespeare'},
|
||||
{"name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
])
|
||||
]
|
||||
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.assertEqual(self.get_signal_output(a1.save), [
|
||||
a1 = self.Author(name="Bill Shakespeare")
|
||||
assert self.get_signal_output(a1.save) == [
|
||||
"pre_save signal, Bill Shakespeare",
|
||||
{},
|
||||
"pre_save_post_validation signal, Bill Shakespeare",
|
||||
@@ -268,12 +285,12 @@ class SignalTests(unittest.TestCase):
|
||||
"post_save signal, Bill Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is created",
|
||||
{}
|
||||
])
|
||||
{},
|
||||
]
|
||||
|
||||
a1.reload()
|
||||
a1.name = 'William Shakespeare'
|
||||
self.assertEqual(self.get_signal_output(a1.save), [
|
||||
a1.name = "William Shakespeare"
|
||||
assert self.get_signal_output(a1.save) == [
|
||||
"pre_save signal, William Shakespeare",
|
||||
{},
|
||||
"pre_save_post_validation signal, William Shakespeare",
|
||||
@@ -282,157 +299,157 @@ class SignalTests(unittest.TestCase):
|
||||
"post_save signal, William Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is updated",
|
||||
{}
|
||||
])
|
||||
|
||||
self.assertEqual(self.get_signal_output(a1.delete), [
|
||||
'pre_delete signal, William Shakespeare',
|
||||
{},
|
||||
'post_delete signal, William Shakespeare',
|
||||
{}
|
||||
])
|
||||
]
|
||||
|
||||
self.assertEqual(self.get_signal_output(load_existing_author), [
|
||||
"pre_init signal, Author",
|
||||
{'id': 2, 'name': 'Bill Shakespeare'},
|
||||
"post_init signal, Bill Shakespeare, document._created = False"
|
||||
])
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [
|
||||
'pre_init signal, Author',
|
||||
{'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = True',
|
||||
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
assert self.get_signal_output(a1.delete) == [
|
||||
"pre_delete signal, William Shakespeare",
|
||||
{},
|
||||
'pre_init signal, Author',
|
||||
{'id': 3, 'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = False',
|
||||
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
'Is loaded',
|
||||
{}
|
||||
])
|
||||
"post_delete signal, William Shakespeare",
|
||||
{},
|
||||
]
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
|
||||
assert self.get_signal_output(load_existing_author) == [
|
||||
"pre_init signal, Author",
|
||||
{'name': 'Bill Shakespeare'},
|
||||
{"id": 2, "name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = False",
|
||||
]
|
||||
|
||||
assert self.get_signal_output(bulk_create_author_with_load) == [
|
||||
"pre_init signal, Author",
|
||||
{"name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
{},
|
||||
"pre_init signal, Author",
|
||||
{"id": 3, "name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = False",
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Is loaded",
|
||||
{},
|
||||
]
|
||||
|
||||
assert self.get_signal_output(bulk_create_author_without_load) == [
|
||||
"pre_init signal, Author",
|
||||
{"name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
{},
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Not loaded",
|
||||
{}
|
||||
])
|
||||
{},
|
||||
]
|
||||
|
||||
def test_signal_kwargs(self):
|
||||
""" Make sure signal_kwargs is passed to signals calls. """
|
||||
|
||||
def live_and_let_die():
|
||||
a = self.Author(name='Bill Shakespeare')
|
||||
a.save(signal_kwargs={'live': True, 'die': False})
|
||||
a.delete(signal_kwargs={'live': False, 'die': True})
|
||||
a = self.Author(name="Bill Shakespeare")
|
||||
a.save(signal_kwargs={"live": True, "die": False})
|
||||
a.delete(signal_kwargs={"live": False, "die": True})
|
||||
|
||||
self.assertEqual(self.get_signal_output(live_and_let_die), [
|
||||
assert self.get_signal_output(live_and_let_die) == [
|
||||
"pre_init signal, Author",
|
||||
{'name': 'Bill Shakespeare'},
|
||||
{"name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_save signal, Bill Shakespeare",
|
||||
{'die': False, 'live': True},
|
||||
{"die": False, "live": True},
|
||||
"pre_save_post_validation signal, Bill Shakespeare",
|
||||
"Is created",
|
||||
{'die': False, 'live': True},
|
||||
{"die": False, "live": True},
|
||||
"post_save signal, Bill Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is created",
|
||||
{'die': False, 'live': True},
|
||||
'pre_delete signal, Bill Shakespeare',
|
||||
{'die': True, 'live': False},
|
||||
'post_delete signal, Bill Shakespeare',
|
||||
{'die': True, 'live': False}
|
||||
])
|
||||
{"die": False, "live": True},
|
||||
"pre_delete signal, Bill Shakespeare",
|
||||
{"die": True, "live": False},
|
||||
"post_delete signal, Bill Shakespeare",
|
||||
{"die": True, "live": False},
|
||||
]
|
||||
|
||||
def bulk_create_author():
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], signal_kwargs={'key': True})
|
||||
a1 = self.Author(name="Bill Shakespeare")
|
||||
self.Author.objects.insert([a1], signal_kwargs={"key": True})
|
||||
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author), [
|
||||
'pre_init signal, Author',
|
||||
{'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = True',
|
||||
'pre_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
{'key': True},
|
||||
'pre_init signal, Author',
|
||||
{'id': 2, 'name': 'Bill Shakespeare'},
|
||||
'post_init signal, Bill Shakespeare, document._created = False',
|
||||
'post_bulk_insert signal, [<Author: Bill Shakespeare>]',
|
||||
'Is loaded',
|
||||
{'key': True}
|
||||
])
|
||||
assert self.get_signal_output(bulk_create_author) == [
|
||||
"pre_init signal, Author",
|
||||
{"name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
{"key": True},
|
||||
"pre_init signal, Author",
|
||||
{"id": 2, "name": "Bill Shakespeare"},
|
||||
"post_init signal, Bill Shakespeare, document._created = False",
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Is loaded",
|
||||
{"key": True},
|
||||
]
|
||||
|
||||
def test_queryset_delete_signals(self):
|
||||
""" Queryset delete should throw some signals. """
|
||||
|
||||
self.Another(name='Bill Shakespeare').save()
|
||||
self.assertEqual(self.get_signal_output(self.Another.objects.delete), [
|
||||
'pre_delete signal, Bill Shakespeare',
|
||||
self.Another(name="Bill Shakespeare").save()
|
||||
assert self.get_signal_output(self.Another.objects.delete) == [
|
||||
"pre_delete signal, Bill Shakespeare",
|
||||
{},
|
||||
'post_delete signal, Bill Shakespeare',
|
||||
{}
|
||||
])
|
||||
"post_delete signal, Bill Shakespeare",
|
||||
{},
|
||||
]
|
||||
|
||||
def test_signals_with_explicit_doc_ids(self):
|
||||
""" Model saves must have a created flag the first time."""
|
||||
ei = self.ExplicitId(id=123)
|
||||
# post save must received the created flag, even if there's already
|
||||
# an object id present
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
# second time, it must be an update
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
assert self.get_signal_output(ei.save) == ["Is updated"]
|
||||
|
||||
def test_signals_with_switch_collection(self):
|
||||
ei = self.ExplicitId(id=123)
|
||||
ei.switch_collection("explicit__1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
ei.switch_collection("explicit__1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
assert self.get_signal_output(ei.save) == ["Is updated"]
|
||||
|
||||
ei.switch_collection("explicit__1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
ei.switch_collection("explicit__1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
|
||||
def test_signals_with_switch_db(self):
|
||||
connect('mongoenginetest')
|
||||
register_connection('testdb-1', 'mongoenginetest2')
|
||||
connect("mongoenginetest")
|
||||
register_connection("testdb-1", "mongoenginetest2")
|
||||
|
||||
ei = self.ExplicitId(id=123)
|
||||
ei.switch_db("testdb-1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
ei.switch_db("testdb-1")
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is updated'])
|
||||
assert self.get_signal_output(ei.save) == ["Is updated"]
|
||||
|
||||
ei.switch_db("testdb-1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
ei.switch_db("testdb-1", keep_created=False)
|
||||
self.assertEqual(self.get_signal_output(ei.save), ['Is created'])
|
||||
assert self.get_signal_output(ei.save) == ["Is created"]
|
||||
|
||||
def test_signals_bulk_insert(self):
|
||||
def bulk_set_active_post():
|
||||
posts = [
|
||||
self.Post(title='Post 1'),
|
||||
self.Post(title='Post 2'),
|
||||
self.Post(title='Post 3')
|
||||
self.Post(title="Post 1"),
|
||||
self.Post(title="Post 2"),
|
||||
self.Post(title="Post 3"),
|
||||
]
|
||||
self.Post.objects.insert(posts)
|
||||
|
||||
results = self.get_signal_output(bulk_set_active_post)
|
||||
self.assertEqual(results, [
|
||||
assert results == [
|
||||
"pre_bulk_insert signal, [(<Post: Post 1>, {'active': False}), (<Post: Post 2>, {'active': False}), (<Post: Post 3>, {'active': False})]",
|
||||
{},
|
||||
"post_bulk_insert signal, [(<Post: Post 1>, {'active': True}), (<Post: Post 2>, {'active': True}), (<Post: Post 3>, {'active': True})]",
|
||||
'Is loaded',
|
||||
{}
|
||||
])
|
||||
"Is loaded",
|
||||
{},
|
||||
]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -1,38 +1,39 @@
|
||||
import unittest
|
||||
import re
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine.base.utils import LazyRegexCompiler
|
||||
|
||||
signal_output = []
|
||||
|
||||
|
||||
class LazyRegexCompilerTest(unittest.TestCase):
|
||||
|
||||
class TestLazyRegexCompiler:
|
||||
def test_lazy_regex_compiler_verify_laziness_of_descriptor(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@', flags=32)
|
||||
EMAIL_REGEX = LazyRegexCompiler("@", flags=32)
|
||||
|
||||
descriptor = UserEmail.__dict__['EMAIL_REGEX']
|
||||
self.assertIsNone(descriptor._compiled_regex)
|
||||
descriptor = UserEmail.__dict__["EMAIL_REGEX"]
|
||||
assert descriptor._compiled_regex is None
|
||||
|
||||
regex = UserEmail.EMAIL_REGEX
|
||||
self.assertEqual(regex, re.compile('@', flags=32))
|
||||
self.assertEqual(regex.search('user@domain.com').group(), '@')
|
||||
assert regex == re.compile("@", flags=32)
|
||||
assert regex.search("user@domain.com").group() == "@"
|
||||
|
||||
user_email = UserEmail()
|
||||
self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX)
|
||||
assert user_email.EMAIL_REGEX is UserEmail.EMAIL_REGEX
|
||||
|
||||
def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@')
|
||||
EMAIL_REGEX = LazyRegexCompiler("@")
|
||||
|
||||
user_email = UserEmail()
|
||||
with self.assertRaises(AttributeError):
|
||||
user_email.EMAIL_REGEX = re.compile('@')
|
||||
with pytest.raises(AttributeError):
|
||||
user_email.EMAIL_REGEX = re.compile("@")
|
||||
|
||||
def test_lazy_regex_compiler_verify_can_override_class_attr(self):
|
||||
class UserEmail(object):
|
||||
EMAIL_REGEX = LazyRegexCompiler('@')
|
||||
EMAIL_REGEX = LazyRegexCompiler("@")
|
||||
|
||||
UserEmail.EMAIL_REGEX = re.compile('cookies')
|
||||
self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies')
|
||||
UserEmail.EMAIL_REGEX = re.compile("cookies")
|
||||
assert UserEmail.EMAIL_REGEX.search("Cake & cookies").group() == "cookies"
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import operator
|
||||
import unittest
|
||||
|
||||
from nose.plugins.skip import SkipTest
|
||||
import pytest
|
||||
|
||||
from mongoengine import connect
|
||||
from mongoengine.connection import disconnect_all, get_db
|
||||
from mongoengine.mongodb_support import get_mongodb_version
|
||||
|
||||
|
||||
MONGO_TEST_DB = 'mongoenginetest' # standard name for the test database
|
||||
MONGO_TEST_DB = "mongoenginetest" # standard name for the test database
|
||||
|
||||
|
||||
class MongoDBTestCase(unittest.TestCase):
|
||||
@@ -37,7 +36,7 @@ def get_as_pymongo(doc):
|
||||
def _decorated_with_ver_requirement(func, mongo_version_req, oper):
|
||||
"""Return a MongoDB version requirement decorator.
|
||||
|
||||
The resulting decorator will raise a SkipTest exception if the current
|
||||
The resulting decorator will skip the test if the current
|
||||
MongoDB version doesn't match the provided version/operator.
|
||||
|
||||
For example, if you define a decorator like so:
|
||||
@@ -53,12 +52,14 @@ def _decorated_with_ver_requirement(func, mongo_version_req, oper):
|
||||
:param mongo_version_req: The mongodb version requirement (tuple(int, int))
|
||||
:param oper: The operator to apply (e.g. operator.ge)
|
||||
"""
|
||||
|
||||
def _inner(*args, **kwargs):
|
||||
mongodb_v = get_mongodb_version()
|
||||
if oper(mongodb_v, mongo_version_req):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
raise SkipTest('Needs MongoDB v{}+'.format('.'.join(str(n) for n in mongo_version_req)))
|
||||
pretty_version = ".".join(str(n) for n in mongo_version_req)
|
||||
pytest.skip("Needs MongoDB v{}+".format(pretty_version))
|
||||
|
||||
_inner.__name__ = func.__name__
|
||||
_inner.__doc__ = func.__doc__
|
||||
|
||||
Reference in New Issue
Block a user