Merge branch 'master' into feature/allow-setting-read-concern-queryset
This commit is contained in:
@@ -5,7 +5,6 @@ from datetime import datetime
|
||||
from pymongo.collation import Collation
|
||||
from pymongo.errors import OperationFailure
|
||||
import pytest
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@@ -59,7 +58,7 @@ class TestIndexes(unittest.TestCase):
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
# _id, '-date', 'tags', ('cat', 'date')
|
||||
assert len(info) == 4
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
for expected in expected_specs:
|
||||
assert expected["fields"] in info
|
||||
|
||||
@@ -87,7 +86,7 @@ class TestIndexes(unittest.TestCase):
|
||||
# the indices on -date and tags will both contain
|
||||
# _cls as first element in the key
|
||||
assert len(info) == 4
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
for expected in expected_specs:
|
||||
assert expected["fields"] in info
|
||||
|
||||
@@ -102,7 +101,7 @@ class TestIndexes(unittest.TestCase):
|
||||
|
||||
ExtendedBlogPost.ensure_indexes()
|
||||
info = ExtendedBlogPost.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
for expected in expected_specs:
|
||||
assert expected["fields"] in info
|
||||
|
||||
@@ -192,7 +191,7 @@ class TestIndexes(unittest.TestCase):
|
||||
# Indexes are lazy so use list() to perform query
|
||||
list(Person.objects)
|
||||
info = Person.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("rank.title", 1)] in info
|
||||
|
||||
def test_explicit_geo2d_index(self):
|
||||
@@ -207,7 +206,7 @@ class TestIndexes(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "2d")] in info
|
||||
|
||||
def test_explicit_geo2d_index_embedded(self):
|
||||
@@ -227,7 +226,7 @@ class TestIndexes(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("current.location.point", "2d")] in info
|
||||
|
||||
def test_explicit_geosphere_index(self):
|
||||
@@ -244,7 +243,7 @@ class TestIndexes(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "2dsphere")] in info
|
||||
|
||||
def test_explicit_geohaystack_index(self):
|
||||
@@ -266,7 +265,7 @@ class TestIndexes(unittest.TestCase):
|
||||
|
||||
Place.ensure_indexes()
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "geoHaystack")] in info
|
||||
|
||||
def test_create_geohaystack_index(self):
|
||||
@@ -279,7 +278,7 @@ class TestIndexes(unittest.TestCase):
|
||||
|
||||
Place.create_index({"fields": (")location.point", "name")}, bucketSize=10)
|
||||
info = Place._get_collection().index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("location.point", "geoHaystack"), ("name", 1)] in info
|
||||
|
||||
def test_dictionary_indexes(self):
|
||||
@@ -308,7 +307,7 @@ class TestIndexes(unittest.TestCase):
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [
|
||||
(value["key"], value.get("unique", False), value.get("sparse", False))
|
||||
for key, value in iteritems(info)
|
||||
for key, value in info.items()
|
||||
]
|
||||
assert ([("addDate", -1)], True, True) in info
|
||||
|
||||
@@ -806,18 +805,6 @@ class TestIndexes(unittest.TestCase):
|
||||
info = Log.objects._collection.index_information()
|
||||
assert 3600 == info["created_1"]["expireAfterSeconds"]
|
||||
|
||||
def test_index_drop_dups_silently_ignored(self):
|
||||
class Customer(Document):
|
||||
cust_id = IntField(unique=True, required=True)
|
||||
meta = {
|
||||
"indexes": ["cust_id"],
|
||||
"index_drop_dups": True,
|
||||
"allow_inheritance": False,
|
||||
}
|
||||
|
||||
Customer.drop_collection()
|
||||
Customer.objects.first()
|
||||
|
||||
def test_unique_and_indexes(self):
|
||||
"""Ensure that 'unique' constraints aren't overridden by
|
||||
meta.indexes.
|
||||
@@ -901,7 +888,7 @@ class TestIndexes(unittest.TestCase):
|
||||
self.fail("Unbound local error at index + pk definition")
|
||||
|
||||
info = BlogPost.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
index_item = [("_id", 1), ("comments.comment_id", 1)]
|
||||
assert index_item in info
|
||||
|
||||
@@ -942,7 +929,7 @@ class TestIndexes(unittest.TestCase):
|
||||
meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]}
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
info = [value["key"] for key, value in iteritems(info)]
|
||||
info = [value["key"] for key, value in info.items()]
|
||||
assert [("provider_ids.foo", 1)] in info
|
||||
assert [("provider_ids.bar", 1)] in info
|
||||
|
||||
@@ -1058,10 +1045,6 @@ class TestIndexes(unittest.TestCase):
|
||||
del index_info[key][
|
||||
"ns"
|
||||
] # drop the index namespace - we don't care about that here, MongoDB 3+
|
||||
if "dropDups" in index_info[key]:
|
||||
del index_info[key][
|
||||
"dropDups"
|
||||
] # drop the index dropDups - it is deprecated in MongoDB 3+
|
||||
|
||||
assert index_info == {
|
||||
"txt_1": {"key": [("txt", 1)], "background": False},
|
||||
|
||||
@@ -3,7 +3,6 @@ import unittest
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import (
|
||||
BooleanField,
|
||||
@@ -523,7 +522,6 @@ class TestInheritance(MongoDBTestCase):
|
||||
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_drop_dups": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
@@ -550,7 +548,7 @@ class TestInheritance(MongoDBTestCase):
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
for k, v in iteritems(defaults):
|
||||
for k, v in defaults.items():
|
||||
for cls in [Animal, Fish, Guppy]:
|
||||
assert cls._meta[k] == v
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import bson
|
||||
from bson import DBRef, ObjectId
|
||||
from pymongo.errors import DuplicateKeyError
|
||||
import pytest
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine import signals
|
||||
@@ -1415,7 +1414,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
assert raw_doc["first_name"] == "John"
|
||||
|
||||
def test_inserts_if_you_set_the_pk(self):
|
||||
p1 = self.Person(name="p1", id=bson.ObjectId()).save()
|
||||
_ = self.Person(name="p1", id=bson.ObjectId()).save()
|
||||
p2 = self.Person(name="p2")
|
||||
p2.id = bson.ObjectId()
|
||||
p2.save()
|
||||
@@ -2196,7 +2195,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
|
||||
user = User(name="Mike").save()
|
||||
reviewer = User(name="John").save()
|
||||
book = Book(author=user, reviewer=reviewer).save()
|
||||
_ = Book(author=user, reviewer=reviewer).save()
|
||||
|
||||
reviewer.delete()
|
||||
assert Book.objects.count() == 1
|
||||
@@ -2222,7 +2221,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
|
||||
user_1 = User(id=1).save()
|
||||
user_2 = User(id=2).save()
|
||||
book_1 = Book(id=1, author=user_2).save()
|
||||
_ = Book(id=1, author=user_2).save()
|
||||
book_2 = Book(id=2, author=user_1).save()
|
||||
|
||||
user_2.delete()
|
||||
@@ -2231,7 +2230,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
assert Book.objects.get() == book_2
|
||||
|
||||
user_3 = User(id=3).save()
|
||||
book_3 = Book(id=3, author=user_3).save()
|
||||
_ = Book(id=3, author=user_3).save()
|
||||
|
||||
user_3.delete()
|
||||
# Deleting user_3 should also delete book_3
|
||||
@@ -3205,7 +3204,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
def test_positional_creation(self):
|
||||
"""Document cannot be instantiated using positional arguments."""
|
||||
with pytest.raises(TypeError) as exc_info:
|
||||
person = self.Person("Test User", 42)
|
||||
self.Person("Test User", 42)
|
||||
|
||||
expected_msg = (
|
||||
"Instantiating a document with positional arguments is not "
|
||||
@@ -3274,7 +3273,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
|
||||
def expand(self):
|
||||
self.flattened_parameter = {}
|
||||
for parameter_name, parameter in iteritems(self.parameters):
|
||||
for parameter_name, parameter in self.parameters.items():
|
||||
parameter.expand()
|
||||
|
||||
class NodesSystem(Document):
|
||||
@@ -3282,7 +3281,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
nodes = MapField(ReferenceField(Node, dbref=False))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
for node_name, node in iteritems(self.nodes):
|
||||
for node_name, node in self.nodes.items():
|
||||
node.expand()
|
||||
node.save(*args, **kwargs)
|
||||
super(NodesSystem, self).save(*args, **kwargs)
|
||||
@@ -3607,13 +3606,13 @@ class TestDocumentInstance(MongoDBTestCase):
|
||||
v = StringField()
|
||||
|
||||
class A(Document):
|
||||
l = ListField(EmbeddedDocumentField(B))
|
||||
array = ListField(EmbeddedDocumentField(B))
|
||||
|
||||
A.objects.delete()
|
||||
A(l=[B(v="1"), B(v="2"), B(v="3")]).save()
|
||||
A(array=[B(v="1"), B(v="2"), B(v="3")]).save()
|
||||
a = A.objects.get()
|
||||
assert a.l._instance == a
|
||||
for idx, b in enumerate(a.l):
|
||||
assert a.array._instance == a
|
||||
for idx, b in enumerate(a.array):
|
||||
assert b._instance == a
|
||||
assert idx == 2
|
||||
|
||||
|
||||
@@ -3,13 +3,12 @@ import uuid
|
||||
|
||||
from bson import Binary
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from mongoengine import *
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
BIN_VALUE = six.b(
|
||||
"\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5"
|
||||
BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
|
||||
"latin-1"
|
||||
)
|
||||
|
||||
|
||||
@@ -22,7 +21,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
content_type = StringField()
|
||||
blob = BinaryField()
|
||||
|
||||
BLOB = six.b("\xe6\x00\xc4\xff\x07")
|
||||
BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
MIME_TYPE = "application/octet-stream"
|
||||
|
||||
Attachment.drop_collection()
|
||||
@@ -32,7 +31,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
|
||||
attachment_1 = Attachment.objects().first()
|
||||
assert MIME_TYPE == attachment_1.content_type
|
||||
assert BLOB == six.binary_type(attachment_1.blob)
|
||||
assert BLOB == bytes(attachment_1.blob)
|
||||
|
||||
def test_validation_succeeds(self):
|
||||
"""Ensure that valid values can be assigned to binary fields.
|
||||
@@ -47,11 +46,11 @@ class TestBinaryField(MongoDBTestCase):
|
||||
attachment_required = AttachmentRequired()
|
||||
with pytest.raises(ValidationError):
|
||||
attachment_required.validate()
|
||||
attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07"))
|
||||
attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1"))
|
||||
attachment_required.validate()
|
||||
|
||||
_5_BYTES = six.b("\xe6\x00\xc4\xff\x07")
|
||||
_4_BYTES = six.b("\xe6\x00\xc4\xff")
|
||||
_5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
_4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1")
|
||||
with pytest.raises(ValidationError):
|
||||
AttachmentSizeLimit(blob=_5_BYTES).validate()
|
||||
AttachmentSizeLimit(blob=_4_BYTES).validate()
|
||||
@@ -123,10 +122,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
upsert=True, new=True, set__bin_field=BIN_VALUE
|
||||
)
|
||||
assert doc.some_field == "test"
|
||||
if six.PY3:
|
||||
assert doc.bin_field == BIN_VALUE
|
||||
else:
|
||||
assert doc.bin_field == Binary(BIN_VALUE)
|
||||
assert doc.bin_field == BIN_VALUE
|
||||
|
||||
def test_update_one(self):
|
||||
"""Ensures no regression of bug #1127"""
|
||||
@@ -136,7 +132,7 @@ class TestBinaryField(MongoDBTestCase):
|
||||
|
||||
MyDocument.drop_collection()
|
||||
|
||||
bin_data = six.b("\xe6\x00\xc4\xff\x07")
|
||||
bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1")
|
||||
doc = MyDocument(bin_field=bin_data).save()
|
||||
|
||||
n_updated = MyDocument.objects(bin_field=bin_data).update_one(
|
||||
@@ -144,7 +140,4 @@ class TestBinaryField(MongoDBTestCase):
|
||||
)
|
||||
assert n_updated == 1
|
||||
fetched = MyDocument.objects.with_id(doc.id)
|
||||
if six.PY3:
|
||||
assert fetched.bin_field == BIN_VALUE
|
||||
else:
|
||||
assert fetched.bin_field == Binary(BIN_VALUE)
|
||||
assert fetched.bin_field == BIN_VALUE
|
||||
|
||||
@@ -4,6 +4,8 @@ import itertools
|
||||
import math
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
@@ -191,3 +193,18 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
|
||||
fetched_log = Log.objects.with_id(log.id)
|
||||
assert fetched_log.timestamp >= NOW
|
||||
|
||||
def test_setting_bad_value_does_not_raise_unless_validate_is_called(self):
|
||||
# test regression of #2253
|
||||
|
||||
class Log(Document):
|
||||
timestamp = ComplexDateTimeField()
|
||||
|
||||
Log.drop_collection()
|
||||
|
||||
log = Log(timestamp="garbage")
|
||||
with pytest.raises(ValidationError):
|
||||
log.validate()
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
log.save()
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@@ -89,17 +88,6 @@ class TestDateField(MongoDBTestCase):
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = datetime.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date == d1.date()
|
||||
assert log.date == d2.date()
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import datetime as dt
|
||||
|
||||
import pytest
|
||||
import six
|
||||
|
||||
try:
|
||||
import dateutil
|
||||
@@ -98,17 +97,6 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
if not six.PY3:
|
||||
# Pre UTC dates microseconds below 1000 are dropped
|
||||
# This does not seem to be true in PY3
|
||||
d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
d2 = dt.datetime(1969, 12, 31, 23, 59, 59)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
assert log.date != d1
|
||||
assert log.date == d2
|
||||
|
||||
def test_regular_usage(self):
|
||||
"""Tests for regular datetime fields"""
|
||||
|
||||
@@ -213,7 +201,7 @@ class TestDateTimeField(MongoDBTestCase):
|
||||
# make sure that passing a parsable datetime works
|
||||
dtd = DTDoc()
|
||||
dtd.date = date_str
|
||||
assert isinstance(dtd.date, six.string_types)
|
||||
assert isinstance(dtd.date, str)
|
||||
dtd.save()
|
||||
dtd.reload()
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson import InvalidDocument
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
@@ -19,22 +20,24 @@ class TestDictField(MongoDBTestCase):
|
||||
post = BlogPost(info=info).save()
|
||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
def test_validate_invalid_type(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
||||
for invalid_info in invalid_infos:
|
||||
with pytest.raises(ValidationError):
|
||||
BlogPost(info=invalid_info).validate()
|
||||
|
||||
def test_keys_with_dots_or_dollars(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = "my post"
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = ["test", "test"]
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"$title": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
@@ -48,25 +51,34 @@ class TestDictField(MongoDBTestCase):
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {1: "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
# MongoDB < 3.6 rejects dots
|
||||
# To avoid checking the mongodb version from the DictField class
|
||||
# we rely on MongoDB to reject the data during the save
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {"title": "test"}
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(info={"title": "test"})
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
|
||||
@@ -75,7 +75,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
@@ -111,7 +111,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase):
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
@@ -319,7 +319,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
Person.objects(settings__notexist="bar").first()
|
||||
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
with pytest.raises(LookUpError):
|
||||
Person.objects.only("settings.notexist")
|
||||
@@ -347,7 +347,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase):
|
||||
# Test non exiting attribute
|
||||
with pytest.raises(InvalidQueryError) as exc_info:
|
||||
assert Person.objects(settings__notexist="bar").first().id == p.id
|
||||
assert unicode(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
assert str(exc_info.value) == u'Cannot resolve field "notexist"'
|
||||
|
||||
# Test existing attribute
|
||||
assert Person.objects(settings__base_foo="basefoo").first().id == p.id
|
||||
|
||||
@@ -3,14 +3,13 @@ import copy
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from io import BytesIO
|
||||
|
||||
import gridfs
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.python_support import StringIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
@@ -30,7 +29,7 @@ TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png")
|
||||
def get_file(path):
|
||||
"""Use a BytesIO instead of a file to allow
|
||||
to have a one-liner and avoid that the file remains opened"""
|
||||
bytes_io = StringIO()
|
||||
bytes_io = BytesIO()
|
||||
with open(path, "rb") as f:
|
||||
bytes_io.write(f.read())
|
||||
bytes_io.seek(0)
|
||||
@@ -58,7 +57,7 @@ class TestFileField(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
@@ -80,7 +79,7 @@ class TestFileField(MongoDBTestCase):
|
||||
PutFile.drop_collection()
|
||||
|
||||
putfile = PutFile()
|
||||
putstring = StringIO()
|
||||
putstring = BytesIO()
|
||||
putstring.write(text)
|
||||
putstring.seek(0)
|
||||
putfile.the_file.put(putstring, content_type=content_type)
|
||||
@@ -101,8 +100,8 @@ class TestFileField(MongoDBTestCase):
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
streamfile = StreamFile()
|
||||
@@ -137,8 +136,8 @@ class TestFileField(MongoDBTestCase):
|
||||
|
||||
StreamFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
|
||||
streamfile = StreamFile()
|
||||
streamfile.save()
|
||||
@@ -167,8 +166,8 @@ class TestFileField(MongoDBTestCase):
|
||||
class SetFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
more_text = six.b("Foo Bar")
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
more_text = "Foo Bar".encode("latin-1")
|
||||
|
||||
SetFile.drop_collection()
|
||||
|
||||
@@ -196,7 +195,7 @@ class TestFileField(MongoDBTestCase):
|
||||
GridDocument.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write("Hello World!".encode("latin-1"))
|
||||
f.flush()
|
||||
|
||||
# Test without default
|
||||
@@ -213,7 +212,7 @@ class TestFileField(MongoDBTestCase):
|
||||
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
|
||||
|
||||
# Test with default
|
||||
doc_d = GridDocument(the_file=six.b(""))
|
||||
doc_d = GridDocument(the_file="".encode("latin-1"))
|
||||
doc_d.save()
|
||||
|
||||
doc_e = GridDocument.objects.with_id(doc_d.id)
|
||||
@@ -240,7 +239,7 @@ class TestFileField(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b("Hello, World!"))
|
||||
test_file.the_file.put("Hello, World!".encode("latin-1"))
|
||||
test_file.save()
|
||||
|
||||
# Second instance
|
||||
@@ -297,7 +296,9 @@ class TestFileField(MongoDBTestCase):
|
||||
|
||||
test_file = TestFile()
|
||||
assert not bool(test_file.the_file)
|
||||
test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain")
|
||||
test_file.the_file.put(
|
||||
"Hello, World!".encode("latin-1"), content_type="text/plain"
|
||||
)
|
||||
test_file.save()
|
||||
assert bool(test_file.the_file)
|
||||
|
||||
@@ -319,7 +320,7 @@ class TestFileField(MongoDBTestCase):
|
||||
class TestFile(Document):
|
||||
the_file = FileField()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
testfile = TestFile()
|
||||
@@ -363,7 +364,7 @@ class TestFileField(MongoDBTestCase):
|
||||
testfile.the_file.put(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
text = six.b("Bonjour, World!")
|
||||
text = "Bonjour, World!".encode("latin-1")
|
||||
testfile.the_file.replace(text, content_type=content_type, filename="hello")
|
||||
testfile.save()
|
||||
|
||||
@@ -387,7 +388,7 @@ class TestFileField(MongoDBTestCase):
|
||||
TestImage.drop_collection()
|
||||
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(six.b("Hello World!"))
|
||||
f.write("Hello World!".encode("latin-1"))
|
||||
f.flush()
|
||||
|
||||
t = TestImage()
|
||||
@@ -503,21 +504,21 @@ class TestFileField(MongoDBTestCase):
|
||||
# First instance
|
||||
test_file = TestFile()
|
||||
test_file.name = "Hello, World!"
|
||||
test_file.the_file.put(six.b("Hello, World!"), name="hello.txt")
|
||||
test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt")
|
||||
test_file.save()
|
||||
|
||||
data = get_db("test_files").macumba.files.find_one()
|
||||
assert data.get("name") == "hello.txt"
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
assert test_file.the_file.read() == six.b("Hello, World!")
|
||||
assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
test_file.the_file = six.b("HELLO, WORLD!")
|
||||
test_file.the_file = "Hello, World!".encode("latin-1")
|
||||
test_file.save()
|
||||
|
||||
test_file = TestFile.objects.first()
|
||||
assert test_file.the_file.read() == six.b("HELLO, WORLD!")
|
||||
assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
|
||||
|
||||
def test_copyable(self):
|
||||
class PutFile(Document):
|
||||
@@ -525,7 +526,7 @@ class TestFileField(MongoDBTestCase):
|
||||
|
||||
PutFile.drop_collection()
|
||||
|
||||
text = six.b("Hello, World!")
|
||||
text = "Hello, World!".encode("latin-1")
|
||||
content_type = "text/plain"
|
||||
|
||||
putfile = PutFile()
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
@@ -52,9 +51,8 @@ class TestFloatField(MongoDBTestCase):
|
||||
|
||||
big_person = BigPerson()
|
||||
|
||||
for value, value_type in enumerate(six.integer_types):
|
||||
big_person.height = value_type(value)
|
||||
big_person.validate()
|
||||
big_person.height = int(0)
|
||||
big_person.validate()
|
||||
|
||||
big_person.height = 2 ** 500
|
||||
big_person.validate()
|
||||
|
||||
@@ -152,7 +152,7 @@ class TestLazyReferenceField(MongoDBTestCase):
|
||||
LazyReference(BadDoc, animal.pk),
|
||||
):
|
||||
with pytest.raises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_lazy_reference_query_conversion(self):
|
||||
"""Ensure that LazyReferenceFields can be queried using objects and values
|
||||
@@ -386,7 +386,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
mineral = Mineral(name="Granite").save()
|
||||
|
||||
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
|
||||
occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
_ = Ocurrence(living_thing=vegetal, thing=vegetal).save()
|
||||
with pytest.raises(ValidationError):
|
||||
Ocurrence(living_thing=mineral).save()
|
||||
|
||||
@@ -458,7 +458,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase):
|
||||
baddoc = BadDoc().save()
|
||||
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
|
||||
with pytest.raises(ValidationError):
|
||||
p = Ocurrence(person="test", animal=bad).save()
|
||||
Ocurrence(person="test", animal=bad).save()
|
||||
|
||||
def test_generic_lazy_reference_query_conversion(self):
|
||||
class Member(Document):
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from bson.int64 import Int64
|
||||
import pytest
|
||||
import six
|
||||
|
||||
try:
|
||||
from bson.int64 import Int64
|
||||
except ImportError:
|
||||
Int64 = long
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@@ -28,7 +22,7 @@ class TestLongField(MongoDBTestCase):
|
||||
assert isinstance(
|
||||
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
|
||||
)
|
||||
assert isinstance(doc.some_long, six.integer_types)
|
||||
assert isinstance(doc.some_long, int)
|
||||
|
||||
def test_long_validation(self):
|
||||
"""Ensure that invalid values cannot be assigned to long fields.
|
||||
|
||||
@@ -21,7 +21,7 @@ class TestSequenceField(MongoDBTestCase):
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
assert ids == range(1, 11)
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
@@ -76,7 +76,7 @@ class TestSequenceField(MongoDBTestCase):
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
assert ids == range(1, 11)
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"})
|
||||
assert c["next"] == 10
|
||||
@@ -101,10 +101,10 @@ class TestSequenceField(MongoDBTestCase):
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
assert ids == range(1, 11)
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
counters = [i.counter for i in Person.objects]
|
||||
assert counters == range(1, 11)
|
||||
assert counters == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
@@ -166,10 +166,10 @@ class TestSequenceField(MongoDBTestCase):
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
assert ids == range(1, 11)
|
||||
assert ids == list(range(1, 11))
|
||||
|
||||
id = [i.id for i in Animal.objects]
|
||||
assert id == range(1, 11)
|
||||
assert id == list(range(1, 11))
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
@@ -193,7 +193,7 @@ class TestSequenceField(MongoDBTestCase):
|
||||
assert c["next"] == 10
|
||||
|
||||
ids = [i.id for i in Person.objects]
|
||||
assert ids == map(str, range(1, 11))
|
||||
assert ids == [str(i) for i in range(1, 11)]
|
||||
|
||||
c = self.db["mongoengine.counters"].find_one({"_id": "person.id"})
|
||||
assert c["next"] == 10
|
||||
@@ -267,12 +267,12 @@ class TestSequenceField(MongoDBTestCase):
|
||||
foo = Foo(name="Foo")
|
||||
foo.save()
|
||||
|
||||
assert not (
|
||||
"base.counter" in self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert "base.counter" not in self.db["mongoengine.counters"].find().distinct(
|
||||
"_id"
|
||||
)
|
||||
assert ("foo.counter" and "bar.counter") in self.db[
|
||||
"mongoengine.counters"
|
||||
].find().distinct("_id")
|
||||
existing_counters = self.db["mongoengine.counters"].find().distinct("_id")
|
||||
assert "foo.counter" in existing_counters
|
||||
assert "bar.counter" in existing_counters
|
||||
assert foo.counter == bar.counter
|
||||
assert foo._fields["counter"].owner_document == Foo
|
||||
assert bar._fields["counter"].owner_document == Bar
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import pytest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
from tests.utils import MongoDBTestCase
|
||||
|
||||
|
||||
@@ -35,7 +34,7 @@ class TestURLField(MongoDBTestCase):
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
link.validate()
|
||||
assert (
|
||||
unicode(exc_info.value)
|
||||
str(exc_info.value)
|
||||
== u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])"
|
||||
)
|
||||
|
||||
|
||||
@@ -11,8 +11,6 @@ from pymongo.read_concern import ReadConcern
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
from pymongo.results import UpdateResult
|
||||
import pytest
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
@@ -111,7 +109,7 @@ class TestQueryset(unittest.TestCase):
|
||||
# Filter people by age
|
||||
people = self.Person.objects(age=20)
|
||||
assert people.count() == 1
|
||||
person = people.next()
|
||||
person = next(people)
|
||||
assert person == user_a
|
||||
assert person.name == "User A"
|
||||
assert person.age == 20
|
||||
@@ -119,7 +117,7 @@ class TestQueryset(unittest.TestCase):
|
||||
def test_limit(self):
|
||||
"""Ensure that QuerySet.limit works as expected."""
|
||||
user_a = self.Person.objects.create(name="User A", age=20)
|
||||
user_b = self.Person.objects.create(name="User B", age=30)
|
||||
_ = self.Person.objects.create(name="User B", age=30)
|
||||
|
||||
# Test limit on a new queryset
|
||||
people = list(self.Person.objects.limit(1))
|
||||
@@ -151,6 +149,11 @@ class TestQueryset(unittest.TestCase):
|
||||
user_b = self.Person.objects.create(name="User B", age=30)
|
||||
|
||||
# Test skip on a new queryset
|
||||
people = list(self.Person.objects.skip(0))
|
||||
assert len(people) == 2
|
||||
assert people[0] == user_a
|
||||
assert people[1] == user_b
|
||||
|
||||
people = list(self.Person.objects.skip(1))
|
||||
assert len(people) == 1
|
||||
assert people[0] == user_b
|
||||
@@ -275,32 +278,47 @@ class TestQueryset(unittest.TestCase):
|
||||
with pytest.raises(InvalidQueryError):
|
||||
self.Person.objects(name="User A").with_id(person1.id)
|
||||
|
||||
def test_find_only_one(self):
|
||||
"""Ensure that a query using ``get`` returns at most one result.
|
||||
"""
|
||||
def test_get_no_document_exists_raises_doesnotexist(self):
|
||||
assert self.Person.objects.count() == 0
|
||||
# Try retrieving when no objects exists
|
||||
with pytest.raises(DoesNotExist):
|
||||
self.Person.objects.get()
|
||||
with pytest.raises(self.Person.DoesNotExist):
|
||||
self.Person.objects.get()
|
||||
|
||||
def test_get_multiple_match_raises_multipleobjectsreturned(self):
|
||||
"""Ensure that a query using ``get`` returns at most one result.
|
||||
"""
|
||||
assert self.Person.objects().count() == 0
|
||||
|
||||
person1 = self.Person(name="User A", age=20)
|
||||
person1.save()
|
||||
person2 = self.Person(name="User B", age=30)
|
||||
|
||||
p = self.Person.objects.get()
|
||||
assert p == person1
|
||||
|
||||
person2 = self.Person(name="User B", age=20)
|
||||
person2.save()
|
||||
|
||||
# Retrieve the first person from the database
|
||||
person3 = self.Person(name="User C", age=30)
|
||||
person3.save()
|
||||
|
||||
# .get called without argument
|
||||
with pytest.raises(MultipleObjectsReturned):
|
||||
self.Person.objects.get()
|
||||
with pytest.raises(self.Person.MultipleObjectsReturned):
|
||||
self.Person.objects.get()
|
||||
|
||||
# check filtering
|
||||
with pytest.raises(MultipleObjectsReturned):
|
||||
self.Person.objects.get(age__lt=30)
|
||||
with pytest.raises(MultipleObjectsReturned) as exc_info:
|
||||
self.Person.objects(age__lt=30).get()
|
||||
assert "2 or more items returned, instead of 1" == str(exc_info.value)
|
||||
|
||||
# Use a query to filter the people found to just person2
|
||||
person = self.Person.objects.get(age=30)
|
||||
assert person.name == "User B"
|
||||
|
||||
person = self.Person.objects.get(age__lt=30)
|
||||
assert person.name == "User A"
|
||||
assert person == person3
|
||||
|
||||
def test_find_array_position(self):
|
||||
"""Ensure that query by array position works.
|
||||
@@ -2574,13 +2592,8 @@ class TestQueryset(unittest.TestCase):
|
||||
age = IntField()
|
||||
|
||||
with db_ops_tracker() as q:
|
||||
adult1 = (
|
||||
User.objects.filter(age__gte=18).comment("looking for an adult").first()
|
||||
)
|
||||
|
||||
adult2 = (
|
||||
User.objects.comment("looking for an adult").filter(age__gte=18).first()
|
||||
)
|
||||
User.objects.filter(age__gte=18).comment("looking for an adult").first()
|
||||
User.objects.comment("looking for an adult").filter(age__gte=18).first()
|
||||
|
||||
ops = q.get_ops()
|
||||
assert len(ops) == 2
|
||||
@@ -2769,7 +2782,7 @@ class TestQueryset(unittest.TestCase):
|
||||
)
|
||||
|
||||
# start a map/reduce
|
||||
cursor.next()
|
||||
next(cursor)
|
||||
|
||||
results = Person.objects.map_reduce(
|
||||
map_f=map_person,
|
||||
@@ -4094,7 +4107,7 @@ class TestQueryset(unittest.TestCase):
|
||||
info = Comment.objects._collection.index_information()
|
||||
info = [
|
||||
(value["key"], value.get("unique", False), value.get("sparse", False))
|
||||
for key, value in iteritems(info)
|
||||
for key, value in info.items()
|
||||
]
|
||||
assert ([("_cls", 1), ("message", 1)], False, False) in info
|
||||
|
||||
@@ -4396,7 +4409,7 @@ class TestQueryset(unittest.TestCase):
|
||||
# Use a query to filter the people found to just person1
|
||||
people = self.Person.objects(age=20).scalar("name")
|
||||
assert people.count() == 1
|
||||
person = people.next()
|
||||
person = next(people)
|
||||
assert person == "User A"
|
||||
|
||||
# Test limit
|
||||
@@ -4446,24 +4459,14 @@ class TestQueryset(unittest.TestCase):
|
||||
"A0" == "%s" % self.Person.objects.order_by("name").scalar("name").first()
|
||||
)
|
||||
assert "A0" == "%s" % self.Person.objects.scalar("name").order_by("name")[0]
|
||||
if six.PY3:
|
||||
assert (
|
||||
"['A1', 'A2']"
|
||||
== "%s" % self.Person.objects.order_by("age").scalar("name")[1:3]
|
||||
)
|
||||
assert (
|
||||
"['A51', 'A52']"
|
||||
== "%s" % self.Person.objects.order_by("age").scalar("name")[51:53]
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
"[u'A1', u'A2']"
|
||||
== "%s" % self.Person.objects.order_by("age").scalar("name")[1:3]
|
||||
)
|
||||
assert (
|
||||
"[u'A51', u'A52']"
|
||||
== "%s" % self.Person.objects.order_by("age").scalar("name")[51:53]
|
||||
)
|
||||
assert (
|
||||
"['A1', 'A2']"
|
||||
== "%s" % self.Person.objects.order_by("age").scalar("name")[1:3]
|
||||
)
|
||||
assert (
|
||||
"['A51', 'A52']"
|
||||
== "%s" % self.Person.objects.order_by("age").scalar("name")[51:53]
|
||||
)
|
||||
|
||||
# with_id and in_bulk
|
||||
person = self.Person.objects.order_by("name").first()
|
||||
@@ -4471,10 +4474,7 @@ class TestQueryset(unittest.TestCase):
|
||||
|
||||
pks = self.Person.objects.order_by("age").scalar("pk")[1:3]
|
||||
names = self.Person.objects.scalar("name").in_bulk(list(pks)).values()
|
||||
if six.PY3:
|
||||
expected = "['A1', 'A2']"
|
||||
else:
|
||||
expected = "[u'A1', u'A2']"
|
||||
expected = "['A1', 'A2']"
|
||||
assert expected == "%s" % sorted(names)
|
||||
|
||||
def test_fields(self):
|
||||
@@ -4519,7 +4519,7 @@ class TestQueryset(unittest.TestCase):
|
||||
|
||||
foos_without_y = list(Foo.objects.order_by("y").fields(y=0))
|
||||
|
||||
assert all(o.y is None for o in foos_with_x)
|
||||
assert all(o.y is None for o in foos_without_y)
|
||||
|
||||
foos_with_sliced_items = list(Foo.objects.order_by("y").fields(slice__items=1))
|
||||
|
||||
@@ -5403,7 +5403,7 @@ class TestQueryset(unittest.TestCase):
|
||||
if not test:
|
||||
raise AssertionError("Cursor has data and returned False")
|
||||
|
||||
queryset.next()
|
||||
next(queryset)
|
||||
if not queryset:
|
||||
raise AssertionError(
|
||||
"Cursor has data and it must returns True, even in the last item."
|
||||
@@ -5636,7 +5636,7 @@ class TestQueryset(unittest.TestCase):
|
||||
self.Person.objects.create(name="Baz")
|
||||
assert self.Person.objects.count(with_limit_and_skip=True) == 3
|
||||
|
||||
newPerson = self.Person.objects.create(name="Foo_1")
|
||||
self.Person.objects.create(name="Foo_1")
|
||||
assert self.Person.objects.count(with_limit_and_skip=True) == 4
|
||||
|
||||
def test_no_cursor_timeout(self):
|
||||
|
||||
@@ -348,7 +348,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def test_disconnect_cleans_cached_collection_attribute_in_document(self):
|
||||
"""Ensure that the disconnect() method works properly"""
|
||||
conn1 = connect("mongoenginetest")
|
||||
connect("mongoenginetest")
|
||||
|
||||
class History(Document):
|
||||
pass
|
||||
@@ -518,7 +518,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
"""Ensure connect() uses the username & password params if the URI
|
||||
doesn't explicitly specify them.
|
||||
"""
|
||||
c = connect(
|
||||
connect(
|
||||
host="mongodb://localhost/mongoenginetest", username="user", password="pass"
|
||||
)
|
||||
|
||||
@@ -632,7 +632,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
"""Ensure connect() works when specifying a replicaSet via the
|
||||
MongoDB URI.
|
||||
"""
|
||||
c = connect(host="mongodb://localhost/test?replicaSet=local-rs")
|
||||
connect(host="mongodb://localhost/test?replicaSet=local-rs")
|
||||
db = get_db()
|
||||
assert isinstance(db, pymongo.database.Database)
|
||||
assert db.name == "test"
|
||||
|
||||
@@ -264,7 +264,7 @@ class TestContextManagers:
|
||||
|
||||
def test_query_counter_does_not_swallow_exception(self):
|
||||
with pytest.raises(TypeError):
|
||||
with query_counter() as q:
|
||||
with query_counter():
|
||||
raise TypeError()
|
||||
|
||||
def test_query_counter_temporarily_modifies_profiling_level(self):
|
||||
@@ -274,12 +274,12 @@ class TestContextManagers:
|
||||
initial_profiling_level = db.profiling_level()
|
||||
|
||||
try:
|
||||
NEW_LEVEL = 1
|
||||
db.set_profiling_level(NEW_LEVEL)
|
||||
assert db.profiling_level() == NEW_LEVEL
|
||||
with query_counter() as q:
|
||||
new_level = 1
|
||||
db.set_profiling_level(new_level)
|
||||
assert db.profiling_level() == new_level
|
||||
with query_counter():
|
||||
assert db.profiling_level() == 2
|
||||
assert db.profiling_level() == NEW_LEVEL
|
||||
assert db.profiling_level() == new_level
|
||||
except Exception:
|
||||
db.set_profiling_level(
|
||||
initial_profiling_level
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
from six import iterkeys
|
||||
|
||||
from mongoengine import Document
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict
|
||||
@@ -287,7 +286,7 @@ class TestBaseList:
|
||||
base_list[:] = [
|
||||
0,
|
||||
1,
|
||||
] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
]
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [0, 1]
|
||||
|
||||
@@ -296,13 +295,13 @@ class TestBaseList:
|
||||
base_list[0:2] = [
|
||||
1,
|
||||
0,
|
||||
] # Will use __setslice__ under py2 and __setitem__ under py3
|
||||
]
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [1, 0, 2]
|
||||
|
||||
def test___setitem___calls_with_step_slice_mark_as_changed(self):
|
||||
base_list = self._get_baselist([0, 1, 2])
|
||||
base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3
|
||||
base_list[0:3:2] = [-1, -2] # uses __setitem__
|
||||
assert base_list._instance._changed_fields == ["my_name"]
|
||||
assert base_list == [-1, 1, -2]
|
||||
|
||||
@@ -372,7 +371,7 @@ class TestStrictDict(unittest.TestCase):
|
||||
|
||||
def test_iterkeys(self):
|
||||
d = self.dtype(a=1)
|
||||
assert list(iterkeys(d)) == ["a"]
|
||||
assert list(d.keys()) == ["a"]
|
||||
|
||||
def test_len(self):
|
||||
d = self.dtype(a=1)
|
||||
|
||||
@@ -2,10 +2,8 @@
|
||||
import unittest
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import query_counter
|
||||
|
||||
|
||||
@@ -739,7 +737,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 2
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert isinstance(m, User)
|
||||
|
||||
# Document select_related
|
||||
@@ -752,7 +750,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 2
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert isinstance(m, User)
|
||||
|
||||
# Queryset select_related
|
||||
@@ -766,7 +764,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 2
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert isinstance(m, User)
|
||||
|
||||
User.drop_collection()
|
||||
@@ -820,7 +818,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 4
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert "User" in m.__class__.__name__
|
||||
|
||||
# Document select_related
|
||||
@@ -836,7 +834,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 4
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert "User" in m.__class__.__name__
|
||||
|
||||
# Queryset select_related
|
||||
@@ -853,7 +851,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 4
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert "User" in m.__class__.__name__
|
||||
|
||||
Group.objects.delete()
|
||||
@@ -910,7 +908,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 2
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert isinstance(m, UserA)
|
||||
|
||||
# Document select_related
|
||||
@@ -926,7 +924,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 2
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert isinstance(m, UserA)
|
||||
|
||||
# Queryset select_related
|
||||
@@ -943,7 +941,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 2
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert isinstance(m, UserA)
|
||||
|
||||
UserA.drop_collection()
|
||||
@@ -997,7 +995,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 4
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert "User" in m.__class__.__name__
|
||||
|
||||
# Document select_related
|
||||
@@ -1013,7 +1011,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 4
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert "User" in m.__class__.__name__
|
||||
|
||||
# Queryset select_related
|
||||
@@ -1030,7 +1028,7 @@ class FieldTest(unittest.TestCase):
|
||||
[m for m in group_obj.members]
|
||||
assert q == 4
|
||||
|
||||
for k, m in iteritems(group_obj.members):
|
||||
for k, m in group_obj.members.items():
|
||||
assert "User" in m.__class__.__name__
|
||||
|
||||
Group.objects.delete()
|
||||
|
||||
@@ -58,7 +58,9 @@ class TestSignal(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
|
||||
dirty_keys = list(document._delta()[0].keys()) + list(
|
||||
document._delta()[1].keys()
|
||||
)
|
||||
signal_output.append("post_save signal, %s" % document)
|
||||
signal_output.append("post_save dirty keys, %s" % dirty_keys)
|
||||
if kwargs.pop("created", False):
|
||||
@@ -265,7 +267,7 @@ class TestSignal(unittest.TestCase):
|
||||
a = self.Author(name="Bill Shakespeare")
|
||||
a.save()
|
||||
self.get_signal_output(lambda: None) # eliminate signal output
|
||||
a1 = self.Author.objects(name="Bill Shakespeare")[0]
|
||||
_ = self.Author.objects(name="Bill Shakespeare")[0]
|
||||
|
||||
assert self.get_signal_output(create_author) == [
|
||||
"pre_init signal, Author",
|
||||
|
||||
Reference in New Issue
Block a user