Merge branch 'master' of github.com:MongoEngine/mongoengine into bump_development_status_classifier
This commit is contained in:
commit
1bfa40e926
1
AUTHORS
1
AUTHORS
@ -255,3 +255,4 @@ that much better:
|
||||
* Filip Kucharczyk (https://github.com/Pacu2)
|
||||
* Eric Timmons (https://github.com/daewok)
|
||||
* Matthew Simpson (https://github.com/mcsimps2)
|
||||
* Leonardo Domingues (https://github.com/leodmgs)
|
||||
|
@ -8,6 +8,15 @@ Development
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
- Add Mongo 4.0 to Travis
|
||||
- Bump development Status classifier to Production/Stable #2232
|
||||
- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630
|
||||
- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264
|
||||
- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267
|
||||
- DictField validate failed without default connection (bug introduced in 0.19.0) #2239
|
||||
- Remove methods deprecated years ago:
|
||||
- name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field
|
||||
- Queryset.slave_okay() was deprecated since pymongo3
|
||||
- dropDups was dropped with MongoDB3
|
||||
- ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes``
|
||||
|
||||
Changes in 0.19.1
|
||||
=================
|
||||
|
@ -555,7 +555,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
'index_background': True,
|
||||
'index_cls': False,
|
||||
'auto_create_index': True,
|
||||
'index_drop_dups': True,
|
||||
}
|
||||
|
||||
|
||||
@ -574,11 +573,6 @@ There are a few top level defaults for all indexes that can be set::
|
||||
in systems where indexes are managed separately. Disabling this will improve
|
||||
performance.
|
||||
|
||||
:attr:`index_drop_dups` (Optional)
|
||||
Set the default value for if an index should drop duplicates
|
||||
Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning
|
||||
and has no effect
|
||||
|
||||
|
||||
Compound Indexes and Indexing sub documents
|
||||
-------------------------------------------
|
||||
|
@ -36,7 +36,6 @@ class BaseField(object):
|
||||
def __init__(
|
||||
self,
|
||||
db_field=None,
|
||||
name=None,
|
||||
required=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
@ -51,7 +50,6 @@ class BaseField(object):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
(defaults to the name of the field)
|
||||
:param name: Deprecated - use db_field
|
||||
:param required: If the field is required. Whether it has to have a
|
||||
value or not. Defaults to False.
|
||||
:param default: (optional) The default value for this field if no value
|
||||
@ -75,11 +73,8 @@ class BaseField(object):
|
||||
existing attributes. Common metadata includes `verbose_name` and
|
||||
`help_text`.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else "_id"
|
||||
self.db_field = db_field if not primary_key else "_id"
|
||||
|
||||
if name:
|
||||
msg = 'Field\'s "name" attribute deprecated in favour of "db_field"'
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self.required = required or primary_key
|
||||
self.default = default
|
||||
self.unique = bool(unique or unique_with)
|
||||
|
@ -284,7 +284,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
"indexes": [], # indexes to be ensured at runtime
|
||||
"id_field": None,
|
||||
"index_background": False,
|
||||
"index_drop_dups": False,
|
||||
"index_opts": None,
|
||||
"delete_rules": None,
|
||||
# allow_inheritance can be True, False, and None. True means
|
||||
|
@ -56,7 +56,7 @@ class InvalidCollectionError(Exception):
|
||||
|
||||
|
||||
class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
r"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
:class:`~mongoengine.EmbeddedDocumentField` field type.
|
||||
@ -332,7 +332,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
):
|
||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||
document already exists, it will be updated, otherwise it will be
|
||||
created.
|
||||
created. Returns the saved object instance.
|
||||
|
||||
:param force_insert: only try to create a new document, don't allow
|
||||
updates of existing documents.
|
||||
@ -851,17 +851,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
index_spec = cls._build_index_spec(keys)
|
||||
index_spec = index_spec.copy()
|
||||
fields = index_spec.pop("fields")
|
||||
drop_dups = kwargs.get("drop_dups", False)
|
||||
if drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
index_spec["background"] = background
|
||||
index_spec.update(kwargs)
|
||||
|
||||
return cls._get_collection().create_index(fields, **index_spec)
|
||||
|
||||
@classmethod
|
||||
def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs):
|
||||
def ensure_index(cls, key_or_list, background=False, **kwargs):
|
||||
"""Ensure that the given indexes are in place. Deprecated in favour
|
||||
of create_index.
|
||||
|
||||
@ -869,12 +865,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
construct a multi-field index); keys may be prefixed with a **+**
|
||||
or a **-** to determine the index ordering
|
||||
:param background: Allows index creation in the background
|
||||
:param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value
|
||||
will be removed if PyMongo3+ is used
|
||||
"""
|
||||
if drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
return cls.create_index(key_or_list, background=background, **kwargs)
|
||||
|
||||
@classmethod
|
||||
@ -887,12 +878,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
`auto_create_index` to False in the documents meta data
|
||||
"""
|
||||
background = cls._meta.get("index_background", False)
|
||||
drop_dups = cls._meta.get("index_drop_dups", False)
|
||||
index_opts = cls._meta.get("index_opts") or {}
|
||||
index_cls = cls._meta.get("index_cls", True)
|
||||
if drop_dups:
|
||||
msg = "drop_dups is deprecated and is removed when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
collection = cls._get_collection()
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
|
@ -1088,14 +1088,12 @@ class DictField(ComplexBaseField):
|
||||
msg = "Invalid dictionary key - documents must have only string keys"
|
||||
self.error(msg)
|
||||
|
||||
curr_mongo_ver = get_mongodb_version()
|
||||
|
||||
if curr_mongo_ver < MONGODB_36 and key_has_dot_or_dollar(value):
|
||||
self.error(
|
||||
'Invalid dictionary key name - keys may not contain "."'
|
||||
' or startswith "$" characters'
|
||||
)
|
||||
elif curr_mongo_ver >= MONGODB_36 and key_starts_with_dollar(value):
|
||||
# Following condition applies to MongoDB >= 3.6
|
||||
# older Mongo has stricter constraints but
|
||||
# it will be rejected upon insertion anyway
|
||||
# Having a validation that depends on the MongoDB version
|
||||
# is not straightforward as the field isn't aware of the connected Mongo
|
||||
if key_starts_with_dollar(value):
|
||||
self.error(
|
||||
'Invalid dictionary key name - keys may not startswith "$" characters'
|
||||
)
|
||||
|
@ -11,7 +11,7 @@ MONGODB_36 = (3, 6)
|
||||
|
||||
|
||||
def get_mongodb_version():
|
||||
"""Return the version of the connected mongoDB (first 2 digits)
|
||||
"""Return the version of the default connected mongoDB (first 2 digits)
|
||||
|
||||
:return: tuple(int, int)
|
||||
"""
|
||||
|
@ -60,7 +60,6 @@ class BaseQuerySet(object):
|
||||
self._ordering = None
|
||||
self._snapshot = False
|
||||
self._timeout = True
|
||||
self._slave_okay = False
|
||||
self._read_preference = None
|
||||
self._iter = False
|
||||
self._scalar = []
|
||||
@ -260,16 +259,18 @@ class BaseQuerySet(object):
|
||||
except StopIteration:
|
||||
msg = "%s matching query does not exist." % queryset._document._class_name
|
||||
raise queryset._document.DoesNotExist(msg)
|
||||
|
||||
try:
|
||||
# Check if there is another match
|
||||
six.next(queryset)
|
||||
except StopIteration:
|
||||
return result
|
||||
|
||||
# If we were able to retrieve the 2nd doc, rewind the cursor and
|
||||
# raise the MultipleObjectsReturned exception.
|
||||
queryset.rewind()
|
||||
message = u"%d items returned, instead of 1" % queryset.count()
|
||||
raise queryset._document.MultipleObjectsReturned(message)
|
||||
raise queryset._document.MultipleObjectsReturned(
|
||||
u"2 or more items returned, instead of 1"
|
||||
)
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create new object. Returns the saved object instance.
|
||||
@ -694,8 +695,8 @@ class BaseQuerySet(object):
|
||||
def in_bulk(self, object_ids):
|
||||
"""Retrieve a set of documents by their ids.
|
||||
|
||||
:param object_ids: a list or tuple of ``ObjectId``\ s
|
||||
:rtype: dict of ObjectIds as keys and collection-specific
|
||||
:param object_ids: a list or tuple of ObjectId's
|
||||
:rtype: dict of ObjectId's as keys and collection-specific
|
||||
Document subclasses as values.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
@ -775,7 +776,6 @@ class BaseQuerySet(object):
|
||||
"_ordering",
|
||||
"_snapshot",
|
||||
"_timeout",
|
||||
"_slave_okay",
|
||||
"_read_preference",
|
||||
"_iter",
|
||||
"_scalar",
|
||||
@ -1026,9 +1026,11 @@ class BaseQuerySet(object):
|
||||
|
||||
posts = BlogPost.objects(...).fields(comments=0)
|
||||
|
||||
To retrieve a subrange of array elements:
|
||||
To retrieve a subrange or sublist of array elements,
|
||||
support exist for both the `slice` and `elemMatch` projection operator:
|
||||
|
||||
posts = BlogPost.objects(...).fields(slice__comments=5)
|
||||
posts = BlogPost.objects(...).fields(elemMatch__comments="test")
|
||||
|
||||
:param kwargs: A set of keyword arguments identifying what to
|
||||
include, exclude, or slice.
|
||||
@ -1037,7 +1039,7 @@ class BaseQuerySet(object):
|
||||
"""
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
operators = ["slice"]
|
||||
operators = ["slice", "elemMatch"]
|
||||
cleaned_fields = []
|
||||
for key, value in kwargs.items():
|
||||
parts = key.split("__")
|
||||
@ -1140,7 +1142,7 @@ class BaseQuerySet(object):
|
||||
|
||||
def explain(self):
|
||||
"""Return an explain plan record for the
|
||||
:class:`~mongoengine.queryset.QuerySet`\ 's cursor.
|
||||
:class:`~mongoengine.queryset.QuerySet` cursor.
|
||||
"""
|
||||
return self._cursor.explain()
|
||||
|
||||
@ -1170,20 +1172,6 @@ class BaseQuerySet(object):
|
||||
queryset._timeout = enabled
|
||||
return queryset
|
||||
|
||||
# DEPRECATED. Has no more impact on PyMongo 3+
|
||||
def slave_okay(self, enabled):
|
||||
"""Enable or disable the slave_okay when querying.
|
||||
|
||||
:param enabled: whether or not the slave_okay is enabled
|
||||
|
||||
.. deprecated:: Ignored with PyMongo 3+
|
||||
"""
|
||||
msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
queryset = self.clone()
|
||||
queryset._slave_okay = enabled
|
||||
return queryset
|
||||
|
||||
def read_preference(self, read_preference):
|
||||
"""Change the read_preference when querying.
|
||||
|
||||
@ -1958,23 +1946,3 @@ class BaseQuerySet(object):
|
||||
setattr(queryset, "_" + method_name, val)
|
||||
|
||||
return queryset
|
||||
|
||||
# Deprecated
|
||||
def ensure_index(self, **kwargs):
|
||||
"""Deprecated use :func:`Document.ensure_index`"""
|
||||
msg = (
|
||||
"Doc.objects()._ensure_index() is deprecated. "
|
||||
"Use Doc.ensure_index() instead."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self._document.__class__.ensure_index(**kwargs)
|
||||
return self
|
||||
|
||||
def _ensure_indexes(self):
|
||||
"""Deprecated use :func:`~Document.ensure_indexes`"""
|
||||
msg = (
|
||||
"Doc.objects()._ensure_indexes() is deprecated. "
|
||||
"Use Doc.ensure_indexes() instead."
|
||||
)
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
self._document.__class__.ensure_indexes()
|
||||
|
@ -169,9 +169,9 @@ def query(_doc_cls=None, **kwargs):
|
||||
|
||||
key = ".".join(parts)
|
||||
|
||||
if op is None or key not in mongo_query:
|
||||
if key not in mongo_query:
|
||||
mongo_query[key] = value
|
||||
elif key in mongo_query:
|
||||
else:
|
||||
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
||||
mongo_query[key].update(value)
|
||||
# $max/minDistance needs to come last - convert to SON
|
||||
|
9
setup.py
9
setup.py
@ -108,6 +108,10 @@ CLASSIFIERS = [
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
]
|
||||
|
||||
PYTHON_VERSION = sys.version_info[0]
|
||||
PY3 = PYTHON_VERSION == 3
|
||||
PY2 = PYTHON_VERSION == 2
|
||||
|
||||
extra_opts = {
|
||||
"packages": find_packages(exclude=["tests", "tests.*"]),
|
||||
"tests_require": [
|
||||
@ -116,9 +120,12 @@ extra_opts = {
|
||||
"coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls
|
||||
"blinker",
|
||||
"Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support
|
||||
"zipp<2.0.0", # (dependency of pytest) dropped python2 support
|
||||
"pyparsing<3", # sub-dependency that dropped py2 support
|
||||
"configparser<5", # sub-dependency that dropped py2 support
|
||||
],
|
||||
}
|
||||
if sys.version_info[0] == 3:
|
||||
if PY3:
|
||||
extra_opts["use_2to3"] = True
|
||||
if "test" in sys.argv:
|
||||
extra_opts["packages"] = find_packages()
|
||||
|
@ -806,18 +806,6 @@ class TestIndexes(unittest.TestCase):
|
||||
info = Log.objects._collection.index_information()
|
||||
assert 3600 == info["created_1"]["expireAfterSeconds"]
|
||||
|
||||
def test_index_drop_dups_silently_ignored(self):
|
||||
class Customer(Document):
|
||||
cust_id = IntField(unique=True, required=True)
|
||||
meta = {
|
||||
"indexes": ["cust_id"],
|
||||
"index_drop_dups": True,
|
||||
"allow_inheritance": False,
|
||||
}
|
||||
|
||||
Customer.drop_collection()
|
||||
Customer.objects.first()
|
||||
|
||||
def test_unique_and_indexes(self):
|
||||
"""Ensure that 'unique' constraints aren't overridden by
|
||||
meta.indexes.
|
||||
@ -1058,10 +1046,6 @@ class TestIndexes(unittest.TestCase):
|
||||
del index_info[key][
|
||||
"ns"
|
||||
] # drop the index namespace - we don't care about that here, MongoDB 3+
|
||||
if "dropDups" in index_info[key]:
|
||||
del index_info[key][
|
||||
"dropDups"
|
||||
] # drop the index dropDups - it is deprecated in MongoDB 3+
|
||||
|
||||
assert index_info == {
|
||||
"txt_1": {"key": [("txt", 1)], "background": False},
|
||||
|
@ -523,7 +523,6 @@ class TestInheritance(MongoDBTestCase):
|
||||
|
||||
defaults = {
|
||||
"index_background": True,
|
||||
"index_drop_dups": True,
|
||||
"index_opts": {"hello": "world"},
|
||||
"allow_inheritance": True,
|
||||
"queryset_class": "QuerySet",
|
||||
|
@ -65,7 +65,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond):
|
||||
stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"]
|
||||
assert (
|
||||
re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored)
|
||||
is not None
|
||||
)
|
||||
|
||||
@ -74,7 +74,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase):
|
||||
"date_with_dots"
|
||||
]
|
||||
assert (
|
||||
re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None
|
||||
)
|
||||
|
||||
def test_complexdatetime_usage(self):
|
||||
|
@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
from bson import InvalidDocument
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import BaseDict
|
||||
@ -19,22 +20,24 @@ class TestDictField(MongoDBTestCase):
|
||||
post = BlogPost(info=info).save()
|
||||
assert get_as_pymongo(post) == {"_id": post.id, "info": info}
|
||||
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
def test_validate_invalid_type(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
invalid_infos = ["my post", ["test", "test"], {1: "test"}]
|
||||
for invalid_info in invalid_infos:
|
||||
with pytest.raises(ValidationError):
|
||||
BlogPost(info=invalid_info).validate()
|
||||
|
||||
def test_keys_with_dots_or_dollars(self):
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost()
|
||||
post.info = "my post"
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = ["test", "test"]
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"$title": "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
@ -48,25 +51,34 @@ class TestDictField(MongoDBTestCase):
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {1: "test"}
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
|
||||
post.info = {"nested": {"the.title": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
# MongoDB < 3.6 rejects dots
|
||||
# To avoid checking the mongodb version from the DictField class
|
||||
# we rely on MongoDB to reject the data during the save
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {"dollar_and_dot": {"te$st.test": "test"}}
|
||||
if get_mongodb_version() < MONGODB_36:
|
||||
with pytest.raises(ValidationError):
|
||||
post.validate()
|
||||
post.validate()
|
||||
with pytest.raises(InvalidDocument):
|
||||
post.save()
|
||||
else:
|
||||
post.validate()
|
||||
|
||||
post.info = {"title": "test"}
|
||||
def test_general_things(self):
|
||||
"""Ensure that dict types work as expected."""
|
||||
|
||||
class BlogPost(Document):
|
||||
info = DictField()
|
||||
|
||||
BlogPost.drop_collection()
|
||||
|
||||
post = BlogPost(info={"title": "test"})
|
||||
post.save()
|
||||
|
||||
post = BlogPost()
|
||||
|
@ -274,32 +274,47 @@ class TestQueryset(unittest.TestCase):
|
||||
with pytest.raises(InvalidQueryError):
|
||||
self.Person.objects(name="User A").with_id(person1.id)
|
||||
|
||||
def test_find_only_one(self):
|
||||
"""Ensure that a query using ``get`` returns at most one result.
|
||||
"""
|
||||
def test_get_no_document_exists_raises_doesnotexist(self):
|
||||
assert self.Person.objects.count() == 0
|
||||
# Try retrieving when no objects exists
|
||||
with pytest.raises(DoesNotExist):
|
||||
self.Person.objects.get()
|
||||
with pytest.raises(self.Person.DoesNotExist):
|
||||
self.Person.objects.get()
|
||||
|
||||
def test_get_multiple_match_raises_multipleobjectsreturned(self):
|
||||
"""Ensure that a query using ``get`` returns at most one result.
|
||||
"""
|
||||
assert self.Person.objects().count() == 0
|
||||
|
||||
person1 = self.Person(name="User A", age=20)
|
||||
person1.save()
|
||||
person2 = self.Person(name="User B", age=30)
|
||||
|
||||
p = self.Person.objects.get()
|
||||
assert p == person1
|
||||
|
||||
person2 = self.Person(name="User B", age=20)
|
||||
person2.save()
|
||||
|
||||
# Retrieve the first person from the database
|
||||
person3 = self.Person(name="User C", age=30)
|
||||
person3.save()
|
||||
|
||||
# .get called without argument
|
||||
with pytest.raises(MultipleObjectsReturned):
|
||||
self.Person.objects.get()
|
||||
with pytest.raises(self.Person.MultipleObjectsReturned):
|
||||
self.Person.objects.get()
|
||||
|
||||
# check filtering
|
||||
with pytest.raises(MultipleObjectsReturned):
|
||||
self.Person.objects.get(age__lt=30)
|
||||
with pytest.raises(MultipleObjectsReturned) as exc_info:
|
||||
self.Person.objects(age__lt=30).get()
|
||||
assert "2 or more items returned, instead of 1" == str(exc_info.value)
|
||||
|
||||
# Use a query to filter the people found to just person2
|
||||
person = self.Person.objects.get(age=30)
|
||||
assert person.name == "User B"
|
||||
|
||||
person = self.Person.objects.get(age__lt=30)
|
||||
assert person.name == "User A"
|
||||
assert person == person3
|
||||
|
||||
def test_find_array_position(self):
|
||||
"""Ensure that query by array position works.
|
||||
@ -4476,6 +4491,74 @@ class TestQueryset(unittest.TestCase):
|
||||
expected = "[u'A1', u'A2']"
|
||||
assert expected == "%s" % sorted(names)
|
||||
|
||||
def test_fields(self):
|
||||
class Bar(EmbeddedDocument):
|
||||
v = StringField()
|
||||
z = StringField()
|
||||
|
||||
class Foo(Document):
|
||||
x = StringField()
|
||||
y = IntField()
|
||||
items = EmbeddedDocumentListField(Bar)
|
||||
|
||||
Foo.drop_collection()
|
||||
|
||||
Foo(x="foo1", y=1).save()
|
||||
Foo(x="foo2", y=2, items=[]).save()
|
||||
Foo(x="foo3", y=3, items=[Bar(z="a", v="V")]).save()
|
||||
Foo(
|
||||
x="foo4",
|
||||
y=4,
|
||||
items=[
|
||||
Bar(z="a", v="V"),
|
||||
Bar(z="b", v="W"),
|
||||
Bar(z="b", v="X"),
|
||||
Bar(z="c", v="V"),
|
||||
],
|
||||
).save()
|
||||
Foo(
|
||||
x="foo5",
|
||||
y=5,
|
||||
items=[
|
||||
Bar(z="b", v="X"),
|
||||
Bar(z="c", v="V"),
|
||||
Bar(z="d", v="V"),
|
||||
Bar(z="e", v="V"),
|
||||
],
|
||||
).save()
|
||||
|
||||
foos_with_x = list(Foo.objects.order_by("y").fields(x=1))
|
||||
|
||||
assert all(o.x is not None for o in foos_with_x)
|
||||
|
||||
foos_without_y = list(Foo.objects.order_by("y").fields(y=0))
|
||||
|
||||
assert all(o.y is None for o in foos_with_x)
|
||||
|
||||
foos_with_sliced_items = list(Foo.objects.order_by("y").fields(slice__items=1))
|
||||
|
||||
assert foos_with_sliced_items[0].items == []
|
||||
assert foos_with_sliced_items[1].items == []
|
||||
assert len(foos_with_sliced_items[2].items) == 1
|
||||
assert foos_with_sliced_items[2].items[0].z == "a"
|
||||
assert len(foos_with_sliced_items[3].items) == 1
|
||||
assert foos_with_sliced_items[3].items[0].z == "a"
|
||||
assert len(foos_with_sliced_items[4].items) == 1
|
||||
assert foos_with_sliced_items[4].items[0].z == "b"
|
||||
|
||||
foos_with_elem_match_items = list(
|
||||
Foo.objects.order_by("y").fields(elemMatch__items={"z": "b"})
|
||||
)
|
||||
|
||||
assert foos_with_elem_match_items[0].items == []
|
||||
assert foos_with_elem_match_items[1].items == []
|
||||
assert foos_with_elem_match_items[2].items == []
|
||||
assert len(foos_with_elem_match_items[3].items) == 1
|
||||
assert foos_with_elem_match_items[3].items[0].z == "b"
|
||||
assert foos_with_elem_match_items[3].items[0].v == "W"
|
||||
assert len(foos_with_elem_match_items[4].items) == 1
|
||||
assert foos_with_elem_match_items[4].items[0].z == "b"
|
||||
|
||||
def test_elem_match(self):
|
||||
class Foo(EmbeddedDocument):
|
||||
shape = StringField()
|
||||
|
@ -24,6 +24,12 @@ class TestTransform(unittest.TestCase):
|
||||
}
|
||||
assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}}
|
||||
assert transform.query(name__exists=True) == {"name": {"$exists": True}}
|
||||
assert transform.query(name=["Mark"], __raw__={"name": {"$in": "Tom"}}) == {
|
||||
"$and": [{"name": ["Mark"]}, {"name": {"$in": "Tom"}}]
|
||||
}
|
||||
assert transform.query(name__in=["Tom"], __raw__={"name": "Mark"}) == {
|
||||
"$and": [{"name": {"$in": ["Tom"]}}, {"name": "Mark"}]
|
||||
}
|
||||
|
||||
def test_transform_update(self):
|
||||
class LisDoc(Document):
|
||||
|
Loading…
x
Reference in New Issue
Block a user