Merge branch 'master' of github.com:MongoEngine/mongoengine into add_migration_documentation
This commit is contained in:
commit
0c274908ec
1
AUTHORS
1
AUTHORS
@ -258,3 +258,4 @@ that much better:
|
|||||||
* Leonardo Domingues (https://github.com/leodmgs)
|
* Leonardo Domingues (https://github.com/leodmgs)
|
||||||
* Agustin Barto (https://github.com/abarto)
|
* Agustin Barto (https://github.com/abarto)
|
||||||
* Stankiewicz Mateusz (https://github.com/mas15)
|
* Stankiewicz Mateusz (https://github.com/mas15)
|
||||||
|
* Felix Schultheiß (https://github.com/felix-smashdocs)
|
||||||
|
@ -6,6 +6,7 @@ Changelog
|
|||||||
Development
|
Development
|
||||||
===========
|
===========
|
||||||
- (Fill this out as you fix issues and develop your features).
|
- (Fill this out as you fix issues and develop your features).
|
||||||
|
- Bug fix in DynamicDocument which isn not parsing known fields in constructor like Document do #2412
|
||||||
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
|
||||||
and Cursor.count that got deprecated in pymongo >= 3.7.
|
and Cursor.count that got deprecated in pymongo >= 3.7.
|
||||||
This should have a negative impact on performance of count see Issue #2219
|
This should have a negative impact on performance of count see Issue #2219
|
||||||
@ -14,6 +15,7 @@ Development
|
|||||||
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
|
||||||
just replacing the first item (as it's usually done) #2392
|
just replacing the first item (as it's usually done) #2392
|
||||||
- Add EnumField: ``mongoengine.fields.EnumField``
|
- Add EnumField: ``mongoengine.fields.EnumField``
|
||||||
|
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
|
||||||
|
|
||||||
Changes in 0.20.0
|
Changes in 0.20.0
|
||||||
=================
|
=================
|
||||||
|
@ -31,6 +31,8 @@ the :attr:`host` to
|
|||||||
|
|
||||||
connect('project1', host='mongodb://localhost/database_name')
|
connect('project1', host='mongodb://localhost/database_name')
|
||||||
|
|
||||||
|
.. note:: URI containing SRV records (e.g mongodb+srv://server.example.com/) can be used as well as the :attr:`host`
|
||||||
|
|
||||||
.. note:: Database, username and password from URI string overrides
|
.. note:: Database, username and password from URI string overrides
|
||||||
corresponding parameters in :func:`~mongoengine.connect`: ::
|
corresponding parameters in :func:`~mongoengine.connect`: ::
|
||||||
|
|
||||||
|
@ -64,8 +64,6 @@ class BaseDocument:
|
|||||||
It may contain additional reserved keywords, e.g. "__auto_convert".
|
It may contain additional reserved keywords, e.g. "__auto_convert".
|
||||||
:param __auto_convert: If True, supplied values will be converted
|
:param __auto_convert: If True, supplied values will be converted
|
||||||
to Python-type values via each field's `to_python` method.
|
to Python-type values via each field's `to_python` method.
|
||||||
:param __only_fields: A set of fields that have been loaded for
|
|
||||||
this document. Empty if all fields have been loaded.
|
|
||||||
:param _created: Indicates whether this is a brand new document
|
:param _created: Indicates whether this is a brand new document
|
||||||
or whether it's already been persisted before. Defaults to true.
|
or whether it's already been persisted before. Defaults to true.
|
||||||
"""
|
"""
|
||||||
@ -80,8 +78,6 @@ class BaseDocument:
|
|||||||
|
|
||||||
__auto_convert = values.pop("__auto_convert", True)
|
__auto_convert = values.pop("__auto_convert", True)
|
||||||
|
|
||||||
__only_fields = set(values.pop("__only_fields", values))
|
|
||||||
|
|
||||||
_created = values.pop("_created", True)
|
_created = values.pop("_created", True)
|
||||||
|
|
||||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||||
@ -106,10 +102,8 @@ class BaseDocument:
|
|||||||
self._dynamic_fields = SON()
|
self._dynamic_fields = SON()
|
||||||
|
|
||||||
# Assign default values to the instance.
|
# Assign default values to the instance.
|
||||||
# We set default values only for fields loaded from DB. See
|
|
||||||
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
|
|
||||||
for key, field in self._fields.items():
|
for key, field in self._fields.items():
|
||||||
if self._db_field_map.get(key, key) in __only_fields:
|
if self._db_field_map.get(key, key) in values:
|
||||||
continue
|
continue
|
||||||
value = getattr(self, key, None)
|
value = getattr(self, key, None)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
@ -117,25 +111,22 @@ class BaseDocument:
|
|||||||
if "_cls" not in values:
|
if "_cls" not in values:
|
||||||
self._cls = self._class_name
|
self._cls = self._class_name
|
||||||
|
|
||||||
# Set passed values after initialisation
|
# Set actual values
|
||||||
if self._dynamic:
|
|
||||||
dynamic_data = {}
|
dynamic_data = {}
|
||||||
for key, value in values.items():
|
|
||||||
if key in self._fields or key == "_id":
|
|
||||||
setattr(self, key, value)
|
|
||||||
else:
|
|
||||||
dynamic_data[key] = value
|
|
||||||
else:
|
|
||||||
FileField = _import_class("FileField")
|
FileField = _import_class("FileField")
|
||||||
for key, value in values.items():
|
for key, value in values.items():
|
||||||
key = self._reverse_db_field_map.get(key, key)
|
key = self._reverse_db_field_map.get(key, key)
|
||||||
if key in self._fields or key in ("id", "pk", "_cls"):
|
|
||||||
if __auto_convert and value is not None:
|
|
||||||
field = self._fields.get(key)
|
field = self._fields.get(key)
|
||||||
|
if field or key in ("id", "pk", "_cls"):
|
||||||
|
if __auto_convert and value is not None:
|
||||||
if field and not isinstance(field, FileField):
|
if field and not isinstance(field, FileField):
|
||||||
value = field.to_python(value)
|
value = field.to_python(value)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
else:
|
else:
|
||||||
|
if self._dynamic:
|
||||||
|
dynamic_data[key] = value
|
||||||
|
else:
|
||||||
|
# For strict Document
|
||||||
self._data[key] = value
|
self._data[key] = value
|
||||||
|
|
||||||
# Set any get_<field>_display methods
|
# Set any get_<field>_display methods
|
||||||
@ -758,11 +749,8 @@ class BaseDocument:
|
|||||||
return cls._meta.get("collection", None)
|
return cls._meta.get("collection", None)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
def _from_son(cls, son, _auto_dereference=True, created=False):
|
||||||
"""Create an instance of a Document (subclass) from a PyMongo SON."""
|
"""Create an instance of a Document (subclass) from a PyMongo SON."""
|
||||||
if not only_fields:
|
|
||||||
only_fields = []
|
|
||||||
|
|
||||||
if son and not isinstance(son, dict):
|
if son and not isinstance(son, dict):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The source SON object needs to be of type 'dict' but a '%s' was found"
|
"The source SON object needs to be of type 'dict' but a '%s' was found"
|
||||||
@ -817,9 +805,7 @@ class BaseDocument:
|
|||||||
if cls.STRICT:
|
if cls.STRICT:
|
||||||
data = {k: v for k, v in data.items() if k in cls._fields}
|
data = {k: v for k, v in data.items() if k in cls._fields}
|
||||||
|
|
||||||
obj = cls(
|
obj = cls(__auto_convert=False, _created=created, **data)
|
||||||
__auto_convert=False, _created=created, __only_fields=only_fields, **data
|
|
||||||
)
|
|
||||||
obj._changed_fields = []
|
obj._changed_fields = []
|
||||||
if not _auto_dereference:
|
if not _auto_dereference:
|
||||||
obj._fields = fields
|
obj._fields = fields
|
||||||
|
@ -464,9 +464,9 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
# insert_one will provoke UniqueError alongside save does not
|
# insert_one will provoke UniqueError alongside save does not
|
||||||
# therefore, it need to catch and call replace_one.
|
# therefore, it need to catch and call replace_one.
|
||||||
if "_id" in doc:
|
if "_id" in doc:
|
||||||
raw_object = wc_collection.find_one_and_replace(
|
select_dict = {"_id": doc["_id"]}
|
||||||
{"_id": doc["_id"]}, doc
|
select_dict = self._integrate_shard_key(doc, select_dict)
|
||||||
)
|
raw_object = wc_collection.find_one_and_replace(select_dict, doc)
|
||||||
if raw_object:
|
if raw_object:
|
||||||
return doc["_id"]
|
return doc["_id"]
|
||||||
|
|
||||||
@ -489,6 +489,23 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
return update_doc
|
return update_doc
|
||||||
|
|
||||||
|
def _integrate_shard_key(self, doc, select_dict):
|
||||||
|
"""Integrates the collection's shard key to the `select_dict`, which will be used for the query.
|
||||||
|
The value from the shard key is taken from the `doc` and finally the select_dict is returned.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Need to add shard key to query, or you get an error
|
||||||
|
shard_key = self._meta.get("shard_key", tuple())
|
||||||
|
for k in shard_key:
|
||||||
|
path = self._lookup_field(k.split("."))
|
||||||
|
actual_key = [p.db_field for p in path]
|
||||||
|
val = doc
|
||||||
|
for ak in actual_key:
|
||||||
|
val = val[ak]
|
||||||
|
select_dict[".".join(actual_key)] = val
|
||||||
|
|
||||||
|
return select_dict
|
||||||
|
|
||||||
def _save_update(self, doc, save_condition, write_concern):
|
def _save_update(self, doc, save_condition, write_concern):
|
||||||
"""Update an existing document.
|
"""Update an existing document.
|
||||||
|
|
||||||
@ -504,15 +521,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|||||||
|
|
||||||
select_dict["_id"] = object_id
|
select_dict["_id"] = object_id
|
||||||
|
|
||||||
# Need to add shard key to query, or you get an error
|
select_dict = self._integrate_shard_key(doc, select_dict)
|
||||||
shard_key = self._meta.get("shard_key", tuple())
|
|
||||||
for k in shard_key:
|
|
||||||
path = self._lookup_field(k.split("."))
|
|
||||||
actual_key = [p.db_field for p in path]
|
|
||||||
val = doc
|
|
||||||
for ak in actual_key:
|
|
||||||
val = val[ak]
|
|
||||||
select_dict[".".join(actual_key)] = val
|
|
||||||
|
|
||||||
update_doc = self._get_update_doc()
|
update_doc = self._get_update_doc()
|
||||||
if update_doc:
|
if update_doc:
|
||||||
|
@ -1623,7 +1623,9 @@ class BinaryField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class EnumField(BaseField):
|
class EnumField(BaseField):
|
||||||
"""Enumeration Field. Values are stored underneath as strings.
|
"""Enumeration Field. Values are stored underneath as is,
|
||||||
|
so it will only work with simple types (str, int, etc) that
|
||||||
|
are bson encodable
|
||||||
Example usage:
|
Example usage:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
@ -88,7 +88,6 @@ class BaseQuerySet:
|
|||||||
self._hint = -1 # Using -1 as None is a valid value for hint
|
self._hint = -1 # Using -1 as None is a valid value for hint
|
||||||
self._collation = None
|
self._collation = None
|
||||||
self._batch_size = None
|
self._batch_size = None
|
||||||
self.only_fields = []
|
|
||||||
self._max_time_ms = None
|
self._max_time_ms = None
|
||||||
self._comment = None
|
self._comment = None
|
||||||
|
|
||||||
@ -190,9 +189,7 @@ class BaseQuerySet:
|
|||||||
if queryset._scalar:
|
if queryset._scalar:
|
||||||
return queryset._get_scalar(
|
return queryset._get_scalar(
|
||||||
queryset._document._from_son(
|
queryset._document._from_son(
|
||||||
queryset._cursor[key],
|
queryset._cursor[key], _auto_dereference=self._auto_dereference,
|
||||||
_auto_dereference=self._auto_dereference,
|
|
||||||
only_fields=self.only_fields,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -200,9 +197,7 @@ class BaseQuerySet:
|
|||||||
return queryset._cursor[key]
|
return queryset._cursor[key]
|
||||||
|
|
||||||
return queryset._document._from_son(
|
return queryset._document._from_son(
|
||||||
queryset._cursor[key],
|
queryset._cursor[key], _auto_dereference=self._auto_dereference,
|
||||||
_auto_dereference=self._auto_dereference,
|
|
||||||
only_fields=self.only_fields,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
raise TypeError("Provide a slice or an integer index")
|
raise TypeError("Provide a slice or an integer index")
|
||||||
@ -719,12 +714,10 @@ class BaseQuerySet:
|
|||||||
|
|
||||||
if full_response:
|
if full_response:
|
||||||
if result["value"] is not None:
|
if result["value"] is not None:
|
||||||
result["value"] = self._document._from_son(
|
result["value"] = self._document._from_son(result["value"])
|
||||||
result["value"], only_fields=self.only_fields
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
if result is not None:
|
if result is not None:
|
||||||
result = self._document._from_son(result, only_fields=self.only_fields)
|
result = self._document._from_son(result)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -757,18 +750,14 @@ class BaseQuerySet:
|
|||||||
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
|
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
|
||||||
if self._scalar:
|
if self._scalar:
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc["_id"]] = self._get_scalar(
|
doc_map[doc["_id"]] = self._get_scalar(self._document._from_son(doc))
|
||||||
self._document._from_son(doc, only_fields=self.only_fields)
|
|
||||||
)
|
|
||||||
elif self._as_pymongo:
|
elif self._as_pymongo:
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc["_id"]] = doc
|
doc_map[doc["_id"]] = doc
|
||||||
else:
|
else:
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc["_id"]] = self._document._from_son(
|
doc_map[doc["_id"]] = self._document._from_son(
|
||||||
doc,
|
doc, _auto_dereference=self._auto_dereference,
|
||||||
only_fields=self.only_fields,
|
|
||||||
_auto_dereference=self._auto_dereference,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return doc_map
|
return doc_map
|
||||||
@ -841,7 +830,6 @@ class BaseQuerySet:
|
|||||||
"_collation",
|
"_collation",
|
||||||
"_auto_dereference",
|
"_auto_dereference",
|
||||||
"_search_text",
|
"_search_text",
|
||||||
"only_fields",
|
|
||||||
"_max_time_ms",
|
"_max_time_ms",
|
||||||
"_comment",
|
"_comment",
|
||||||
"_batch_size",
|
"_batch_size",
|
||||||
@ -1045,7 +1033,6 @@ class BaseQuerySet:
|
|||||||
.. versionchanged:: 0.5 - Added subfield support
|
.. versionchanged:: 0.5 - Added subfield support
|
||||||
"""
|
"""
|
||||||
fields = {f: QueryFieldList.ONLY for f in fields}
|
fields = {f: QueryFieldList.ONLY for f in fields}
|
||||||
self.only_fields = list(fields.keys())
|
|
||||||
return self.fields(True, **fields)
|
return self.fields(True, **fields)
|
||||||
|
|
||||||
def exclude(self, *fields):
|
def exclude(self, *fields):
|
||||||
@ -1310,10 +1297,7 @@ class BaseQuerySet:
|
|||||||
def from_json(self, json_data):
|
def from_json(self, json_data):
|
||||||
"""Converts json data to unsaved objects"""
|
"""Converts json data to unsaved objects"""
|
||||||
son_data = json_util.loads(json_data)
|
son_data = json_util.loads(json_data)
|
||||||
return [
|
return [self._document._from_son(data) for data in son_data]
|
||||||
self._document._from_son(data, only_fields=self.only_fields)
|
|
||||||
for data in son_data
|
|
||||||
]
|
|
||||||
|
|
||||||
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
|
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
|
||||||
"""Perform a aggregate function based in your queryset params
|
"""Perform a aggregate function based in your queryset params
|
||||||
@ -1638,9 +1622,7 @@ class BaseQuerySet:
|
|||||||
return raw_doc
|
return raw_doc
|
||||||
|
|
||||||
doc = self._document._from_son(
|
doc = self._document._from_son(
|
||||||
raw_doc,
|
raw_doc, _auto_dereference=self._auto_dereference,
|
||||||
_auto_dereference=self._auto_dereference,
|
|
||||||
only_fields=self.only_fields,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if self._scalar:
|
if self._scalar:
|
||||||
|
@ -37,6 +37,19 @@ class TestDynamicDocument(MongoDBTestCase):
|
|||||||
# Confirm no changes to self.Person
|
# Confirm no changes to self.Person
|
||||||
assert not hasattr(self.Person, "age")
|
assert not hasattr(self.Person, "age")
|
||||||
|
|
||||||
|
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
|
||||||
|
class ProductDynamicDocument(DynamicDocument):
|
||||||
|
title = StringField()
|
||||||
|
price = FloatField()
|
||||||
|
|
||||||
|
class ProductDocument(Document):
|
||||||
|
title = StringField()
|
||||||
|
price = FloatField()
|
||||||
|
|
||||||
|
product = ProductDocument(title="Blabla", price="12.5")
|
||||||
|
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
|
||||||
|
assert product.price == dyn_product.price == 12.5
|
||||||
|
|
||||||
def test_change_scope_of_variable(self):
|
def test_change_scope_of_variable(self):
|
||||||
"""Test changing the scope of a dynamic field has no adverse effects"""
|
"""Test changing the scope of a dynamic field has no adverse effects"""
|
||||||
p = self.Person()
|
p = self.Person()
|
||||||
|
@ -500,7 +500,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
doc.reload()
|
doc.reload()
|
||||||
Animal.drop_collection()
|
Animal.drop_collection()
|
||||||
|
|
||||||
def test_update_shard_key_routing(self):
|
def test_save_update_shard_key_routing(self):
|
||||||
"""Ensures updating a doc with a specified shard_key includes it in
|
"""Ensures updating a doc with a specified shard_key includes it in
|
||||||
the query.
|
the query.
|
||||||
"""
|
"""
|
||||||
@ -528,6 +528,29 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
|
|
||||||
Animal.drop_collection()
|
Animal.drop_collection()
|
||||||
|
|
||||||
|
def test_save_create_shard_key_routing(self):
|
||||||
|
"""Ensures inserting a doc with a specified shard_key includes it in
|
||||||
|
the query.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Animal(Document):
|
||||||
|
_id = UUIDField(binary=False, primary_key=True, default=uuid.uuid4)
|
||||||
|
is_mammal = BooleanField()
|
||||||
|
name = StringField()
|
||||||
|
meta = {"shard_key": ("is_mammal",)}
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
doc = Animal(is_mammal=True, name="Dog")
|
||||||
|
|
||||||
|
with query_counter() as q:
|
||||||
|
doc.save()
|
||||||
|
query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0]
|
||||||
|
assert query_op["op"] == "command"
|
||||||
|
assert query_op["command"]["findAndModify"] == "animal"
|
||||||
|
assert set(query_op["command"]["query"].keys()) == set(["_id", "is_mammal"])
|
||||||
|
|
||||||
|
Animal.drop_collection()
|
||||||
|
|
||||||
def test_reload_with_changed_fields(self):
|
def test_reload_with_changed_fields(self):
|
||||||
"""Ensures reloading will not affect changed fields"""
|
"""Ensures reloading will not affect changed fields"""
|
||||||
|
|
||||||
@ -3411,7 +3434,7 @@ class TestDocumentInstance(MongoDBTestCase):
|
|||||||
assert obj3 != dbref2
|
assert obj3 != dbref2
|
||||||
assert dbref2 != obj3
|
assert dbref2 != obj3
|
||||||
|
|
||||||
def test_default_values(self):
|
def test_default_values_dont_get_override_upon_save_when_only_is_used(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
created_on = DateTimeField(default=lambda: datetime.utcnow())
|
created_on = DateTimeField(default=lambda: datetime.utcnow())
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
from bson import InvalidDocument
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
@ -105,3 +106,17 @@ class TestIntEnumField(MongoDBTestCase):
|
|||||||
|
|
||||||
with pytest.raises(ValidationError, match="Value must be one of"):
|
with pytest.raises(ValidationError, match="Value must be one of"):
|
||||||
ModelWithColor(color="wrong_type").validate()
|
ModelWithColor(color="wrong_type").validate()
|
||||||
|
|
||||||
|
|
||||||
|
class TestFunkyEnumField(MongoDBTestCase):
|
||||||
|
def test_enum_incompatible_bson_type_fails_during_save(self):
|
||||||
|
class FunkyColor(Enum):
|
||||||
|
YELLOW = object()
|
||||||
|
|
||||||
|
class ModelWithFunkyColor(Document):
|
||||||
|
color = EnumField(FunkyColor)
|
||||||
|
|
||||||
|
m = ModelWithFunkyColor(color=FunkyColor.YELLOW)
|
||||||
|
|
||||||
|
with pytest.raises(InvalidDocument, match="[cC]annot encode object"):
|
||||||
|
m.save()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user