Merge branch 'master' of github.com:MongoEngine/mongoengine into add_migration_documentation

This commit is contained in:
Bastien Gérard 2020-11-11 21:18:52 +01:00
commit 0c274908ec
10 changed files with 109 additions and 74 deletions

View File

@ -258,3 +258,4 @@ that much better:
* Leonardo Domingues (https://github.com/leodmgs)
* Agustin Barto (https://github.com/abarto)
* Stankiewicz Mateusz (https://github.com/mas15)
* Felix Schultheiß (https://github.com/felix-smashdocs)

View File

@ -6,6 +6,7 @@ Changelog
Development
===========
- (Fill this out as you fix issues and develop your features).
- Bug fix in DynamicDocument which isn not parsing known fields in constructor like Document do #2412
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
and Cursor.count that got deprecated in pymongo >= 3.7.
This should have a negative impact on performance of count see Issue #2219
@ -14,6 +15,7 @@ Development
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
just replacing the first item (as it's usually done) #2392
- Add EnumField: ``mongoengine.fields.EnumField``
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
Changes in 0.20.0
=================

View File

@ -31,6 +31,8 @@ the :attr:`host` to
connect('project1', host='mongodb://localhost/database_name')
.. note:: URI containing SRV records (e.g mongodb+srv://server.example.com/) can be used as well as the :attr:`host`
.. note:: Database, username and password from URI string overrides
corresponding parameters in :func:`~mongoengine.connect`: ::

View File

@ -64,8 +64,6 @@ class BaseDocument:
It may contain additional reserved keywords, e.g. "__auto_convert".
:param __auto_convert: If True, supplied values will be converted
to Python-type values via each field's `to_python` method.
:param __only_fields: A set of fields that have been loaded for
this document. Empty if all fields have been loaded.
:param _created: Indicates whether this is a brand new document
or whether it's already been persisted before. Defaults to true.
"""
@ -80,8 +78,6 @@ class BaseDocument:
__auto_convert = values.pop("__auto_convert", True)
__only_fields = set(values.pop("__only_fields", values))
_created = values.pop("_created", True)
signals.pre_init.send(self.__class__, document=self, values=values)
@ -106,10 +102,8 @@ class BaseDocument:
self._dynamic_fields = SON()
# Assign default values to the instance.
# We set default values only for fields loaded from DB. See
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
for key, field in self._fields.items():
if self._db_field_map.get(key, key) in __only_fields:
if self._db_field_map.get(key, key) in values:
continue
value = getattr(self, key, None)
setattr(self, key, value)
@ -117,25 +111,22 @@ class BaseDocument:
if "_cls" not in values:
self._cls = self._class_name
# Set passed values after initialisation
if self._dynamic:
dynamic_data = {}
for key, value in values.items():
if key in self._fields or key == "_id":
setattr(self, key, value)
else:
# Set actual values
dynamic_data = {}
FileField = _import_class("FileField")
for key, value in values.items():
key = self._reverse_db_field_map.get(key, key)
field = self._fields.get(key)
if field or key in ("id", "pk", "_cls"):
if __auto_convert and value is not None:
if field and not isinstance(field, FileField):
value = field.to_python(value)
setattr(self, key, value)
else:
if self._dynamic:
dynamic_data[key] = value
else:
FileField = _import_class("FileField")
for key, value in values.items():
key = self._reverse_db_field_map.get(key, key)
if key in self._fields or key in ("id", "pk", "_cls"):
if __auto_convert and value is not None:
field = self._fields.get(key)
if field and not isinstance(field, FileField):
value = field.to_python(value)
setattr(self, key, value)
else:
# For strict Document
self._data[key] = value
# Set any get_<field>_display methods
@ -758,11 +749,8 @@ class BaseDocument:
return cls._meta.get("collection", None)
@classmethod
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
def _from_son(cls, son, _auto_dereference=True, created=False):
"""Create an instance of a Document (subclass) from a PyMongo SON."""
if not only_fields:
only_fields = []
if son and not isinstance(son, dict):
raise ValueError(
"The source SON object needs to be of type 'dict' but a '%s' was found"
@ -817,9 +805,7 @@ class BaseDocument:
if cls.STRICT:
data = {k: v for k, v in data.items() if k in cls._fields}
obj = cls(
__auto_convert=False, _created=created, __only_fields=only_fields, **data
)
obj = cls(__auto_convert=False, _created=created, **data)
obj._changed_fields = []
if not _auto_dereference:
obj._fields = fields

View File

@ -464,9 +464,9 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
# insert_one will provoke UniqueError alongside save does not
# therefore, it need to catch and call replace_one.
if "_id" in doc:
raw_object = wc_collection.find_one_and_replace(
{"_id": doc["_id"]}, doc
)
select_dict = {"_id": doc["_id"]}
select_dict = self._integrate_shard_key(doc, select_dict)
raw_object = wc_collection.find_one_and_replace(select_dict, doc)
if raw_object:
return doc["_id"]
@ -489,6 +489,23 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
return update_doc
def _integrate_shard_key(self, doc, select_dict):
"""Integrates the collection's shard key to the `select_dict`, which will be used for the query.
The value from the shard key is taken from the `doc` and finally the select_dict is returned.
"""
# Need to add shard key to query, or you get an error
shard_key = self._meta.get("shard_key", tuple())
for k in shard_key:
path = self._lookup_field(k.split("."))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict[".".join(actual_key)] = val
return select_dict
def _save_update(self, doc, save_condition, write_concern):
"""Update an existing document.
@ -504,15 +521,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
select_dict["_id"] = object_id
# Need to add shard key to query, or you get an error
shard_key = self._meta.get("shard_key", tuple())
for k in shard_key:
path = self._lookup_field(k.split("."))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict[".".join(actual_key)] = val
select_dict = self._integrate_shard_key(doc, select_dict)
update_doc = self._get_update_doc()
if update_doc:
@ -919,7 +928,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
@classmethod
def list_indexes(cls):
""" Lists all of the indexes that should be created for given
"""Lists all of the indexes that should be created for given
collection. It includes all the indexes from super- and sub-classes.
"""
if cls._meta.get("abstract"):
@ -984,7 +993,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
@classmethod
def compare_indexes(cls):
""" Compares the indexes defined in MongoEngine with the ones
"""Compares the indexes defined in MongoEngine with the ones
existing in the database. Returns any missing/extra indexes.
"""

View File

@ -1623,7 +1623,9 @@ class BinaryField(BaseField):
class EnumField(BaseField):
"""Enumeration Field. Values are stored underneath as strings.
"""Enumeration Field. Values are stored underneath as is,
so it will only work with simple types (str, int, etc) that
are bson encodable
Example usage:
.. code-block:: python

View File

@ -88,7 +88,6 @@ class BaseQuerySet:
self._hint = -1 # Using -1 as None is a valid value for hint
self._collation = None
self._batch_size = None
self.only_fields = []
self._max_time_ms = None
self._comment = None
@ -190,9 +189,7 @@ class BaseQuerySet:
if queryset._scalar:
return queryset._get_scalar(
queryset._document._from_son(
queryset._cursor[key],
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields,
queryset._cursor[key], _auto_dereference=self._auto_dereference,
)
)
@ -200,9 +197,7 @@ class BaseQuerySet:
return queryset._cursor[key]
return queryset._document._from_son(
queryset._cursor[key],
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields,
queryset._cursor[key], _auto_dereference=self._auto_dereference,
)
raise TypeError("Provide a slice or an integer index")
@ -719,12 +714,10 @@ class BaseQuerySet:
if full_response:
if result["value"] is not None:
result["value"] = self._document._from_son(
result["value"], only_fields=self.only_fields
)
result["value"] = self._document._from_son(result["value"])
else:
if result is not None:
result = self._document._from_son(result, only_fields=self.only_fields)
result = self._document._from_son(result)
return result
@ -757,18 +750,14 @@ class BaseQuerySet:
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
if self._scalar:
for doc in docs:
doc_map[doc["_id"]] = self._get_scalar(
self._document._from_son(doc, only_fields=self.only_fields)
)
doc_map[doc["_id"]] = self._get_scalar(self._document._from_son(doc))
elif self._as_pymongo:
for doc in docs:
doc_map[doc["_id"]] = doc
else:
for doc in docs:
doc_map[doc["_id"]] = self._document._from_son(
doc,
only_fields=self.only_fields,
_auto_dereference=self._auto_dereference,
doc, _auto_dereference=self._auto_dereference,
)
return doc_map
@ -841,7 +830,6 @@ class BaseQuerySet:
"_collation",
"_auto_dereference",
"_search_text",
"only_fields",
"_max_time_ms",
"_comment",
"_batch_size",
@ -1045,7 +1033,6 @@ class BaseQuerySet:
.. versionchanged:: 0.5 - Added subfield support
"""
fields = {f: QueryFieldList.ONLY for f in fields}
self.only_fields = list(fields.keys())
return self.fields(True, **fields)
def exclude(self, *fields):
@ -1310,10 +1297,7 @@ class BaseQuerySet:
def from_json(self, json_data):
"""Converts json data to unsaved objects"""
son_data = json_util.loads(json_data)
return [
self._document._from_son(data, only_fields=self.only_fields)
for data in son_data
]
return [self._document._from_son(data) for data in son_data]
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
"""Perform a aggregate function based in your queryset params
@ -1638,9 +1622,7 @@ class BaseQuerySet:
return raw_doc
doc = self._document._from_son(
raw_doc,
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields,
raw_doc, _auto_dereference=self._auto_dereference,
)
if self._scalar:

View File

@ -37,6 +37,19 @@ class TestDynamicDocument(MongoDBTestCase):
# Confirm no changes to self.Person
assert not hasattr(self.Person, "age")
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
class ProductDynamicDocument(DynamicDocument):
title = StringField()
price = FloatField()
class ProductDocument(Document):
title = StringField()
price = FloatField()
product = ProductDocument(title="Blabla", price="12.5")
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
assert product.price == dyn_product.price == 12.5
def test_change_scope_of_variable(self):
"""Test changing the scope of a dynamic field has no adverse effects"""
p = self.Person()

View File

@ -500,7 +500,7 @@ class TestDocumentInstance(MongoDBTestCase):
doc.reload()
Animal.drop_collection()
def test_update_shard_key_routing(self):
def test_save_update_shard_key_routing(self):
"""Ensures updating a doc with a specified shard_key includes it in
the query.
"""
@ -528,6 +528,29 @@ class TestDocumentInstance(MongoDBTestCase):
Animal.drop_collection()
def test_save_create_shard_key_routing(self):
"""Ensures inserting a doc with a specified shard_key includes it in
the query.
"""
class Animal(Document):
_id = UUIDField(binary=False, primary_key=True, default=uuid.uuid4)
is_mammal = BooleanField()
name = StringField()
meta = {"shard_key": ("is_mammal",)}
Animal.drop_collection()
doc = Animal(is_mammal=True, name="Dog")
with query_counter() as q:
doc.save()
query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0]
assert query_op["op"] == "command"
assert query_op["command"]["findAndModify"] == "animal"
assert set(query_op["command"]["query"].keys()) == set(["_id", "is_mammal"])
Animal.drop_collection()
def test_reload_with_changed_fields(self):
"""Ensures reloading will not affect changed fields"""
@ -3411,7 +3434,7 @@ class TestDocumentInstance(MongoDBTestCase):
assert obj3 != dbref2
assert dbref2 != obj3
def test_default_values(self):
def test_default_values_dont_get_override_upon_save_when_only_is_used(self):
class Person(Document):
created_on = DateTimeField(default=lambda: datetime.utcnow())
name = StringField()

View File

@ -1,5 +1,6 @@
from enum import Enum
from bson import InvalidDocument
import pytest
from mongoengine import *
@ -105,3 +106,17 @@ class TestIntEnumField(MongoDBTestCase):
with pytest.raises(ValidationError, match="Value must be one of"):
ModelWithColor(color="wrong_type").validate()
class TestFunkyEnumField(MongoDBTestCase):
def test_enum_incompatible_bson_type_fails_during_save(self):
class FunkyColor(Enum):
YELLOW = object()
class ModelWithFunkyColor(Document):
color = EnumField(FunkyColor)
m = ModelWithFunkyColor(color=FunkyColor.YELLOW)
with pytest.raises(InvalidDocument, match="[cC]annot encode object"):
m.save()