Merge branch 'master' of github.com:MongoEngine/mongoengine into fix_complex_datetime_field_invalid_string_set
This commit is contained in:
		| @@ -1,8 +1,6 @@ | ||||
| import weakref | ||||
|  | ||||
| from bson import DBRef | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||
| @@ -53,7 +51,7 @@ class BaseDict(dict): | ||||
|         if isinstance(instance, BaseDocument): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         super(BaseDict, self).__init__(dict_items) | ||||
|         super().__init__(dict_items) | ||||
|  | ||||
|     def get(self, key, default=None): | ||||
|         # get does not use __getitem__ by default so we must override it as well | ||||
| @@ -63,18 +61,18 @@ class BaseDict(dict): | ||||
|             return default | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         value = super(BaseDict, self).__getitem__(key) | ||||
|         value = super().__getitem__(key) | ||||
|  | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             value = BaseDict(value, None, "%s.%s" % (self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||
|             value = BaseList(value, None, "%s.%s" % (self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
| @@ -99,7 +97,7 @@ class BaseDict(dict): | ||||
|     def _mark_as_changed(self, key=None): | ||||
|         if hasattr(self._instance, "_mark_as_changed"): | ||||
|             if key: | ||||
|                 self._instance._mark_as_changed("%s.%s" % (self._name, key)) | ||||
|                 self._instance._mark_as_changed("{}.{}".format(self._name, key)) | ||||
|             else: | ||||
|                 self._instance._mark_as_changed(self._name) | ||||
|  | ||||
| @@ -117,13 +115,13 @@ class BaseList(list): | ||||
|         if isinstance(instance, BaseDocument): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         super(BaseList, self).__init__(list_items) | ||||
|         super().__init__(list_items) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         # change index to positive value because MongoDB does not support negative one | ||||
|         if isinstance(key, int) and key < 0: | ||||
|             key = len(self) + key | ||||
|         value = super(BaseList, self).__getitem__(key) | ||||
|         value = super().__getitem__(key) | ||||
|  | ||||
|         if isinstance(key, slice): | ||||
|             # When receiving a slice operator, we don't convert the structure and bind | ||||
| @@ -135,19 +133,18 @@ class BaseList(list): | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             # Replace dict by BaseDict | ||||
|             value = BaseDict(value, None, "%s.%s" % (self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||
|             # Replace list by BaseList | ||||
|             value = BaseList(value, None, "%s.%s" % (self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __iter__(self): | ||||
|         for v in super(BaseList, self).__iter__(): | ||||
|             yield v | ||||
|         yield from super().__iter__() | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
| @@ -165,7 +162,7 @@ class BaseList(list): | ||||
|             # instead, we simply marks the whole list as changed | ||||
|             changed_key = None | ||||
|  | ||||
|         result = super(BaseList, self).__setitem__(key, value) | ||||
|         result = super().__setitem__(key, value) | ||||
|         self._mark_as_changed(changed_key) | ||||
|         return result | ||||
|  | ||||
| @@ -180,30 +177,19 @@ class BaseList(list): | ||||
|     __iadd__ = mark_as_changed_wrapper(list.__iadd__) | ||||
|     __imul__ = mark_as_changed_wrapper(list.__imul__) | ||||
|  | ||||
|     if six.PY2: | ||||
|         # Under py3 __setslice__, __delslice__ and __getslice__ | ||||
|         # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter | ||||
|         # so we mimic this under python 2 | ||||
|         def __setslice__(self, i, j, sequence): | ||||
|             return self.__setitem__(slice(i, j), sequence) | ||||
|  | ||||
|         def __delslice__(self, i, j): | ||||
|             return self.__delitem__(slice(i, j)) | ||||
|  | ||||
|         def __getslice__(self, i, j): | ||||
|             return self.__getitem__(slice(i, j)) | ||||
|  | ||||
|     def _mark_as_changed(self, key=None): | ||||
|         if hasattr(self._instance, "_mark_as_changed"): | ||||
|             if key: | ||||
|                 self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self))) | ||||
|                 self._instance._mark_as_changed( | ||||
|                     "{}.{}".format(self._name, key % len(self)) | ||||
|                 ) | ||||
|             else: | ||||
|                 self._instance._mark_as_changed(self._name) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentList(BaseList): | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||
|         super().__init__(list_items, instance, name) | ||||
|         self._instance = instance | ||||
|  | ||||
|     @classmethod | ||||
| @@ -213,7 +199,7 @@ class EmbeddedDocumentList(BaseList): | ||||
|         """ | ||||
|         for key, expected_value in kwargs.items(): | ||||
|             doc_val = getattr(embedded_doc, key) | ||||
|             if doc_val != expected_value and six.text_type(doc_val) != expected_value: | ||||
|             if doc_val != expected_value and str(doc_val) != expected_value: | ||||
|                 return False | ||||
|         return True | ||||
|  | ||||
| @@ -368,13 +354,13 @@ class EmbeddedDocumentList(BaseList): | ||||
|         return len(values) | ||||
|  | ||||
|  | ||||
| class StrictDict(object): | ||||
| class StrictDict: | ||||
|     __slots__ = () | ||||
|     _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} | ||||
|     _classes = {} | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         for k, v in iteritems(kwargs): | ||||
|         for k, v in kwargs.items(): | ||||
|             setattr(self, k, v) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
| @@ -422,13 +408,13 @@ class StrictDict(object): | ||||
|         return (key for key in self.__slots__ if hasattr(self, key)) | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(list(iteritems(self))) | ||||
|         return len(list(self.items())) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return self.items() == other.items() | ||||
|         return list(self.items()) == list(other.items()) | ||||
|  | ||||
|     def __ne__(self, other): | ||||
|         return self.items() != other.items() | ||||
|         return not (self == other) | ||||
|  | ||||
|     @classmethod | ||||
|     def create(cls, allowed_keys): | ||||
| @@ -443,7 +429,7 @@ class StrictDict(object): | ||||
|  | ||||
|                 def __repr__(self): | ||||
|                     return "{%s}" % ", ".join( | ||||
|                         '"{0!s}": {1!r}'.format(k, v) for k, v in self.items() | ||||
|                         '"{!s}": {!r}'.format(k, v) for k, v in self.items() | ||||
|                     ) | ||||
|  | ||||
|             cls._classes[allowed_keys] = SpecificStrictDict | ||||
| @@ -468,9 +454,7 @@ class LazyReference(DBRef): | ||||
|         self.document_type = document_type | ||||
|         self._cached_doc = cached_doc | ||||
|         self.passthrough = passthrough | ||||
|         super(LazyReference, self).__init__( | ||||
|             self.document_type._get_collection_name(), pk | ||||
|         ) | ||||
|         super().__init__(self.document_type._get_collection_name(), pk) | ||||
|  | ||||
|     def __getitem__(self, name): | ||||
|         if not self.passthrough: | ||||
| @@ -488,4 +472,4 @@ class LazyReference(DBRef): | ||||
|             raise AttributeError() | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "<LazyReference(%s, %r)>" % (self.document_type, self.pk) | ||||
|         return "<LazyReference({}, {!r})>".format(self.document_type, self.pk) | ||||
|   | ||||
| @@ -1,11 +1,10 @@ | ||||
| import copy | ||||
|  | ||||
| import numbers | ||||
| from functools import partial | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON, json_util | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base.common import get_document | ||||
| @@ -25,14 +24,13 @@ from mongoengine.errors import ( | ||||
|     OperationError, | ||||
|     ValidationError, | ||||
| ) | ||||
| from mongoengine.python_support import Hashable | ||||
|  | ||||
| __all__ = ("BaseDocument", "NON_FIELD_ERRORS") | ||||
|  | ||||
| NON_FIELD_ERRORS = "__all__" | ||||
|  | ||||
|  | ||||
| class BaseDocument(object): | ||||
| class BaseDocument: | ||||
|     # TODO simplify how `_changed_fields` is used. | ||||
|     # Currently, handling of `_changed_fields` seems unnecessarily convoluted: | ||||
|     # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's | ||||
| @@ -92,10 +90,10 @@ class BaseDocument(object): | ||||
|         # if so raise an Exception. | ||||
|         if not self._dynamic and (self._meta.get("strict", True) or _created): | ||||
|             _undefined_fields = set(values.keys()) - set( | ||||
|                 self._fields.keys() + ["id", "pk", "_cls", "_text_score"] | ||||
|                 list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"] | ||||
|             ) | ||||
|             if _undefined_fields: | ||||
|                 msg = ('The fields "{0}" do not exist on the document "{1}"').format( | ||||
|                 msg = ('The fields "{}" do not exist on the document "{}"').format( | ||||
|                     _undefined_fields, self._class_name | ||||
|                 ) | ||||
|                 raise FieldDoesNotExist(msg) | ||||
| @@ -110,7 +108,7 @@ class BaseDocument(object): | ||||
|         # Assign default values to the instance. | ||||
|         # We set default values only for fields loaded from DB. See | ||||
|         # https://github.com/mongoengine/mongoengine/issues/399 for more info. | ||||
|         for key, field in iteritems(self._fields): | ||||
|         for key, field in self._fields.items(): | ||||
|             if self._db_field_map.get(key, key) in __only_fields: | ||||
|                 continue | ||||
|             value = getattr(self, key, None) | ||||
| @@ -122,14 +120,14 @@ class BaseDocument(object): | ||||
|         # Set passed values after initialisation | ||||
|         if self._dynamic: | ||||
|             dynamic_data = {} | ||||
|             for key, value in iteritems(values): | ||||
|             for key, value in values.items(): | ||||
|                 if key in self._fields or key == "_id": | ||||
|                     setattr(self, key, value) | ||||
|                 else: | ||||
|                     dynamic_data[key] = value | ||||
|         else: | ||||
|             FileField = _import_class("FileField") | ||||
|             for key, value in iteritems(values): | ||||
|             for key, value in values.items(): | ||||
|                 key = self._reverse_db_field_map.get(key, key) | ||||
|                 if key in self._fields or key in ("id", "pk", "_cls"): | ||||
|                     if __auto_convert and value is not None: | ||||
| @@ -145,7 +143,7 @@ class BaseDocument(object): | ||||
|  | ||||
|         if self._dynamic: | ||||
|             self._dynamic_lock = False | ||||
|             for key, value in iteritems(dynamic_data): | ||||
|             for key, value in dynamic_data.items(): | ||||
|                 setattr(self, key, value) | ||||
|  | ||||
|         # Flag initialised | ||||
| @@ -163,7 +161,7 @@ class BaseDocument(object): | ||||
|                 default = default() | ||||
|             setattr(self, field_name, default) | ||||
|         else: | ||||
|             super(BaseDocument, self).__delattr__(*args, **kwargs) | ||||
|             super().__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __setattr__(self, name, value): | ||||
|         # Handle dynamic data only if an initialised dynamic document | ||||
| @@ -210,9 +208,9 @@ class BaseDocument(object): | ||||
|             and self__created | ||||
|             and name == self._meta.get("id_field") | ||||
|         ): | ||||
|             super(BaseDocument, self).__setattr__("_created", False) | ||||
|             super().__setattr__("_created", False) | ||||
|  | ||||
|         super(BaseDocument, self).__setattr__(name, value) | ||||
|         super().__setattr__(name, value) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         data = {} | ||||
| @@ -288,16 +286,13 @@ class BaseDocument(object): | ||||
|         except (UnicodeEncodeError, UnicodeDecodeError): | ||||
|             u = "[Bad Unicode data]" | ||||
|         repr_type = str if u is None else type(u) | ||||
|         return repr_type("<%s: %s>" % (self.__class__.__name__, u)) | ||||
|         return repr_type("<{}: {}>".format(self.__class__.__name__, u)) | ||||
|  | ||||
|     def __str__(self): | ||||
|         # TODO this could be simpler? | ||||
|         if hasattr(self, "__unicode__"): | ||||
|             if six.PY3: | ||||
|                 return self.__unicode__() | ||||
|             else: | ||||
|                 return six.text_type(self).encode("utf-8") | ||||
|         return six.text_type("%s object" % self.__class__.__name__) | ||||
|             return self.__unicode__() | ||||
|         return "%s object" % self.__class__.__name__ | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if ( | ||||
| @@ -446,7 +441,7 @@ class BaseDocument(object): | ||||
|                 pk = self.pk | ||||
|             elif self._instance and hasattr(self._instance, "pk"): | ||||
|                 pk = self._instance.pk | ||||
|             message = "ValidationError (%s:%s) " % (self._class_name, pk) | ||||
|             message = "ValidationError ({}:{}) ".format(self._class_name, pk) | ||||
|             raise ValidationError(message, errors=errors) | ||||
|  | ||||
|     def to_json(self, *args, **kwargs): | ||||
| @@ -519,7 +514,7 @@ class BaseDocument(object): | ||||
|         if "." in key: | ||||
|             key, rest = key.split(".", 1) | ||||
|             key = self._db_field_map.get(key, key) | ||||
|             key = "%s.%s" % (key, rest) | ||||
|             key = "{}.{}".format(key, rest) | ||||
|         else: | ||||
|             key = self._db_field_map.get(key, key) | ||||
|  | ||||
| @@ -578,10 +573,10 @@ class BaseDocument(object): | ||||
|         if not hasattr(data, "items"): | ||||
|             iterator = enumerate(data) | ||||
|         else: | ||||
|             iterator = iteritems(data) | ||||
|             iterator = data.items() | ||||
|  | ||||
|         for index_or_key, value in iterator: | ||||
|             item_key = "%s%s." % (base_key, index_or_key) | ||||
|             item_key = "{}{}.".format(base_key, index_or_key) | ||||
|             # don't check anything lower if this key is already marked | ||||
|             # as changed. | ||||
|             if item_key[:-1] in changed_fields: | ||||
| @@ -589,7 +584,7 @@ class BaseDocument(object): | ||||
|  | ||||
|             if hasattr(value, "_get_changed_fields"): | ||||
|                 changed = value._get_changed_fields() | ||||
|                 changed_fields += ["%s%s" % (item_key, k) for k in changed if k] | ||||
|                 changed_fields += ["{}{}".format(item_key, k) for k in changed if k] | ||||
|             elif isinstance(value, (list, tuple, dict)): | ||||
|                 self._nestable_types_changed_fields(changed_fields, item_key, value) | ||||
|  | ||||
| @@ -620,7 +615,7 @@ class BaseDocument(object): | ||||
|             if isinstance(data, EmbeddedDocument): | ||||
|                 # Find all embedded fields that have been changed | ||||
|                 changed = data._get_changed_fields() | ||||
|                 changed_fields += ["%s%s" % (key, k) for k in changed if k] | ||||
|                 changed_fields += ["{}{}".format(key, k) for k in changed if k] | ||||
|             elif isinstance(data, (list, tuple, dict)): | ||||
|                 if hasattr(field, "field") and isinstance( | ||||
|                     field.field, (ReferenceField, GenericReferenceField) | ||||
| @@ -670,7 +665,7 @@ class BaseDocument(object): | ||||
|                 del set_data["_id"] | ||||
|  | ||||
|         # Determine if any changed items were actually unset. | ||||
|         for path, value in set_data.items(): | ||||
|         for path, value in list(set_data.items()): | ||||
|             if value or isinstance( | ||||
|                 value, (numbers.Number, bool) | ||||
|             ):  # Account for 0 and True that are truthy | ||||
| @@ -744,7 +739,7 @@ class BaseDocument(object): | ||||
|         # Convert SON to a data dict, making sure each key is a string and | ||||
|         # corresponds to the right db field. | ||||
|         data = {} | ||||
|         for key, value in iteritems(son): | ||||
|         for key, value in son.items(): | ||||
|             key = str(key) | ||||
|             key = cls._db_field_map.get(key, key) | ||||
|             data[key] = value | ||||
| @@ -759,7 +754,7 @@ class BaseDocument(object): | ||||
|         if not _auto_dereference: | ||||
|             fields = copy.deepcopy(fields) | ||||
|  | ||||
|         for field_name, field in iteritems(fields): | ||||
|         for field_name, field in fields.items(): | ||||
|             field._auto_dereference = _auto_dereference | ||||
|             if field.db_field in data: | ||||
|                 value = data[field.db_field] | ||||
| @@ -774,17 +769,16 @@ class BaseDocument(object): | ||||
|  | ||||
|         if errors_dict: | ||||
|             errors = "\n".join( | ||||
|                 ["Field '%s' - %s" % (k, v) for k, v in errors_dict.items()] | ||||
|                 ["Field '{}' - {}".format(k, v) for k, v in errors_dict.items()] | ||||
|             ) | ||||
|             msg = "Invalid data to create a `%s` instance.\n%s" % ( | ||||
|                 cls._class_name, | ||||
|                 errors, | ||||
|             msg = "Invalid data to create a `{}` instance.\n{}".format( | ||||
|                 cls._class_name, errors, | ||||
|             ) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         # In STRICT documents, remove any keys that aren't in cls._fields | ||||
|         if cls.STRICT: | ||||
|             data = {k: v for k, v in iteritems(data) if k in cls._fields} | ||||
|             data = {k: v for k, v in data.items() if k in cls._fields} | ||||
|  | ||||
|         obj = cls( | ||||
|             __auto_convert=False, _created=created, __only_fields=only_fields, **data | ||||
| @@ -831,7 +825,7 @@ class BaseDocument(object): | ||||
|     @classmethod | ||||
|     def _build_index_spec(cls, spec): | ||||
|         """Build a PyMongo index spec from a MongoEngine index spec.""" | ||||
|         if isinstance(spec, six.string_types): | ||||
|         if isinstance(spec, str): | ||||
|             spec = {"fields": [spec]} | ||||
|         elif isinstance(spec, (list, tuple)): | ||||
|             spec = {"fields": list(spec)} | ||||
| @@ -928,7 +922,7 @@ class BaseDocument(object): | ||||
|  | ||||
|                 # Add any unique_with fields to the back of the index spec | ||||
|                 if field.unique_with: | ||||
|                     if isinstance(field.unique_with, six.string_types): | ||||
|                     if isinstance(field.unique_with, str): | ||||
|                         field.unique_with = [field.unique_with] | ||||
|  | ||||
|                     # Convert unique_with field names to real field names | ||||
| @@ -949,7 +943,8 @@ class BaseDocument(object): | ||||
|  | ||||
|                 # Add the new index to the list | ||||
|                 fields = [ | ||||
|                     ("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields | ||||
|                     ("{}{}".format(namespace, f), pymongo.ASCENDING) | ||||
|                     for f in unique_fields | ||||
|                 ] | ||||
|                 index = {"fields": fields, "unique": True, "sparse": sparse} | ||||
|                 unique_indexes.append(index) | ||||
| @@ -1006,7 +1001,7 @@ class BaseDocument(object): | ||||
|             elif field._geo_index: | ||||
|                 field_name = field.db_field | ||||
|                 if parent_field: | ||||
|                     field_name = "%s.%s" % (parent_field, field_name) | ||||
|                     field_name = "{}.{}".format(parent_field, field_name) | ||||
|                 geo_indices.append({"fields": [(field_name, field._geo_index)]}) | ||||
|  | ||||
|         return geo_indices | ||||
| @@ -1175,9 +1170,6 @@ class BaseDocument(object): | ||||
|                 else [value] | ||||
|             ) | ||||
|             return sep.join( | ||||
|                 [ | ||||
|                     six.text_type(dict(field.choices).get(val, val)) | ||||
|                     for val in values or [] | ||||
|                 ] | ||||
|                 [str(dict(field.choices).get(val, val)) for val in values or []] | ||||
|             ) | ||||
|         return value | ||||
|   | ||||
| @@ -4,8 +4,6 @@ import weakref | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base.common import UPDATE_OPERATORS | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList | ||||
| @@ -15,7 +13,7 @@ from mongoengine.errors import DeprecatedError, ValidationError | ||||
| __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||
|  | ||||
|  | ||||
| class BaseField(object): | ||||
| class BaseField: | ||||
|     """A base class for fields in a MongoDB document. Instances of this class | ||||
|     may be added to subclasses of `Document` to define a document's schema. | ||||
|  | ||||
| @@ -87,13 +85,11 @@ class BaseField(object): | ||||
|         self._owner_document = None | ||||
|  | ||||
|         # Make sure db_field is a string (if it's explicitly defined). | ||||
|         if self.db_field is not None and not isinstance( | ||||
|             self.db_field, six.string_types | ||||
|         ): | ||||
|         if self.db_field is not None and not isinstance(self.db_field, str): | ||||
|             raise TypeError("db_field should be a string.") | ||||
|  | ||||
|         # Make sure db_field doesn't contain any forbidden characters. | ||||
|         if isinstance(self.db_field, six.string_types) and ( | ||||
|         if isinstance(self.db_field, str) and ( | ||||
|             "." in self.db_field | ||||
|             or "\0" in self.db_field | ||||
|             or self.db_field.startswith("$") | ||||
| @@ -216,14 +212,12 @@ class BaseField(object): | ||||
|         # Choices which are other types of Documents | ||||
|         if isinstance(value, (Document, EmbeddedDocument)): | ||||
|             if not any(isinstance(value, c) for c in choice_list): | ||||
|                 self.error( | ||||
|                     "Value must be an instance of %s" % (six.text_type(choice_list)) | ||||
|                 ) | ||||
|                 self.error("Value must be an instance of %s" % (choice_list)) | ||||
|         # Choices which are types other than Documents | ||||
|         else: | ||||
|             values = value if isinstance(value, (list, tuple)) else [value] | ||||
|             if len(set(values) - set(choice_list)): | ||||
|                 self.error("Value must be one of %s" % six.text_type(choice_list)) | ||||
|                 self.error("Value must be one of %s" % str(choice_list)) | ||||
|  | ||||
|     def _validate(self, value, **kwargs): | ||||
|         # Check the Choices Constraint | ||||
| @@ -311,7 +305,7 @@ class ComplexBaseField(BaseField): | ||||
|             if hasattr(instance._data[self.name], "_dereferenced"): | ||||
|                 instance._data[self.name]._dereferenced = True | ||||
|  | ||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||
|         value = super().__get__(instance, owner) | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         if isinstance(value, (list, tuple)): | ||||
| @@ -340,7 +334,7 @@ class ComplexBaseField(BaseField): | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type.""" | ||||
|         if isinstance(value, six.string_types): | ||||
|         if isinstance(value, str): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, "to_python"): | ||||
| @@ -394,7 +388,7 @@ class ComplexBaseField(BaseField): | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|  | ||||
|         if isinstance(value, six.string_types): | ||||
|         if isinstance(value, str): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, "to_mongo"): | ||||
| @@ -418,11 +412,11 @@ class ComplexBaseField(BaseField): | ||||
|         if self.field: | ||||
|             value_dict = { | ||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||
|                 for key, item in iteritems(value) | ||||
|                 for key, item in value.items() | ||||
|             } | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in iteritems(value): | ||||
|             for k, v in value.items(): | ||||
|                 if isinstance(v, Document): | ||||
|                     # We need the id from the saved object to create the DBRef | ||||
|                     if v.pk is None: | ||||
| @@ -461,8 +455,8 @@ class ComplexBaseField(BaseField): | ||||
|         """If field is provided ensure the value is valid.""" | ||||
|         errors = {} | ||||
|         if self.field: | ||||
|             if hasattr(value, "iteritems") or hasattr(value, "items"): | ||||
|                 sequence = iteritems(value) | ||||
|             if hasattr(value, "items"): | ||||
|                 sequence = value.items() | ||||
|             else: | ||||
|                 sequence = enumerate(value) | ||||
|             for k, v in sequence: | ||||
| @@ -475,7 +469,9 @@ class ComplexBaseField(BaseField): | ||||
|  | ||||
|             if errors: | ||||
|                 field_class = self.field.__class__.__name__ | ||||
|                 self.error("Invalid %s item (%s)" % (field_class, value), errors=errors) | ||||
|                 self.error( | ||||
|                     "Invalid {} item ({})".format(field_class, value), errors=errors | ||||
|                 ) | ||||
|         # Don't allow empty values if required | ||||
|         if self.required and not value: | ||||
|             self.error("Field is required and cannot be empty") | ||||
| @@ -508,10 +504,9 @@ class ObjectIdField(BaseField): | ||||
|     def to_mongo(self, value): | ||||
|         if not isinstance(value, ObjectId): | ||||
|             try: | ||||
|                 return ObjectId(six.text_type(value)) | ||||
|                 return ObjectId(str(value)) | ||||
|             except Exception as e: | ||||
|                 # e.message attribute has been deprecated since Python 2.6 | ||||
|                 self.error(six.text_type(e)) | ||||
|                 self.error(str(e)) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -519,9 +514,9 @@ class ObjectIdField(BaseField): | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             ObjectId(six.text_type(value)) | ||||
|             ObjectId(str(value)) | ||||
|         except Exception: | ||||
|             self.error("Invalid Object ID") | ||||
|             self.error("Invalid ObjectID") | ||||
|  | ||||
|  | ||||
| class GeoJsonBaseField(BaseField): | ||||
| @@ -541,14 +536,14 @@ class GeoJsonBaseField(BaseField): | ||||
|         self._name = "%sField" % self._type | ||||
|         if not auto_index: | ||||
|             self._geo_index = False | ||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Validate the GeoJson object based on its type.""" | ||||
|         if isinstance(value, dict): | ||||
|             if set(value.keys()) == {"type", "coordinates"}: | ||||
|                 if value["type"] != self._type: | ||||
|                     self.error('%s type must be "%s"' % (self._name, self._type)) | ||||
|                     self.error('{} type must be "{}"'.format(self._name, self._type)) | ||||
|                 return self.validate(value["coordinates"]) | ||||
|             else: | ||||
|                 self.error( | ||||
|   | ||||
| @@ -1,9 +1,6 @@ | ||||
| import itertools | ||||
| import warnings | ||||
|  | ||||
| import six | ||||
| from six import iteritems, itervalues | ||||
|  | ||||
| from mongoengine.base.common import _document_registry | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
| from mongoengine.common import _import_class | ||||
| @@ -25,7 +22,7 @@ class DocumentMetaclass(type): | ||||
|     # TODO lower complexity of this method | ||||
|     def __new__(mcs, name, bases, attrs): | ||||
|         flattened_bases = mcs._get_bases(bases) | ||||
|         super_new = super(DocumentMetaclass, mcs).__new__ | ||||
|         super_new = super().__new__ | ||||
|  | ||||
|         # If a base class just call super | ||||
|         metaclass = attrs.get("my_metaclass") | ||||
| @@ -69,7 +66,7 @@ class DocumentMetaclass(type): | ||||
|             # Standard object mixin - merge in any Fields | ||||
|             if not hasattr(base, "_meta"): | ||||
|                 base_fields = {} | ||||
|                 for attr_name, attr_value in iteritems(base.__dict__): | ||||
|                 for attr_name, attr_value in base.__dict__.items(): | ||||
|                     if not isinstance(attr_value, BaseField): | ||||
|                         continue | ||||
|                     attr_value.name = attr_name | ||||
| @@ -81,7 +78,7 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|         # Discover any document fields | ||||
|         field_names = {} | ||||
|         for attr_name, attr_value in iteritems(attrs): | ||||
|         for attr_name, attr_value in attrs.items(): | ||||
|             if not isinstance(attr_value, BaseField): | ||||
|                 continue | ||||
|             attr_value.name = attr_name | ||||
| @@ -111,9 +108,7 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|         attrs["_fields_ordered"] = tuple( | ||||
|             i[1] | ||||
|             for i in sorted( | ||||
|                 (v.creation_counter, v.name) for v in itervalues(doc_fields) | ||||
|             ) | ||||
|             for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) | ||||
|         ) | ||||
|  | ||||
|         # | ||||
| @@ -173,24 +168,8 @@ class DocumentMetaclass(type): | ||||
|         # Add class to the _document_registry | ||||
|         _document_registry[new_class._class_name] = new_class | ||||
|  | ||||
|         # In Python 2, User-defined methods objects have special read-only | ||||
|         # attributes 'im_func' and 'im_self' which contain the function obj | ||||
|         # and class instance object respectively.  With Python 3 these special | ||||
|         # attributes have been replaced by __func__ and __self__.  The Blinker | ||||
|         # module continues to use im_func and im_self, so the code below | ||||
|         # copies __func__ into im_func and __self__ into im_self for | ||||
|         # classmethod objects in Document derived classes. | ||||
|         if six.PY3: | ||||
|             for val in new_class.__dict__.values(): | ||||
|                 if isinstance(val, classmethod): | ||||
|                     f = val.__get__(new_class) | ||||
|                     if hasattr(f, "__func__") and not hasattr(f, "im_func"): | ||||
|                         f.__dict__.update({"im_func": getattr(f, "__func__")}) | ||||
|                     if hasattr(f, "__self__") and not hasattr(f, "im_self"): | ||||
|                         f.__dict__.update({"im_self": getattr(f, "__self__")}) | ||||
|  | ||||
|         # Handle delete rules | ||||
|         for field in itervalues(new_class._fields): | ||||
|         for field in new_class._fields.values(): | ||||
|             f = field | ||||
|             if f.owner_document is None: | ||||
|                 f.owner_document = new_class | ||||
| @@ -252,8 +231,7 @@ class DocumentMetaclass(type): | ||||
|             if base is object: | ||||
|                 continue | ||||
|             yield base | ||||
|             for child_base in mcs.__get_bases(base.__bases__): | ||||
|                 yield child_base | ||||
|             yield from mcs.__get_bases(base.__bases__) | ||||
|  | ||||
|     @classmethod | ||||
|     def _import_classes(mcs): | ||||
| @@ -271,7 +249,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|     def __new__(mcs, name, bases, attrs): | ||||
|         flattened_bases = mcs._get_bases(bases) | ||||
|         super_new = super(TopLevelDocumentMetaclass, mcs).__new__ | ||||
|         super_new = super().__new__ | ||||
|  | ||||
|         # Set default _meta data if base class, otherwise get user defined meta | ||||
|         if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: | ||||
| @@ -398,7 +376,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|             new_class.objects = QuerySetManager() | ||||
|  | ||||
|         # Validate the fields and set primary key if needed | ||||
|         for field_name, field in iteritems(new_class._fields): | ||||
|         for field_name, field in new_class._fields.items(): | ||||
|             if field.primary_key: | ||||
|                 # Ensure only one primary key is set | ||||
|                 current_pk = new_class._meta.get("id_field") | ||||
| @@ -461,8 +439,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|         id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) | ||||
|         for i in itertools.count(): | ||||
|             id_name = "{0}_{1}".format(id_basename, i) | ||||
|             id_db_name = "{0}_{1}".format(id_db_basename, i) | ||||
|             id_name = "{}_{}".format(id_basename, i) | ||||
|             id_db_name = "{}_{}".format(id_db_basename, i) | ||||
|             if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||
|                 return id_name, id_db_name | ||||
|  | ||||
| @@ -475,7 +453,7 @@ class MetaDict(dict): | ||||
|     _merge_options = ("indexes",) | ||||
|  | ||||
|     def merge(self, new_options): | ||||
|         for k, v in iteritems(new_options): | ||||
|         for k, v in new_options.items(): | ||||
|             if k in self._merge_options: | ||||
|                 self[k] = self.get(k, []) + v | ||||
|             else: | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| import re | ||||
|  | ||||
|  | ||||
| class LazyRegexCompiler(object): | ||||
| class LazyRegexCompiler: | ||||
|     """Descriptor to allow lazy compilation of regex""" | ||||
|  | ||||
|     def __init__(self, pattern, flags=0): | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| from pymongo import MongoClient, ReadPreference, uri_parser | ||||
| from pymongo.database import _check_name | ||||
| import six | ||||
|  | ||||
| __all__ = [ | ||||
|     "DEFAULT_CONNECTION_NAME", | ||||
| @@ -39,8 +38,8 @@ def _check_db_name(name): | ||||
|     """Check if a database name is valid. | ||||
|     This functionality is copied from pymongo Database class constructor. | ||||
|     """ | ||||
|     if not isinstance(name, six.string_types): | ||||
|         raise TypeError("name must be an instance of %s" % six.string_types) | ||||
|     if not isinstance(name, str): | ||||
|         raise TypeError("name must be an instance of %s" % str) | ||||
|     elif name != "$external": | ||||
|         _check_name(name) | ||||
|  | ||||
| @@ -93,7 +92,7 @@ def _get_connection_settings( | ||||
|     conn_host = conn_settings["host"] | ||||
|  | ||||
|     # Host can be a list or a string, so if string, force to a list. | ||||
|     if isinstance(conn_host, six.string_types): | ||||
|     if isinstance(conn_host, str): | ||||
|         conn_host = [conn_host] | ||||
|  | ||||
|     resolved_hosts = [] | ||||
| @@ -148,7 +147,7 @@ def _get_connection_settings( | ||||
|                 # TODO simplify the code below once we drop support for | ||||
|                 # PyMongo v3.4. | ||||
|                 read_pf_mode = uri_options["readpreference"] | ||||
|                 if isinstance(read_pf_mode, six.string_types): | ||||
|                 if isinstance(read_pf_mode, str): | ||||
|                     read_pf_mode = read_pf_mode.lower() | ||||
|                 for preference in read_preferences: | ||||
|                     if ( | ||||
| @@ -318,7 +317,7 @@ def _create_connection(alias, connection_class, **connection_settings): | ||||
|     try: | ||||
|         return connection_class(**connection_settings) | ||||
|     except Exception as e: | ||||
|         raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e)) | ||||
|         raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) | ||||
|  | ||||
|  | ||||
| def _find_existing_connection(connection_settings): | ||||
| @@ -396,8 +395,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|  | ||||
|         if new_conn_settings != prev_conn_setting: | ||||
|             err_msg = ( | ||||
|                 u"A different connection with alias `{}` was already " | ||||
|                 u"registered. Use disconnect() first" | ||||
|                 "A different connection with alias `{}` was already " | ||||
|                 "registered. Use disconnect() first" | ||||
|             ).format(alias) | ||||
|             raise ConnectionFailure(err_msg) | ||||
|     else: | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| from contextlib import contextmanager | ||||
|  | ||||
| from pymongo.write_concern import WriteConcern | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| @@ -17,7 +16,7 @@ __all__ = ( | ||||
| ) | ||||
|  | ||||
|  | ||||
| class switch_db(object): | ||||
| class switch_db: | ||||
|     """switch_db alias context manager. | ||||
|  | ||||
|     Example :: | ||||
| @@ -58,7 +57,7 @@ class switch_db(object): | ||||
|         self.cls._collection = self.collection | ||||
|  | ||||
|  | ||||
| class switch_collection(object): | ||||
| class switch_collection: | ||||
|     """switch_collection alias context manager. | ||||
|  | ||||
|     Example :: | ||||
| @@ -100,7 +99,7 @@ class switch_collection(object): | ||||
|         self.cls._get_collection_name = self.ori_get_collection_name | ||||
|  | ||||
|  | ||||
| class no_dereference(object): | ||||
| class no_dereference: | ||||
|     """no_dereference context manager. | ||||
|  | ||||
|     Turns off all dereferencing in Documents for the duration of the context | ||||
| @@ -123,7 +122,7 @@ class no_dereference(object): | ||||
|  | ||||
|         self.deref_fields = [ | ||||
|             k | ||||
|             for k, v in iteritems(self.cls._fields) | ||||
|             for k, v in self.cls._fields.items() | ||||
|             if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) | ||||
|         ] | ||||
|  | ||||
| @@ -140,7 +139,7 @@ class no_dereference(object): | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class no_sub_classes(object): | ||||
| class no_sub_classes: | ||||
|     """no_sub_classes context manager. | ||||
|  | ||||
|     Only returns instances of this class and no sub (inherited) classes:: | ||||
| @@ -168,7 +167,7 @@ class no_sub_classes(object): | ||||
|         self.cls._subclasses = self.cls_initial_subclasses | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
| class query_counter: | ||||
|     """Query_counter context manager to get the number of queries. | ||||
|     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||
|     resetting the db.system.profile collection at the beginning of the context and counting the new entries. | ||||
| @@ -235,7 +234,7 @@ class query_counter(object): | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """repr query_counter as the number of queries.""" | ||||
|         return u"%s" % self._get_count() | ||||
|         return "%s" % self._get_count() | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """Get the number of queries by counting the current number of entries in db.system.profile | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| from bson import DBRef, SON | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base import ( | ||||
|     BaseDict, | ||||
| @@ -16,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||
| from mongoengine.queryset import QuerySet | ||||
|  | ||||
|  | ||||
| class DeReference(object): | ||||
| class DeReference: | ||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||
|         """ | ||||
|         Cheaply dereferences the items to a set depth. | ||||
| @@ -30,7 +28,7 @@ class DeReference(object): | ||||
|             :class:`~mongoengine.base.ComplexBaseField` | ||||
|         :param get: A boolean determining if being called by __get__ | ||||
|         """ | ||||
|         if items is None or isinstance(items, six.string_types): | ||||
|         if items is None or isinstance(items, str): | ||||
|             return items | ||||
|  | ||||
|         # cheapest way to convert a queryset to a list | ||||
| @@ -79,7 +77,7 @@ class DeReference(object): | ||||
|  | ||||
|                     def _get_items_from_dict(items): | ||||
|                         new_items = {} | ||||
|                         for k, v in iteritems(items): | ||||
|                         for k, v in items.items(): | ||||
|                             value = v | ||||
|                             if isinstance(v, list): | ||||
|                                 value = _get_items_from_list(v) | ||||
| @@ -120,7 +118,7 @@ class DeReference(object): | ||||
|         depth += 1 | ||||
|         for item in iterator: | ||||
|             if isinstance(item, (Document, EmbeddedDocument)): | ||||
|                 for field_name, field in iteritems(item._fields): | ||||
|                 for field_name, field in item._fields.items(): | ||||
|                     v = item._data.get(field_name, None) | ||||
|                     if isinstance(v, LazyReference): | ||||
|                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||
| @@ -136,7 +134,7 @@ class DeReference(object): | ||||
|                             getattr(field, "field", None), "document_type", None | ||||
|                         ) | ||||
|                         references = self._find_references(v, depth) | ||||
|                         for key, refs in iteritems(references): | ||||
|                         for key, refs in references.items(): | ||||
|                             if isinstance( | ||||
|                                 field_cls, (Document, TopLevelDocumentMetaclass) | ||||
|                             ): | ||||
| @@ -153,7 +151,7 @@ class DeReference(object): | ||||
|                 ) | ||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||
|                 references = self._find_references(item, depth - 1) | ||||
|                 for key, refs in iteritems(references): | ||||
|                 for key, refs in references.items(): | ||||
|                     reference_map.setdefault(key, set()).update(refs) | ||||
|  | ||||
|         return reference_map | ||||
| @@ -162,7 +160,7 @@ class DeReference(object): | ||||
|         """Fetch all references and convert to their document objects | ||||
|         """ | ||||
|         object_map = {} | ||||
|         for collection, dbrefs in iteritems(self.reference_map): | ||||
|         for collection, dbrefs in self.reference_map.items(): | ||||
|  | ||||
|             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||
|             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||
| @@ -174,7 +172,7 @@ class DeReference(object): | ||||
|                     dbref for dbref in dbrefs if (col_name, dbref) not in object_map | ||||
|                 ] | ||||
|                 references = collection.objects.in_bulk(refs) | ||||
|                 for key, doc in iteritems(references): | ||||
|                 for key, doc in references.items(): | ||||
|                     object_map[(col_name, key)] = doc | ||||
|             else:  # Generic reference: use the refs data to convert to document | ||||
|                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||
| @@ -250,7 +248,7 @@ class DeReference(object): | ||||
|             data = [] | ||||
|         else: | ||||
|             is_list = False | ||||
|             iterator = iteritems(items) | ||||
|             iterator = items.items() | ||||
|             data = {} | ||||
|  | ||||
|         depth += 1 | ||||
| @@ -274,14 +272,12 @@ class DeReference(object): | ||||
|                             (v["_ref"].collection, v["_ref"].id), v | ||||
|                         ) | ||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                         item_name = six.text_type("{0}.{1}.{2}").format( | ||||
|                             name, k, field_name | ||||
|                         ) | ||||
|                         item_name = "{}.{}.{}".format(name, k, field_name) | ||||
|                         data[k]._data[field_name] = self._attach_objects( | ||||
|                             v, depth, instance=instance, name=item_name | ||||
|                         ) | ||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                 item_name = "%s.%s" % (name, k) if name else name | ||||
|                 item_name = "{}.{}".format(name, k) if name else name | ||||
|                 data[k] = self._attach_objects( | ||||
|                     v, depth - 1, instance=instance, name=item_name | ||||
|                 ) | ||||
|   | ||||
| @@ -4,8 +4,6 @@ import warnings | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| from pymongo.read_preferences import ReadPreference | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base import ( | ||||
| @@ -44,7 +42,7 @@ def includes_cls(fields): | ||||
|     """Helper function used for ensuring and comparing indexes.""" | ||||
|     first_field = None | ||||
|     if len(fields): | ||||
|         if isinstance(fields[0], six.string_types): | ||||
|         if isinstance(fields[0], str): | ||||
|             first_field = fields[0] | ||||
|         elif isinstance(fields[0], (list, tuple)) and len(fields[0]): | ||||
|             first_field = fields[0][0] | ||||
| @@ -55,7 +53,7 @@ class InvalidCollectionError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
| class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): | ||||
|     r"""A :class:`~mongoengine.Document` that isn't stored in its own | ||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||
|     fields on :class:`~mongoengine.Document`\ s through the | ||||
| @@ -71,7 +69,6 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
|  | ||||
|     __slots__ = ("_instance",) | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass = DocumentMetaclass | ||||
|  | ||||
| @@ -82,7 +79,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
|     __hash__ = None | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super(EmbeddedDocument, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self._instance = None | ||||
|         self._changed_fields = [] | ||||
|  | ||||
| @@ -95,7 +92,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
|         return not self.__eq__(other) | ||||
|  | ||||
|     def to_mongo(self, *args, **kwargs): | ||||
|         data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) | ||||
|         data = super().to_mongo(*args, **kwargs) | ||||
|  | ||||
|         # remove _id from the SON if it's in it and it's None | ||||
|         if "_id" in data and data["_id"] is None: | ||||
| @@ -104,7 +101,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
|         return data | ||||
|  | ||||
|  | ||||
| class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
| class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|     """The base class used for defining the structure and properties of | ||||
|     collections of documents stored in MongoDB. Inherit from this class, and | ||||
|     add fields as class attributes to define a document's structure. | ||||
| @@ -156,7 +153,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|     in the :attr:`meta` dictionary. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass = TopLevelDocumentMetaclass | ||||
|  | ||||
| @@ -260,7 +256,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|         return db.create_collection(collection_name, **opts) | ||||
|  | ||||
|     def to_mongo(self, *args, **kwargs): | ||||
|         data = super(Document, self).to_mongo(*args, **kwargs) | ||||
|         data = super().to_mongo(*args, **kwargs) | ||||
|  | ||||
|         # If '_id' is None, try and set it from self._data. If that | ||||
|         # doesn't exist either, remove '_id' from the SON completely. | ||||
| @@ -431,16 +427,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|                 self.cascade_save(**kwargs) | ||||
|  | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             message = u"Tried to save duplicate unique keys (%s)" | ||||
|             raise NotUniqueError(message % six.text_type(err)) | ||||
|             message = "Tried to save duplicate unique keys (%s)" | ||||
|             raise NotUniqueError(message % err) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = "Could not save document (%s)" | ||||
|             if re.match("^E1100[01] duplicate key", six.text_type(err)): | ||||
|             if re.match("^E1100[01] duplicate key", str(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u"Tried to save duplicate unique keys (%s)" | ||||
|                 raise NotUniqueError(message % six.text_type(err)) | ||||
|             raise OperationError(message % six.text_type(err)) | ||||
|                 message = "Tried to save duplicate unique keys (%s)" | ||||
|                 raise NotUniqueError(message % err) | ||||
|             raise OperationError(message % err) | ||||
|  | ||||
|         # Make sure we store the PK on this document now that it's saved | ||||
|         id_field = self._meta["id_field"] | ||||
| @@ -559,7 +555,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|             if not getattr(ref, "_changed_fields", True): | ||||
|                 continue | ||||
|  | ||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||
|             ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data)) | ||||
|             if ref and ref_id not in _refs: | ||||
|                 _refs.append(ref_id) | ||||
|                 kwargs["_refs"] = _refs | ||||
| @@ -634,7 +630,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|  | ||||
|         # Delete FileFields separately | ||||
|         FileField = _import_class("FileField") | ||||
|         for name, field in iteritems(self._fields): | ||||
|         for name, field in self._fields.items(): | ||||
|             if isinstance(field, FileField): | ||||
|                 getattr(self, name).delete() | ||||
|  | ||||
| @@ -643,7 +639,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|                 write_concern=write_concern, _from_doc_delete=True | ||||
|             ) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = u"Could not delete document (%s)" % err.message | ||||
|             message = "Could not delete document (%s)" % err.message | ||||
|             raise OperationError(message) | ||||
|         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) | ||||
|  | ||||
| @@ -979,10 +975,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|                     indexes.append(index) | ||||
|  | ||||
|         # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed | ||||
|         if [(u"_id", 1)] not in indexes: | ||||
|             indexes.append([(u"_id", 1)]) | ||||
|         if [("_id", 1)] not in indexes: | ||||
|             indexes.append([("_id", 1)]) | ||||
|         if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): | ||||
|             indexes.append([(u"_cls", 1)]) | ||||
|             indexes.append([("_cls", 1)]) | ||||
|  | ||||
|         return indexes | ||||
|  | ||||
| @@ -1006,19 +1002,19 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|         extra = [index for index in existing if index not in required] | ||||
|  | ||||
|         # if { _cls: 1 } is missing, make sure it's *really* necessary | ||||
|         if [(u"_cls", 1)] in missing: | ||||
|         if [("_cls", 1)] in missing: | ||||
|             cls_obsolete = False | ||||
|             for index in existing: | ||||
|                 if includes_cls(index) and index not in extra: | ||||
|                     cls_obsolete = True | ||||
|                     break | ||||
|             if cls_obsolete: | ||||
|                 missing.remove([(u"_cls", 1)]) | ||||
|                 missing.remove([("_cls", 1)]) | ||||
|  | ||||
|         return {"missing": missing, "extra": extra} | ||||
|  | ||||
|  | ||||
| class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | ||||
| class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): | ||||
|     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||
|     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||
|     way as an ordinary document but has expanded style properties.  Any data | ||||
| @@ -1032,7 +1028,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | ||||
|         There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass = TopLevelDocumentMetaclass | ||||
|  | ||||
| @@ -1047,16 +1042,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | ||||
|             setattr(self, field_name, None) | ||||
|             self._dynamic_fields[field_name].null = False | ||||
|         else: | ||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||
|             super().__delattr__(*args, **kwargs) | ||||
|  | ||||
|  | ||||
| class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): | ||||
| class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): | ||||
|     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||
|     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||
|     information about dynamic documents. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass = DocumentMetaclass | ||||
|  | ||||
| @@ -1076,7 +1070,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu | ||||
|             setattr(self, field_name, None) | ||||
|  | ||||
|  | ||||
| class MapReduceDocument(object): | ||||
| class MapReduceDocument: | ||||
|     """A document returned from a map/reduce query. | ||||
|  | ||||
|     :param collection: An instance of :class:`~pymongo.Collection` | ||||
|   | ||||
| @@ -1,7 +1,5 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| __all__ = ( | ||||
|     "NotRegistered", | ||||
| @@ -87,24 +85,24 @@ class ValidationError(AssertionError): | ||||
|     _message = None | ||||
|  | ||||
|     def __init__(self, message="", **kwargs): | ||||
|         super(ValidationError, self).__init__(message) | ||||
|         super().__init__(message) | ||||
|         self.errors = kwargs.get("errors", {}) | ||||
|         self.field_name = kwargs.get("field_name") | ||||
|         self.message = message | ||||
|  | ||||
|     def __str__(self): | ||||
|         return six.text_type(self.message) | ||||
|         return str(self.message) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "%s(%s,)" % (self.__class__.__name__, self.message) | ||||
|         return "{}({},)".format(self.__class__.__name__, self.message) | ||||
|  | ||||
|     def __getattribute__(self, name): | ||||
|         message = super(ValidationError, self).__getattribute__(name) | ||||
|         message = super().__getattribute__(name) | ||||
|         if name == "message": | ||||
|             if self.field_name: | ||||
|                 message = "%s" % message | ||||
|             if self.errors: | ||||
|                 message = "%s(%s)" % (message, self._format_errors()) | ||||
|                 message = "{}({})".format(message, self._format_errors()) | ||||
|         return message | ||||
|  | ||||
|     def _get_message(self): | ||||
| @@ -126,12 +124,12 @@ class ValidationError(AssertionError): | ||||
|         def build_dict(source): | ||||
|             errors_dict = {} | ||||
|             if isinstance(source, dict): | ||||
|                 for field_name, error in iteritems(source): | ||||
|                 for field_name, error in source.items(): | ||||
|                     errors_dict[field_name] = build_dict(error) | ||||
|             elif isinstance(source, ValidationError) and source.errors: | ||||
|                 return build_dict(source.errors) | ||||
|             else: | ||||
|                 return six.text_type(source) | ||||
|                 return str(source) | ||||
|  | ||||
|             return errors_dict | ||||
|  | ||||
| @@ -147,15 +145,15 @@ class ValidationError(AssertionError): | ||||
|             if isinstance(value, list): | ||||
|                 value = " ".join([generate_key(k) for k in value]) | ||||
|             elif isinstance(value, dict): | ||||
|                 value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) | ||||
|                 value = " ".join([generate_key(v, k) for k, v in value.items()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             results = "{}.{}".format(prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in iteritems(self.to_dict()): | ||||
|         for k, v in self.to_dict().items(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) | ||||
|         return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) | ||||
|  | ||||
|  | ||||
| class DeprecatedError(Exception): | ||||
|   | ||||
| @@ -5,14 +5,14 @@ import re | ||||
| import socket | ||||
| import time | ||||
| import uuid | ||||
| from io import BytesIO | ||||
| from operator import itemgetter | ||||
|  | ||||
| from bson import Binary, DBRef, ObjectId, SON | ||||
| from bson.int64 import Int64 | ||||
| import gridfs | ||||
| import pymongo | ||||
| from pymongo import ReturnDocument | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
| @@ -21,11 +21,6 @@ except ImportError: | ||||
| else: | ||||
|     import dateutil.parser | ||||
|  | ||||
| try: | ||||
|     from bson.int64 import Int64 | ||||
| except ImportError: | ||||
|     Int64 = long | ||||
|  | ||||
|  | ||||
| from mongoengine.base import ( | ||||
|     BaseDocument, | ||||
| @@ -42,7 +37,6 @@ from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from mongoengine.document import Document, EmbeddedDocument | ||||
| from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError | ||||
| from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version | ||||
| from mongoengine.python_support import StringIO | ||||
| from mongoengine.queryset import DO_NOTHING | ||||
| from mongoengine.queryset.base import BaseQuerySet | ||||
| from mongoengine.queryset.transform import STRING_OPERATORS | ||||
| @@ -53,11 +47,6 @@ except ImportError: | ||||
|     Image = None | ||||
|     ImageOps = None | ||||
|  | ||||
| if six.PY3: | ||||
|     # Useless as long as 2to3 gets executed | ||||
|     # as it turns `long` into `int` blindly | ||||
|     long = int | ||||
|  | ||||
|  | ||||
| __all__ = ( | ||||
|     "StringField", | ||||
| @@ -114,10 +103,10 @@ class StringField(BaseField): | ||||
|         self.regex = re.compile(regex) if regex else None | ||||
|         self.max_length = max_length | ||||
|         self.min_length = min_length | ||||
|         super(StringField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, six.text_type): | ||||
|         if isinstance(value, str): | ||||
|             return value | ||||
|         try: | ||||
|             value = value.decode("utf-8") | ||||
| @@ -126,7 +115,7 @@ class StringField(BaseField): | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, six.string_types): | ||||
|         if not isinstance(value, str): | ||||
|             self.error("StringField only accepts string values") | ||||
|  | ||||
|         if self.max_length is not None and len(value) > self.max_length: | ||||
| @@ -142,7 +131,7 @@ class StringField(BaseField): | ||||
|         return None | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if not isinstance(op, six.string_types): | ||||
|         if not isinstance(op, str): | ||||
|             return value | ||||
|  | ||||
|         if op in STRING_OPERATORS: | ||||
| @@ -162,7 +151,7 @@ class StringField(BaseField): | ||||
|             # escape unsafe characters which could lead to a re.error | ||||
|             value = re.escape(value) | ||||
|             value = re.compile(regex % value, flags) | ||||
|         return super(StringField, self).prepare_query_value(op, value) | ||||
|         return super().prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| class URLField(StringField): | ||||
| @@ -186,17 +175,17 @@ class URLField(StringField): | ||||
|     def __init__(self, url_regex=None, schemes=None, **kwargs): | ||||
|         self.url_regex = url_regex or self._URL_REGEX | ||||
|         self.schemes = schemes or self._URL_SCHEMES | ||||
|         super(URLField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         # Check first if the scheme is valid | ||||
|         scheme = value.split("://")[0].lower() | ||||
|         if scheme not in self.schemes: | ||||
|             self.error(u"Invalid scheme {} in URL: {}".format(scheme, value)) | ||||
|             self.error("Invalid scheme {} in URL: {}".format(scheme, value)) | ||||
|  | ||||
|         # Then check full URL | ||||
|         if not self.url_regex.match(value): | ||||
|             self.error(u"Invalid URL: {}".format(value)) | ||||
|             self.error("Invalid URL: {}".format(value)) | ||||
|  | ||||
|  | ||||
| class EmailField(StringField): | ||||
| @@ -214,7 +203,7 @@ class EmailField(StringField): | ||||
|     ) | ||||
|  | ||||
|     UTF8_USER_REGEX = LazyRegexCompiler( | ||||
|         six.u( | ||||
|         ( | ||||
|             # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to | ||||
|             # include `UTF8-non-ascii`. | ||||
|             r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z" | ||||
| @@ -229,7 +218,7 @@ class EmailField(StringField): | ||||
|         re.IGNORECASE, | ||||
|     ) | ||||
|  | ||||
|     error_msg = u"Invalid email address: %s" | ||||
|     error_msg = "Invalid email address: %s" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
| @@ -253,7 +242,7 @@ class EmailField(StringField): | ||||
|         self.domain_whitelist = domain_whitelist or [] | ||||
|         self.allow_utf8_user = allow_utf8_user | ||||
|         self.allow_ip_domain = allow_ip_domain | ||||
|         super(EmailField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate_user_part(self, user_part): | ||||
|         """Validate the user part of the email address. Return True if | ||||
| @@ -280,13 +269,13 @@ class EmailField(StringField): | ||||
|                 try: | ||||
|                     socket.inet_pton(addr_family, domain_part[1:-1]) | ||||
|                     return True | ||||
|                 except (socket.error, UnicodeEncodeError): | ||||
|                 except (OSError, UnicodeEncodeError): | ||||
|                     pass | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     def validate(self, value): | ||||
|         super(EmailField, self).validate(value) | ||||
|         super().validate(value) | ||||
|  | ||||
|         if "@" not in value: | ||||
|             self.error(self.error_msg % value) | ||||
| @@ -303,12 +292,16 @@ class EmailField(StringField): | ||||
|                 domain_part = domain_part.encode("idna").decode("ascii") | ||||
|             except UnicodeError: | ||||
|                 self.error( | ||||
|                     "%s %s" % (self.error_msg % value, "(domain failed IDN encoding)") | ||||
|                     "{} {}".format( | ||||
|                         self.error_msg % value, "(domain failed IDN encoding)" | ||||
|                     ) | ||||
|                 ) | ||||
|             else: | ||||
|                 if not self.validate_domain_part(domain_part): | ||||
|                     self.error( | ||||
|                         "%s %s" % (self.error_msg % value, "(domain validation failed)") | ||||
|                         "{} {}".format( | ||||
|                             self.error_msg % value, "(domain validation failed)" | ||||
|                         ) | ||||
|                     ) | ||||
|  | ||||
|  | ||||
| @@ -317,7 +310,7 @@ class IntField(BaseField): | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(IntField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
| @@ -342,19 +335,19 @@ class IntField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return super(IntField, self).prepare_query_value(op, int(value)) | ||||
|         return super().prepare_query_value(op, int(value)) | ||||
|  | ||||
|  | ||||
| class LongField(BaseField): | ||||
|     """64-bit integer field.""" | ||||
|     """64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)""" | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(LongField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
|             value = long(value) | ||||
|             value = int(value) | ||||
|         except (TypeError, ValueError): | ||||
|             pass | ||||
|         return value | ||||
| @@ -364,7 +357,7 @@ class LongField(BaseField): | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             value = long(value) | ||||
|             value = int(value) | ||||
|         except (TypeError, ValueError): | ||||
|             self.error("%s could not be converted to long" % value) | ||||
|  | ||||
| @@ -378,7 +371,7 @@ class LongField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return super(LongField, self).prepare_query_value(op, long(value)) | ||||
|         return super().prepare_query_value(op, int(value)) | ||||
|  | ||||
|  | ||||
| class FloatField(BaseField): | ||||
| @@ -386,7 +379,7 @@ class FloatField(BaseField): | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(FloatField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
| @@ -396,7 +389,7 @@ class FloatField(BaseField): | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if isinstance(value, six.integer_types): | ||||
|         if isinstance(value, int): | ||||
|             try: | ||||
|                 value = float(value) | ||||
|             except OverflowError: | ||||
| @@ -415,7 +408,7 @@ class FloatField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return super(FloatField, self).prepare_query_value(op, float(value)) | ||||
|         return super().prepare_query_value(op, float(value)) | ||||
|  | ||||
|  | ||||
| class DecimalField(BaseField): | ||||
| @@ -462,7 +455,7 @@ class DecimalField(BaseField): | ||||
|         self.precision = precision | ||||
|         self.rounding = rounding | ||||
|  | ||||
|         super(DecimalField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if value is None: | ||||
| @@ -481,13 +474,13 @@ class DecimalField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|         if self.force_string: | ||||
|             return six.text_type(self.to_python(value)) | ||||
|             return str(self.to_python(value)) | ||||
|         return float(self.to_python(value)) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, decimal.Decimal): | ||||
|             if not isinstance(value, six.string_types): | ||||
|                 value = six.text_type(value) | ||||
|             if not isinstance(value, str): | ||||
|                 value = str(value) | ||||
|             try: | ||||
|                 value = decimal.Decimal(value) | ||||
|             except (TypeError, ValueError, decimal.InvalidOperation) as exc: | ||||
| @@ -500,7 +493,7 @@ class DecimalField(BaseField): | ||||
|             self.error("Decimal value is too large") | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|  | ||||
| class BooleanField(BaseField): | ||||
| @@ -540,7 +533,7 @@ class DateTimeField(BaseField): | ||||
|     def validate(self, value): | ||||
|         new_value = self.to_mongo(value) | ||||
|         if not isinstance(new_value, (datetime.datetime, datetime.date)): | ||||
|             self.error(u'cannot parse date "%s"' % value) | ||||
|             self.error('cannot parse date "%s"' % value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if value is None: | ||||
| @@ -552,7 +545,7 @@ class DateTimeField(BaseField): | ||||
|         if callable(value): | ||||
|             return value() | ||||
|  | ||||
|         if not isinstance(value, six.string_types): | ||||
|         if not isinstance(value, str): | ||||
|             return None | ||||
|  | ||||
|         return self._parse_datetime(value) | ||||
| @@ -597,19 +590,19 @@ class DateTimeField(BaseField): | ||||
|                     return None | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|  | ||||
| class DateField(DateTimeField): | ||||
|     def to_mongo(self, value): | ||||
|         value = super(DateField, self).to_mongo(value) | ||||
|         value = super().to_mongo(value) | ||||
|         # drop hours, minutes, seconds | ||||
|         if isinstance(value, datetime.datetime): | ||||
|             value = datetime.datetime(value.year, value.month, value.day) | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         value = super(DateField, self).to_python(value) | ||||
|         value = super().to_python(value) | ||||
|         # convert datetime to date | ||||
|         if isinstance(value, datetime.datetime): | ||||
|             value = datetime.date(value.year, value.month, value.day) | ||||
| @@ -643,7 +636,7 @@ class ComplexDateTimeField(StringField): | ||||
|         """ | ||||
|         self.separator = separator | ||||
|         self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) | ||||
|         super(ComplexDateTimeField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def _convert_from_datetime(self, val): | ||||
|         """ | ||||
| @@ -674,14 +667,14 @@ class ComplexDateTimeField(StringField): | ||||
|         if instance is None: | ||||
|             return self | ||||
|  | ||||
|         data = super(ComplexDateTimeField, self).__get__(instance, owner) | ||||
|         data = super().__get__(instance, owner) | ||||
|  | ||||
|         if isinstance(data, datetime.datetime) or data is None: | ||||
|             return data | ||||
|         return self._convert_from_string(data) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         super(ComplexDateTimeField, self).__set__(instance, value) | ||||
|         super().__set__(instance, value) | ||||
|         value = instance._data[self.name] | ||||
|         if value is not None: | ||||
|             if isinstance(value, datetime.datetime): | ||||
| @@ -706,9 +699,7 @@ class ComplexDateTimeField(StringField): | ||||
|         return self._convert_from_datetime(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(ComplexDateTimeField, self).prepare_query_value( | ||||
|             op, self._convert_from_datetime(value) | ||||
|         ) | ||||
|         return super().prepare_query_value(op, self._convert_from_datetime(value)) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentField(BaseField): | ||||
| @@ -719,7 +710,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|     def __init__(self, document_type, **kwargs): | ||||
|         # XXX ValidationError raised outside of the "validate" method. | ||||
|         if not ( | ||||
|             isinstance(document_type, six.string_types) | ||||
|             isinstance(document_type, str) | ||||
|             or issubclass(document_type, EmbeddedDocument) | ||||
|         ): | ||||
|             self.error( | ||||
| @@ -728,11 +719,11 @@ class EmbeddedDocumentField(BaseField): | ||||
|             ) | ||||
|  | ||||
|         self.document_type_obj = document_type | ||||
|         super(EmbeddedDocumentField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|         if isinstance(self.document_type_obj, str): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 resolved_document_type = self.owner_document | ||||
|             else: | ||||
| @@ -789,7 +780,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|                     "Querying the embedded document '%s' failed, due to an invalid query value" | ||||
|                     % (self.document_type._class_name,) | ||||
|                 ) | ||||
|         super(EmbeddedDocumentField, self).prepare_query_value(op, value) | ||||
|         super().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|  | ||||
| @@ -805,9 +796,7 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(GenericEmbeddedDocumentField, self).prepare_query_value( | ||||
|             op, self.to_mongo(value) | ||||
|         ) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, dict): | ||||
| @@ -858,7 +847,7 @@ class DynamicField(BaseField): | ||||
|         """Convert a Python type to a MongoDB compatible type. | ||||
|         """ | ||||
|  | ||||
|         if isinstance(value, six.string_types): | ||||
|         if isinstance(value, str): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, "to_mongo"): | ||||
| @@ -880,12 +869,12 @@ class DynamicField(BaseField): | ||||
|             value = {k: v for k, v in enumerate(value)} | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in iteritems(value): | ||||
|         for k, v in value.items(): | ||||
|             data[k] = self.to_mongo(v, use_db_field, fields) | ||||
|  | ||||
|         value = data | ||||
|         if is_list:  # Convert back to a list | ||||
|             value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] | ||||
|             value = [v for k, v in sorted(data.items(), key=itemgetter(0))] | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
| @@ -895,15 +884,15 @@ class DynamicField(BaseField): | ||||
|                 value = doc_cls._get_db().dereference(value["_ref"]) | ||||
|             return doc_cls._from_son(value) | ||||
|  | ||||
|         return super(DynamicField, self).to_python(value) | ||||
|         return super().to_python(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return member_name | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if isinstance(value, six.string_types): | ||||
|         if isinstance(value, str): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|         return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         if hasattr(value, "validate"): | ||||
| @@ -924,7 +913,7 @@ class ListField(ComplexBaseField): | ||||
|         self.field = field | ||||
|         self.max_length = max_length | ||||
|         kwargs.setdefault("default", lambda: []) | ||||
|         super(ListField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         if instance is None: | ||||
| @@ -938,7 +927,7 @@ class ListField(ComplexBaseField): | ||||
|             and value | ||||
|         ): | ||||
|             instance._data[self.name] = [self.field.build_lazyref(x) for x in value] | ||||
|         return super(ListField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a list of valid fields is being used.""" | ||||
| @@ -952,7 +941,7 @@ class ListField(ComplexBaseField): | ||||
|         if self.max_length is not None and len(value) > self.max_length: | ||||
|             self.error("List is too long") | ||||
|  | ||||
|         super(ListField, self).validate(value) | ||||
|         super().validate(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         # Validate that the `set` operator doesn't contain more items than `max_length`. | ||||
| @@ -966,14 +955,14 @@ class ListField(ComplexBaseField): | ||||
|             if ( | ||||
|                 op in ("set", "unset", None) | ||||
|                 and hasattr(value, "__iter__") | ||||
|                 and not isinstance(value, six.string_types) | ||||
|                 and not isinstance(value, str) | ||||
|                 and not isinstance(value, BaseDocument) | ||||
|             ): | ||||
|                 return [self.field.prepare_query_value(op, v) for v in value] | ||||
|  | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(ListField, self).prepare_query_value(op, value) | ||||
|         return super().prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentListField(ListField): | ||||
| @@ -994,9 +983,7 @@ class EmbeddedDocumentListField(ListField): | ||||
|         :param kwargs: Keyword arguments passed directly into the parent | ||||
|          :class:`~mongoengine.ListField`. | ||||
|         """ | ||||
|         super(EmbeddedDocumentListField, self).__init__( | ||||
|             field=EmbeddedDocumentField(document_type), **kwargs | ||||
|         ) | ||||
|         super().__init__(field=EmbeddedDocumentField(document_type), **kwargs) | ||||
|  | ||||
|  | ||||
| class SortedListField(ListField): | ||||
| @@ -1022,10 +1009,10 @@ class SortedListField(ListField): | ||||
|             self._ordering = kwargs.pop("ordering") | ||||
|         if "reverse" in kwargs.keys(): | ||||
|             self._order_reverse = kwargs.pop("reverse") | ||||
|         super(SortedListField, self).__init__(field, **kwargs) | ||||
|         super().__init__(field, **kwargs) | ||||
|  | ||||
|     def to_mongo(self, value, use_db_field=True, fields=None): | ||||
|         value = super(SortedListField, self).to_mongo(value, use_db_field, fields) | ||||
|         value = super().to_mongo(value, use_db_field, fields) | ||||
|         if self._ordering is not None: | ||||
|             return sorted( | ||||
|                 value, key=itemgetter(self._ordering), reverse=self._order_reverse | ||||
| @@ -1038,9 +1025,7 @@ def key_not_string(d): | ||||
|     dictionary is not a string. | ||||
|     """ | ||||
|     for k, v in d.items(): | ||||
|         if not isinstance(k, six.string_types) or ( | ||||
|             isinstance(v, dict) and key_not_string(v) | ||||
|         ): | ||||
|         if not isinstance(k, str) or (isinstance(v, dict) and key_not_string(v)): | ||||
|             return True | ||||
|  | ||||
|  | ||||
| @@ -1080,7 +1065,7 @@ class DictField(ComplexBaseField): | ||||
|         self._auto_dereference = False | ||||
|  | ||||
|         kwargs.setdefault("default", lambda: {}) | ||||
|         super(DictField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a list of valid fields is being used.""" | ||||
| @@ -1100,7 +1085,7 @@ class DictField(ComplexBaseField): | ||||
|             self.error( | ||||
|                 'Invalid dictionary key name - keys may not startswith "$" characters' | ||||
|             ) | ||||
|         super(DictField, self).validate(value) | ||||
|         super().validate(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return DictField(db_field=member_name) | ||||
| @@ -1117,7 +1102,7 @@ class DictField(ComplexBaseField): | ||||
|             "iexact", | ||||
|         ] | ||||
|  | ||||
|         if op in match_operators and isinstance(value, six.string_types): | ||||
|         if op in match_operators and isinstance(value, str): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|  | ||||
|         if hasattr( | ||||
| @@ -1129,7 +1114,7 @@ class DictField(ComplexBaseField): | ||||
|                 } | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(DictField, self).prepare_query_value(op, value) | ||||
|         return super().prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| class MapField(DictField): | ||||
| @@ -1144,7 +1129,7 @@ class MapField(DictField): | ||||
|         # XXX ValidationError raised outside of the "validate" method. | ||||
|         if not isinstance(field, BaseField): | ||||
|             self.error("Argument to MapField constructor must be a valid field") | ||||
|         super(MapField, self).__init__(field=field, *args, **kwargs) | ||||
|         super().__init__(field=field, *args, **kwargs) | ||||
|  | ||||
|  | ||||
| class ReferenceField(BaseField): | ||||
| @@ -1204,7 +1189,7 @@ class ReferenceField(BaseField): | ||||
|             :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. | ||||
|         """ | ||||
|         # XXX ValidationError raised outside of the "validate" method. | ||||
|         if not isinstance(document_type, six.string_types) and not issubclass( | ||||
|         if not isinstance(document_type, str) and not issubclass( | ||||
|             document_type, Document | ||||
|         ): | ||||
|             self.error( | ||||
| @@ -1215,11 +1200,11 @@ class ReferenceField(BaseField): | ||||
|         self.dbref = dbref | ||||
|         self.document_type_obj = document_type | ||||
|         self.reverse_delete_rule = reverse_delete_rule | ||||
|         super(ReferenceField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|         if isinstance(self.document_type_obj, str): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 self.document_type_obj = self.owner_document | ||||
|             else: | ||||
| @@ -1248,7 +1233,7 @@ class ReferenceField(BaseField): | ||||
|             else: | ||||
|                 instance._data[self.name] = cls._from_son(dereferenced) | ||||
|  | ||||
|         return super(ReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def to_mongo(self, document): | ||||
|         if isinstance(document, DBRef): | ||||
| @@ -1299,7 +1284,7 @@ class ReferenceField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|         super(ReferenceField, self).prepare_query_value(op, value) | ||||
|         super().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
| @@ -1335,7 +1320,7 @@ class CachedReferenceField(BaseField): | ||||
|             fields = [] | ||||
|  | ||||
|         # XXX ValidationError raised outside of the "validate" method. | ||||
|         if not isinstance(document_type, six.string_types) and not issubclass( | ||||
|         if not isinstance(document_type, str) and not issubclass( | ||||
|             document_type, Document | ||||
|         ): | ||||
|             self.error( | ||||
| @@ -1346,7 +1331,7 @@ class CachedReferenceField(BaseField): | ||||
|         self.auto_sync = auto_sync | ||||
|         self.document_type_obj = document_type | ||||
|         self.fields = fields | ||||
|         super(CachedReferenceField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def start_listener(self): | ||||
|         from mongoengine import signals | ||||
| @@ -1358,7 +1343,7 @@ class CachedReferenceField(BaseField): | ||||
|             return None | ||||
|  | ||||
|         update_kwargs = { | ||||
|             "set__%s__%s" % (self.name, key): val | ||||
|             "set__{}__{}".format(self.name, key): val | ||||
|             for key, val in document._delta()[0].items() | ||||
|             if key in self.fields | ||||
|         } | ||||
| @@ -1380,7 +1365,7 @@ class CachedReferenceField(BaseField): | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|         if isinstance(self.document_type_obj, str): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 self.document_type_obj = self.owner_document | ||||
|             else: | ||||
| @@ -1404,7 +1389,7 @@ class CachedReferenceField(BaseField): | ||||
|             else: | ||||
|                 instance._data[self.name] = self.document_type._from_son(dereferenced) | ||||
|  | ||||
|         return super(CachedReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def to_mongo(self, document, use_db_field=True, fields=None): | ||||
|         id_field_name = self.document_type._meta["id_field"] | ||||
| @@ -1503,12 +1488,12 @@ class GenericReferenceField(BaseField): | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         choices = kwargs.pop("choices", None) | ||||
|         super(GenericReferenceField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.choices = [] | ||||
|         # Keep the choices as a list of allowed Document class names | ||||
|         if choices: | ||||
|             for choice in choices: | ||||
|                 if isinstance(choice, six.string_types): | ||||
|                 if isinstance(choice, str): | ||||
|                     self.choices.append(choice) | ||||
|                 elif isinstance(choice, type) and issubclass(choice, Document): | ||||
|                     self.choices.append(choice._class_name) | ||||
| @@ -1517,7 +1502,7 @@ class GenericReferenceField(BaseField): | ||||
|                     # method. | ||||
|                     self.error( | ||||
|                         "Invalid choices provided: must be a list of" | ||||
|                         "Document subclasses and/or six.string_typess" | ||||
|                         "Document subclasses and/or str" | ||||
|                     ) | ||||
|  | ||||
|     def _validate_choices(self, value): | ||||
| @@ -1527,7 +1512,7 @@ class GenericReferenceField(BaseField): | ||||
|             value = value.get("_cls") | ||||
|         elif isinstance(value, Document): | ||||
|             value = value._class_name | ||||
|         super(GenericReferenceField, self)._validate_choices(value) | ||||
|         super()._validate_choices(value) | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         if instance is None: | ||||
| @@ -1543,7 +1528,7 @@ class GenericReferenceField(BaseField): | ||||
|             else: | ||||
|                 instance._data[self.name] = dereferenced | ||||
|  | ||||
|         return super(GenericReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (Document, DBRef, dict, SON)): | ||||
| @@ -1607,22 +1592,22 @@ class BinaryField(BaseField): | ||||
|  | ||||
|     def __init__(self, max_bytes=None, **kwargs): | ||||
|         self.max_bytes = max_bytes | ||||
|         super(BinaryField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Handle bytearrays in python 3.1""" | ||||
|         if six.PY3 and isinstance(value, bytearray): | ||||
|             value = six.binary_type(value) | ||||
|         return super(BinaryField, self).__set__(instance, value) | ||||
|         if isinstance(value, bytearray): | ||||
|             value = bytes(value) | ||||
|         return super().__set__(instance, value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return Binary(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (six.binary_type, Binary)): | ||||
|         if not isinstance(value, (bytes, Binary)): | ||||
|             self.error( | ||||
|                 "BinaryField only accepts instances of " | ||||
|                 "(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) | ||||
|                 "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__) | ||||
|             ) | ||||
|  | ||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||
| @@ -1631,14 +1616,14 @@ class BinaryField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|         return super(BinaryField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|  | ||||
| class GridFSError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class GridFSProxy(object): | ||||
| class GridFSProxy: | ||||
|     """Proxy object to handle writing and reading of files to and from GridFS | ||||
|  | ||||
|     .. versionadded:: 0.4 | ||||
| @@ -1688,8 +1673,6 @@ class GridFSProxy(object): | ||||
|     def __bool__(self): | ||||
|         return bool(self.grid_id) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self_dict = self.__dict__ | ||||
|         self_dict["_fs"] = None | ||||
| @@ -1704,12 +1687,12 @@ class GridFSProxy(object): | ||||
|         return self.__copy__() | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "<%s: %s>" % (self.__class__.__name__, self.grid_id) | ||||
|         return "<{}: {}>".format(self.__class__.__name__, self.grid_id) | ||||
|  | ||||
|     def __str__(self): | ||||
|         gridout = self.get() | ||||
|         filename = getattr(gridout, "filename") if gridout else "<no file>" | ||||
|         return "<%s: %s (%s)>" % (self.__class__.__name__, filename, self.grid_id) | ||||
|         return "<{}: {} ({})>".format(self.__class__.__name__, filename, self.grid_id) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, GridFSProxy): | ||||
| @@ -1820,7 +1803,7 @@ class FileField(BaseField): | ||||
|     def __init__( | ||||
|         self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs | ||||
|     ): | ||||
|         super(FileField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|         self.collection_name = collection_name | ||||
|         self.db_alias = db_alias | ||||
|  | ||||
| @@ -1843,7 +1826,7 @@ class FileField(BaseField): | ||||
|         key = self.name | ||||
|         if ( | ||||
|             hasattr(value, "read") and not isinstance(value, GridFSProxy) | ||||
|         ) or isinstance(value, (six.binary_type, six.string_types)): | ||||
|         ) or isinstance(value, (bytes, str)): | ||||
|             # using "FileField() = file/string" notation | ||||
|             grid_file = instance._data.get(self.name) | ||||
|             # If a file already exists, delete it | ||||
| @@ -1961,11 +1944,11 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|  | ||||
|         w, h = img.size | ||||
|  | ||||
|         io = StringIO() | ||||
|         io = BytesIO() | ||||
|         img.save(io, img_format, progressive=progressive) | ||||
|         io.seek(0) | ||||
|  | ||||
|         return super(ImageGridFsProxy, self).put( | ||||
|         return super().put( | ||||
|             io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs | ||||
|         ) | ||||
|  | ||||
| @@ -1975,12 +1958,12 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|         if out and out.thumbnail_id: | ||||
|             self.fs.delete(out.thumbnail_id) | ||||
|  | ||||
|         return super(ImageGridFsProxy, self).delete() | ||||
|         return super().delete() | ||||
|  | ||||
|     def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): | ||||
|         w, h = thumbnail.size | ||||
|  | ||||
|         io = StringIO() | ||||
|         io = BytesIO() | ||||
|         thumbnail.save(io, format, progressive=progressive) | ||||
|         io.seek(0) | ||||
|  | ||||
| @@ -2050,16 +2033,11 @@ class ImageField(FileField): | ||||
|         for att_name, att in extra_args.items(): | ||||
|             value = None | ||||
|             if isinstance(att, (tuple, list)): | ||||
|                 if six.PY3: | ||||
|                     value = dict( | ||||
|                         itertools.zip_longest(params_size, att, fillvalue=None) | ||||
|                     ) | ||||
|                 else: | ||||
|                     value = dict(map(None, params_size, att)) | ||||
|                 value = dict(itertools.zip_longest(params_size, att, fillvalue=None)) | ||||
|  | ||||
|             setattr(self, att_name, value) | ||||
|  | ||||
|         super(ImageField, self).__init__(collection_name=collection_name, **kwargs) | ||||
|         super().__init__(collection_name=collection_name, **kwargs) | ||||
|  | ||||
|  | ||||
| class SequenceField(BaseField): | ||||
| @@ -2111,14 +2089,14 @@ class SequenceField(BaseField): | ||||
|         self.value_decorator = ( | ||||
|             value_decorator if callable(value_decorator) else self.VALUE_DECORATOR | ||||
|         ) | ||||
|         super(SequenceField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def generate(self): | ||||
|         """ | ||||
|         Generate and Increment the counter | ||||
|         """ | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         sequence_id = "{}.{}".format(sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|  | ||||
|         counter = collection.find_one_and_update( | ||||
| @@ -2132,7 +2110,7 @@ class SequenceField(BaseField): | ||||
|     def set_next_value(self, value): | ||||
|         """Helper method to set the next sequence value""" | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         sequence_id = "{}.{}".format(sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         counter = collection.find_one_and_update( | ||||
|             filter={"_id": sequence_id}, | ||||
| @@ -2149,7 +2127,7 @@ class SequenceField(BaseField): | ||||
|         as it is only fixed on set. | ||||
|         """ | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         sequence_id = "{}.{}".format(sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         data = collection.find_one({"_id": sequence_id}) | ||||
|  | ||||
| @@ -2172,7 +2150,7 @@ class SequenceField(BaseField): | ||||
|             ) | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         value = super(SequenceField, self).__get__(instance, owner) | ||||
|         value = super().__get__(instance, owner) | ||||
|         if value is None and instance._initialised: | ||||
|             value = self.generate() | ||||
|             instance._data[self.name] = value | ||||
| @@ -2185,7 +2163,7 @@ class SequenceField(BaseField): | ||||
|         if value is None and instance._initialised: | ||||
|             value = self.generate() | ||||
|  | ||||
|         return super(SequenceField, self).__set__(instance, value) | ||||
|         return super().__set__(instance, value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         """ | ||||
| @@ -2219,14 +2197,14 @@ class UUIDField(BaseField): | ||||
|         .. versionchanged:: 0.6.19 | ||||
|         """ | ||||
|         self._binary = binary | ||||
|         super(UUIDField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if not self._binary: | ||||
|             original_value = value | ||||
|             try: | ||||
|                 if not isinstance(value, six.string_types): | ||||
|                     value = six.text_type(value) | ||||
|                 if not isinstance(value, str): | ||||
|                     value = str(value) | ||||
|                 return uuid.UUID(value) | ||||
|             except (ValueError, TypeError, AttributeError): | ||||
|                 return original_value | ||||
| @@ -2234,8 +2212,8 @@ class UUIDField(BaseField): | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if not self._binary: | ||||
|             return six.text_type(value) | ||||
|         elif isinstance(value, six.string_types): | ||||
|             return str(value) | ||||
|         elif isinstance(value, str): | ||||
|             return uuid.UUID(value) | ||||
|         return value | ||||
|  | ||||
| @@ -2246,7 +2224,7 @@ class UUIDField(BaseField): | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, uuid.UUID): | ||||
|             if not isinstance(value, six.string_types): | ||||
|             if not isinstance(value, str): | ||||
|                 value = str(value) | ||||
|             try: | ||||
|                 uuid.UUID(value) | ||||
| @@ -2445,7 +2423,7 @@ class LazyReferenceField(BaseField): | ||||
|           document. Note this only work getting field (not setting or deleting). | ||||
|         """ | ||||
|         # XXX ValidationError raised outside of the "validate" method. | ||||
|         if not isinstance(document_type, six.string_types) and not issubclass( | ||||
|         if not isinstance(document_type, str) and not issubclass( | ||||
|             document_type, Document | ||||
|         ): | ||||
|             self.error( | ||||
| @@ -2457,11 +2435,11 @@ class LazyReferenceField(BaseField): | ||||
|         self.passthrough = passthrough | ||||
|         self.document_type_obj = document_type | ||||
|         self.reverse_delete_rule = reverse_delete_rule | ||||
|         super(LazyReferenceField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
|         if isinstance(self.document_type_obj, six.string_types): | ||||
|         if isinstance(self.document_type_obj, str): | ||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||
|                 self.document_type_obj = self.owner_document | ||||
|             else: | ||||
| @@ -2500,7 +2478,7 @@ class LazyReferenceField(BaseField): | ||||
|         if value: | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         return super(LazyReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if isinstance(value, LazyReference): | ||||
| @@ -2564,7 +2542,7 @@ class LazyReferenceField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|         super(LazyReferenceField, self).prepare_query_value(op, value) | ||||
|         super().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
| @@ -2591,12 +2569,12 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         self.passthrough = kwargs.pop("passthrough", False) | ||||
|         super(GenericLazyReferenceField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def _validate_choices(self, value): | ||||
|         if isinstance(value, LazyReference): | ||||
|             value = value.document_type._class_name | ||||
|         super(GenericLazyReferenceField, self)._validate_choices(value) | ||||
|         super()._validate_choices(value) | ||||
|  | ||||
|     def build_lazyref(self, value): | ||||
|         if isinstance(value, LazyReference): | ||||
| @@ -2625,7 +2603,7 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|         if value: | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         return super(GenericLazyReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if isinstance(value, LazyReference) and value.pk is None: | ||||
| @@ -2633,7 +2611,7 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|                 "You can only reference documents once they have been" | ||||
|                 " saved to the database" | ||||
|             ) | ||||
|         return super(GenericLazyReferenceField, self).validate(value) | ||||
|         return super().validate(value) | ||||
|  | ||||
|     def to_mongo(self, document): | ||||
|         if document is None: | ||||
| @@ -2652,4 +2630,4 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|                 ) | ||||
|             ) | ||||
|         else: | ||||
|             return super(GenericLazyReferenceField, self).to_mongo(document) | ||||
|             return super().to_mongo(document) | ||||
|   | ||||
| @@ -1,23 +0,0 @@ | ||||
| """ | ||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x support | ||||
| """ | ||||
| import six | ||||
|  | ||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | ||||
| StringIO = six.BytesIO | ||||
|  | ||||
| # Additionally for Py2, try to use the faster cStringIO, if available | ||||
| if not six.PY3: | ||||
|     try: | ||||
|         import cStringIO | ||||
|     except ImportError: | ||||
|         pass | ||||
|     else: | ||||
|         StringIO = cStringIO.StringIO | ||||
|  | ||||
|  | ||||
| if six.PY3: | ||||
|     from collections.abc import Hashable | ||||
| else: | ||||
|     # raises DeprecationWarnings in Python >=3.7 | ||||
|     from collections import Hashable | ||||
| @@ -1,5 +1,3 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| import copy | ||||
| import itertools | ||||
| import re | ||||
| @@ -11,8 +9,6 @@ import pymongo | ||||
| import pymongo.errors | ||||
| from pymongo.collection import ReturnDocument | ||||
| from pymongo.common import validate_read_preference | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base import get_document | ||||
| @@ -41,7 +37,7 @@ DENY = 3 | ||||
| PULL = 4 | ||||
|  | ||||
|  | ||||
| class BaseQuerySet(object): | ||||
| class BaseQuerySet: | ||||
|     """A set of results returned from a query. Wraps a MongoDB cursor, | ||||
|     providing :class:`~mongoengine.Document` objects as the results. | ||||
|     """ | ||||
| @@ -203,8 +199,6 @@ class BaseQuerySet(object): | ||||
|         """Avoid to open all records in an if stmt in Py3.""" | ||||
|         return self._has_data() | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     # Core functions | ||||
|  | ||||
|     def all(self): | ||||
| @@ -255,21 +249,20 @@ class BaseQuerySet(object): | ||||
|         queryset = queryset.filter(*q_objs, **query) | ||||
|  | ||||
|         try: | ||||
|             result = six.next(queryset) | ||||
|             result = next(queryset) | ||||
|         except StopIteration: | ||||
|             msg = "%s matching query does not exist." % queryset._document._class_name | ||||
|             raise queryset._document.DoesNotExist(msg) | ||||
|  | ||||
|         try: | ||||
|             # Check if there is another match | ||||
|             six.next(queryset) | ||||
|             next(queryset) | ||||
|         except StopIteration: | ||||
|             return result | ||||
|  | ||||
|         # If we were able to retrieve the 2nd doc, rewind the cursor and | ||||
|         # raise the MultipleObjectsReturned exception. | ||||
|         # If we were able to retrieve the 2nd doc, raise the MultipleObjectsReturned exception. | ||||
|         raise queryset._document.MultipleObjectsReturned( | ||||
|             u"2 or more items returned, instead of 1" | ||||
|             "2 or more items returned, instead of 1" | ||||
|         ) | ||||
|  | ||||
|     def create(self, **kwargs): | ||||
| @@ -354,20 +347,20 @@ class BaseQuerySet(object): | ||||
|             ) | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             message = "Could not save document (%s)" | ||||
|             raise NotUniqueError(message % six.text_type(err)) | ||||
|             raise NotUniqueError(message % err) | ||||
|         except pymongo.errors.BulkWriteError as err: | ||||
|             # inserting documents that already have an _id field will | ||||
|             # give huge performance debt or raise | ||||
|             message = u"Bulk write error: (%s)" | ||||
|             raise BulkWriteError(message % six.text_type(err.details)) | ||||
|             message = "Bulk write error: (%s)" | ||||
|             raise BulkWriteError(message % err.details) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = "Could not save document (%s)" | ||||
|             if re.match("^E1100[01] duplicate key", six.text_type(err)): | ||||
|             if re.match("^E1100[01] duplicate key", str(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u"Tried to save duplicate unique keys (%s)" | ||||
|                 raise NotUniqueError(message % six.text_type(err)) | ||||
|             raise OperationError(message % six.text_type(err)) | ||||
|                 message = "Tried to save duplicate unique keys (%s)" | ||||
|                 raise NotUniqueError(message % err) | ||||
|             raise OperationError(message % err) | ||||
|  | ||||
|         # Apply inserted_ids to documents | ||||
|         for doc, doc_id in zip(docs, ids): | ||||
| @@ -539,12 +532,12 @@ class BaseQuerySet(object): | ||||
|             elif result.raw_result: | ||||
|                 return result.raw_result["n"] | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             raise NotUniqueError(u"Update failed (%s)" % six.text_type(err)) | ||||
|             raise NotUniqueError("Update failed (%s)" % err) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             if six.text_type(err) == u"multi not coded yet": | ||||
|                 message = u"update() method requires MongoDB 1.1.3+" | ||||
|             if str(err) == "multi not coded yet": | ||||
|                 message = "update() method requires MongoDB 1.1.3+" | ||||
|                 raise OperationError(message) | ||||
|             raise OperationError(u"Update failed (%s)" % six.text_type(err)) | ||||
|             raise OperationError("Update failed (%s)" % err) | ||||
|  | ||||
|     def upsert_one(self, write_concern=None, **update): | ||||
|         """Overwrite or add the first document matched by the query. | ||||
| @@ -662,9 +655,9 @@ class BaseQuerySet(object): | ||||
|                     **self._cursor_args | ||||
|                 ) | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             raise NotUniqueError(u"Update failed (%s)" % err) | ||||
|             raise NotUniqueError("Update failed (%s)" % err) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             raise OperationError(u"Update failed (%s)" % err) | ||||
|             raise OperationError("Update failed (%s)" % err) | ||||
|  | ||||
|         if full_response: | ||||
|             if result["value"] is not None: | ||||
| @@ -693,7 +686,7 @@ class BaseQuerySet(object): | ||||
|         return queryset.filter(pk=object_id).first() | ||||
|  | ||||
|     def in_bulk(self, object_ids): | ||||
|         """Retrieve a set of documents by their ids. | ||||
|         """"Retrieve a set of documents by their ids. | ||||
|  | ||||
|         :param object_ids: a list or tuple of ObjectId's | ||||
|         :rtype: dict of ObjectId's as keys and collection-specific | ||||
| @@ -989,7 +982,7 @@ class BaseQuerySet(object): | ||||
|         .. versionchanged:: 0.5 - Added subfield support | ||||
|         """ | ||||
|         fields = {f: QueryFieldList.ONLY for f in fields} | ||||
|         self.only_fields = fields.keys() | ||||
|         self.only_fields = list(fields.keys()) | ||||
|         return self.fields(True, **fields) | ||||
|  | ||||
|     def exclude(self, *fields): | ||||
| @@ -1340,13 +1333,13 @@ class BaseQuerySet(object): | ||||
|         map_f_scope = {} | ||||
|         if isinstance(map_f, Code): | ||||
|             map_f_scope = map_f.scope | ||||
|             map_f = six.text_type(map_f) | ||||
|             map_f = str(map_f) | ||||
|         map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) | ||||
|  | ||||
|         reduce_f_scope = {} | ||||
|         if isinstance(reduce_f, Code): | ||||
|             reduce_f_scope = reduce_f.scope | ||||
|             reduce_f = six.text_type(reduce_f) | ||||
|             reduce_f = str(reduce_f) | ||||
|         reduce_f_code = queryset._sub_js_fields(reduce_f) | ||||
|         reduce_f = Code(reduce_f_code, reduce_f_scope) | ||||
|  | ||||
| @@ -1356,7 +1349,7 @@ class BaseQuerySet(object): | ||||
|             finalize_f_scope = {} | ||||
|             if isinstance(finalize_f, Code): | ||||
|                 finalize_f_scope = finalize_f.scope | ||||
|                 finalize_f = six.text_type(finalize_f) | ||||
|                 finalize_f = str(finalize_f) | ||||
|             finalize_f_code = queryset._sub_js_fields(finalize_f) | ||||
|             finalize_f = Code(finalize_f_code, finalize_f_scope) | ||||
|             mr_args["finalize"] = finalize_f | ||||
| @@ -1372,7 +1365,7 @@ class BaseQuerySet(object): | ||||
|         else: | ||||
|             map_reduce_function = "map_reduce" | ||||
|  | ||||
|             if isinstance(output, six.string_types): | ||||
|             if isinstance(output, str): | ||||
|                 mr_args["out"] = output | ||||
|  | ||||
|             elif isinstance(output, dict): | ||||
| @@ -1559,7 +1552,7 @@ class BaseQuerySet(object): | ||||
|         if self._limit == 0 or self._none: | ||||
|             raise StopIteration | ||||
|  | ||||
|         raw_doc = six.next(self._cursor) | ||||
|         raw_doc = next(self._cursor) | ||||
|  | ||||
|         if self._as_pymongo: | ||||
|             return raw_doc | ||||
| @@ -1804,13 +1797,13 @@ class BaseQuerySet(object): | ||||
|             } | ||||
|         """ | ||||
|         total, data, types = self.exec_js(freq_func, field) | ||||
|         values = {types.get(k): int(v) for k, v in iteritems(data)} | ||||
|         values = {types.get(k): int(v) for k, v in data.items()} | ||||
|  | ||||
|         if normalize: | ||||
|             values = {k: float(v) / total for k, v in values.items()} | ||||
|  | ||||
|         frequencies = {} | ||||
|         for k, v in iteritems(values): | ||||
|         for k, v in values.items(): | ||||
|             if isinstance(k, float): | ||||
|                 if int(k) == k: | ||||
|                     k = int(k) | ||||
| @@ -1830,7 +1823,7 @@ class BaseQuerySet(object): | ||||
|             field_parts = field.split(".") | ||||
|             try: | ||||
|                 field = ".".join( | ||||
|                     f if isinstance(f, six.string_types) else f.db_field | ||||
|                     f if isinstance(f, str) else f.db_field | ||||
|                     for f in self._document._lookup_field(field_parts) | ||||
|                 ) | ||||
|                 db_field_paths.append(field) | ||||
| @@ -1842,7 +1835,7 @@ class BaseQuerySet(object): | ||||
|                 for subdoc in subclasses: | ||||
|                     try: | ||||
|                         subfield = ".".join( | ||||
|                             f if isinstance(f, six.string_types) else f.db_field | ||||
|                             f if isinstance(f, str) else f.db_field | ||||
|                             for f in subdoc._lookup_field(field_parts) | ||||
|                         ) | ||||
|                         db_field_paths.append(subfield) | ||||
| @@ -1916,7 +1909,7 @@ class BaseQuerySet(object): | ||||
|             field_name = match.group(1).split(".") | ||||
|             fields = self._document._lookup_field(field_name) | ||||
|             # Substitute the correct name for the field into the javascript | ||||
|             return u'["%s"]' % fields[-1].db_field | ||||
|             return '["%s"]' % fields[-1].db_field | ||||
|  | ||||
|         def field_path_sub(match): | ||||
|             # Extract just the field name, and look up the field objects | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| __all__ = ("QueryFieldList",) | ||||
|  | ||||
|  | ||||
| class QueryFieldList(object): | ||||
| class QueryFieldList: | ||||
|     """Object that handles combinations of .only() and .exclude() calls""" | ||||
|  | ||||
|     ONLY = 1 | ||||
| @@ -69,8 +69,6 @@ class QueryFieldList(object): | ||||
|     def __bool__(self): | ||||
|         return bool(self.fields) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def as_dict(self): | ||||
|         field_list = {field: self.value for field in self.fields} | ||||
|         if self.slice: | ||||
| @@ -80,7 +78,7 @@ class QueryFieldList(object): | ||||
|         return field_list | ||||
|  | ||||
|     def reset(self): | ||||
|         self.fields = set([]) | ||||
|         self.fields = set() | ||||
|         self.slice = {} | ||||
|         self.value = self.ONLY | ||||
|  | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from mongoengine.queryset.queryset import QuerySet | ||||
| __all__ = ("queryset_manager", "QuerySetManager") | ||||
|  | ||||
|  | ||||
| class QuerySetManager(object): | ||||
| class QuerySetManager: | ||||
|     """ | ||||
|     The default QuerySet Manager. | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import six | ||||
|  | ||||
| from mongoengine.errors import OperationError | ||||
| from mongoengine.queryset.base import ( | ||||
|     BaseQuerySet, | ||||
| @@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet): | ||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||
|         # the result cache. | ||||
|         try: | ||||
|             for _ in six.moves.range(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(six.next(self)) | ||||
|             for _ in range(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(next(self)) | ||||
|         except StopIteration: | ||||
|             # Getting this exception means there are no more docs in the | ||||
|             # db cursor. Set _has_more to False so that we can use that | ||||
| @@ -143,10 +141,10 @@ class QuerySet(BaseQuerySet): | ||||
|             getting the count | ||||
|         """ | ||||
|         if with_limit_and_skip is False: | ||||
|             return super(QuerySet, self).count(with_limit_and_skip) | ||||
|             return super().count(with_limit_and_skip) | ||||
|  | ||||
|         if self._len is None: | ||||
|             self._len = super(QuerySet, self).count(with_limit_and_skip) | ||||
|             self._len = super().count(with_limit_and_skip) | ||||
|  | ||||
|         return self._len | ||||
|  | ||||
| @@ -180,9 +178,9 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|             return ".. queryset mid-iteration .." | ||||
|  | ||||
|         data = [] | ||||
|         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): | ||||
|         for _ in range(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|                 data.append(six.next(self)) | ||||
|                 data.append(next(self)) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|  | ||||
|   | ||||
| @@ -3,14 +3,12 @@ from collections import defaultdict | ||||
| from bson import ObjectId, SON | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base import UPDATE_OPERATORS | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidQueryError | ||||
|  | ||||
| __all__ = ("query", "update") | ||||
| __all__ = ("query", "update", "STRING_OPERATORS") | ||||
|  | ||||
| COMPARISON_OPERATORS = ( | ||||
|     "ne", | ||||
| @@ -101,7 +99,7 @@ def query(_doc_cls=None, **kwargs): | ||||
|             cleaned_fields = [] | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, six.string_types): | ||||
|                 if isinstance(field, str): | ||||
|                     parts.append(field) | ||||
|                     append_field = False | ||||
|                 # is last and CachedReferenceField | ||||
| @@ -180,7 +178,7 @@ def query(_doc_cls=None, **kwargs): | ||||
|                     "$near" in value_dict or "$nearSphere" in value_dict | ||||
|                 ): | ||||
|                     value_son = SON() | ||||
|                     for k, v in iteritems(value_dict): | ||||
|                     for k, v in value_dict.items(): | ||||
|                         if k == "$maxDistance" or k == "$minDistance": | ||||
|                             continue | ||||
|                         value_son[k] = v | ||||
| @@ -281,7 +279,7 @@ def update(_doc_cls=None, **update): | ||||
|             appended_sub_field = False | ||||
|             for field in fields: | ||||
|                 append_field = True | ||||
|                 if isinstance(field, six.string_types): | ||||
|                 if isinstance(field, str): | ||||
|                     # Convert the S operator to $ | ||||
|                     if field == "S": | ||||
|                         field = "$" | ||||
| @@ -435,7 +433,9 @@ def _geo_operator(field, op, value): | ||||
|             value = {"$near": _infer_geometry(value)} | ||||
|         else: | ||||
|             raise NotImplementedError( | ||||
|                 'Geo method "%s" has not been implemented for a %s ' % (op, field._name) | ||||
|                 'Geo method "{}" has not been implemented for a {} '.format( | ||||
|                     op, field._name | ||||
|                 ) | ||||
|             ) | ||||
|     return value | ||||
|  | ||||
|   | ||||
| @@ -7,7 +7,7 @@ from mongoengine.queryset import transform | ||||
| __all__ = ("Q", "QNode") | ||||
|  | ||||
|  | ||||
| class QNodeVisitor(object): | ||||
| class QNodeVisitor: | ||||
|     """Base visitor class for visiting Q-object nodes in a query tree. | ||||
|     """ | ||||
|  | ||||
| @@ -79,7 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|         return transform.query(self.document, **query.query) | ||||
|  | ||||
|  | ||||
| class QNode(object): | ||||
| class QNode: | ||||
|     """Base class for nodes in query trees.""" | ||||
|  | ||||
|     AND = 0 | ||||
| @@ -143,8 +143,6 @@ class QCombination(QNode): | ||||
|     def __bool__(self): | ||||
|         return bool(self.children) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def accept(self, visitor): | ||||
|         for i in range(len(self.children)): | ||||
|             if isinstance(self.children[i], QNode): | ||||
| @@ -180,8 +178,6 @@ class Q(QNode): | ||||
|     def __bool__(self): | ||||
|         return bool(self.query) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return self.__class__ == other.__class__ and self.query == other.query | ||||
|  | ||||
|   | ||||
| @@ -15,11 +15,11 @@ try: | ||||
|     signals_available = True | ||||
| except ImportError: | ||||
|  | ||||
|     class Namespace(object): | ||||
|     class Namespace: | ||||
|         def signal(self, name, doc=None): | ||||
|             return _FakeSignal(name, doc) | ||||
|  | ||||
|     class _FakeSignal(object): | ||||
|     class _FakeSignal: | ||||
|         """If blinker is unavailable, create a fake class with the same | ||||
|         interface that allows sending of signals but will fail with an | ||||
|         error on anything else.  Instead of doing anything on send, it | ||||
|   | ||||
		Reference in New Issue
	
	Block a user