run pyupgrade --py3-plus
This commit is contained in:
		| @@ -51,7 +51,7 @@ class BaseDict(dict): | ||||
|         if isinstance(instance, BaseDocument): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         super(BaseDict, self).__init__(dict_items) | ||||
|         super().__init__(dict_items) | ||||
|  | ||||
|     def get(self, key, default=None): | ||||
|         # get does not use __getitem__ by default so we must override it as well | ||||
| @@ -61,18 +61,18 @@ class BaseDict(dict): | ||||
|             return default | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         value = super(BaseDict, self).__getitem__(key) | ||||
|         value = super().__getitem__(key) | ||||
|  | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||
|             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
| @@ -115,13 +115,13 @@ class BaseList(list): | ||||
|         if isinstance(instance, BaseDocument): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         super(BaseList, self).__init__(list_items) | ||||
|         super().__init__(list_items) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         # change index to positive value because MongoDB does not support negative one | ||||
|         if isinstance(key, int) and key < 0: | ||||
|             key = len(self) + key | ||||
|         value = super(BaseList, self).__getitem__(key) | ||||
|         value = super().__getitem__(key) | ||||
|  | ||||
|         if isinstance(key, slice): | ||||
|             # When receiving a slice operator, we don't convert the structure and bind | ||||
| @@ -134,18 +134,17 @@ class BaseList(list): | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             # Replace dict by BaseDict | ||||
|             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||
|             # Replace list by BaseList | ||||
|             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             super().__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __iter__(self): | ||||
|         for v in super(BaseList, self).__iter__(): | ||||
|             yield v | ||||
|         yield from super().__iter__() | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
| @@ -163,7 +162,7 @@ class BaseList(list): | ||||
|             # instead, we simply marks the whole list as changed | ||||
|             changed_key = None | ||||
|  | ||||
|         result = super(BaseList, self).__setitem__(key, value) | ||||
|         result = super().__setitem__(key, value) | ||||
|         self._mark_as_changed(changed_key) | ||||
|         return result | ||||
|  | ||||
| @@ -190,7 +189,7 @@ class BaseList(list): | ||||
|  | ||||
| class EmbeddedDocumentList(BaseList): | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||
|         super().__init__(list_items, instance, name) | ||||
|         self._instance = instance | ||||
|  | ||||
|     @classmethod | ||||
| @@ -355,7 +354,7 @@ class EmbeddedDocumentList(BaseList): | ||||
|         return len(values) | ||||
|  | ||||
|  | ||||
| class StrictDict(object): | ||||
| class StrictDict: | ||||
|     __slots__ = () | ||||
|     _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} | ||||
|     _classes = {} | ||||
| @@ -455,9 +454,7 @@ class LazyReference(DBRef): | ||||
|         self.document_type = document_type | ||||
|         self._cached_doc = cached_doc | ||||
|         self.passthrough = passthrough | ||||
|         super(LazyReference, self).__init__( | ||||
|             self.document_type._get_collection_name(), pk | ||||
|         ) | ||||
|         super().__init__(self.document_type._get_collection_name(), pk) | ||||
|  | ||||
|     def __getitem__(self, name): | ||||
|         if not self.passthrough: | ||||
|   | ||||
| @@ -30,7 +30,7 @@ __all__ = ("BaseDocument", "NON_FIELD_ERRORS") | ||||
| NON_FIELD_ERRORS = "__all__" | ||||
|  | ||||
|  | ||||
| class BaseDocument(object): | ||||
| class BaseDocument: | ||||
|     # TODO simplify how `_changed_fields` is used. | ||||
|     # Currently, handling of `_changed_fields` seems unnecessarily convoluted: | ||||
|     # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's | ||||
| @@ -161,7 +161,7 @@ class BaseDocument(object): | ||||
|                 default = default() | ||||
|             setattr(self, field_name, default) | ||||
|         else: | ||||
|             super(BaseDocument, self).__delattr__(*args, **kwargs) | ||||
|             super().__delattr__(*args, **kwargs) | ||||
|  | ||||
|     def __setattr__(self, name, value): | ||||
|         # Handle dynamic data only if an initialised dynamic document | ||||
| @@ -208,9 +208,9 @@ class BaseDocument(object): | ||||
|             and self__created | ||||
|             and name == self._meta.get("id_field") | ||||
|         ): | ||||
|             super(BaseDocument, self).__setattr__("_created", False) | ||||
|             super().__setattr__("_created", False) | ||||
|  | ||||
|         super(BaseDocument, self).__setattr__(name, value) | ||||
|         super().__setattr__(name, value) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         data = {} | ||||
|   | ||||
| @@ -13,7 +13,7 @@ from mongoengine.errors import DeprecatedError, ValidationError | ||||
| __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||
|  | ||||
|  | ||||
| class BaseField(object): | ||||
| class BaseField: | ||||
|     """A base class for fields in a MongoDB document. Instances of this class | ||||
|     may be added to subclasses of `Document` to define a document's schema. | ||||
|  | ||||
| @@ -310,7 +310,7 @@ class ComplexBaseField(BaseField): | ||||
|             if hasattr(instance._data[self.name], "_dereferenced"): | ||||
|                 instance._data[self.name]._dereferenced = True | ||||
|  | ||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||
|         value = super().__get__(instance, owner) | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         if isinstance(value, (list, tuple)): | ||||
| @@ -541,7 +541,7 @@ class GeoJsonBaseField(BaseField): | ||||
|         self._name = "%sField" % self._type | ||||
|         if not auto_index: | ||||
|             self._geo_index = False | ||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Validate the GeoJson object based on its type.""" | ||||
|   | ||||
| @@ -22,7 +22,7 @@ class DocumentMetaclass(type): | ||||
|     # TODO lower complexity of this method | ||||
|     def __new__(mcs, name, bases, attrs): | ||||
|         flattened_bases = mcs._get_bases(bases) | ||||
|         super_new = super(DocumentMetaclass, mcs).__new__ | ||||
|         super_new = super().__new__ | ||||
|  | ||||
|         # If a base class just call super | ||||
|         metaclass = attrs.get("my_metaclass") | ||||
| @@ -231,8 +231,7 @@ class DocumentMetaclass(type): | ||||
|             if base is object: | ||||
|                 continue | ||||
|             yield base | ||||
|             for child_base in mcs.__get_bases(base.__bases__): | ||||
|                 yield child_base | ||||
|             yield from mcs.__get_bases(base.__bases__) | ||||
|  | ||||
|     @classmethod | ||||
|     def _import_classes(mcs): | ||||
| @@ -250,7 +249,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|  | ||||
|     def __new__(mcs, name, bases, attrs): | ||||
|         flattened_bases = mcs._get_bases(bases) | ||||
|         super_new = super(TopLevelDocumentMetaclass, mcs).__new__ | ||||
|         super_new = super().__new__ | ||||
|  | ||||
|         # Set default _meta data if base class, otherwise get user defined meta | ||||
|         if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| import re | ||||
|  | ||||
|  | ||||
| class LazyRegexCompiler(object): | ||||
| class LazyRegexCompiler: | ||||
|     """Descriptor to allow lazy compilation of regex""" | ||||
|  | ||||
|     def __init__(self, pattern, flags=0): | ||||
|   | ||||
| @@ -395,8 +395,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|  | ||||
|         if new_conn_settings != prev_conn_setting: | ||||
|             err_msg = ( | ||||
|                 u"A different connection with alias `{}` was already " | ||||
|                 u"registered. Use disconnect() first" | ||||
|                 "A different connection with alias `{}` was already " | ||||
|                 "registered. Use disconnect() first" | ||||
|             ).format(alias) | ||||
|             raise ConnectionFailure(err_msg) | ||||
|     else: | ||||
|   | ||||
| @@ -16,7 +16,7 @@ __all__ = ( | ||||
| ) | ||||
|  | ||||
|  | ||||
| class switch_db(object): | ||||
| class switch_db: | ||||
|     """switch_db alias context manager. | ||||
|  | ||||
|     Example :: | ||||
| @@ -57,7 +57,7 @@ class switch_db(object): | ||||
|         self.cls._collection = self.collection | ||||
|  | ||||
|  | ||||
| class switch_collection(object): | ||||
| class switch_collection: | ||||
|     """switch_collection alias context manager. | ||||
|  | ||||
|     Example :: | ||||
| @@ -99,7 +99,7 @@ class switch_collection(object): | ||||
|         self.cls._get_collection_name = self.ori_get_collection_name | ||||
|  | ||||
|  | ||||
| class no_dereference(object): | ||||
| class no_dereference: | ||||
|     """no_dereference context manager. | ||||
|  | ||||
|     Turns off all dereferencing in Documents for the duration of the context | ||||
| @@ -139,7 +139,7 @@ class no_dereference(object): | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class no_sub_classes(object): | ||||
| class no_sub_classes: | ||||
|     """no_sub_classes context manager. | ||||
|  | ||||
|     Only returns instances of this class and no sub (inherited) classes:: | ||||
| @@ -167,7 +167,7 @@ class no_sub_classes(object): | ||||
|         self.cls._subclasses = self.cls_initial_subclasses | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
| class query_counter: | ||||
|     """Query_counter context manager to get the number of queries. | ||||
|     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||
|     resetting the db.system.profile collection at the beginning of the context and counting the new entries. | ||||
| @@ -234,7 +234,7 @@ class query_counter(object): | ||||
|  | ||||
|     def __repr__(self): | ||||
|         """repr query_counter as the number of queries.""" | ||||
|         return u"%s" % self._get_count() | ||||
|         return "%s" % self._get_count() | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """Get the number of queries by counting the current number of entries in db.system.profile | ||||
|   | ||||
| @@ -14,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||
| from mongoengine.queryset import QuerySet | ||||
|  | ||||
|  | ||||
| class DeReference(object): | ||||
| class DeReference: | ||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||
|         """ | ||||
|         Cheaply dereferences the items to a set depth. | ||||
|   | ||||
| @@ -79,7 +79,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): | ||||
|     __hash__ = None | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         super(EmbeddedDocument, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self._instance = None | ||||
|         self._changed_fields = [] | ||||
|  | ||||
| @@ -92,7 +92,7 @@ class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): | ||||
|         return not self.__eq__(other) | ||||
|  | ||||
|     def to_mongo(self, *args, **kwargs): | ||||
|         data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) | ||||
|         data = super().to_mongo(*args, **kwargs) | ||||
|  | ||||
|         # remove _id from the SON if it's in it and it's None | ||||
|         if "_id" in data and data["_id"] is None: | ||||
| @@ -256,7 +256,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|         return db.create_collection(collection_name, **opts) | ||||
|  | ||||
|     def to_mongo(self, *args, **kwargs): | ||||
|         data = super(Document, self).to_mongo(*args, **kwargs) | ||||
|         data = super().to_mongo(*args, **kwargs) | ||||
|  | ||||
|         # If '_id' is None, try and set it from self._data. If that | ||||
|         # doesn't exist either, remove '_id' from the SON completely. | ||||
| @@ -427,14 +427,14 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|                 self.cascade_save(**kwargs) | ||||
|  | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             message = u"Tried to save duplicate unique keys (%s)" | ||||
|             message = "Tried to save duplicate unique keys (%s)" | ||||
|             raise NotUniqueError(message % err) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = "Could not save document (%s)" | ||||
|             if re.match("^E1100[01] duplicate key", str(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u"Tried to save duplicate unique keys (%s)" | ||||
|                 message = "Tried to save duplicate unique keys (%s)" | ||||
|                 raise NotUniqueError(message % err) | ||||
|             raise OperationError(message % err) | ||||
|  | ||||
| @@ -639,7 +639,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|                 write_concern=write_concern, _from_doc_delete=True | ||||
|             ) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = u"Could not delete document (%s)" % err.message | ||||
|             message = "Could not delete document (%s)" % err.message | ||||
|             raise OperationError(message) | ||||
|         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) | ||||
|  | ||||
| @@ -988,10 +988,10 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|                     indexes.append(index) | ||||
|  | ||||
|         # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed | ||||
|         if [(u"_id", 1)] not in indexes: | ||||
|             indexes.append([(u"_id", 1)]) | ||||
|         if [("_id", 1)] not in indexes: | ||||
|             indexes.append([("_id", 1)]) | ||||
|         if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): | ||||
|             indexes.append([(u"_cls", 1)]) | ||||
|             indexes.append([("_cls", 1)]) | ||||
|  | ||||
|         return indexes | ||||
|  | ||||
| @@ -1015,14 +1015,14 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|         extra = [index for index in existing if index not in required] | ||||
|  | ||||
|         # if { _cls: 1 } is missing, make sure it's *really* necessary | ||||
|         if [(u"_cls", 1)] in missing: | ||||
|         if [("_cls", 1)] in missing: | ||||
|             cls_obsolete = False | ||||
|             for index in existing: | ||||
|                 if includes_cls(index) and index not in extra: | ||||
|                     cls_obsolete = True | ||||
|                     break | ||||
|             if cls_obsolete: | ||||
|                 missing.remove([(u"_cls", 1)]) | ||||
|                 missing.remove([("_cls", 1)]) | ||||
|  | ||||
|         return {"missing": missing, "extra": extra} | ||||
|  | ||||
| @@ -1055,7 +1055,7 @@ class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): | ||||
|             setattr(self, field_name, None) | ||||
|             self._dynamic_fields[field_name].null = False | ||||
|         else: | ||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||
|             super().__delattr__(*args, **kwargs) | ||||
|  | ||||
|  | ||||
| class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): | ||||
| @@ -1083,7 +1083,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): | ||||
|             setattr(self, field_name, None) | ||||
|  | ||||
|  | ||||
| class MapReduceDocument(object): | ||||
| class MapReduceDocument: | ||||
|     """A document returned from a map/reduce query. | ||||
|  | ||||
|     :param collection: An instance of :class:`~pymongo.Collection` | ||||
|   | ||||
| @@ -85,7 +85,7 @@ class ValidationError(AssertionError): | ||||
|     _message = None | ||||
|  | ||||
|     def __init__(self, message="", **kwargs): | ||||
|         super(ValidationError, self).__init__(message) | ||||
|         super().__init__(message) | ||||
|         self.errors = kwargs.get("errors", {}) | ||||
|         self.field_name = kwargs.get("field_name") | ||||
|         self.message = message | ||||
| @@ -97,7 +97,7 @@ class ValidationError(AssertionError): | ||||
|         return "{}({},)".format(self.__class__.__name__, self.message) | ||||
|  | ||||
|     def __getattribute__(self, name): | ||||
|         message = super(ValidationError, self).__getattribute__(name) | ||||
|         message = super().__getattribute__(name) | ||||
|         if name == "message": | ||||
|             if self.field_name: | ||||
|                 message = "%s" % message | ||||
|   | ||||
| @@ -103,7 +103,7 @@ class StringField(BaseField): | ||||
|         self.regex = re.compile(regex) if regex else None | ||||
|         self.max_length = max_length | ||||
|         self.min_length = min_length | ||||
|         super(StringField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, str): | ||||
| @@ -151,7 +151,7 @@ class StringField(BaseField): | ||||
|             # escape unsafe characters which could lead to a re.error | ||||
|             value = re.escape(value) | ||||
|             value = re.compile(regex % value, flags) | ||||
|         return super(StringField, self).prepare_query_value(op, value) | ||||
|         return super().prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| class URLField(StringField): | ||||
| @@ -175,17 +175,17 @@ class URLField(StringField): | ||||
|     def __init__(self, url_regex=None, schemes=None, **kwargs): | ||||
|         self.url_regex = url_regex or self._URL_REGEX | ||||
|         self.schemes = schemes or self._URL_SCHEMES | ||||
|         super(URLField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         # Check first if the scheme is valid | ||||
|         scheme = value.split("://")[0].lower() | ||||
|         if scheme not in self.schemes: | ||||
|             self.error(u"Invalid scheme {} in URL: {}".format(scheme, value)) | ||||
|             self.error("Invalid scheme {} in URL: {}".format(scheme, value)) | ||||
|  | ||||
|         # Then check full URL | ||||
|         if not self.url_regex.match(value): | ||||
|             self.error(u"Invalid URL: {}".format(value)) | ||||
|             self.error("Invalid URL: {}".format(value)) | ||||
|  | ||||
|  | ||||
| class EmailField(StringField): | ||||
| @@ -218,7 +218,7 @@ class EmailField(StringField): | ||||
|         re.IGNORECASE, | ||||
|     ) | ||||
|  | ||||
|     error_msg = u"Invalid email address: %s" | ||||
|     error_msg = "Invalid email address: %s" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
| @@ -242,7 +242,7 @@ class EmailField(StringField): | ||||
|         self.domain_whitelist = domain_whitelist or [] | ||||
|         self.allow_utf8_user = allow_utf8_user | ||||
|         self.allow_ip_domain = allow_ip_domain | ||||
|         super(EmailField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate_user_part(self, user_part): | ||||
|         """Validate the user part of the email address. Return True if | ||||
| @@ -269,13 +269,13 @@ class EmailField(StringField): | ||||
|                 try: | ||||
|                     socket.inet_pton(addr_family, domain_part[1:-1]) | ||||
|                     return True | ||||
|                 except (socket.error, UnicodeEncodeError): | ||||
|                 except (OSError, UnicodeEncodeError): | ||||
|                     pass | ||||
|  | ||||
|         return False | ||||
|  | ||||
|     def validate(self, value): | ||||
|         super(EmailField, self).validate(value) | ||||
|         super().validate(value) | ||||
|  | ||||
|         if "@" not in value: | ||||
|             self.error(self.error_msg % value) | ||||
| @@ -310,7 +310,7 @@ class IntField(BaseField): | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(IntField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
| @@ -335,7 +335,7 @@ class IntField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return super(IntField, self).prepare_query_value(op, int(value)) | ||||
|         return super().prepare_query_value(op, int(value)) | ||||
|  | ||||
|  | ||||
| class LongField(BaseField): | ||||
| @@ -343,7 +343,7 @@ class LongField(BaseField): | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(LongField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
| @@ -371,7 +371,7 @@ class LongField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return super(LongField, self).prepare_query_value(op, int(value)) | ||||
|         return super().prepare_query_value(op, int(value)) | ||||
|  | ||||
|  | ||||
| class FloatField(BaseField): | ||||
| @@ -379,7 +379,7 @@ class FloatField(BaseField): | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(FloatField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
| @@ -408,7 +408,7 @@ class FloatField(BaseField): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return super(FloatField, self).prepare_query_value(op, float(value)) | ||||
|         return super().prepare_query_value(op, float(value)) | ||||
|  | ||||
|  | ||||
| class DecimalField(BaseField): | ||||
| @@ -455,7 +455,7 @@ class DecimalField(BaseField): | ||||
|         self.precision = precision | ||||
|         self.rounding = rounding | ||||
|  | ||||
|         super(DecimalField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if value is None: | ||||
| @@ -493,7 +493,7 @@ class DecimalField(BaseField): | ||||
|             self.error("Decimal value is too large") | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|  | ||||
| class BooleanField(BaseField): | ||||
| @@ -533,7 +533,7 @@ class DateTimeField(BaseField): | ||||
|     def validate(self, value): | ||||
|         new_value = self.to_mongo(value) | ||||
|         if not isinstance(new_value, (datetime.datetime, datetime.date)): | ||||
|             self.error(u'cannot parse date "%s"' % value) | ||||
|             self.error('cannot parse date "%s"' % value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if value is None: | ||||
| @@ -590,19 +590,19 @@ class DateTimeField(BaseField): | ||||
|                     return None | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|  | ||||
| class DateField(DateTimeField): | ||||
|     def to_mongo(self, value): | ||||
|         value = super(DateField, self).to_mongo(value) | ||||
|         value = super().to_mongo(value) | ||||
|         # drop hours, minutes, seconds | ||||
|         if isinstance(value, datetime.datetime): | ||||
|             value = datetime.datetime(value.year, value.month, value.day) | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         value = super(DateField, self).to_python(value) | ||||
|         value = super().to_python(value) | ||||
|         # convert datetime to date | ||||
|         if isinstance(value, datetime.datetime): | ||||
|             value = datetime.date(value.year, value.month, value.day) | ||||
| @@ -636,7 +636,7 @@ class ComplexDateTimeField(StringField): | ||||
|         """ | ||||
|         self.separator = separator | ||||
|         self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) | ||||
|         super(ComplexDateTimeField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def _convert_from_datetime(self, val): | ||||
|         """ | ||||
| @@ -667,14 +667,14 @@ class ComplexDateTimeField(StringField): | ||||
|         if instance is None: | ||||
|             return self | ||||
|  | ||||
|         data = super(ComplexDateTimeField, self).__get__(instance, owner) | ||||
|         data = super().__get__(instance, owner) | ||||
|  | ||||
|         if isinstance(data, datetime.datetime) or data is None: | ||||
|             return data | ||||
|         return self._convert_from_string(data) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         super(ComplexDateTimeField, self).__set__(instance, value) | ||||
|         super().__set__(instance, value) | ||||
|         value = instance._data[self.name] | ||||
|         if value is not None: | ||||
|             instance._data[self.name] = self._convert_from_datetime(value) | ||||
| @@ -696,9 +696,7 @@ class ComplexDateTimeField(StringField): | ||||
|         return self._convert_from_datetime(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(ComplexDateTimeField, self).prepare_query_value( | ||||
|             op, self._convert_from_datetime(value) | ||||
|         ) | ||||
|         return super().prepare_query_value(op, self._convert_from_datetime(value)) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentField(BaseField): | ||||
| @@ -718,7 +716,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|             ) | ||||
|  | ||||
|         self.document_type_obj = document_type | ||||
|         super(EmbeddedDocumentField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
| @@ -779,7 +777,7 @@ class EmbeddedDocumentField(BaseField): | ||||
|                     "Querying the embedded document '%s' failed, due to an invalid query value" | ||||
|                     % (self.document_type._class_name,) | ||||
|                 ) | ||||
|         super(EmbeddedDocumentField, self).prepare_query_value(op, value) | ||||
|         super().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|  | ||||
| @@ -795,9 +793,7 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return super(GenericEmbeddedDocumentField, self).prepare_query_value( | ||||
|             op, self.to_mongo(value) | ||||
|         ) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, dict): | ||||
| @@ -885,7 +881,7 @@ class DynamicField(BaseField): | ||||
|                 value = doc_cls._get_db().dereference(value["_ref"]) | ||||
|             return doc_cls._from_son(value) | ||||
|  | ||||
|         return super(DynamicField, self).to_python(value) | ||||
|         return super().to_python(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return member_name | ||||
| @@ -893,7 +889,7 @@ class DynamicField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if isinstance(value, str): | ||||
|             return StringField().prepare_query_value(op, value) | ||||
|         return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|     def validate(self, value, clean=True): | ||||
|         if hasattr(value, "validate"): | ||||
| @@ -914,7 +910,7 @@ class ListField(ComplexBaseField): | ||||
|         self.field = field | ||||
|         self.max_length = max_length | ||||
|         kwargs.setdefault("default", lambda: []) | ||||
|         super(ListField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         if instance is None: | ||||
| @@ -928,7 +924,7 @@ class ListField(ComplexBaseField): | ||||
|             and value | ||||
|         ): | ||||
|             instance._data[self.name] = [self.field.build_lazyref(x) for x in value] | ||||
|         return super(ListField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a list of valid fields is being used.""" | ||||
| @@ -942,7 +938,7 @@ class ListField(ComplexBaseField): | ||||
|         if self.max_length is not None and len(value) > self.max_length: | ||||
|             self.error("List is too long") | ||||
|  | ||||
|         super(ListField, self).validate(value) | ||||
|         super().validate(value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         # Validate that the `set` operator doesn't contain more items than `max_length`. | ||||
| @@ -963,7 +959,7 @@ class ListField(ComplexBaseField): | ||||
|  | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(ListField, self).prepare_query_value(op, value) | ||||
|         return super().prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentListField(ListField): | ||||
| @@ -984,9 +980,7 @@ class EmbeddedDocumentListField(ListField): | ||||
|         :param kwargs: Keyword arguments passed directly into the parent | ||||
|          :class:`~mongoengine.ListField`. | ||||
|         """ | ||||
|         super(EmbeddedDocumentListField, self).__init__( | ||||
|             field=EmbeddedDocumentField(document_type), **kwargs | ||||
|         ) | ||||
|         super().__init__(field=EmbeddedDocumentField(document_type), **kwargs) | ||||
|  | ||||
|  | ||||
| class SortedListField(ListField): | ||||
| @@ -1012,10 +1006,10 @@ class SortedListField(ListField): | ||||
|             self._ordering = kwargs.pop("ordering") | ||||
|         if "reverse" in kwargs.keys(): | ||||
|             self._order_reverse = kwargs.pop("reverse") | ||||
|         super(SortedListField, self).__init__(field, **kwargs) | ||||
|         super().__init__(field, **kwargs) | ||||
|  | ||||
|     def to_mongo(self, value, use_db_field=True, fields=None): | ||||
|         value = super(SortedListField, self).to_mongo(value, use_db_field, fields) | ||||
|         value = super().to_mongo(value, use_db_field, fields) | ||||
|         if self._ordering is not None: | ||||
|             return sorted( | ||||
|                 value, key=itemgetter(self._ordering), reverse=self._order_reverse | ||||
| @@ -1068,7 +1062,7 @@ class DictField(ComplexBaseField): | ||||
|         self._auto_dereference = False | ||||
|  | ||||
|         kwargs.setdefault("default", lambda: {}) | ||||
|         super(DictField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """Make sure that a list of valid fields is being used.""" | ||||
| @@ -1090,7 +1084,7 @@ class DictField(ComplexBaseField): | ||||
|             self.error( | ||||
|                 'Invalid dictionary key name - keys may not startswith "$" characters' | ||||
|             ) | ||||
|         super(DictField, self).validate(value) | ||||
|         super().validate(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
|         return DictField(db_field=member_name) | ||||
| @@ -1119,7 +1113,7 @@ class DictField(ComplexBaseField): | ||||
|                 } | ||||
|             return self.field.prepare_query_value(op, value) | ||||
|  | ||||
|         return super(DictField, self).prepare_query_value(op, value) | ||||
|         return super().prepare_query_value(op, value) | ||||
|  | ||||
|  | ||||
| class MapField(DictField): | ||||
| @@ -1134,7 +1128,7 @@ class MapField(DictField): | ||||
|         # XXX ValidationError raised outside of the "validate" method. | ||||
|         if not isinstance(field, BaseField): | ||||
|             self.error("Argument to MapField constructor must be a valid field") | ||||
|         super(MapField, self).__init__(field=field, *args, **kwargs) | ||||
|         super().__init__(field=field, *args, **kwargs) | ||||
|  | ||||
|  | ||||
| class ReferenceField(BaseField): | ||||
| @@ -1205,7 +1199,7 @@ class ReferenceField(BaseField): | ||||
|         self.dbref = dbref | ||||
|         self.document_type_obj = document_type | ||||
|         self.reverse_delete_rule = reverse_delete_rule | ||||
|         super(ReferenceField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
| @@ -1238,7 +1232,7 @@ class ReferenceField(BaseField): | ||||
|             else: | ||||
|                 instance._data[self.name] = cls._from_son(dereferenced) | ||||
|  | ||||
|         return super(ReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def to_mongo(self, document): | ||||
|         if isinstance(document, DBRef): | ||||
| @@ -1289,7 +1283,7 @@ class ReferenceField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|         super(ReferenceField, self).prepare_query_value(op, value) | ||||
|         super().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
| @@ -1336,7 +1330,7 @@ class CachedReferenceField(BaseField): | ||||
|         self.auto_sync = auto_sync | ||||
|         self.document_type_obj = document_type | ||||
|         self.fields = fields | ||||
|         super(CachedReferenceField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def start_listener(self): | ||||
|         from mongoengine import signals | ||||
| @@ -1394,7 +1388,7 @@ class CachedReferenceField(BaseField): | ||||
|             else: | ||||
|                 instance._data[self.name] = self.document_type._from_son(dereferenced) | ||||
|  | ||||
|         return super(CachedReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def to_mongo(self, document, use_db_field=True, fields=None): | ||||
|         id_field_name = self.document_type._meta["id_field"] | ||||
| @@ -1493,7 +1487,7 @@ class GenericReferenceField(BaseField): | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         choices = kwargs.pop("choices", None) | ||||
|         super(GenericReferenceField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|         self.choices = [] | ||||
|         # Keep the choices as a list of allowed Document class names | ||||
|         if choices: | ||||
| @@ -1517,7 +1511,7 @@ class GenericReferenceField(BaseField): | ||||
|             value = value.get("_cls") | ||||
|         elif isinstance(value, Document): | ||||
|             value = value._class_name | ||||
|         super(GenericReferenceField, self)._validate_choices(value) | ||||
|         super()._validate_choices(value) | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         if instance is None: | ||||
| @@ -1533,7 +1527,7 @@ class GenericReferenceField(BaseField): | ||||
|             else: | ||||
|                 instance._data[self.name] = dereferenced | ||||
|  | ||||
|         return super(GenericReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (Document, DBRef, dict, SON)): | ||||
| @@ -1597,13 +1591,13 @@ class BinaryField(BaseField): | ||||
|  | ||||
|     def __init__(self, max_bytes=None, **kwargs): | ||||
|         self.max_bytes = max_bytes | ||||
|         super(BinaryField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Handle bytearrays in python 3.1""" | ||||
|         if isinstance(value, bytearray): | ||||
|             value = bytes(value) | ||||
|         return super(BinaryField, self).__set__(instance, value) | ||||
|         return super().__set__(instance, value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return Binary(value) | ||||
| @@ -1621,14 +1615,14 @@ class BinaryField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|         return super(BinaryField, self).prepare_query_value(op, self.to_mongo(value)) | ||||
|         return super().prepare_query_value(op, self.to_mongo(value)) | ||||
|  | ||||
|  | ||||
| class GridFSError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class GridFSProxy(object): | ||||
| class GridFSProxy: | ||||
|     """Proxy object to handle writing and reading of files to and from GridFS | ||||
|  | ||||
|     .. versionadded:: 0.4 | ||||
| @@ -1808,7 +1802,7 @@ class FileField(BaseField): | ||||
|     def __init__( | ||||
|         self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs | ||||
|     ): | ||||
|         super(FileField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|         self.collection_name = collection_name | ||||
|         self.db_alias = db_alias | ||||
|  | ||||
| @@ -1953,7 +1947,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|         img.save(io, img_format, progressive=progressive) | ||||
|         io.seek(0) | ||||
|  | ||||
|         return super(ImageGridFsProxy, self).put( | ||||
|         return super().put( | ||||
|             io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs | ||||
|         ) | ||||
|  | ||||
| @@ -1963,7 +1957,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|         if out and out.thumbnail_id: | ||||
|             self.fs.delete(out.thumbnail_id) | ||||
|  | ||||
|         return super(ImageGridFsProxy, self).delete() | ||||
|         return super().delete() | ||||
|  | ||||
|     def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): | ||||
|         w, h = thumbnail.size | ||||
| @@ -2042,7 +2036,7 @@ class ImageField(FileField): | ||||
|  | ||||
|             setattr(self, att_name, value) | ||||
|  | ||||
|         super(ImageField, self).__init__(collection_name=collection_name, **kwargs) | ||||
|         super().__init__(collection_name=collection_name, **kwargs) | ||||
|  | ||||
|  | ||||
| class SequenceField(BaseField): | ||||
| @@ -2094,7 +2088,7 @@ class SequenceField(BaseField): | ||||
|         self.value_decorator = ( | ||||
|             value_decorator if callable(value_decorator) else self.VALUE_DECORATOR | ||||
|         ) | ||||
|         super(SequenceField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def generate(self): | ||||
|         """ | ||||
| @@ -2155,7 +2149,7 @@ class SequenceField(BaseField): | ||||
|             ) | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         value = super(SequenceField, self).__get__(instance, owner) | ||||
|         value = super().__get__(instance, owner) | ||||
|         if value is None and instance._initialised: | ||||
|             value = self.generate() | ||||
|             instance._data[self.name] = value | ||||
| @@ -2168,7 +2162,7 @@ class SequenceField(BaseField): | ||||
|         if value is None and instance._initialised: | ||||
|             value = self.generate() | ||||
|  | ||||
|         return super(SequenceField, self).__set__(instance, value) | ||||
|         return super().__set__(instance, value) | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         """ | ||||
| @@ -2202,7 +2196,7 @@ class UUIDField(BaseField): | ||||
|         .. versionchanged:: 0.6.19 | ||||
|         """ | ||||
|         self._binary = binary | ||||
|         super(UUIDField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if not self._binary: | ||||
| @@ -2440,7 +2434,7 @@ class LazyReferenceField(BaseField): | ||||
|         self.passthrough = passthrough | ||||
|         self.document_type_obj = document_type | ||||
|         self.reverse_delete_rule = reverse_delete_rule | ||||
|         super(LazyReferenceField, self).__init__(**kwargs) | ||||
|         super().__init__(**kwargs) | ||||
|  | ||||
|     @property | ||||
|     def document_type(self): | ||||
| @@ -2483,7 +2477,7 @@ class LazyReferenceField(BaseField): | ||||
|         if value: | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         return super(LazyReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if isinstance(value, LazyReference): | ||||
| @@ -2547,7 +2541,7 @@ class LazyReferenceField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|         super(LazyReferenceField, self).prepare_query_value(op, value) | ||||
|         super().prepare_query_value(op, value) | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def lookup_member(self, member_name): | ||||
| @@ -2574,12 +2568,12 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         self.passthrough = kwargs.pop("passthrough", False) | ||||
|         super(GenericLazyReferenceField, self).__init__(*args, **kwargs) | ||||
|         super().__init__(*args, **kwargs) | ||||
|  | ||||
|     def _validate_choices(self, value): | ||||
|         if isinstance(value, LazyReference): | ||||
|             value = value.document_type._class_name | ||||
|         super(GenericLazyReferenceField, self)._validate_choices(value) | ||||
|         super()._validate_choices(value) | ||||
|  | ||||
|     def build_lazyref(self, value): | ||||
|         if isinstance(value, LazyReference): | ||||
| @@ -2608,7 +2602,7 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|         if value: | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         return super(GenericLazyReferenceField, self).__get__(instance, owner) | ||||
|         return super().__get__(instance, owner) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if isinstance(value, LazyReference) and value.pk is None: | ||||
| @@ -2616,7 +2610,7 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|                 "You can only reference documents once they have been" | ||||
|                 " saved to the database" | ||||
|             ) | ||||
|         return super(GenericLazyReferenceField, self).validate(value) | ||||
|         return super().validate(value) | ||||
|  | ||||
|     def to_mongo(self, document): | ||||
|         if document is None: | ||||
| @@ -2635,4 +2629,4 @@ class GenericLazyReferenceField(GenericReferenceField): | ||||
|                 ) | ||||
|             ) | ||||
|         else: | ||||
|             return super(GenericLazyReferenceField, self).to_mongo(document) | ||||
|             return super().to_mongo(document) | ||||
|   | ||||
| @@ -37,7 +37,7 @@ DENY = 3 | ||||
| PULL = 4 | ||||
|  | ||||
|  | ||||
| class BaseQuerySet(object): | ||||
| class BaseQuerySet: | ||||
|     """A set of results returned from a query. Wraps a MongoDB cursor, | ||||
|     providing :class:`~mongoengine.Document` objects as the results. | ||||
|     """ | ||||
| @@ -262,7 +262,7 @@ class BaseQuerySet(object): | ||||
|         # If we were able to retrieve the 2nd doc, rewind the cursor and | ||||
|         # raise the MultipleObjectsReturned exception. | ||||
|         queryset.rewind() | ||||
|         message = u"%d items returned, instead of 1" % queryset.count() | ||||
|         message = "%d items returned, instead of 1" % queryset.count() | ||||
|         raise queryset._document.MultipleObjectsReturned(message) | ||||
|  | ||||
|     def create(self, **kwargs): | ||||
| @@ -351,14 +351,14 @@ class BaseQuerySet(object): | ||||
|         except pymongo.errors.BulkWriteError as err: | ||||
|             # inserting documents that already have an _id field will | ||||
|             # give huge performance debt or raise | ||||
|             message = u"Bulk write error: (%s)" | ||||
|             message = "Bulk write error: (%s)" | ||||
|             raise BulkWriteError(message % err.details) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             message = "Could not save document (%s)" | ||||
|             if re.match("^E1100[01] duplicate key", str(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u"Tried to save duplicate unique keys (%s)" | ||||
|                 message = "Tried to save duplicate unique keys (%s)" | ||||
|                 raise NotUniqueError(message % err) | ||||
|             raise OperationError(message % err) | ||||
|  | ||||
| @@ -655,9 +655,9 @@ class BaseQuerySet(object): | ||||
|                     **self._cursor_args | ||||
|                 ) | ||||
|         except pymongo.errors.DuplicateKeyError as err: | ||||
|             raise NotUniqueError(u"Update failed (%s)" % err) | ||||
|             raise NotUniqueError("Update failed (%s)" % err) | ||||
|         except pymongo.errors.OperationFailure as err: | ||||
|             raise OperationError(u"Update failed (%s)" % err) | ||||
|             raise OperationError("Update failed (%s)" % err) | ||||
|  | ||||
|         if full_response: | ||||
|             if result["value"] is not None: | ||||
| @@ -686,7 +686,7 @@ class BaseQuerySet(object): | ||||
|         return queryset.filter(pk=object_id).first() | ||||
|  | ||||
|     def in_bulk(self, object_ids): | ||||
|         """Retrieve a set of documents by their ids. | ||||
|         """"Retrieve a set of documents by their ids. | ||||
|  | ||||
|         :param object_ids: a list or tuple of ``ObjectId``\ s | ||||
|         :rtype: dict of ObjectIds as keys and collection-specific | ||||
| @@ -1922,7 +1922,7 @@ class BaseQuerySet(object): | ||||
|             field_name = match.group(1).split(".") | ||||
|             fields = self._document._lookup_field(field_name) | ||||
|             # Substitute the correct name for the field into the javascript | ||||
|             return u'["%s"]' % fields[-1].db_field | ||||
|             return '["%s"]' % fields[-1].db_field | ||||
|  | ||||
|         def field_path_sub(match): | ||||
|             # Extract just the field name, and look up the field objects | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| __all__ = ("QueryFieldList",) | ||||
|  | ||||
|  | ||||
| class QueryFieldList(object): | ||||
| class QueryFieldList: | ||||
|     """Object that handles combinations of .only() and .exclude() calls""" | ||||
|  | ||||
|     ONLY = 1 | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from mongoengine.queryset.queryset import QuerySet | ||||
| __all__ = ("queryset_manager", "QuerySetManager") | ||||
|  | ||||
|  | ||||
| class QuerySetManager(object): | ||||
| class QuerySetManager: | ||||
|     """ | ||||
|     The default QuerySet Manager. | ||||
|  | ||||
|   | ||||
| @@ -141,10 +141,10 @@ class QuerySet(BaseQuerySet): | ||||
|             getting the count | ||||
|         """ | ||||
|         if with_limit_and_skip is False: | ||||
|             return super(QuerySet, self).count(with_limit_and_skip) | ||||
|             return super().count(with_limit_and_skip) | ||||
|  | ||||
|         if self._len is None: | ||||
|             self._len = super(QuerySet, self).count(with_limit_and_skip) | ||||
|             self._len = super().count(with_limit_and_skip) | ||||
|  | ||||
|         return self._len | ||||
|  | ||||
|   | ||||
| @@ -7,7 +7,7 @@ from mongoengine.queryset import transform | ||||
| __all__ = ("Q", "QNode") | ||||
|  | ||||
|  | ||||
| class QNodeVisitor(object): | ||||
| class QNodeVisitor: | ||||
|     """Base visitor class for visiting Q-object nodes in a query tree. | ||||
|     """ | ||||
|  | ||||
| @@ -79,7 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|         return transform.query(self.document, **query.query) | ||||
|  | ||||
|  | ||||
| class QNode(object): | ||||
| class QNode: | ||||
|     """Base class for nodes in query trees.""" | ||||
|  | ||||
|     AND = 0 | ||||
|   | ||||
| @@ -15,11 +15,11 @@ try: | ||||
|     signals_available = True | ||||
| except ImportError: | ||||
|  | ||||
|     class Namespace(object): | ||||
|     class Namespace: | ||||
|         def signal(self, name, doc=None): | ||||
|             return _FakeSignal(name, doc) | ||||
|  | ||||
|     class _FakeSignal(object): | ||||
|     class _FakeSignal: | ||||
|         """If blinker is unavailable, create a fake class with the same | ||||
|         interface that allows sending of signals but will fail with an | ||||
|         error on anything else.  Instead of doing anything on send, it | ||||
|   | ||||
		Reference in New Issue
	
	Block a user