diff --git a/.landscape.yml b/.landscape.yml index a27bbb03..4f13a5eb 100644 --- a/.landscape.yml +++ b/.landscape.yml @@ -5,17 +5,12 @@ pylint: options: additional-builtins: - # add xrange and long as valid built-ins. In Python 3, xrange is - # translated into range and long is translated into int via 2to3 (see - # "use_2to3" in setup.py). This should be removed when we drop Python - # 2 support (which probably won't happen any time soon). - - xrange + # add long as valid built-ins. - long pyflakes: disable: - # undefined variables are already covered by pylint (and exclude - # xrange & long) + # undefined variables are already covered by pylint (and exclude long) - F821 ignore-paths: diff --git a/.travis.yml b/.travis.yml index 62bbacb1..5d04571a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,10 @@ # For full coverage, we'd have to test all supported Python, MongoDB, and # PyMongo combinations. However, that would result in an overly long build # with a very large number of jobs, hence we only test a subset of all the -# combinations: -# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, -# tested against Python v2.7, v3.5, v3.6, v3.7, v3.8, PyPy and PyPy3. -# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo -# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. -# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. -# +# combinations. +# * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, +# Other combinations are tested. See below for the details or check the travis jobs + # We should periodically check MongoDB Server versions supported by MongoDB # Inc., add newly released versions to the test matrix, and remove versions # which have reached their End of Life. See: @@ -16,19 +13,15 @@ # # Reminder: Update README.rst if you change MongoDB versions we test. - language: python +dist: xenial python: -- 2.7 - 3.5 - 3.6 - 3.7 - 3.8 -- pypy - pypy3 -dist: xenial - env: global: - MONGODB_3_4=3.4.17 @@ -39,6 +32,8 @@ env: - PYMONGO_3_6=3.6 - PYMONGO_3_9=3.9 - PYMONGO_3_10=3.10 + + - MAIN_PYTHON_VERSION = "3.7" matrix: - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} @@ -47,8 +42,6 @@ matrix: fast_finish: true include: - - python: 2.7 - env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - python: 3.7 @@ -67,28 +60,24 @@ install: - pip install --upgrade pip - pip install coveralls - pip install flake8 flake8-import-order - - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) + - pip install tox # tox dryrun to setup the tox venv (we run a mock test). - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" # Install black for Python v3.7 only. - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pip install black; fi before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then flake8 .; else echo "flake8 only runs on py37"; fi + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then black --check .; else echo "black only runs on py37"; fi - mongo --eval 'db.version();' # Make sure mongo is awake script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" -# For now only submit coveralls for Python v2.7. Python v3.x currently shows -# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible -# code in a separate dir and runs tests on that. after_success: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; else echo "coveralls only sent for py27"; fi +- - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi notifications: irc: irc.freenode.org#mongoengine @@ -110,11 +99,11 @@ deploy: distributions: "sdist bdist_wheel" # Only deploy on tagged commits (aka GitHub releases) and only for the parent - # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. + # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. # We run Travis against many different Python, PyMongo, and MongoDB versions # and we don't want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) - python: 2.7 + python: 3.7 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 56bae31f..4afcd69e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -20,19 +20,23 @@ post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 2.7 and newer. Language -features not supported by all interpreters can not be used. -The codebase is written in python 2 so you must be using python 2 -when developing new features. Compatibility of the library with Python 3 -relies on the 2to3 package that gets executed as part of the installation -build. You should ensure that your code is properly converted by -`2to3 `_. +MongoEngine supports CPython 3.5 and newer as well as Pypy3. +Language features not supported by all interpreters can not be used. + +Python3 codebase +---------------------- + +Since 0.20, the codebase is exclusively Python 3. + +Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. +Travis runs the tests against the main Python 3.x versions. + Style Guide ----------- -MongoEngine uses `black `_ for code -formatting. +MongoEngine uses `black `_ for code formatting. +Black runs as part of the CI so it will fail in case the code is not formatted properly. Testing ------- diff --git a/README.rst b/README.rst index b5c95888..619970af 100644 --- a/README.rst +++ b/README.rst @@ -42,13 +42,14 @@ to both create the virtual environment and install the package. Otherwise, you c download the source from `GitHub `_ and run ``python setup.py install``. +The support for Python2 was dropped with MongoEngine 0.20.0 + Dependencies ============ All of the dependencies can easily be installed via `pip `_. At the very least, you'll need these two packages to use MongoEngine: - pymongo>=3.4 -- six>=1.10.0 If you utilize a ``DateTimeField``, you might also use a more flexible date parser: @@ -58,6 +59,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``: - Pillow>=2.0.0 +If you need to use signals: + +- blinker>=1.3 + Examples ======== Some simple examples of what MongoEngine code looks like: diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index fd017bae..4ecd48de 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -4,12 +4,14 @@ import timeit def main(): setup = """ from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') """ stmt = """ from pymongo import MongoClient + connection = MongoClient() db = connection.mongoengine_benchmark_test @@ -56,6 +58,7 @@ myNoddys = noddy.find() setup = """ from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') connection.close() diff --git a/docs/changelog.rst b/docs/changelog.rst index fb73a036..75e1b81a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,6 +6,7 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- ATTENTION: Drop support for Python2 - Add Mongo 4.0 to Travis - Bump development Status classifier to Production/Stable #2232 - Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 250347bf..f25bab8f 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -153,7 +153,7 @@ inherited classes like so: :: # 4. Remove indexes info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() + indexes_to_drop = [key for key, value in info.items() if '_types' in dict(value['key'])] for index in indexes_to_drop: collection.drop_index(index) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index dcc1f092..d3bff2b3 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,8 +1,6 @@ import weakref from bson import DBRef -import six -from six import iteritems from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned @@ -53,7 +51,7 @@ class BaseDict(dict): if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseDict, self).__init__(dict_items) + super().__init__(dict_items) def get(self, key, default=None): # get does not use __getitem__ by default so we must override it as well @@ -63,18 +61,18 @@ class BaseDict(dict): return default def __getitem__(self, key): - value = super(BaseDict, self).__getitem__(key) + value = super().__getitem__(key) EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, None, "%s.%s" % (self._name, key)) - super(BaseDict, self).__setitem__(key, value) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): - value = BaseList(value, None, "%s.%s" % (self._name, key)) - super(BaseDict, self).__setitem__(key, value) + value = BaseList(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance return value @@ -99,7 +97,7 @@ class BaseDict(dict): def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed("%s.%s" % (self._name, key)) + self._instance._mark_as_changed("{}.{}".format(self._name, key)) else: self._instance._mark_as_changed(self._name) @@ -117,13 +115,13 @@ class BaseList(list): if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseList, self).__init__(list_items) + super().__init__(list_items) def __getitem__(self, key): # change index to positive value because MongoDB does not support negative one if isinstance(key, int) and key < 0: key = len(self) + key - value = super(BaseList, self).__getitem__(key) + value = super().__getitem__(key) if isinstance(key, slice): # When receiving a slice operator, we don't convert the structure and bind @@ -135,19 +133,18 @@ class BaseList(list): value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict - value = BaseDict(value, None, "%s.%s" % (self._name, key)) - super(BaseList, self).__setitem__(key, value) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList - value = BaseList(value, None, "%s.%s" % (self._name, key)) - super(BaseList, self).__setitem__(key, value) + value = BaseList(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance return value def __iter__(self): - for v in super(BaseList, self).__iter__(): - yield v + yield from super().__iter__() def __getstate__(self): self.instance = None @@ -165,7 +162,7 @@ class BaseList(list): # instead, we simply marks the whole list as changed changed_key = None - result = super(BaseList, self).__setitem__(key, value) + result = super().__setitem__(key, value) self._mark_as_changed(changed_key) return result @@ -180,30 +177,19 @@ class BaseList(list): __iadd__ = mark_as_changed_wrapper(list.__iadd__) __imul__ = mark_as_changed_wrapper(list.__imul__) - if six.PY2: - # Under py3 __setslice__, __delslice__ and __getslice__ - # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter - # so we mimic this under python 2 - def __setslice__(self, i, j, sequence): - return self.__setitem__(slice(i, j), sequence) - - def __delslice__(self, i, j): - return self.__delitem__(slice(i, j)) - - def __getslice__(self, i, j): - return self.__getitem__(slice(i, j)) - def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self))) + self._instance._mark_as_changed( + "{}.{}".format(self._name, key % len(self)) + ) else: self._instance._mark_as_changed(self._name) class EmbeddedDocumentList(BaseList): def __init__(self, list_items, instance, name): - super(EmbeddedDocumentList, self).__init__(list_items, instance, name) + super().__init__(list_items, instance, name) self._instance = instance @classmethod @@ -213,7 +199,7 @@ class EmbeddedDocumentList(BaseList): """ for key, expected_value in kwargs.items(): doc_val = getattr(embedded_doc, key) - if doc_val != expected_value and six.text_type(doc_val) != expected_value: + if doc_val != expected_value and str(doc_val) != expected_value: return False return True @@ -368,13 +354,13 @@ class EmbeddedDocumentList(BaseList): return len(values) -class StrictDict(object): +class StrictDict: __slots__ = () _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} _classes = {} def __init__(self, **kwargs): - for k, v in iteritems(kwargs): + for k, v in kwargs.items(): setattr(self, k, v) def __getitem__(self, key): @@ -422,13 +408,13 @@ class StrictDict(object): return (key for key in self.__slots__ if hasattr(self, key)) def __len__(self): - return len(list(iteritems(self))) + return len(list(self.items())) def __eq__(self, other): - return self.items() == other.items() + return list(self.items()) == list(other.items()) def __ne__(self, other): - return self.items() != other.items() + return not (self == other) @classmethod def create(cls, allowed_keys): @@ -443,7 +429,7 @@ class StrictDict(object): def __repr__(self): return "{%s}" % ", ".join( - '"{0!s}": {1!r}'.format(k, v) for k, v in self.items() + '"{!s}": {!r}'.format(k, v) for k, v in self.items() ) cls._classes[allowed_keys] = SpecificStrictDict @@ -468,9 +454,7 @@ class LazyReference(DBRef): self.document_type = document_type self._cached_doc = cached_doc self.passthrough = passthrough - super(LazyReference, self).__init__( - self.document_type._get_collection_name(), pk - ) + super().__init__(self.document_type._get_collection_name(), pk) def __getitem__(self, name): if not self.passthrough: @@ -488,4 +472,4 @@ class LazyReference(DBRef): raise AttributeError() def __repr__(self): - return "" % (self.document_type, self.pk) + return "".format(self.document_type, self.pk) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ad691362..e697fe40 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,11 +1,10 @@ import copy + import numbers from functools import partial from bson import DBRef, ObjectId, SON, json_util import pymongo -import six -from six import iteritems from mongoengine import signals from mongoengine.base.common import get_document @@ -25,14 +24,13 @@ from mongoengine.errors import ( OperationError, ValidationError, ) -from mongoengine.python_support import Hashable __all__ = ("BaseDocument", "NON_FIELD_ERRORS") NON_FIELD_ERRORS = "__all__" -class BaseDocument(object): +class BaseDocument: # TODO simplify how `_changed_fields` is used. # Currently, handling of `_changed_fields` seems unnecessarily convoluted: # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's @@ -92,10 +90,10 @@ class BaseDocument(object): # if so raise an Exception. if not self._dynamic and (self._meta.get("strict", True) or _created): _undefined_fields = set(values.keys()) - set( - self._fields.keys() + ["id", "pk", "_cls", "_text_score"] + list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"] ) if _undefined_fields: - msg = ('The fields "{0}" do not exist on the document "{1}"').format( + msg = ('The fields "{}" do not exist on the document "{}"').format( _undefined_fields, self._class_name ) raise FieldDoesNotExist(msg) @@ -110,7 +108,7 @@ class BaseDocument(object): # Assign default values to the instance. # We set default values only for fields loaded from DB. See # https://github.com/mongoengine/mongoengine/issues/399 for more info. - for key, field in iteritems(self._fields): + for key, field in self._fields.items(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) @@ -122,14 +120,14 @@ class BaseDocument(object): # Set passed values after initialisation if self._dynamic: dynamic_data = {} - for key, value in iteritems(values): + for key, value in values.items(): if key in self._fields or key == "_id": setattr(self, key, value) else: dynamic_data[key] = value else: FileField = _import_class("FileField") - for key, value in iteritems(values): + for key, value in values.items(): key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ("id", "pk", "_cls"): if __auto_convert and value is not None: @@ -145,7 +143,7 @@ class BaseDocument(object): if self._dynamic: self._dynamic_lock = False - for key, value in iteritems(dynamic_data): + for key, value in dynamic_data.items(): setattr(self, key, value) # Flag initialised @@ -163,7 +161,7 @@ class BaseDocument(object): default = default() setattr(self, field_name, default) else: - super(BaseDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document @@ -210,9 +208,9 @@ class BaseDocument(object): and self__created and name == self._meta.get("id_field") ): - super(BaseDocument, self).__setattr__("_created", False) + super().__setattr__("_created", False) - super(BaseDocument, self).__setattr__(name, value) + super().__setattr__(name, value) def __getstate__(self): data = {} @@ -288,16 +286,13 @@ class BaseDocument(object): except (UnicodeEncodeError, UnicodeDecodeError): u = "[Bad Unicode data]" repr_type = str if u is None else type(u) - return repr_type("<%s: %s>" % (self.__class__.__name__, u)) + return repr_type("<{}: {}>".format(self.__class__.__name__, u)) def __str__(self): # TODO this could be simpler? if hasattr(self, "__unicode__"): - if six.PY3: - return self.__unicode__() - else: - return six.text_type(self).encode("utf-8") - return six.text_type("%s object" % self.__class__.__name__) + return self.__unicode__() + return "%s object" % self.__class__.__name__ def __eq__(self, other): if ( @@ -446,7 +441,7 @@ class BaseDocument(object): pk = self.pk elif self._instance and hasattr(self._instance, "pk"): pk = self._instance.pk - message = "ValidationError (%s:%s) " % (self._class_name, pk) + message = "ValidationError ({}:{}) ".format(self._class_name, pk) raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): @@ -519,7 +514,7 @@ class BaseDocument(object): if "." in key: key, rest = key.split(".", 1) key = self._db_field_map.get(key, key) - key = "%s.%s" % (key, rest) + key = "{}.{}".format(key, rest) else: key = self._db_field_map.get(key, key) @@ -578,10 +573,10 @@ class BaseDocument(object): if not hasattr(data, "items"): iterator = enumerate(data) else: - iterator = iteritems(data) + iterator = data.items() for index_or_key, value in iterator: - item_key = "%s%s." % (base_key, index_or_key) + item_key = "{}{}.".format(base_key, index_or_key) # don't check anything lower if this key is already marked # as changed. if item_key[:-1] in changed_fields: @@ -589,7 +584,7 @@ class BaseDocument(object): if hasattr(value, "_get_changed_fields"): changed = value._get_changed_fields() - changed_fields += ["%s%s" % (item_key, k) for k in changed if k] + changed_fields += ["{}{}".format(item_key, k) for k in changed if k] elif isinstance(value, (list, tuple, dict)): self._nestable_types_changed_fields(changed_fields, item_key, value) @@ -620,7 +615,7 @@ class BaseDocument(object): if isinstance(data, EmbeddedDocument): # Find all embedded fields that have been changed changed = data._get_changed_fields() - changed_fields += ["%s%s" % (key, k) for k in changed if k] + changed_fields += ["{}{}".format(key, k) for k in changed if k] elif isinstance(data, (list, tuple, dict)): if hasattr(field, "field") and isinstance( field.field, (ReferenceField, GenericReferenceField) @@ -670,7 +665,7 @@ class BaseDocument(object): del set_data["_id"] # Determine if any changed items were actually unset. - for path, value in set_data.items(): + for path, value in list(set_data.items()): if value or isinstance( value, (numbers.Number, bool) ): # Account for 0 and True that are truthy @@ -744,7 +739,7 @@ class BaseDocument(object): # Convert SON to a data dict, making sure each key is a string and # corresponds to the right db field. data = {} - for key, value in iteritems(son): + for key, value in son.items(): key = str(key) key = cls._db_field_map.get(key, key) data[key] = value @@ -759,7 +754,7 @@ class BaseDocument(object): if not _auto_dereference: fields = copy.deepcopy(fields) - for field_name, field in iteritems(fields): + for field_name, field in fields.items(): field._auto_dereference = _auto_dereference if field.db_field in data: value = data[field.db_field] @@ -774,17 +769,16 @@ class BaseDocument(object): if errors_dict: errors = "\n".join( - ["Field '%s' - %s" % (k, v) for k, v in errors_dict.items()] + ["Field '{}' - {}".format(k, v) for k, v in errors_dict.items()] ) - msg = "Invalid data to create a `%s` instance.\n%s" % ( - cls._class_name, - errors, + msg = "Invalid data to create a `{}` instance.\n{}".format( + cls._class_name, errors, ) raise InvalidDocumentError(msg) # In STRICT documents, remove any keys that aren't in cls._fields if cls.STRICT: - data = {k: v for k, v in iteritems(data) if k in cls._fields} + data = {k: v for k, v in data.items() if k in cls._fields} obj = cls( __auto_convert=False, _created=created, __only_fields=only_fields, **data @@ -831,7 +825,7 @@ class BaseDocument(object): @classmethod def _build_index_spec(cls, spec): """Build a PyMongo index spec from a MongoEngine index spec.""" - if isinstance(spec, six.string_types): + if isinstance(spec, str): spec = {"fields": [spec]} elif isinstance(spec, (list, tuple)): spec = {"fields": list(spec)} @@ -928,7 +922,7 @@ class BaseDocument(object): # Add any unique_with fields to the back of the index spec if field.unique_with: - if isinstance(field.unique_with, six.string_types): + if isinstance(field.unique_with, str): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names @@ -949,7 +943,8 @@ class BaseDocument(object): # Add the new index to the list fields = [ - ("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields + ("{}{}".format(namespace, f), pymongo.ASCENDING) + for f in unique_fields ] index = {"fields": fields, "unique": True, "sparse": sparse} unique_indexes.append(index) @@ -1006,7 +1001,7 @@ class BaseDocument(object): elif field._geo_index: field_name = field.db_field if parent_field: - field_name = "%s.%s" % (parent_field, field_name) + field_name = "{}.{}".format(parent_field, field_name) geo_indices.append({"fields": [(field_name, field._geo_index)]}) return geo_indices @@ -1175,9 +1170,6 @@ class BaseDocument(object): else [value] ) return sep.join( - [ - six.text_type(dict(field.choices).get(val, val)) - for val in values or [] - ] + [str(dict(field.choices).get(val, val)) for val in values or []] ) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index 379098e5..7bab813c 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -4,8 +4,6 @@ import weakref from bson import DBRef, ObjectId, SON import pymongo -import six -from six import iteritems from mongoengine.base.common import UPDATE_OPERATORS from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList @@ -15,7 +13,7 @@ from mongoengine.errors import DeprecatedError, ValidationError __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") -class BaseField(object): +class BaseField: """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. @@ -87,13 +85,11 @@ class BaseField(object): self._owner_document = None # Make sure db_field is a string (if it's explicitly defined). - if self.db_field is not None and not isinstance( - self.db_field, six.string_types - ): + if self.db_field is not None and not isinstance(self.db_field, str): raise TypeError("db_field should be a string.") # Make sure db_field doesn't contain any forbidden characters. - if isinstance(self.db_field, six.string_types) and ( + if isinstance(self.db_field, str) and ( "." in self.db_field or "\0" in self.db_field or self.db_field.startswith("$") @@ -216,14 +212,12 @@ class BaseField(object): # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): - self.error( - "Value must be an instance of %s" % (six.text_type(choice_list)) - ) + self.error("Value must be an instance of %s" % (choice_list)) # Choices which are types other than Documents else: values = value if isinstance(value, (list, tuple)) else [value] if len(set(values) - set(choice_list)): - self.error("Value must be one of %s" % six.text_type(choice_list)) + self.error("Value must be one of %s" % str(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -311,7 +305,7 @@ class ComplexBaseField(BaseField): if hasattr(instance._data[self.name], "_dereferenced"): instance._data[self.name]._dereferenced = True - value = super(ComplexBaseField, self).__get__(instance, owner) + value = super().__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)): @@ -340,7 +334,7 @@ class ComplexBaseField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_python"): @@ -394,7 +388,7 @@ class ComplexBaseField(BaseField): EmbeddedDocument = _import_class("EmbeddedDocument") GenericReferenceField = _import_class("GenericReferenceField") - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_mongo"): @@ -418,11 +412,11 @@ class ComplexBaseField(BaseField): if self.field: value_dict = { key: self.field._to_mongo_safe_call(item, use_db_field, fields) - for key, item in iteritems(value) + for key, item in value.items() } else: value_dict = {} - for k, v in iteritems(value): + for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: @@ -461,8 +455,8 @@ class ComplexBaseField(BaseField): """If field is provided ensure the value is valid.""" errors = {} if self.field: - if hasattr(value, "iteritems") or hasattr(value, "items"): - sequence = iteritems(value) + if hasattr(value, "items"): + sequence = value.items() else: sequence = enumerate(value) for k, v in sequence: @@ -475,7 +469,9 @@ class ComplexBaseField(BaseField): if errors: field_class = self.field.__class__.__name__ - self.error("Invalid %s item (%s)" % (field_class, value), errors=errors) + self.error( + "Invalid {} item ({})".format(field_class, value), errors=errors + ) # Don't allow empty values if required if self.required and not value: self.error("Field is required and cannot be empty") @@ -508,10 +504,9 @@ class ObjectIdField(BaseField): def to_mongo(self, value): if not isinstance(value, ObjectId): try: - return ObjectId(six.text_type(value)) + return ObjectId(str(value)) except Exception as e: - # e.message attribute has been deprecated since Python 2.6 - self.error(six.text_type(e)) + self.error(str(e)) return value def prepare_query_value(self, op, value): @@ -519,9 +514,9 @@ class ObjectIdField(BaseField): def validate(self, value): try: - ObjectId(six.text_type(value)) + ObjectId(str(value)) except Exception: - self.error("Invalid Object ID") + self.error("Invalid ObjectID") class GeoJsonBaseField(BaseField): @@ -541,14 +536,14 @@ class GeoJsonBaseField(BaseField): self._name = "%sField" % self._type if not auto_index: self._geo_index = False - super(GeoJsonBaseField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Validate the GeoJson object based on its type.""" if isinstance(value, dict): if set(value.keys()) == {"type", "coordinates"}: if value["type"] != self._type: - self.error('%s type must be "%s"' % (self._name, self._type)) + self.error('{} type must be "{}"'.format(self._name, self._type)) return self.validate(value["coordinates"]) else: self.error( diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 3bba796b..b4479b97 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,9 +1,6 @@ import itertools import warnings -import six -from six import iteritems, itervalues - from mongoengine.base.common import _document_registry from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.common import _import_class @@ -25,7 +22,7 @@ class DocumentMetaclass(type): # TODO lower complexity of this method def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(DocumentMetaclass, mcs).__new__ + super_new = super().__new__ # If a base class just call super metaclass = attrs.get("my_metaclass") @@ -69,7 +66,7 @@ class DocumentMetaclass(type): # Standard object mixin - merge in any Fields if not hasattr(base, "_meta"): base_fields = {} - for attr_name, attr_value in iteritems(base.__dict__): + for attr_name, attr_value in base.__dict__.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -81,7 +78,7 @@ class DocumentMetaclass(type): # Discover any document fields field_names = {} - for attr_name, attr_value in iteritems(attrs): + for attr_name, attr_value in attrs.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -111,9 +108,7 @@ class DocumentMetaclass(type): attrs["_fields_ordered"] = tuple( i[1] - for i in sorted( - (v.creation_counter, v.name) for v in itervalues(doc_fields) - ) + for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) ) # @@ -173,24 +168,8 @@ class DocumentMetaclass(type): # Add class to the _document_registry _document_registry[new_class._class_name] = new_class - # In Python 2, User-defined methods objects have special read-only - # attributes 'im_func' and 'im_self' which contain the function obj - # and class instance object respectively. With Python 3 these special - # attributes have been replaced by __func__ and __self__. The Blinker - # module continues to use im_func and im_self, so the code below - # copies __func__ into im_func and __self__ into im_self for - # classmethod objects in Document derived classes. - if six.PY3: - for val in new_class.__dict__.values(): - if isinstance(val, classmethod): - f = val.__get__(new_class) - if hasattr(f, "__func__") and not hasattr(f, "im_func"): - f.__dict__.update({"im_func": getattr(f, "__func__")}) - if hasattr(f, "__self__") and not hasattr(f, "im_self"): - f.__dict__.update({"im_self": getattr(f, "__self__")}) - # Handle delete rules - for field in itervalues(new_class._fields): + for field in new_class._fields.values(): f = field if f.owner_document is None: f.owner_document = new_class @@ -252,8 +231,7 @@ class DocumentMetaclass(type): if base is object: continue yield base - for child_base in mcs.__get_bases(base.__bases__): - yield child_base + yield from mcs.__get_bases(base.__bases__) @classmethod def _import_classes(mcs): @@ -271,7 +249,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(TopLevelDocumentMetaclass, mcs).__new__ + super_new = super().__new__ # Set default _meta data if base class, otherwise get user defined meta if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: @@ -398,7 +376,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed - for field_name, field in iteritems(new_class._fields): + for field_name, field in new_class._fields.items(): if field.primary_key: # Ensure only one primary key is set current_pk = new_class._meta.get("id_field") @@ -461,8 +439,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) for i in itertools.count(): - id_name = "{0}_{1}".format(id_basename, i) - id_db_name = "{0}_{1}".format(id_db_basename, i) + id_name = "{}_{}".format(id_basename, i) + id_db_name = "{}_{}".format(id_db_basename, i) if id_name not in existing_fields and id_db_name not in existing_db_fields: return id_name, id_db_name @@ -475,7 +453,7 @@ class MetaDict(dict): _merge_options = ("indexes",) def merge(self, new_options): - for k, v in iteritems(new_options): + for k, v in new_options.items(): if k in self._merge_options: self[k] = self.get(k, []) + v else: diff --git a/mongoengine/base/utils.py b/mongoengine/base/utils.py index 8f27ee14..7753ad50 100644 --- a/mongoengine/base/utils.py +++ b/mongoengine/base/utils.py @@ -1,7 +1,7 @@ import re -class LazyRegexCompiler(object): +class LazyRegexCompiler: """Descriptor to allow lazy compilation of regex""" def __init__(self, pattern, flags=0): diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 4e0c60b0..13d170ec 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,6 +1,5 @@ from pymongo import MongoClient, ReadPreference, uri_parser from pymongo.database import _check_name -import six __all__ = [ "DEFAULT_CONNECTION_NAME", @@ -39,8 +38,8 @@ def _check_db_name(name): """Check if a database name is valid. This functionality is copied from pymongo Database class constructor. """ - if not isinstance(name, six.string_types): - raise TypeError("name must be an instance of %s" % six.string_types) + if not isinstance(name, str): + raise TypeError("name must be an instance of %s" % str) elif name != "$external": _check_name(name) @@ -93,7 +92,7 @@ def _get_connection_settings( conn_host = conn_settings["host"] # Host can be a list or a string, so if string, force to a list. - if isinstance(conn_host, six.string_types): + if isinstance(conn_host, str): conn_host = [conn_host] resolved_hosts = [] @@ -148,7 +147,7 @@ def _get_connection_settings( # TODO simplify the code below once we drop support for # PyMongo v3.4. read_pf_mode = uri_options["readpreference"] - if isinstance(read_pf_mode, six.string_types): + if isinstance(read_pf_mode, str): read_pf_mode = read_pf_mode.lower() for preference in read_preferences: if ( @@ -318,7 +317,7 @@ def _create_connection(alias, connection_class, **connection_settings): try: return connection_class(**connection_settings) except Exception as e: - raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e)) + raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) def _find_existing_connection(connection_settings): @@ -396,8 +395,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): if new_conn_settings != prev_conn_setting: err_msg = ( - u"A different connection with alias `{}` was already " - u"registered. Use disconnect() first" + "A different connection with alias `{}` was already " + "registered. Use disconnect() first" ).format(alias) raise ConnectionFailure(err_msg) else: diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 1592ceef..8bfb902b 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,7 +1,6 @@ from contextlib import contextmanager from pymongo.write_concern import WriteConcern -from six import iteritems from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db @@ -17,7 +16,7 @@ __all__ = ( ) -class switch_db(object): +class switch_db: """switch_db alias context manager. Example :: @@ -58,7 +57,7 @@ class switch_db(object): self.cls._collection = self.collection -class switch_collection(object): +class switch_collection: """switch_collection alias context manager. Example :: @@ -100,7 +99,7 @@ class switch_collection(object): self.cls._get_collection_name = self.ori_get_collection_name -class no_dereference(object): +class no_dereference: """no_dereference context manager. Turns off all dereferencing in Documents for the duration of the context @@ -123,7 +122,7 @@ class no_dereference(object): self.deref_fields = [ k - for k, v in iteritems(self.cls._fields) + for k, v in self.cls._fields.items() if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) ] @@ -140,7 +139,7 @@ class no_dereference(object): return self.cls -class no_sub_classes(object): +class no_sub_classes: """no_sub_classes context manager. Only returns instances of this class and no sub (inherited) classes:: @@ -168,7 +167,7 @@ class no_sub_classes(object): self.cls._subclasses = self.cls_initial_subclasses -class query_counter(object): +class query_counter: """Query_counter context manager to get the number of queries. This works by updating the `profiling_level` of the database so that all queries get logged, resetting the db.system.profile collection at the beginning of the context and counting the new entries. @@ -235,7 +234,7 @@ class query_counter(object): def __repr__(self): """repr query_counter as the number of queries.""" - return u"%s" % self._get_count() + return "%s" % self._get_count() def _get_count(self): """Get the number of queries by counting the current number of entries in db.system.profile diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 9e75f353..ff608a3b 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -1,6 +1,4 @@ from bson import DBRef, SON -import six -from six import iteritems from mongoengine.base import ( BaseDict, @@ -16,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField from mongoengine.queryset import QuerySet -class DeReference(object): +class DeReference: def __call__(self, items, max_depth=1, instance=None, name=None): """ Cheaply dereferences the items to a set depth. @@ -30,7 +28,7 @@ class DeReference(object): :class:`~mongoengine.base.ComplexBaseField` :param get: A boolean determining if being called by __get__ """ - if items is None or isinstance(items, six.string_types): + if items is None or isinstance(items, str): return items # cheapest way to convert a queryset to a list @@ -79,7 +77,7 @@ class DeReference(object): def _get_items_from_dict(items): new_items = {} - for k, v in iteritems(items): + for k, v in items.items(): value = v if isinstance(v, list): value = _get_items_from_list(v) @@ -120,7 +118,7 @@ class DeReference(object): depth += 1 for item in iterator: if isinstance(item, (Document, EmbeddedDocument)): - for field_name, field in iteritems(item._fields): + for field_name, field in item._fields.items(): v = item._data.get(field_name, None) if isinstance(v, LazyReference): # LazyReference inherits DBRef but should not be dereferenced here ! @@ -136,7 +134,7 @@ class DeReference(object): getattr(field, "field", None), "document_type", None ) references = self._find_references(v, depth) - for key, refs in iteritems(references): + for key, refs in references.items(): if isinstance( field_cls, (Document, TopLevelDocumentMetaclass) ): @@ -153,7 +151,7 @@ class DeReference(object): ) elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: references = self._find_references(item, depth - 1) - for key, refs in iteritems(references): + for key, refs in references.items(): reference_map.setdefault(key, set()).update(refs) return reference_map @@ -162,7 +160,7 @@ class DeReference(object): """Fetch all references and convert to their document objects """ object_map = {} - for collection, dbrefs in iteritems(self.reference_map): + for collection, dbrefs in self.reference_map.items(): # we use getattr instead of hasattr because hasattr swallows any exception under python2 # so it could hide nasty things without raising exceptions (cfr bug #1688)) @@ -174,7 +172,7 @@ class DeReference(object): dbref for dbref in dbrefs if (col_name, dbref) not in object_map ] references = collection.objects.in_bulk(refs) - for key, doc in iteritems(references): + for key, doc in references.items(): object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document if isinstance(doc_type, (ListField, DictField, MapField)): @@ -250,7 +248,7 @@ class DeReference(object): data = [] else: is_list = False - iterator = iteritems(items) + iterator = items.items() data = {} depth += 1 @@ -274,14 +272,12 @@ class DeReference(object): (v["_ref"].collection, v["_ref"].id), v ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = six.text_type("{0}.{1}.{2}").format( - name, k, field_name - ) + item_name = "{}.{}.{}".format(name, k, field_name) data[k]._data[field_name] = self._attach_objects( v, depth, instance=instance, name=item_name ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = "%s.%s" % (name, k) if name else name + item_name = "{}.{}".format(name, k) if name else name data[k] = self._attach_objects( v, depth - 1, instance=instance, name=item_name ) diff --git a/mongoengine/document.py b/mongoengine/document.py index c8710fb5..db64054a 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -4,8 +4,6 @@ import warnings from bson.dbref import DBRef import pymongo from pymongo.read_preferences import ReadPreference -import six -from six import iteritems from mongoengine import signals from mongoengine.base import ( @@ -44,7 +42,7 @@ def includes_cls(fields): """Helper function used for ensuring and comparing indexes.""" first_field = None if len(fields): - if isinstance(fields[0], six.string_types): + if isinstance(fields[0], str): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] @@ -55,7 +53,7 @@ class InvalidCollectionError(Exception): pass -class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): +class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): r"""A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the @@ -71,7 +69,6 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): __slots__ = ("_instance",) - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -82,7 +79,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): __hash__ = None def __init__(self, *args, **kwargs): - super(EmbeddedDocument, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._instance = None self._changed_fields = [] @@ -95,7 +92,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): return not self.__eq__(other) def to_mongo(self, *args, **kwargs): - data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # remove _id from the SON if it's in it and it's None if "_id" in data and data["_id"] is None: @@ -104,7 +101,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): return data -class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): +class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): """The base class used for defining the structure and properties of collections of documents stored in MongoDB. Inherit from this class, and add fields as class attributes to define a document's structure. @@ -156,7 +153,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): in the :attr:`meta` dictionary. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -260,7 +256,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return db.create_collection(collection_name, **opts) def to_mongo(self, *args, **kwargs): - data = super(Document, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # If '_id' is None, try and set it from self._data. If that # doesn't exist either, remove '_id' from the SON completely. @@ -431,16 +427,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): self.cascade_save(**kwargs) except pymongo.errors.DuplicateKeyError as err: - message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" - if re.match("^E1100[01] duplicate key", six.text_type(err)): + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) + raise OperationError(message % err) # Make sure we store the PK on this document now that it's saved id_field = self._meta["id_field"] @@ -559,7 +555,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if not getattr(ref, "_changed_fields", True): continue - ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) + ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: _refs.append(ref_id) kwargs["_refs"] = _refs @@ -634,7 +630,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # Delete FileFields separately FileField = _import_class("FileField") - for name, field in iteritems(self._fields): + for name, field in self._fields.items(): if isinstance(field, FileField): getattr(self, name).delete() @@ -643,7 +639,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): write_concern=write_concern, _from_doc_delete=True ) except pymongo.errors.OperationFailure as err: - message = u"Could not delete document (%s)" % err.message + message = "Could not delete document (%s)" % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self, **signal_kwargs) @@ -979,10 +975,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): indexes.append(index) # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed - if [(u"_id", 1)] not in indexes: - indexes.append([(u"_id", 1)]) + if [("_id", 1)] not in indexes: + indexes.append([("_id", 1)]) if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): - indexes.append([(u"_cls", 1)]) + indexes.append([("_cls", 1)]) return indexes @@ -1006,19 +1002,19 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): extra = [index for index in existing if index not in required] # if { _cls: 1 } is missing, make sure it's *really* necessary - if [(u"_cls", 1)] in missing: + if [("_cls", 1)] in missing: cls_obsolete = False for index in existing: if includes_cls(index) and index not in extra: cls_obsolete = True break if cls_obsolete: - missing.remove([(u"_cls", 1)]) + missing.remove([("_cls", 1)]) return {"missing": missing, "extra": extra} -class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): +class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same way as an ordinary document but has expanded style properties. Any data @@ -1032,7 +1028,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -1047,16 +1042,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): setattr(self, field_name, None) self._dynamic_fields[field_name].null = False else: - super(DynamicDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) -class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): +class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): """A Dynamic Embedded Document class allowing flexible, expandable and uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more information about dynamic documents. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -1076,7 +1070,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu setattr(self, field_name, None) -class MapReduceDocument(object): +class MapReduceDocument: """A document returned from a map/reduce query. :param collection: An instance of :class:`~pymongo.Collection` diff --git a/mongoengine/errors.py b/mongoengine/errors.py index b76243d3..95564ff9 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,7 +1,5 @@ from collections import defaultdict -import six -from six import iteritems __all__ = ( "NotRegistered", @@ -87,24 +85,24 @@ class ValidationError(AssertionError): _message = None def __init__(self, message="", **kwargs): - super(ValidationError, self).__init__(message) + super().__init__(message) self.errors = kwargs.get("errors", {}) self.field_name = kwargs.get("field_name") self.message = message def __str__(self): - return six.text_type(self.message) + return str(self.message) def __repr__(self): - return "%s(%s,)" % (self.__class__.__name__, self.message) + return "{}({},)".format(self.__class__.__name__, self.message) def __getattribute__(self, name): - message = super(ValidationError, self).__getattribute__(name) + message = super().__getattribute__(name) if name == "message": if self.field_name: message = "%s" % message if self.errors: - message = "%s(%s)" % (message, self._format_errors()) + message = "{}({})".format(message, self._format_errors()) return message def _get_message(self): @@ -126,12 +124,12 @@ class ValidationError(AssertionError): def build_dict(source): errors_dict = {} if isinstance(source, dict): - for field_name, error in iteritems(source): + for field_name, error in source.items(): errors_dict[field_name] = build_dict(error) elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) else: - return six.text_type(source) + return str(source) return errors_dict @@ -147,15 +145,15 @@ class ValidationError(AssertionError): if isinstance(value, list): value = " ".join([generate_key(k) for k in value]) elif isinstance(value, dict): - value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) + value = " ".join([generate_key(v, k) for k, v in value.items()]) - results = "%s.%s" % (prefix, value) if prefix else value + results = "{}.{}".format(prefix, value) if prefix else value return results error_dict = defaultdict(list) - for k, v in iteritems(self.to_dict()): + for k, v in self.to_dict().items(): error_dict[generate_key(v)].append(k) - return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) + return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) class DeprecatedError(Exception): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index d502dba3..17aa0b57 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -5,14 +5,14 @@ import re import socket import time import uuid +from io import BytesIO from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON +from bson.int64 import Int64 import gridfs import pymongo from pymongo import ReturnDocument -import six -from six import iteritems try: import dateutil @@ -21,11 +21,6 @@ except ImportError: else: import dateutil.parser -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long - from mongoengine.base import ( BaseDocument, @@ -42,7 +37,6 @@ from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version -from mongoengine.python_support import StringIO from mongoengine.queryset import DO_NOTHING from mongoengine.queryset.base import BaseQuerySet from mongoengine.queryset.transform import STRING_OPERATORS @@ -53,11 +47,6 @@ except ImportError: Image = None ImageOps = None -if six.PY3: - # Useless as long as 2to3 gets executed - # as it turns `long` into `int` blindly - long = int - __all__ = ( "StringField", @@ -114,10 +103,10 @@ class StringField(BaseField): self.regex = re.compile(regex) if regex else None self.max_length = max_length self.min_length = min_length - super(StringField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): - if isinstance(value, six.text_type): + if isinstance(value, str): return value try: value = value.decode("utf-8") @@ -126,7 +115,7 @@ class StringField(BaseField): return value def validate(self, value): - if not isinstance(value, six.string_types): + if not isinstance(value, str): self.error("StringField only accepts string values") if self.max_length is not None and len(value) > self.max_length: @@ -142,7 +131,7 @@ class StringField(BaseField): return None def prepare_query_value(self, op, value): - if not isinstance(op, six.string_types): + if not isinstance(op, str): return value if op in STRING_OPERATORS: @@ -162,7 +151,7 @@ class StringField(BaseField): # escape unsafe characters which could lead to a re.error value = re.escape(value) value = re.compile(regex % value, flags) - return super(StringField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class URLField(StringField): @@ -186,17 +175,17 @@ class URLField(StringField): def __init__(self, url_regex=None, schemes=None, **kwargs): self.url_regex = url_regex or self._URL_REGEX self.schemes = schemes or self._URL_SCHEMES - super(URLField, self).__init__(**kwargs) + super().__init__(**kwargs) def validate(self, value): # Check first if the scheme is valid scheme = value.split("://")[0].lower() if scheme not in self.schemes: - self.error(u"Invalid scheme {} in URL: {}".format(scheme, value)) + self.error("Invalid scheme {} in URL: {}".format(scheme, value)) # Then check full URL if not self.url_regex.match(value): - self.error(u"Invalid URL: {}".format(value)) + self.error("Invalid URL: {}".format(value)) class EmailField(StringField): @@ -214,7 +203,7 @@ class EmailField(StringField): ) UTF8_USER_REGEX = LazyRegexCompiler( - six.u( + ( # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to # include `UTF8-non-ascii`. r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z" @@ -229,7 +218,7 @@ class EmailField(StringField): re.IGNORECASE, ) - error_msg = u"Invalid email address: %s" + error_msg = "Invalid email address: %s" def __init__( self, @@ -253,7 +242,7 @@ class EmailField(StringField): self.domain_whitelist = domain_whitelist or [] self.allow_utf8_user = allow_utf8_user self.allow_ip_domain = allow_ip_domain - super(EmailField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate_user_part(self, user_part): """Validate the user part of the email address. Return True if @@ -280,13 +269,13 @@ class EmailField(StringField): try: socket.inet_pton(addr_family, domain_part[1:-1]) return True - except (socket.error, UnicodeEncodeError): + except (OSError, UnicodeEncodeError): pass return False def validate(self, value): - super(EmailField, self).validate(value) + super().validate(value) if "@" not in value: self.error(self.error_msg % value) @@ -303,12 +292,16 @@ class EmailField(StringField): domain_part = domain_part.encode("idna").decode("ascii") except UnicodeError: self.error( - "%s %s" % (self.error_msg % value, "(domain failed IDN encoding)") + "{} {}".format( + self.error_msg % value, "(domain failed IDN encoding)" + ) ) else: if not self.validate_domain_part(domain_part): self.error( - "%s %s" % (self.error_msg % value, "(domain validation failed)") + "{} {}".format( + self.error_msg % value, "(domain validation failed)" + ) ) @@ -317,7 +310,7 @@ class IntField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(IntField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -342,19 +335,19 @@ class IntField(BaseField): if value is None: return value - return super(IntField, self).prepare_query_value(op, int(value)) + return super().prepare_query_value(op, int(value)) class LongField(BaseField): - """64-bit integer field.""" + """64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(LongField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): pass return value @@ -364,7 +357,7 @@ class LongField(BaseField): def validate(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): self.error("%s could not be converted to long" % value) @@ -378,7 +371,7 @@ class LongField(BaseField): if value is None: return value - return super(LongField, self).prepare_query_value(op, long(value)) + return super().prepare_query_value(op, int(value)) class FloatField(BaseField): @@ -386,7 +379,7 @@ class FloatField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(FloatField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -396,7 +389,7 @@ class FloatField(BaseField): return value def validate(self, value): - if isinstance(value, six.integer_types): + if isinstance(value, int): try: value = float(value) except OverflowError: @@ -415,7 +408,7 @@ class FloatField(BaseField): if value is None: return value - return super(FloatField, self).prepare_query_value(op, float(value)) + return super().prepare_query_value(op, float(value)) class DecimalField(BaseField): @@ -462,7 +455,7 @@ class DecimalField(BaseField): self.precision = precision self.rounding = rounding - super(DecimalField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if value is None: @@ -481,13 +474,13 @@ class DecimalField(BaseField): if value is None: return value if self.force_string: - return six.text_type(self.to_python(value)) + return str(self.to_python(value)) return float(self.to_python(value)) def validate(self, value): if not isinstance(value, decimal.Decimal): - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) try: value = decimal.Decimal(value) except (TypeError, ValueError, decimal.InvalidOperation) as exc: @@ -500,7 +493,7 @@ class DecimalField(BaseField): self.error("Decimal value is too large") def prepare_query_value(self, op, value): - return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class BooleanField(BaseField): @@ -540,7 +533,7 @@ class DateTimeField(BaseField): def validate(self, value): new_value = self.to_mongo(value) if not isinstance(new_value, (datetime.datetime, datetime.date)): - self.error(u'cannot parse date "%s"' % value) + self.error('cannot parse date "%s"' % value) def to_mongo(self, value): if value is None: @@ -552,7 +545,7 @@ class DateTimeField(BaseField): if callable(value): return value() - if not isinstance(value, six.string_types): + if not isinstance(value, str): return None return self._parse_datetime(value) @@ -597,19 +590,19 @@ class DateTimeField(BaseField): return None def prepare_query_value(self, op, value): - return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class DateField(DateTimeField): def to_mongo(self, value): - value = super(DateField, self).to_mongo(value) + value = super().to_mongo(value) # drop hours, minutes, seconds if isinstance(value, datetime.datetime): value = datetime.datetime(value.year, value.month, value.day) return value def to_python(self, value): - value = super(DateField, self).to_python(value) + value = super().to_python(value) # convert datetime to date if isinstance(value, datetime.datetime): value = datetime.date(value.year, value.month, value.day) @@ -643,7 +636,7 @@ class ComplexDateTimeField(StringField): """ self.separator = separator self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) - super(ComplexDateTimeField, self).__init__(**kwargs) + super().__init__(**kwargs) def _convert_from_datetime(self, val): """ @@ -674,14 +667,14 @@ class ComplexDateTimeField(StringField): if instance is None: return self - data = super(ComplexDateTimeField, self).__get__(instance, owner) + data = super().__get__(instance, owner) if isinstance(data, datetime.datetime) or data is None: return data return self._convert_from_string(data) def __set__(self, instance, value): - super(ComplexDateTimeField, self).__set__(instance, value) + super().__set__(instance, value) value = instance._data[self.name] if value is not None: instance._data[self.name] = self._convert_from_datetime(value) @@ -703,9 +696,7 @@ class ComplexDateTimeField(StringField): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): - return super(ComplexDateTimeField, self).prepare_query_value( - op, self._convert_from_datetime(value) - ) + return super().prepare_query_value(op, self._convert_from_datetime(value)) class EmbeddedDocumentField(BaseField): @@ -716,7 +707,7 @@ class EmbeddedDocumentField(BaseField): def __init__(self, document_type, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not ( - isinstance(document_type, six.string_types) + isinstance(document_type, str) or issubclass(document_type, EmbeddedDocument) ): self.error( @@ -725,11 +716,11 @@ class EmbeddedDocumentField(BaseField): ) self.document_type_obj = document_type - super(EmbeddedDocumentField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: resolved_document_type = self.owner_document else: @@ -786,7 +777,7 @@ class EmbeddedDocumentField(BaseField): "Querying the embedded document '%s' failed, due to an invalid query value" % (self.document_type._class_name,) ) - super(EmbeddedDocumentField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) @@ -802,9 +793,7 @@ class GenericEmbeddedDocumentField(BaseField): """ def prepare_query_value(self, op, value): - return super(GenericEmbeddedDocumentField, self).prepare_query_value( - op, self.to_mongo(value) - ) + return super().prepare_query_value(op, self.to_mongo(value)) def to_python(self, value): if isinstance(value, dict): @@ -855,7 +844,7 @@ class DynamicField(BaseField): """Convert a Python type to a MongoDB compatible type. """ - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_mongo"): @@ -877,12 +866,12 @@ class DynamicField(BaseField): value = {k: v for k, v in enumerate(value)} data = {} - for k, v in iteritems(value): + for k, v in value.items(): data[k] = self.to_mongo(v, use_db_field, fields) value = data if is_list: # Convert back to a list - value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] + value = [v for k, v in sorted(data.items(), key=itemgetter(0))] return value def to_python(self, value): @@ -892,15 +881,15 @@ class DynamicField(BaseField): value = doc_cls._get_db().dereference(value["_ref"]) return doc_cls._from_son(value) - return super(DynamicField, self).to_python(value) + return super().to_python(value) def lookup_member(self, member_name): return member_name def prepare_query_value(self, op, value): - if isinstance(value, six.string_types): + if isinstance(value, str): return StringField().prepare_query_value(op, value) - return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) def validate(self, value, clean=True): if hasattr(value, "validate"): @@ -921,7 +910,7 @@ class ListField(ComplexBaseField): self.field = field self.max_length = max_length kwargs.setdefault("default", lambda: []) - super(ListField, self).__init__(**kwargs) + super().__init__(**kwargs) def __get__(self, instance, owner): if instance is None: @@ -935,7 +924,7 @@ class ListField(ComplexBaseField): and value ): instance._data[self.name] = [self.field.build_lazyref(x) for x in value] - return super(ListField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): """Make sure that a list of valid fields is being used.""" @@ -949,7 +938,7 @@ class ListField(ComplexBaseField): if self.max_length is not None and len(value) > self.max_length: self.error("List is too long") - super(ListField, self).validate(value) + super().validate(value) def prepare_query_value(self, op, value): # Validate that the `set` operator doesn't contain more items than `max_length`. @@ -963,14 +952,14 @@ class ListField(ComplexBaseField): if ( op in ("set", "unset", None) and hasattr(value, "__iter__") - and not isinstance(value, six.string_types) + and not isinstance(value, str) and not isinstance(value, BaseDocument) ): return [self.field.prepare_query_value(op, v) for v in value] return self.field.prepare_query_value(op, value) - return super(ListField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class EmbeddedDocumentListField(ListField): @@ -991,9 +980,7 @@ class EmbeddedDocumentListField(ListField): :param kwargs: Keyword arguments passed directly into the parent :class:`~mongoengine.ListField`. """ - super(EmbeddedDocumentListField, self).__init__( - field=EmbeddedDocumentField(document_type), **kwargs - ) + super().__init__(field=EmbeddedDocumentField(document_type), **kwargs) class SortedListField(ListField): @@ -1019,10 +1006,10 @@ class SortedListField(ListField): self._ordering = kwargs.pop("ordering") if "reverse" in kwargs.keys(): self._order_reverse = kwargs.pop("reverse") - super(SortedListField, self).__init__(field, **kwargs) + super().__init__(field, **kwargs) def to_mongo(self, value, use_db_field=True, fields=None): - value = super(SortedListField, self).to_mongo(value, use_db_field, fields) + value = super().to_mongo(value, use_db_field, fields) if self._ordering is not None: return sorted( value, key=itemgetter(self._ordering), reverse=self._order_reverse @@ -1035,9 +1022,7 @@ def key_not_string(d): dictionary is not a string. """ for k, v in d.items(): - if not isinstance(k, six.string_types) or ( - isinstance(v, dict) and key_not_string(v) - ): + if not isinstance(k, str) or (isinstance(v, dict) and key_not_string(v)): return True @@ -1077,7 +1062,7 @@ class DictField(ComplexBaseField): self._auto_dereference = False kwargs.setdefault("default", lambda: {}) - super(DictField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Make sure that a list of valid fields is being used.""" @@ -1097,7 +1082,7 @@ class DictField(ComplexBaseField): self.error( 'Invalid dictionary key name - keys may not startswith "$" characters' ) - super(DictField, self).validate(value) + super().validate(value) def lookup_member(self, member_name): return DictField(db_field=member_name) @@ -1114,7 +1099,7 @@ class DictField(ComplexBaseField): "iexact", ] - if op in match_operators and isinstance(value, six.string_types): + if op in match_operators and isinstance(value, str): return StringField().prepare_query_value(op, value) if hasattr( @@ -1126,7 +1111,7 @@ class DictField(ComplexBaseField): } return self.field.prepare_query_value(op, value) - return super(DictField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class MapField(DictField): @@ -1141,7 +1126,7 @@ class MapField(DictField): # XXX ValidationError raised outside of the "validate" method. if not isinstance(field, BaseField): self.error("Argument to MapField constructor must be a valid field") - super(MapField, self).__init__(field=field, *args, **kwargs) + super().__init__(field=field, *args, **kwargs) class ReferenceField(BaseField): @@ -1201,7 +1186,7 @@ class ReferenceField(BaseField): :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -1212,11 +1197,11 @@ class ReferenceField(BaseField): self.dbref = dbref self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(ReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1245,7 +1230,7 @@ class ReferenceField(BaseField): else: instance._data[self.name] = cls._from_son(dereferenced) - return super(ReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document): if isinstance(document, DBRef): @@ -1296,7 +1281,7 @@ class ReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(ReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def validate(self, value): @@ -1332,7 +1317,7 @@ class CachedReferenceField(BaseField): fields = [] # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -1343,7 +1328,7 @@ class CachedReferenceField(BaseField): self.auto_sync = auto_sync self.document_type_obj = document_type self.fields = fields - super(CachedReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) def start_listener(self): from mongoengine import signals @@ -1355,7 +1340,7 @@ class CachedReferenceField(BaseField): return None update_kwargs = { - "set__%s__%s" % (self.name, key): val + "set__{}__{}".format(self.name, key): val for key, val in document._delta()[0].items() if key in self.fields } @@ -1377,7 +1362,7 @@ class CachedReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1401,7 +1386,7 @@ class CachedReferenceField(BaseField): else: instance._data[self.name] = self.document_type._from_son(dereferenced) - return super(CachedReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document, use_db_field=True, fields=None): id_field_name = self.document_type._meta["id_field"] @@ -1500,12 +1485,12 @@ class GenericReferenceField(BaseField): def __init__(self, *args, **kwargs): choices = kwargs.pop("choices", None) - super(GenericReferenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.choices = [] # Keep the choices as a list of allowed Document class names if choices: for choice in choices: - if isinstance(choice, six.string_types): + if isinstance(choice, str): self.choices.append(choice) elif isinstance(choice, type) and issubclass(choice, Document): self.choices.append(choice._class_name) @@ -1514,7 +1499,7 @@ class GenericReferenceField(BaseField): # method. self.error( "Invalid choices provided: must be a list of" - "Document subclasses and/or six.string_typess" + "Document subclasses and/or str" ) def _validate_choices(self, value): @@ -1524,7 +1509,7 @@ class GenericReferenceField(BaseField): value = value.get("_cls") elif isinstance(value, Document): value = value._class_name - super(GenericReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def __get__(self, instance, owner): if instance is None: @@ -1540,7 +1525,7 @@ class GenericReferenceField(BaseField): else: instance._data[self.name] = dereferenced - return super(GenericReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if not isinstance(value, (Document, DBRef, dict, SON)): @@ -1604,22 +1589,22 @@ class BinaryField(BaseField): def __init__(self, max_bytes=None, **kwargs): self.max_bytes = max_bytes - super(BinaryField, self).__init__(**kwargs) + super().__init__(**kwargs) def __set__(self, instance, value): """Handle bytearrays in python 3.1""" - if six.PY3 and isinstance(value, bytearray): - value = six.binary_type(value) - return super(BinaryField, self).__set__(instance, value) + if isinstance(value, bytearray): + value = bytes(value) + return super().__set__(instance, value) def to_mongo(self, value): return Binary(value) def validate(self, value): - if not isinstance(value, (six.binary_type, Binary)): + if not isinstance(value, (bytes, Binary)): self.error( "BinaryField only accepts instances of " - "(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) + "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__) ) if self.max_bytes is not None and len(value) > self.max_bytes: @@ -1628,14 +1613,14 @@ class BinaryField(BaseField): def prepare_query_value(self, op, value): if value is None: return value - return super(BinaryField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class GridFSError(Exception): pass -class GridFSProxy(object): +class GridFSProxy: """Proxy object to handle writing and reading of files to and from GridFS .. versionadded:: 0.4 @@ -1685,8 +1670,6 @@ class GridFSProxy(object): def __bool__(self): return bool(self.grid_id) - __nonzero__ = __bool__ # For Py2 support - def __getstate__(self): self_dict = self.__dict__ self_dict["_fs"] = None @@ -1701,12 +1684,12 @@ class GridFSProxy(object): return self.__copy__() def __repr__(self): - return "<%s: %s>" % (self.__class__.__name__, self.grid_id) + return "<{}: {}>".format(self.__class__.__name__, self.grid_id) def __str__(self): gridout = self.get() filename = getattr(gridout, "filename") if gridout else "" - return "<%s: %s (%s)>" % (self.__class__.__name__, filename, self.grid_id) + return "<{}: {} ({})>".format(self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): @@ -1817,7 +1800,7 @@ class FileField(BaseField): def __init__( self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs ): - super(FileField, self).__init__(**kwargs) + super().__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias @@ -1840,7 +1823,7 @@ class FileField(BaseField): key = self.name if ( hasattr(value, "read") and not isinstance(value, GridFSProxy) - ) or isinstance(value, (six.binary_type, six.string_types)): + ) or isinstance(value, (bytes, str)): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it @@ -1958,11 +1941,11 @@ class ImageGridFsProxy(GridFSProxy): w, h = img.size - io = StringIO() + io = BytesIO() img.save(io, img_format, progressive=progressive) io.seek(0) - return super(ImageGridFsProxy, self).put( + return super().put( io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs ) @@ -1972,12 +1955,12 @@ class ImageGridFsProxy(GridFSProxy): if out and out.thumbnail_id: self.fs.delete(out.thumbnail_id) - return super(ImageGridFsProxy, self).delete() + return super().delete() def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): w, h = thumbnail.size - io = StringIO() + io = BytesIO() thumbnail.save(io, format, progressive=progressive) io.seek(0) @@ -2047,16 +2030,11 @@ class ImageField(FileField): for att_name, att in extra_args.items(): value = None if isinstance(att, (tuple, list)): - if six.PY3: - value = dict( - itertools.zip_longest(params_size, att, fillvalue=None) - ) - else: - value = dict(map(None, params_size, att)) + value = dict(itertools.zip_longest(params_size, att, fillvalue=None)) setattr(self, att_name, value) - super(ImageField, self).__init__(collection_name=collection_name, **kwargs) + super().__init__(collection_name=collection_name, **kwargs) class SequenceField(BaseField): @@ -2108,14 +2086,14 @@ class SequenceField(BaseField): self.value_decorator = ( value_decorator if callable(value_decorator) else self.VALUE_DECORATOR ) - super(SequenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def generate(self): """ Generate and Increment the counter """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( @@ -2129,7 +2107,7 @@ class SequenceField(BaseField): def set_next_value(self, value): """Helper method to set the next sequence value""" sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( filter={"_id": sequence_id}, @@ -2146,7 +2124,7 @@ class SequenceField(BaseField): as it is only fixed on set. """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] data = collection.find_one({"_id": sequence_id}) @@ -2169,7 +2147,7 @@ class SequenceField(BaseField): ) def __get__(self, instance, owner): - value = super(SequenceField, self).__get__(instance, owner) + value = super().__get__(instance, owner) if value is None and instance._initialised: value = self.generate() instance._data[self.name] = value @@ -2182,7 +2160,7 @@ class SequenceField(BaseField): if value is None and instance._initialised: value = self.generate() - return super(SequenceField, self).__set__(instance, value) + return super().__set__(instance, value) def prepare_query_value(self, op, value): """ @@ -2216,14 +2194,14 @@ class UUIDField(BaseField): .. versionchanged:: 0.6.19 """ self._binary = binary - super(UUIDField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if not self._binary: original_value = value try: - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) return uuid.UUID(value) except (ValueError, TypeError, AttributeError): return original_value @@ -2231,8 +2209,8 @@ class UUIDField(BaseField): def to_mongo(self, value): if not self._binary: - return six.text_type(value) - elif isinstance(value, six.string_types): + return str(value) + elif isinstance(value, str): return uuid.UUID(value) return value @@ -2243,7 +2221,7 @@ class UUIDField(BaseField): def validate(self, value): if not isinstance(value, uuid.UUID): - if not isinstance(value, six.string_types): + if not isinstance(value, str): value = str(value) try: uuid.UUID(value) @@ -2442,7 +2420,7 @@ class LazyReferenceField(BaseField): document. Note this only work getting field (not setting or deleting). """ # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -2454,11 +2432,11 @@ class LazyReferenceField(BaseField): self.passthrough = passthrough self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(LazyReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -2497,7 +2475,7 @@ class LazyReferenceField(BaseField): if value: instance._data[self.name] = value - return super(LazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, value): if isinstance(value, LazyReference): @@ -2561,7 +2539,7 @@ class LazyReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(LazyReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def lookup_member(self, member_name): @@ -2588,12 +2566,12 @@ class GenericLazyReferenceField(GenericReferenceField): def __init__(self, *args, **kwargs): self.passthrough = kwargs.pop("passthrough", False) - super(GenericLazyReferenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _validate_choices(self, value): if isinstance(value, LazyReference): value = value.document_type._class_name - super(GenericLazyReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def build_lazyref(self, value): if isinstance(value, LazyReference): @@ -2622,7 +2600,7 @@ class GenericLazyReferenceField(GenericReferenceField): if value: instance._data[self.name] = value - return super(GenericLazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if isinstance(value, LazyReference) and value.pk is None: @@ -2630,7 +2608,7 @@ class GenericLazyReferenceField(GenericReferenceField): "You can only reference documents once they have been" " saved to the database" ) - return super(GenericLazyReferenceField, self).validate(value) + return super().validate(value) def to_mongo(self, document): if document is None: @@ -2649,4 +2627,4 @@ class GenericLazyReferenceField(GenericReferenceField): ) ) else: - return super(GenericLazyReferenceField, self).to_mongo(document) + return super().to_mongo(document) diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py deleted file mode 100644 index 57e467db..00000000 --- a/mongoengine/python_support.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Helper functions, constants, and types to aid with Python v2.7 - v3.x support -""" -import six - -# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. -StringIO = six.BytesIO - -# Additionally for Py2, try to use the faster cStringIO, if available -if not six.PY3: - try: - import cStringIO - except ImportError: - pass - else: - StringIO = cStringIO.StringIO - - -if six.PY3: - from collections.abc import Hashable -else: - # raises DeprecationWarnings in Python >=3.7 - from collections import Hashable diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 15c58481..23cb79c5 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import copy import itertools import re @@ -11,8 +9,6 @@ import pymongo import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference -import six -from six import iteritems from mongoengine import signals from mongoengine.base import get_document @@ -41,7 +37,7 @@ DENY = 3 PULL = 4 -class BaseQuerySet(object): +class BaseQuerySet: """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ @@ -203,8 +199,6 @@ class BaseQuerySet(object): """Avoid to open all records in an if stmt in Py3.""" return self._has_data() - __nonzero__ = __bool__ # For Py2 support - # Core functions def all(self): @@ -255,21 +249,20 @@ class BaseQuerySet(object): queryset = queryset.filter(*q_objs, **query) try: - result = six.next(queryset) + result = next(queryset) except StopIteration: msg = "%s matching query does not exist." % queryset._document._class_name raise queryset._document.DoesNotExist(msg) try: # Check if there is another match - six.next(queryset) + next(queryset) except StopIteration: return result - # If we were able to retrieve the 2nd doc, rewind the cursor and - # raise the MultipleObjectsReturned exception. + # If we were able to retrieve the 2nd doc, raise the MultipleObjectsReturned exception. raise queryset._document.MultipleObjectsReturned( - u"2 or more items returned, instead of 1" + "2 or more items returned, instead of 1" ) def create(self, **kwargs): @@ -354,20 +347,20 @@ class BaseQuerySet(object): ) except pymongo.errors.DuplicateKeyError as err: message = "Could not save document (%s)" - raise NotUniqueError(message % six.text_type(err)) + raise NotUniqueError(message % err) except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise - message = u"Bulk write error: (%s)" - raise BulkWriteError(message % six.text_type(err.details)) + message = "Bulk write error: (%s)" + raise BulkWriteError(message % err.details) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" - if re.match("^E1100[01] duplicate key", six.text_type(err)): + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) + raise OperationError(message % err) # Apply inserted_ids to documents for doc, doc_id in zip(docs, ids): @@ -539,12 +532,12 @@ class BaseQuerySet(object): elif result.raw_result: return result.raw_result["n"] except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u"Update failed (%s)" % six.text_type(err)) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - if six.text_type(err) == u"multi not coded yet": - message = u"update() method requires MongoDB 1.1.3+" + if str(err) == "multi not coded yet": + message = "update() method requires MongoDB 1.1.3+" raise OperationError(message) - raise OperationError(u"Update failed (%s)" % six.text_type(err)) + raise OperationError("Update failed (%s)" % err) def upsert_one(self, write_concern=None, **update): """Overwrite or add the first document matched by the query. @@ -662,9 +655,9 @@ class BaseQuerySet(object): **self._cursor_args ) except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u"Update failed (%s)" % err) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - raise OperationError(u"Update failed (%s)" % err) + raise OperationError("Update failed (%s)" % err) if full_response: if result["value"] is not None: @@ -693,7 +686,7 @@ class BaseQuerySet(object): return queryset.filter(pk=object_id).first() def in_bulk(self, object_ids): - """Retrieve a set of documents by their ids. + """"Retrieve a set of documents by their ids. :param object_ids: a list or tuple of ObjectId's :rtype: dict of ObjectId's as keys and collection-specific @@ -989,7 +982,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.5 - Added subfield support """ fields = {f: QueryFieldList.ONLY for f in fields} - self.only_fields = fields.keys() + self.only_fields = list(fields.keys()) return self.fields(True, **fields) def exclude(self, *fields): @@ -1340,13 +1333,13 @@ class BaseQuerySet(object): map_f_scope = {} if isinstance(map_f, Code): map_f_scope = map_f.scope - map_f = six.text_type(map_f) + map_f = str(map_f) map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) reduce_f_scope = {} if isinstance(reduce_f, Code): reduce_f_scope = reduce_f.scope - reduce_f = six.text_type(reduce_f) + reduce_f = str(reduce_f) reduce_f_code = queryset._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) @@ -1356,7 +1349,7 @@ class BaseQuerySet(object): finalize_f_scope = {} if isinstance(finalize_f, Code): finalize_f_scope = finalize_f.scope - finalize_f = six.text_type(finalize_f) + finalize_f = str(finalize_f) finalize_f_code = queryset._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) mr_args["finalize"] = finalize_f @@ -1372,7 +1365,7 @@ class BaseQuerySet(object): else: map_reduce_function = "map_reduce" - if isinstance(output, six.string_types): + if isinstance(output, str): mr_args["out"] = output elif isinstance(output, dict): @@ -1559,7 +1552,7 @@ class BaseQuerySet(object): if self._limit == 0 or self._none: raise StopIteration - raw_doc = six.next(self._cursor) + raw_doc = next(self._cursor) if self._as_pymongo: return raw_doc @@ -1804,13 +1797,13 @@ class BaseQuerySet(object): } """ total, data, types = self.exec_js(freq_func, field) - values = {types.get(k): int(v) for k, v in iteritems(data)} + values = {types.get(k): int(v) for k, v in data.items()} if normalize: values = {k: float(v) / total for k, v in values.items()} frequencies = {} - for k, v in iteritems(values): + for k, v in values.items(): if isinstance(k, float): if int(k) == k: k = int(k) @@ -1830,7 +1823,7 @@ class BaseQuerySet(object): field_parts = field.split(".") try: field = ".".join( - f if isinstance(f, six.string_types) else f.db_field + f if isinstance(f, str) else f.db_field for f in self._document._lookup_field(field_parts) ) db_field_paths.append(field) @@ -1842,7 +1835,7 @@ class BaseQuerySet(object): for subdoc in subclasses: try: subfield = ".".join( - f if isinstance(f, six.string_types) else f.db_field + f if isinstance(f, str) else f.db_field for f in subdoc._lookup_field(field_parts) ) db_field_paths.append(subfield) @@ -1916,7 +1909,7 @@ class BaseQuerySet(object): field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field + return '["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 5c3ff222..443c895c 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -1,7 +1,7 @@ __all__ = ("QueryFieldList",) -class QueryFieldList(object): +class QueryFieldList: """Object that handles combinations of .only() and .exclude() calls""" ONLY = 1 @@ -69,8 +69,6 @@ class QueryFieldList(object): def __bool__(self): return bool(self.fields) - __nonzero__ = __bool__ # For Py2 support - def as_dict(self): field_list = {field: self.value for field in self.fields} if self.slice: @@ -80,7 +78,7 @@ class QueryFieldList(object): return field_list def reset(self): - self.fields = set([]) + self.fields = set() self.slice = {} self.value = self.ONLY diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 5067ffbf..699526fd 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -4,7 +4,7 @@ from mongoengine.queryset.queryset import QuerySet __all__ = ("queryset_manager", "QuerySetManager") -class QuerySetManager(object): +class QuerySetManager: """ The default QuerySet Manager. diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4ba62d46..8b5872f8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,5 +1,3 @@ -import six - from mongoengine.errors import OperationError from mongoengine.queryset.base import ( BaseQuerySet, @@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet): # Pull in ITER_CHUNK_SIZE docs from the database and store them in # the result cache. try: - for _ in six.moves.range(ITER_CHUNK_SIZE): - self._result_cache.append(six.next(self)) + for _ in range(ITER_CHUNK_SIZE): + self._result_cache.append(next(self)) except StopIteration: # Getting this exception means there are no more docs in the # db cursor. Set _has_more to False so that we can use that @@ -143,10 +141,10 @@ class QuerySet(BaseQuerySet): getting the count """ if with_limit_and_skip is False: - return super(QuerySet, self).count(with_limit_and_skip) + return super().count(with_limit_and_skip) if self._len is None: - self._len = super(QuerySet, self).count(with_limit_and_skip) + self._len = super().count(with_limit_and_skip) return self._len @@ -180,9 +178,9 @@ class QuerySetNoCache(BaseQuerySet): return ".. queryset mid-iteration .." data = [] - for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): + for _ in range(REPR_OUTPUT_SIZE + 1): try: - data.append(six.next(self)) + data.append(next(self)) except StopIteration: break diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 659a97e2..3f1db8fa 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -3,14 +3,12 @@ from collections import defaultdict from bson import ObjectId, SON from bson.dbref import DBRef import pymongo -import six -from six import iteritems from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError -__all__ = ("query", "update") +__all__ = ("query", "update", "STRING_OPERATORS") COMPARISON_OPERATORS = ( "ne", @@ -101,7 +99,7 @@ def query(_doc_cls=None, **kwargs): cleaned_fields = [] for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): parts.append(field) append_field = False # is last and CachedReferenceField @@ -180,7 +178,7 @@ def query(_doc_cls=None, **kwargs): "$near" in value_dict or "$nearSphere" in value_dict ): value_son = SON() - for k, v in iteritems(value_dict): + for k, v in value_dict.items(): if k == "$maxDistance" or k == "$minDistance": continue value_son[k] = v @@ -281,7 +279,7 @@ def update(_doc_cls=None, **update): appended_sub_field = False for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): # Convert the S operator to $ if field == "S": field = "$" @@ -435,7 +433,9 @@ def _geo_operator(field, op, value): value = {"$near": _infer_geometry(value)} else: raise NotImplementedError( - 'Geo method "%s" has not been implemented for a %s ' % (op, field._name) + 'Geo method "{}" has not been implemented for a {} '.format( + op, field._name + ) ) return value diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 470839c1..0eacc2ef 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -7,7 +7,7 @@ from mongoengine.queryset import transform __all__ = ("Q", "QNode") -class QNodeVisitor(object): +class QNodeVisitor: """Base visitor class for visiting Q-object nodes in a query tree. """ @@ -79,7 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): return transform.query(self.document, **query.query) -class QNode(object): +class QNode: """Base class for nodes in query trees.""" AND = 0 @@ -143,8 +143,6 @@ class QCombination(QNode): def __bool__(self): return bool(self.children) - __nonzero__ = __bool__ # For Py2 support - def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): @@ -180,8 +178,6 @@ class Q(QNode): def __bool__(self): return bool(self.query) - __nonzero__ = __bool__ # For Py2 support - def __eq__(self, other): return self.__class__ == other.__class__ and self.query == other.query diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 0db63604..582b533d 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -15,11 +15,11 @@ try: signals_available = True except ImportError: - class Namespace(object): + class Namespace: def signal(self, name, doc=None): return _FakeSignal(name, doc) - class _FakeSignal(object): + class _FakeSignal: """If blinker is unavailable, create a fake class with the same interface that allows sending of signals but will fail with an error on anything else. Instead of doing anything on send, it diff --git a/requirements.txt b/requirements.txt index 43e5261b..0ce39f74 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ pymongo>=3.4 -six==1.10.0 Sphinx==1.5.5 sphinx-rtd-theme==0.2.4 diff --git a/setup.py b/setup.py index 16896fbb..393de9c7 100644 --- a/setup.py +++ b/setup.py @@ -97,21 +97,17 @@ CLASSIFIERS = [ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", ] -PYTHON_VERSION = sys.version_info[0] -PY3 = PYTHON_VERSION == 3 -PY2 = PYTHON_VERSION == 2 - extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), "tests_require": [ @@ -120,20 +116,14 @@ extra_opts = { "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support - "zipp<2.0.0", # (dependency of pytest) dropped python2 support - "pyparsing<3", # sub-dependency that dropped py2 support - "configparser<5", # sub-dependency that dropped py2 support ], } -if PY3: - extra_opts["use_2to3"] = True - if "test" in sys.argv: - extra_opts["packages"] = find_packages() - extra_opts["package_data"] = { - "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] - } -else: - extra_opts["tests_require"] += ["python-dateutil"] + +if "test" in sys.argv: + extra_opts["packages"] = find_packages() + extra_opts["package_data"] = { + "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] + } setup( name="mongoengine", @@ -150,7 +140,8 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], + python_requires=">=3.5", + install_requires=["pymongo>=3.4, <4.0"], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index b08306a0..4f728c2d 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -5,7 +5,6 @@ from datetime import datetime from pymongo.collation import Collation from pymongo.errors import OperationFailure import pytest -from six import iteritems from mongoengine import * from mongoengine.connection import get_db @@ -59,7 +58,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') assert len(info) == 4 - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -87,7 +86,7 @@ class TestIndexes(unittest.TestCase): # the indices on -date and tags will both contain # _cls as first element in the key assert len(info) == 4 - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -102,7 +101,7 @@ class TestIndexes(unittest.TestCase): ExtendedBlogPost.ensure_indexes() info = ExtendedBlogPost.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -192,7 +191,7 @@ class TestIndexes(unittest.TestCase): # Indexes are lazy so use list() to perform query list(Person.objects) info = Person.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("rank.title", 1)] in info def test_explicit_geo2d_index(self): @@ -207,7 +206,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "2d")] in info def test_explicit_geo2d_index_embedded(self): @@ -227,7 +226,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("current.location.point", "2d")] in info def test_explicit_geosphere_index(self): @@ -244,7 +243,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "2dsphere")] in info def test_explicit_geohaystack_index(self): @@ -266,7 +265,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "geoHaystack")] in info def test_create_geohaystack_index(self): @@ -279,7 +278,7 @@ class TestIndexes(unittest.TestCase): Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "geoHaystack"), ("name", 1)] in info def test_dictionary_indexes(self): @@ -308,7 +307,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() info = [ (value["key"], value.get("unique", False), value.get("sparse", False)) - for key, value in iteritems(info) + for key, value in info.items() ] assert ([("addDate", -1)], True, True) in info @@ -889,7 +888,7 @@ class TestIndexes(unittest.TestCase): self.fail("Unbound local error at index + pk definition") info = BlogPost.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] index_item = [("_id", 1), ("comments.comment_id", 1)] assert index_item in info @@ -930,7 +929,7 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} info = MyDoc.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("provider_ids.foo", 1)] in info assert [("provider_ids.bar", 1)] in info diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index d7bd0632..53a1489b 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -3,7 +3,6 @@ import unittest import warnings import pytest -from six import iteritems from mongoengine import ( BooleanField, @@ -549,7 +548,7 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - for k, v in iteritems(defaults): + for k, v in defaults.items(): for cls in [Animal, Fish, Guppy]: assert cls._meta[k] == v diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index a5c21323..920bf392 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -10,7 +10,6 @@ import bson from bson import DBRef, ObjectId from pymongo.errors import DuplicateKeyError import pytest -from six import iteritems from mongoengine import * from mongoengine import signals @@ -3274,7 +3273,7 @@ class TestDocumentInstance(MongoDBTestCase): def expand(self): self.flattened_parameter = {} - for parameter_name, parameter in iteritems(self.parameters): + for parameter_name, parameter in self.parameters.items(): parameter.expand() class NodesSystem(Document): @@ -3282,7 +3281,7 @@ class TestDocumentInstance(MongoDBTestCase): nodes = MapField(ReferenceField(Node, dbref=False)) def save(self, *args, **kwargs): - for node_name, node in iteritems(self.nodes): + for node_name, node in self.nodes.items(): node.expand() node.save(*args, **kwargs) super(NodesSystem, self).save(*args, **kwargs) diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index e2a1b8d6..a9c0c7e5 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -3,13 +3,12 @@ import uuid from bson import Binary import pytest -import six from mongoengine import * from tests.utils import MongoDBTestCase -BIN_VALUE = six.b( - "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" +BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( + "latin-1" ) @@ -22,7 +21,7 @@ class TestBinaryField(MongoDBTestCase): content_type = StringField() blob = BinaryField() - BLOB = six.b("\xe6\x00\xc4\xff\x07") + BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") MIME_TYPE = "application/octet-stream" Attachment.drop_collection() @@ -32,7 +31,7 @@ class TestBinaryField(MongoDBTestCase): attachment_1 = Attachment.objects().first() assert MIME_TYPE == attachment_1.content_type - assert BLOB == six.binary_type(attachment_1.blob) + assert BLOB == bytes(attachment_1.blob) def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields. @@ -47,11 +46,11 @@ class TestBinaryField(MongoDBTestCase): attachment_required = AttachmentRequired() with pytest.raises(ValidationError): attachment_required.validate() - attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) + attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) attachment_required.validate() - _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") - _4_BYTES = six.b("\xe6\x00\xc4\xff") + _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") + _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") with pytest.raises(ValidationError): AttachmentSizeLimit(blob=_5_BYTES).validate() AttachmentSizeLimit(blob=_4_BYTES).validate() @@ -123,10 +122,7 @@ class TestBinaryField(MongoDBTestCase): upsert=True, new=True, set__bin_field=BIN_VALUE ) assert doc.some_field == "test" - if six.PY3: - assert doc.bin_field == BIN_VALUE - else: - assert doc.bin_field == Binary(BIN_VALUE) + assert doc.bin_field == BIN_VALUE def test_update_one(self): """Ensures no regression of bug #1127""" @@ -136,7 +132,7 @@ class TestBinaryField(MongoDBTestCase): MyDocument.drop_collection() - bin_data = six.b("\xe6\x00\xc4\xff\x07") + bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") doc = MyDocument(bin_field=bin_data).save() n_updated = MyDocument.objects(bin_field=bin_data).update_one( @@ -144,7 +140,4 @@ class TestBinaryField(MongoDBTestCase): ) assert n_updated == 1 fetched = MyDocument.objects.with_id(doc.id) - if six.PY3: - assert fetched.bin_field == BIN_VALUE - else: - assert fetched.bin_field == Binary(BIN_VALUE) + assert fetched.bin_field == BIN_VALUE diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index e94ed0ce..42a4b7f1 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -2,7 +2,6 @@ import datetime import pytest -import six try: import dateutil @@ -89,17 +88,6 @@ class TestDateField(MongoDBTestCase): assert log.date == d1.date() assert log.date == d2.date() - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - assert log.date == d1.date() - assert log.date == d2.date() - def test_regular_usage(self): """Tests for regular datetime fields""" diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 70debac5..48936af7 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -2,7 +2,6 @@ import datetime as dt import pytest -import six try: import dateutil @@ -98,17 +97,6 @@ class TestDateTimeField(MongoDBTestCase): assert log.date != d1 assert log.date == d2 - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = dt.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - assert log.date != d1 - assert log.date == d2 - def test_regular_usage(self): """Tests for regular datetime fields""" @@ -213,7 +201,7 @@ class TestDateTimeField(MongoDBTestCase): # make sure that passing a parsable datetime works dtd = DTDoc() dtd.date = date_str - assert isinstance(dtd.date, six.string_types) + assert isinstance(dtd.date, str) dtd.save() dtd.reload() diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index eeddac1e..13ca9c0b 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -75,7 +75,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -111,7 +111,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id @@ -319,7 +319,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -347,7 +347,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index b8ece1a9..cbac9b69 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -3,14 +3,13 @@ import copy import os import tempfile import unittest +from io import BytesIO import gridfs import pytest -import six from mongoengine import * from mongoengine.connection import get_db -from mongoengine.python_support import StringIO try: from PIL import Image @@ -30,7 +29,7 @@ TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") def get_file(path): """Use a BytesIO instead of a file to allow to have a one-liner and avoid that the file remains opened""" - bytes_io = StringIO() + bytes_io = BytesIO() with open(path, "rb") as f: bytes_io.write(f.read()) bytes_io.seek(0) @@ -58,7 +57,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" putfile = PutFile() @@ -80,7 +79,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() putfile = PutFile() - putstring = StringIO() + putstring = BytesIO() putstring.write(text) putstring.seek(0) putfile.the_file.put(putstring, content_type=content_type) @@ -101,8 +100,8 @@ class TestFileField(MongoDBTestCase): StreamFile.drop_collection() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") content_type = "text/plain" streamfile = StreamFile() @@ -137,8 +136,8 @@ class TestFileField(MongoDBTestCase): StreamFile.drop_collection() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") streamfile = StreamFile() streamfile.save() @@ -167,8 +166,8 @@ class TestFileField(MongoDBTestCase): class SetFile(Document): the_file = FileField() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") SetFile.drop_collection() @@ -196,7 +195,7 @@ class TestFileField(MongoDBTestCase): GridDocument.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() # Test without default @@ -213,7 +212,7 @@ class TestFileField(MongoDBTestCase): assert doc_b.the_file.grid_id == doc_c.the_file.grid_id # Test with default - doc_d = GridDocument(the_file=six.b("")) + doc_d = GridDocument(the_file="".encode("latin-1")) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) @@ -240,7 +239,7 @@ class TestFileField(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b("Hello, World!")) + test_file.the_file.put("Hello, World!".encode("latin-1")) test_file.save() # Second instance @@ -297,7 +296,9 @@ class TestFileField(MongoDBTestCase): test_file = TestFile() assert not bool(test_file.the_file) - test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") + test_file.the_file.put( + "Hello, World!".encode("latin-1"), content_type="text/plain" + ) test_file.save() assert bool(test_file.the_file) @@ -319,7 +320,7 @@ class TestFileField(MongoDBTestCase): class TestFile(Document): the_file = FileField() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" testfile = TestFile() @@ -363,7 +364,7 @@ class TestFileField(MongoDBTestCase): testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - text = six.b("Bonjour, World!") + text = "Bonjour, World!".encode("latin-1") testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.save() @@ -387,7 +388,7 @@ class TestFileField(MongoDBTestCase): TestImage.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() t = TestImage() @@ -503,21 +504,21 @@ class TestFileField(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") + test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt") test_file.save() data = get_db("test_files").macumba.files.find_one() assert data.get("name") == "hello.txt" test_file = TestFile.objects.first() - assert test_file.the_file.read() == six.b("Hello, World!") + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") test_file = TestFile.objects.first() - test_file.the_file = six.b("HELLO, WORLD!") + test_file.the_file = "Hello, World!".encode("latin-1") test_file.save() test_file = TestFile.objects.first() - assert test_file.the_file.read() == six.b("HELLO, WORLD!") + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") def test_copyable(self): class PutFile(Document): @@ -525,7 +526,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" putfile = PutFile() diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index a1cd7a0a..839494a9 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import pytest -import six from mongoengine import * @@ -52,9 +51,8 @@ class TestFloatField(MongoDBTestCase): big_person = BigPerson() - for value, value_type in enumerate(six.integer_types): - big_person.height = value_type(value) - big_person.validate() + big_person.height = int(0) + big_person.validate() big_person.height = 2 ** 500 big_person.validate() diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index da4f04c8..330051c3 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -1,11 +1,5 @@ -# -*- coding: utf-8 -*- +from bson.int64 import Int64 import pytest -import six - -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long from mongoengine import * from mongoengine.connection import get_db @@ -28,7 +22,7 @@ class TestLongField(MongoDBTestCase): assert isinstance( db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 ) - assert isinstance(doc.some_long, six.integer_types) + assert isinstance(doc.some_long, int) def test_long_validation(self): """Ensure that invalid values cannot be assigned to long fields. diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index aa83f710..81d648fd 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -21,7 +21,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -76,7 +76,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) assert c["next"] == 10 @@ -101,10 +101,10 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) counters = [i.counter for i in Person.objects] - assert counters == range(1, 11) + assert counters == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -166,10 +166,10 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) id = [i.id for i in Animal.objects] - assert id == range(1, 11) + assert id == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -193,7 +193,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == map(str, range(1, 11)) + assert ids == [str(i) for i in range(1, 11)] c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -267,12 +267,12 @@ class TestSequenceField(MongoDBTestCase): foo = Foo(name="Foo") foo.save() - assert not ( - "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + assert "base.counter" not in self.db["mongoengine.counters"].find().distinct( + "_id" ) - assert ("foo.counter" and "bar.counter") in self.db[ - "mongoengine.counters" - ].find().distinct("_id") + existing_counters = self.db["mongoengine.counters"].find().distinct("_id") + assert "foo.counter" in existing_counters + assert "bar.counter" in existing_counters assert foo.counter == bar.counter assert foo._fields["counter"].owner_document == Foo assert bar._fields["counter"].owner_document == Bar diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index 948a4788..c449e467 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -2,7 +2,6 @@ import pytest from mongoengine import * - from tests.utils import MongoDBTestCase @@ -35,7 +34,7 @@ class TestURLField(MongoDBTestCase): with pytest.raises(ValidationError) as exc_info: link.validate() assert ( - unicode(exc_info.value) + str(exc_info.value) == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" ) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index f15b9748..984e2bd1 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -10,8 +10,6 @@ import pymongo from pymongo.read_preferences import ReadPreference from pymongo.results import UpdateResult import pytest -import six -from six import iteritems from mongoengine import * from mongoengine.connection import get_db @@ -110,7 +108,7 @@ class TestQueryset(unittest.TestCase): # Filter people by age people = self.Person.objects(age=20) assert people.count() == 1 - person = people.next() + person = next(people) assert person == user_a assert person.name == "User A" assert person.age == 20 @@ -2783,7 +2781,7 @@ class TestQueryset(unittest.TestCase): ) # start a map/reduce - cursor.next() + next(cursor) results = Person.objects.map_reduce( map_f=map_person, @@ -4108,7 +4106,7 @@ class TestQueryset(unittest.TestCase): info = Comment.objects._collection.index_information() info = [ (value["key"], value.get("unique", False), value.get("sparse", False)) - for key, value in iteritems(info) + for key, value in info.items() ] assert ([("_cls", 1), ("message", 1)], False, False) in info @@ -4410,7 +4408,7 @@ class TestQueryset(unittest.TestCase): # Use a query to filter the people found to just person1 people = self.Person.objects(age=20).scalar("name") assert people.count() == 1 - person = people.next() + person = next(people) assert person == "User A" # Test limit @@ -4460,24 +4458,14 @@ class TestQueryset(unittest.TestCase): "A0" == "%s" % self.Person.objects.order_by("name").scalar("name").first() ) assert "A0" == "%s" % self.Person.objects.scalar("name").order_by("name")[0] - if six.PY3: - assert ( - "['A1', 'A2']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] - ) - assert ( - "['A51', 'A52']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] - ) - else: - assert ( - "[u'A1', u'A2']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] - ) - assert ( - "[u'A51', u'A52']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] - ) + assert ( + "['A1', 'A2']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] + ) + assert ( + "['A51', 'A52']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] + ) # with_id and in_bulk person = self.Person.objects.order_by("name").first() @@ -4485,10 +4473,7 @@ class TestQueryset(unittest.TestCase): pks = self.Person.objects.order_by("age").scalar("pk")[1:3] names = self.Person.objects.scalar("name").in_bulk(list(pks)).values() - if six.PY3: - expected = "['A1', 'A2']" - else: - expected = "[u'A1', u'A2']" + expected = "['A1', 'A2']" assert expected == "%s" % sorted(names) def test_fields(self): @@ -5377,7 +5362,7 @@ class TestQueryset(unittest.TestCase): if not test: raise AssertionError("Cursor has data and returned False") - queryset.next() + next(queryset) if not queryset: raise AssertionError( "Cursor has data and it must returns True, even in the last item." diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 7b5d7d11..6d432e32 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,7 +1,6 @@ import unittest import pytest -from six import iterkeys from mongoengine import Document from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict @@ -287,7 +286,7 @@ class TestBaseList: base_list[:] = [ 0, 1, - ] # Will use __setslice__ under py2 and __setitem__ under py3 + ] assert base_list._instance._changed_fields == ["my_name"] assert base_list == [0, 1] @@ -296,13 +295,13 @@ class TestBaseList: base_list[0:2] = [ 1, 0, - ] # Will use __setslice__ under py2 and __setitem__ under py3 + ] assert base_list._instance._changed_fields == ["my_name"] assert base_list == [1, 0, 2] def test___setitem___calls_with_step_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 + base_list[0:3:2] = [-1, -2] # uses __setitem__ assert base_list._instance._changed_fields == ["my_name"] assert base_list == [-1, 1, -2] @@ -372,7 +371,7 @@ class TestStrictDict(unittest.TestCase): def test_iterkeys(self): d = self.dtype(a=1) - assert list(iterkeys(d)) == ["a"] + assert list(d.keys()) == ["a"] def test_len(self): d = self.dtype(a=1) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index b9d92883..0f9f412c 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -2,10 +2,8 @@ import unittest from bson import DBRef, ObjectId -from six import iteritems from mongoengine import * -from mongoengine.connection import get_db from mongoengine.context_managers import query_counter @@ -739,7 +737,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) # Document select_related @@ -752,7 +750,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) # Queryset select_related @@ -766,7 +764,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) User.drop_collection() @@ -820,7 +818,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Document select_related @@ -836,7 +834,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Queryset select_related @@ -853,7 +851,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ Group.objects.delete() @@ -910,7 +908,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) # Document select_related @@ -926,7 +924,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) # Queryset select_related @@ -943,7 +941,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) UserA.drop_collection() @@ -997,7 +995,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Document select_related @@ -1013,7 +1011,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Queryset select_related @@ -1030,7 +1028,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ Group.objects.delete() diff --git a/tests/test_signals.py b/tests/test_signals.py index d79eaf75..451e01ff 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -58,7 +58,9 @@ class TestSignal(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): - dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() + dirty_keys = list(document._delta()[0].keys()) + list( + document._delta()[1].keys() + ) signal_output.append("post_save signal, %s" % document) signal_output.append("post_save dirty keys, %s" % dirty_keys) if kwargs.pop("created", False): diff --git a/tox.ini b/tox.ini index 396817ca..675b6d9a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg34,mg36,mg39,mg310} +envlist = {py35,pypy3}-{mg34,mg36,mg39,mg310} [testenv] commands =