diff --git a/.gitignore b/.gitignore index 048a2d19..16633bae 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,15 @@ -.* !.gitignore *~ *.py[co] .*.sw[po] +.cache/ +.coverage +.coveragerc +.env +.idea/ +.pytest_cache/ +.tox/ +.eggs/ *.egg docs/.build docs/_build @@ -13,8 +20,6 @@ env/ .settings .project .pydevproject -tests/test_bugfix.py htmlcov/ venv venv3 -scratchpad diff --git a/.landscape.yml b/.landscape.yml index a27bbb03..4f13a5eb 100644 --- a/.landscape.yml +++ b/.landscape.yml @@ -5,17 +5,12 @@ pylint: options: additional-builtins: - # add xrange and long as valid built-ins. In Python 3, xrange is - # translated into range and long is translated into int via 2to3 (see - # "use_2to3" in setup.py). This should be removed when we drop Python - # 2 support (which probably won't happen any time soon). - - xrange + # add long as valid built-ins. - long pyflakes: disable: - # undefined variables are already covered by pylint (and exclude - # xrange & long) + # undefined variables are already covered by pylint (and exclude long) - F821 ignore-paths: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..e11640b8 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,12 @@ +fail_fast: false +repos: + - repo: https://github.com/ambv/black + rev: 19.10b0 + hooks: + - id: black + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.0a2 + hooks: + - id: flake8 + additional_dependencies: + - flake8-import-order diff --git a/.travis.yml b/.travis.yml index 62bbacb1..2316124a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,10 @@ # For full coverage, we'd have to test all supported Python, MongoDB, and # PyMongo combinations. However, that would result in an overly long build # with a very large number of jobs, hence we only test a subset of all the -# combinations: -# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, -# tested against Python v2.7, v3.5, v3.6, v3.7, v3.8, PyPy and PyPy3. -# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo -# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. -# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. -# +# combinations. +# * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, +# Other combinations are tested. See below for the details or check the travis jobs + # We should periodically check MongoDB Server versions supported by MongoDB # Inc., add newly released versions to the test matrix, and remove versions # which have reached their End of Life. See: @@ -16,19 +13,15 @@ # # Reminder: Update README.rst if you change MongoDB versions we test. - language: python +dist: xenial python: -- 2.7 - 3.5 - 3.6 - 3.7 - 3.8 -- pypy - pypy3 -dist: xenial - env: global: - MONGODB_3_4=3.4.17 @@ -39,6 +32,8 @@ env: - PYMONGO_3_6=3.6 - PYMONGO_3_9=3.9 - PYMONGO_3_10=3.10 + + - MAIN_PYTHON_VERSION=3.7 matrix: - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} @@ -47,8 +42,6 @@ matrix: fast_finish: true include: - - python: 2.7 - env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} - python: 3.7 env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} - python: 3.7 @@ -66,29 +59,23 @@ install: # Install Python dependencies. - pip install --upgrade pip - pip install coveralls - - pip install flake8 flake8-import-order - - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) + - pip install pre-commit + - pip install tox # tox dryrun to setup the tox venv (we run a mock test). - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" - # Install black for Python v3.7 only. - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for Python 2.7 only - - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi # Run black for Python 3.7 only + # Run pre-commit hooks (black, flake8, etc) on entire codebase + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi - mongo --eval 'db.version();' # Make sure mongo is awake script: - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" -# For now only submit coveralls for Python v2.7. Python v3.x currently shows -# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible -# code in a separate dir and runs tests on that. after_success: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; else echo "coveralls only sent for py27"; fi +- - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi notifications: irc: irc.freenode.org#mongoengine @@ -110,11 +97,11 @@ deploy: distributions: "sdist bdist_wheel" # Only deploy on tagged commits (aka GitHub releases) and only for the parent - # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. + # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. # We run Travis against many different Python, PyMongo, and MongoDB versions # and we don't want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) - python: 2.7 + python: 3.7 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 56bae31f..035ae07a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -20,19 +20,43 @@ post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 2.7 and newer. Language -features not supported by all interpreters can not be used. -The codebase is written in python 2 so you must be using python 2 -when developing new features. Compatibility of the library with Python 3 -relies on the 2to3 package that gets executed as part of the installation -build. You should ensure that your code is properly converted by -`2to3 `_. +MongoEngine supports CPython 3.5 and newer as well as Pypy3. +Language features not supported by all interpreters can not be used. + +Python3 codebase +---------------------- + +Since 0.20, the codebase is exclusively Python 3. + +Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. +Travis runs the tests against the main Python 3.x versions. + Style Guide ----------- -MongoEngine uses `black `_ for code -formatting. +MongoEngine's codebase is formatted with `black `_, other tools like +flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly. + +To install all development tools, simply run the following commands: + +.. code-block:: console + + $ python -m pip install -r requirements-dev.txt + + +You can install `pre-commit `_ into your git hooks, +to automatically check and fix any formatting issue before creating a +git commit. + +To enable ``pre-commit`` simply run: + +.. code-block:: console + + $ pre-commit install + +See the ``.pre-commit-config.yaml`` configuration file for more information +on how it works. Testing ------- diff --git a/README.rst b/README.rst index b5c95888..aca8edc0 100644 --- a/README.rst +++ b/README.rst @@ -34,7 +34,7 @@ with MongoDB version > 4.0. Installation ============ We recommend the use of `virtualenv `_ and of -`pip `_. You can then use ``pip install -U mongoengine``. +`pip `_. You can then use ``python -m pip install -U mongoengine``. You may also have `setuptools `_ and thus you can use ``easy_install -U mongoengine``. Another option is `pipenv `_. You can then use ``pipenv install mongoengine`` @@ -42,13 +42,14 @@ to both create the virtual environment and install the package. Otherwise, you c download the source from `GitHub `_ and run ``python setup.py install``. +The support for Python2 was dropped with MongoEngine 0.20.0 + Dependencies ============ -All of the dependencies can easily be installed via `pip `_. +All of the dependencies can easily be installed via `python -m pip `_. At the very least, you'll need these two packages to use MongoEngine: - pymongo>=3.4 -- six>=1.10.0 If you utilize a ``DateTimeField``, you might also use a more flexible date parser: @@ -58,6 +59,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``: - Pillow>=2.0.0 +If you need to use signals: + +- blinker>=1.3 + Examples ======== Some simple examples of what MongoEngine code looks like: @@ -125,7 +130,7 @@ installed in your environment and then: .. code-block:: shell # Install tox - $ pip install tox + $ python -m pip install tox # Run the test suites $ tox diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index fd017bae..4ecd48de 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -4,12 +4,14 @@ import timeit def main(): setup = """ from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') """ stmt = """ from pymongo import MongoClient + connection = MongoClient() db = connection.mongoengine_benchmark_test @@ -56,6 +58,7 @@ myNoddys = noddy.find() setup = """ from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') connection.close() diff --git a/docs/changelog.rst b/docs/changelog.rst index 41ff8c85..76545559 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,9 +6,21 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- ATTENTION: Drop support for Python2 - Add Mongo 4.0 to Travis +- Fix error when setting a string as a ComplexDateTimeField #2253 +- Bump development Status classifier to Production/Stable #2232 +- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 -- Add support for the `elemMatch` projection operator in .fields (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 +- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 +- DictField validate failed without default connection (bug introduced in 0.19.0) #2239 +- Remove methods deprecated years ago: + - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field + - Queryset.slave_okay() was deprecated since pymongo3 + - dropDups was dropped with MongoDB3 + - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` +- Added pre-commit #2212 +- Renamed requirements-lint.txt to requirements-dev.txt #2212 Changes in 0.19.1 ================= diff --git a/docs/django.rst b/docs/django.rst index b8a52165..d43a205e 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -13,7 +13,7 @@ Help Wanted! The MongoEngine team is looking for help contributing and maintaining a new Django extension for MongoEngine! If you have Django experience and would like -to help contribute to the project, please get in touch on the -`mailing list `_ or by +to help contribute to the project, please get in touch on the +`mailing list `_ or by simply contributing on `GitHub `_. diff --git a/docs/faq.rst b/docs/faq.rst index 27cd6937..49c73023 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -10,4 +10,3 @@ If this is a requirement for your project, check the alternative: `uMongo`_ and .. _uMongo: https://umongo.readthedocs.io/ .. _MotorEngine: https://motorengine.readthedocs.io/ - diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index aac13902..ac2146a6 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -86,7 +86,7 @@ using 3 different databases to store data:: connect(alias='user-db-alias', db='user-db') connect(alias='book-db-alias', db='book-db') connect(alias='users-books-db-alias', db='users-books-db') - + class User(Document): name = StringField() diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index bd2b43e2..6dc35c30 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -555,7 +555,6 @@ There are a few top level defaults for all indexes that can be set:: 'index_background': True, 'index_cls': False, 'auto_create_index': True, - 'index_drop_dups': True, } @@ -574,11 +573,6 @@ There are a few top level defaults for all indexes that can be set:: in systems where indexes are managed separately. Disabling this will improve performance. -:attr:`index_drop_dups` (Optional) - Set the default value for if an index should drop duplicates - Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning - and has no effect - Compound Indexes and Indexing sub documents ------------------------------------------- diff --git a/docs/guide/installing.rst b/docs/guide/installing.rst index b89d48f0..2c962ad9 100644 --- a/docs/guide/installing.rst +++ b/docs/guide/installing.rst @@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`: .. code-block:: console - $ pip install mongoengine + $ python -m pip install mongoengine Alternatively, if you don't have setuptools installed, `download it from PyPi `_ and run diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 06bccb3b..e5214610 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -44,8 +44,8 @@ Available signals include: `post_save` Called within :meth:`~mongoengine.Document.save` after most actions - (validation, insert/update, and cascades, but not clearing dirty flags) have - completed successfully. Passed the additional boolean keyword argument + (validation, insert/update, and cascades, but not clearing dirty flags) have + completed successfully. Passed the additional boolean keyword argument `created` to indicate if the save was an insert or an update. `pre_delete` diff --git a/docs/guide/text-indexes.rst b/docs/guide/text-indexes.rst index 92a4471a..a5eaf7d8 100644 --- a/docs/guide/text-indexes.rst +++ b/docs/guide/text-indexes.rst @@ -8,7 +8,7 @@ After MongoDB 2.4 version, supports search documents by text indexes. Defining a Document with text index =================================== Use the *$* prefix to set a text index, Look the declaration:: - + class News(Document): title = StringField() content = StringField() @@ -35,10 +35,10 @@ Saving a document:: content="Various improvements").save() Next, start a text search using :attr:`QuerySet.search_text` method:: - + document = News.objects.search_text('testing').first() document.title # may be: "Using mongodb text search" - + document = News.objects.search_text('released').first() document.title # may be: "MongoEngine 0.9 released" diff --git a/docs/index.rst b/docs/index.rst index 662968d4..a42ff857 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,7 +7,7 @@ MongoDB. To install it, simply run .. code-block:: console - $ pip install -U mongoengine + $ python -m pip install -U mongoengine :doc:`tutorial` A quick tutorial building a tumblelog to get you up and running with @@ -91,4 +91,3 @@ Indices and tables * :ref:`genindex` * :ref:`modindex` * :ref:`search` - diff --git a/docs/tutorial.rst b/docs/tutorial.rst index bcd0d17f..b7885c34 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option then it may be run on a remote server. If you haven't installed MongoEngine, simply use pip to install it like so:: - $ pip install mongoengine + $ python -m pip install mongoengine Before we can start using MongoEngine, we need to tell it how to connect to our instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 250347bf..4e798dd4 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -85,10 +85,10 @@ by default from now on. The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: :: - pip uninstall pymongo - pip uninstall mongoengine - pip install pymongo==2.8 - pip install mongoengine + python -m pip uninstall pymongo + python -m pip uninstall mongoengine + python -m pip install pymongo==2.8 + python -m pip install mongoengine 0.8.7 ***** @@ -153,7 +153,7 @@ inherited classes like so: :: # 4. Remove indexes info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() + indexes_to_drop = [key for key, value in info.items() if '_types' in dict(value['key'])] for index in indexes_to_drop: collection.drop_index(index) diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index dcc1f092..d08d4930 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,8 +1,6 @@ import weakref from bson import DBRef -import six -from six import iteritems from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned @@ -53,7 +51,7 @@ class BaseDict(dict): if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseDict, self).__init__(dict_items) + super().__init__(dict_items) def get(self, key, default=None): # get does not use __getitem__ by default so we must override it as well @@ -63,18 +61,18 @@ class BaseDict(dict): return default def __getitem__(self, key): - value = super(BaseDict, self).__getitem__(key) + value = super().__getitem__(key) EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, None, "%s.%s" % (self._name, key)) - super(BaseDict, self).__setitem__(key, value) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): - value = BaseList(value, None, "%s.%s" % (self._name, key)) - super(BaseDict, self).__setitem__(key, value) + value = BaseList(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance return value @@ -99,7 +97,7 @@ class BaseDict(dict): def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed("%s.%s" % (self._name, key)) + self._instance._mark_as_changed("{}.{}".format(self._name, key)) else: self._instance._mark_as_changed(self._name) @@ -117,13 +115,13 @@ class BaseList(list): if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseList, self).__init__(list_items) + super().__init__(list_items) def __getitem__(self, key): # change index to positive value because MongoDB does not support negative one if isinstance(key, int) and key < 0: key = len(self) + key - value = super(BaseList, self).__getitem__(key) + value = super().__getitem__(key) if isinstance(key, slice): # When receiving a slice operator, we don't convert the structure and bind @@ -135,19 +133,18 @@ class BaseList(list): value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict - value = BaseDict(value, None, "%s.%s" % (self._name, key)) - super(BaseList, self).__setitem__(key, value) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList - value = BaseList(value, None, "%s.%s" % (self._name, key)) - super(BaseList, self).__setitem__(key, value) + value = BaseList(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance return value def __iter__(self): - for v in super(BaseList, self).__iter__(): - yield v + yield from super().__iter__() def __getstate__(self): self.instance = None @@ -165,7 +162,7 @@ class BaseList(list): # instead, we simply marks the whole list as changed changed_key = None - result = super(BaseList, self).__setitem__(key, value) + result = super().__setitem__(key, value) self._mark_as_changed(changed_key) return result @@ -180,30 +177,19 @@ class BaseList(list): __iadd__ = mark_as_changed_wrapper(list.__iadd__) __imul__ = mark_as_changed_wrapper(list.__imul__) - if six.PY2: - # Under py3 __setslice__, __delslice__ and __getslice__ - # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter - # so we mimic this under python 2 - def __setslice__(self, i, j, sequence): - return self.__setitem__(slice(i, j), sequence) - - def __delslice__(self, i, j): - return self.__delitem__(slice(i, j)) - - def __getslice__(self, i, j): - return self.__getitem__(slice(i, j)) - def _mark_as_changed(self, key=None): if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self))) + self._instance._mark_as_changed( + "{}.{}".format(self._name, key % len(self)) + ) else: self._instance._mark_as_changed(self._name) class EmbeddedDocumentList(BaseList): def __init__(self, list_items, instance, name): - super(EmbeddedDocumentList, self).__init__(list_items, instance, name) + super().__init__(list_items, instance, name) self._instance = instance @classmethod @@ -213,7 +199,7 @@ class EmbeddedDocumentList(BaseList): """ for key, expected_value in kwargs.items(): doc_val = getattr(embedded_doc, key) - if doc_val != expected_value and six.text_type(doc_val) != expected_value: + if doc_val != expected_value and str(doc_val) != expected_value: return False return True @@ -303,11 +289,11 @@ class EmbeddedDocumentList(BaseList): def create(self, **values): """ - Creates a new embedded document and saves it to the database. + Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList. .. note:: - The embedded document changes are not automatically saved - to the database after calling this method. + the instance of the EmbeddedDocument is not automatically saved to the database. + You still need to call .save() on the parent Document. :param values: A dictionary of values for the embedded document. :return: The new embedded document instance. @@ -368,13 +354,13 @@ class EmbeddedDocumentList(BaseList): return len(values) -class StrictDict(object): +class StrictDict: __slots__ = () _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} _classes = {} def __init__(self, **kwargs): - for k, v in iteritems(kwargs): + for k, v in kwargs.items(): setattr(self, k, v) def __getitem__(self, key): @@ -422,13 +408,13 @@ class StrictDict(object): return (key for key in self.__slots__ if hasattr(self, key)) def __len__(self): - return len(list(iteritems(self))) + return len(list(self.items())) def __eq__(self, other): - return self.items() == other.items() + return list(self.items()) == list(other.items()) def __ne__(self, other): - return self.items() != other.items() + return not (self == other) @classmethod def create(cls, allowed_keys): @@ -443,7 +429,7 @@ class StrictDict(object): def __repr__(self): return "{%s}" % ", ".join( - '"{0!s}": {1!r}'.format(k, v) for k, v in self.items() + '"{!s}": {!r}'.format(k, v) for k, v in self.items() ) cls._classes[allowed_keys] = SpecificStrictDict @@ -468,9 +454,7 @@ class LazyReference(DBRef): self.document_type = document_type self._cached_doc = cached_doc self.passthrough = passthrough - super(LazyReference, self).__init__( - self.document_type._get_collection_name(), pk - ) + super().__init__(self.document_type._get_collection_name(), pk) def __getitem__(self, name): if not self.passthrough: @@ -488,4 +472,4 @@ class LazyReference(DBRef): raise AttributeError() def __repr__(self): - return "" % (self.document_type, self.pk) + return "".format(self.document_type, self.pk) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ad691362..e697fe40 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,11 +1,10 @@ import copy + import numbers from functools import partial from bson import DBRef, ObjectId, SON, json_util import pymongo -import six -from six import iteritems from mongoengine import signals from mongoengine.base.common import get_document @@ -25,14 +24,13 @@ from mongoengine.errors import ( OperationError, ValidationError, ) -from mongoengine.python_support import Hashable __all__ = ("BaseDocument", "NON_FIELD_ERRORS") NON_FIELD_ERRORS = "__all__" -class BaseDocument(object): +class BaseDocument: # TODO simplify how `_changed_fields` is used. # Currently, handling of `_changed_fields` seems unnecessarily convoluted: # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's @@ -92,10 +90,10 @@ class BaseDocument(object): # if so raise an Exception. if not self._dynamic and (self._meta.get("strict", True) or _created): _undefined_fields = set(values.keys()) - set( - self._fields.keys() + ["id", "pk", "_cls", "_text_score"] + list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"] ) if _undefined_fields: - msg = ('The fields "{0}" do not exist on the document "{1}"').format( + msg = ('The fields "{}" do not exist on the document "{}"').format( _undefined_fields, self._class_name ) raise FieldDoesNotExist(msg) @@ -110,7 +108,7 @@ class BaseDocument(object): # Assign default values to the instance. # We set default values only for fields loaded from DB. See # https://github.com/mongoengine/mongoengine/issues/399 for more info. - for key, field in iteritems(self._fields): + for key, field in self._fields.items(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) @@ -122,14 +120,14 @@ class BaseDocument(object): # Set passed values after initialisation if self._dynamic: dynamic_data = {} - for key, value in iteritems(values): + for key, value in values.items(): if key in self._fields or key == "_id": setattr(self, key, value) else: dynamic_data[key] = value else: FileField = _import_class("FileField") - for key, value in iteritems(values): + for key, value in values.items(): key = self._reverse_db_field_map.get(key, key) if key in self._fields or key in ("id", "pk", "_cls"): if __auto_convert and value is not None: @@ -145,7 +143,7 @@ class BaseDocument(object): if self._dynamic: self._dynamic_lock = False - for key, value in iteritems(dynamic_data): + for key, value in dynamic_data.items(): setattr(self, key, value) # Flag initialised @@ -163,7 +161,7 @@ class BaseDocument(object): default = default() setattr(self, field_name, default) else: - super(BaseDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document @@ -210,9 +208,9 @@ class BaseDocument(object): and self__created and name == self._meta.get("id_field") ): - super(BaseDocument, self).__setattr__("_created", False) + super().__setattr__("_created", False) - super(BaseDocument, self).__setattr__(name, value) + super().__setattr__(name, value) def __getstate__(self): data = {} @@ -288,16 +286,13 @@ class BaseDocument(object): except (UnicodeEncodeError, UnicodeDecodeError): u = "[Bad Unicode data]" repr_type = str if u is None else type(u) - return repr_type("<%s: %s>" % (self.__class__.__name__, u)) + return repr_type("<{}: {}>".format(self.__class__.__name__, u)) def __str__(self): # TODO this could be simpler? if hasattr(self, "__unicode__"): - if six.PY3: - return self.__unicode__() - else: - return six.text_type(self).encode("utf-8") - return six.text_type("%s object" % self.__class__.__name__) + return self.__unicode__() + return "%s object" % self.__class__.__name__ def __eq__(self, other): if ( @@ -446,7 +441,7 @@ class BaseDocument(object): pk = self.pk elif self._instance and hasattr(self._instance, "pk"): pk = self._instance.pk - message = "ValidationError (%s:%s) " % (self._class_name, pk) + message = "ValidationError ({}:{}) ".format(self._class_name, pk) raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): @@ -519,7 +514,7 @@ class BaseDocument(object): if "." in key: key, rest = key.split(".", 1) key = self._db_field_map.get(key, key) - key = "%s.%s" % (key, rest) + key = "{}.{}".format(key, rest) else: key = self._db_field_map.get(key, key) @@ -578,10 +573,10 @@ class BaseDocument(object): if not hasattr(data, "items"): iterator = enumerate(data) else: - iterator = iteritems(data) + iterator = data.items() for index_or_key, value in iterator: - item_key = "%s%s." % (base_key, index_or_key) + item_key = "{}{}.".format(base_key, index_or_key) # don't check anything lower if this key is already marked # as changed. if item_key[:-1] in changed_fields: @@ -589,7 +584,7 @@ class BaseDocument(object): if hasattr(value, "_get_changed_fields"): changed = value._get_changed_fields() - changed_fields += ["%s%s" % (item_key, k) for k in changed if k] + changed_fields += ["{}{}".format(item_key, k) for k in changed if k] elif isinstance(value, (list, tuple, dict)): self._nestable_types_changed_fields(changed_fields, item_key, value) @@ -620,7 +615,7 @@ class BaseDocument(object): if isinstance(data, EmbeddedDocument): # Find all embedded fields that have been changed changed = data._get_changed_fields() - changed_fields += ["%s%s" % (key, k) for k in changed if k] + changed_fields += ["{}{}".format(key, k) for k in changed if k] elif isinstance(data, (list, tuple, dict)): if hasattr(field, "field") and isinstance( field.field, (ReferenceField, GenericReferenceField) @@ -670,7 +665,7 @@ class BaseDocument(object): del set_data["_id"] # Determine if any changed items were actually unset. - for path, value in set_data.items(): + for path, value in list(set_data.items()): if value or isinstance( value, (numbers.Number, bool) ): # Account for 0 and True that are truthy @@ -744,7 +739,7 @@ class BaseDocument(object): # Convert SON to a data dict, making sure each key is a string and # corresponds to the right db field. data = {} - for key, value in iteritems(son): + for key, value in son.items(): key = str(key) key = cls._db_field_map.get(key, key) data[key] = value @@ -759,7 +754,7 @@ class BaseDocument(object): if not _auto_dereference: fields = copy.deepcopy(fields) - for field_name, field in iteritems(fields): + for field_name, field in fields.items(): field._auto_dereference = _auto_dereference if field.db_field in data: value = data[field.db_field] @@ -774,17 +769,16 @@ class BaseDocument(object): if errors_dict: errors = "\n".join( - ["Field '%s' - %s" % (k, v) for k, v in errors_dict.items()] + ["Field '{}' - {}".format(k, v) for k, v in errors_dict.items()] ) - msg = "Invalid data to create a `%s` instance.\n%s" % ( - cls._class_name, - errors, + msg = "Invalid data to create a `{}` instance.\n{}".format( + cls._class_name, errors, ) raise InvalidDocumentError(msg) # In STRICT documents, remove any keys that aren't in cls._fields if cls.STRICT: - data = {k: v for k, v in iteritems(data) if k in cls._fields} + data = {k: v for k, v in data.items() if k in cls._fields} obj = cls( __auto_convert=False, _created=created, __only_fields=only_fields, **data @@ -831,7 +825,7 @@ class BaseDocument(object): @classmethod def _build_index_spec(cls, spec): """Build a PyMongo index spec from a MongoEngine index spec.""" - if isinstance(spec, six.string_types): + if isinstance(spec, str): spec = {"fields": [spec]} elif isinstance(spec, (list, tuple)): spec = {"fields": list(spec)} @@ -928,7 +922,7 @@ class BaseDocument(object): # Add any unique_with fields to the back of the index spec if field.unique_with: - if isinstance(field.unique_with, six.string_types): + if isinstance(field.unique_with, str): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names @@ -949,7 +943,8 @@ class BaseDocument(object): # Add the new index to the list fields = [ - ("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields + ("{}{}".format(namespace, f), pymongo.ASCENDING) + for f in unique_fields ] index = {"fields": fields, "unique": True, "sparse": sparse} unique_indexes.append(index) @@ -1006,7 +1001,7 @@ class BaseDocument(object): elif field._geo_index: field_name = field.db_field if parent_field: - field_name = "%s.%s" % (parent_field, field_name) + field_name = "{}.{}".format(parent_field, field_name) geo_indices.append({"fields": [(field_name, field._geo_index)]}) return geo_indices @@ -1175,9 +1170,6 @@ class BaseDocument(object): else [value] ) return sep.join( - [ - six.text_type(dict(field.choices).get(val, val)) - for val in values or [] - ] + [str(dict(field.choices).get(val, val)) for val in values or []] ) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index cd1039cb..7bab813c 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -4,8 +4,6 @@ import weakref from bson import DBRef, ObjectId, SON import pymongo -import six -from six import iteritems from mongoengine.base.common import UPDATE_OPERATORS from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList @@ -15,7 +13,7 @@ from mongoengine.errors import DeprecatedError, ValidationError __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") -class BaseField(object): +class BaseField: """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. @@ -36,7 +34,6 @@ class BaseField(object): def __init__( self, db_field=None, - name=None, required=False, default=None, unique=False, @@ -51,7 +48,6 @@ class BaseField(object): """ :param db_field: The database field to store this field in (defaults to the name of the field) - :param name: Deprecated - use db_field :param required: If the field is required. Whether it has to have a value or not. Defaults to False. :param default: (optional) The default value for this field if no value @@ -75,11 +71,8 @@ class BaseField(object): existing attributes. Common metadata includes `verbose_name` and `help_text`. """ - self.db_field = (db_field or name) if not primary_key else "_id" + self.db_field = db_field if not primary_key else "_id" - if name: - msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' - warnings.warn(msg, DeprecationWarning) self.required = required or primary_key self.default = default self.unique = bool(unique or unique_with) @@ -92,13 +85,11 @@ class BaseField(object): self._owner_document = None # Make sure db_field is a string (if it's explicitly defined). - if self.db_field is not None and not isinstance( - self.db_field, six.string_types - ): + if self.db_field is not None and not isinstance(self.db_field, str): raise TypeError("db_field should be a string.") # Make sure db_field doesn't contain any forbidden characters. - if isinstance(self.db_field, six.string_types) and ( + if isinstance(self.db_field, str) and ( "." in self.db_field or "\0" in self.db_field or self.db_field.startswith("$") @@ -221,14 +212,12 @@ class BaseField(object): # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): - self.error( - "Value must be an instance of %s" % (six.text_type(choice_list)) - ) + self.error("Value must be an instance of %s" % (choice_list)) # Choices which are types other than Documents else: values = value if isinstance(value, (list, tuple)) else [value] if len(set(values) - set(choice_list)): - self.error("Value must be one of %s" % six.text_type(choice_list)) + self.error("Value must be one of %s" % str(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -316,7 +305,7 @@ class ComplexBaseField(BaseField): if hasattr(instance._data[self.name], "_dereferenced"): instance._data[self.name]._dereferenced = True - value = super(ComplexBaseField, self).__get__(instance, owner) + value = super().__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)): @@ -345,7 +334,7 @@ class ComplexBaseField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_python"): @@ -399,7 +388,7 @@ class ComplexBaseField(BaseField): EmbeddedDocument = _import_class("EmbeddedDocument") GenericReferenceField = _import_class("GenericReferenceField") - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_mongo"): @@ -423,11 +412,11 @@ class ComplexBaseField(BaseField): if self.field: value_dict = { key: self.field._to_mongo_safe_call(item, use_db_field, fields) - for key, item in iteritems(value) + for key, item in value.items() } else: value_dict = {} - for k, v in iteritems(value): + for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: @@ -466,8 +455,8 @@ class ComplexBaseField(BaseField): """If field is provided ensure the value is valid.""" errors = {} if self.field: - if hasattr(value, "iteritems") or hasattr(value, "items"): - sequence = iteritems(value) + if hasattr(value, "items"): + sequence = value.items() else: sequence = enumerate(value) for k, v in sequence: @@ -480,7 +469,9 @@ class ComplexBaseField(BaseField): if errors: field_class = self.field.__class__.__name__ - self.error("Invalid %s item (%s)" % (field_class, value), errors=errors) + self.error( + "Invalid {} item ({})".format(field_class, value), errors=errors + ) # Don't allow empty values if required if self.required and not value: self.error("Field is required and cannot be empty") @@ -513,10 +504,9 @@ class ObjectIdField(BaseField): def to_mongo(self, value): if not isinstance(value, ObjectId): try: - return ObjectId(six.text_type(value)) + return ObjectId(str(value)) except Exception as e: - # e.message attribute has been deprecated since Python 2.6 - self.error(six.text_type(e)) + self.error(str(e)) return value def prepare_query_value(self, op, value): @@ -524,9 +514,9 @@ class ObjectIdField(BaseField): def validate(self, value): try: - ObjectId(six.text_type(value)) + ObjectId(str(value)) except Exception: - self.error("Invalid Object ID") + self.error("Invalid ObjectID") class GeoJsonBaseField(BaseField): @@ -546,14 +536,14 @@ class GeoJsonBaseField(BaseField): self._name = "%sField" % self._type if not auto_index: self._geo_index = False - super(GeoJsonBaseField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Validate the GeoJson object based on its type.""" if isinstance(value, dict): if set(value.keys()) == {"type", "coordinates"}: if value["type"] != self._type: - self.error('%s type must be "%s"' % (self._name, self._type)) + self.error('{} type must be "{}"'.format(self._name, self._type)) return self.validate(value["coordinates"]) else: self.error( diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index e4d26811..b4479b97 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,9 +1,6 @@ import itertools import warnings -import six -from six import iteritems, itervalues - from mongoengine.base.common import _document_registry from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.common import _import_class @@ -25,7 +22,7 @@ class DocumentMetaclass(type): # TODO lower complexity of this method def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(DocumentMetaclass, mcs).__new__ + super_new = super().__new__ # If a base class just call super metaclass = attrs.get("my_metaclass") @@ -69,7 +66,7 @@ class DocumentMetaclass(type): # Standard object mixin - merge in any Fields if not hasattr(base, "_meta"): base_fields = {} - for attr_name, attr_value in iteritems(base.__dict__): + for attr_name, attr_value in base.__dict__.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -81,7 +78,7 @@ class DocumentMetaclass(type): # Discover any document fields field_names = {} - for attr_name, attr_value in iteritems(attrs): + for attr_name, attr_value in attrs.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -111,9 +108,7 @@ class DocumentMetaclass(type): attrs["_fields_ordered"] = tuple( i[1] - for i in sorted( - (v.creation_counter, v.name) for v in itervalues(doc_fields) - ) + for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) ) # @@ -173,24 +168,8 @@ class DocumentMetaclass(type): # Add class to the _document_registry _document_registry[new_class._class_name] = new_class - # In Python 2, User-defined methods objects have special read-only - # attributes 'im_func' and 'im_self' which contain the function obj - # and class instance object respectively. With Python 3 these special - # attributes have been replaced by __func__ and __self__. The Blinker - # module continues to use im_func and im_self, so the code below - # copies __func__ into im_func and __self__ into im_self for - # classmethod objects in Document derived classes. - if six.PY3: - for val in new_class.__dict__.values(): - if isinstance(val, classmethod): - f = val.__get__(new_class) - if hasattr(f, "__func__") and not hasattr(f, "im_func"): - f.__dict__.update({"im_func": getattr(f, "__func__")}) - if hasattr(f, "__self__") and not hasattr(f, "im_self"): - f.__dict__.update({"im_self": getattr(f, "__self__")}) - # Handle delete rules - for field in itervalues(new_class._fields): + for field in new_class._fields.values(): f = field if f.owner_document is None: f.owner_document = new_class @@ -252,8 +231,7 @@ class DocumentMetaclass(type): if base is object: continue yield base - for child_base in mcs.__get_bases(base.__bases__): - yield child_base + yield from mcs.__get_bases(base.__bases__) @classmethod def _import_classes(mcs): @@ -271,7 +249,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(TopLevelDocumentMetaclass, mcs).__new__ + super_new = super().__new__ # Set default _meta data if base class, otherwise get user defined meta if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: @@ -284,7 +262,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): "indexes": [], # indexes to be ensured at runtime "id_field": None, "index_background": False, - "index_drop_dups": False, "index_opts": None, "delete_rules": None, # allow_inheritance can be True, False, and None. True means @@ -399,7 +376,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed - for field_name, field in iteritems(new_class._fields): + for field_name, field in new_class._fields.items(): if field.primary_key: # Ensure only one primary key is set current_pk = new_class._meta.get("id_field") @@ -462,8 +439,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) for i in itertools.count(): - id_name = "{0}_{1}".format(id_basename, i) - id_db_name = "{0}_{1}".format(id_db_basename, i) + id_name = "{}_{}".format(id_basename, i) + id_db_name = "{}_{}".format(id_db_basename, i) if id_name not in existing_fields and id_db_name not in existing_db_fields: return id_name, id_db_name @@ -476,7 +453,7 @@ class MetaDict(dict): _merge_options = ("indexes",) def merge(self, new_options): - for k, v in iteritems(new_options): + for k, v in new_options.items(): if k in self._merge_options: self[k] = self.get(k, []) + v else: diff --git a/mongoengine/base/utils.py b/mongoengine/base/utils.py index 8f27ee14..7753ad50 100644 --- a/mongoengine/base/utils.py +++ b/mongoengine/base/utils.py @@ -1,7 +1,7 @@ import re -class LazyRegexCompiler(object): +class LazyRegexCompiler: """Descriptor to allow lazy compilation of regex""" def __init__(self, pattern, flags=0): diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 4e0c60b0..13d170ec 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,6 +1,5 @@ from pymongo import MongoClient, ReadPreference, uri_parser from pymongo.database import _check_name -import six __all__ = [ "DEFAULT_CONNECTION_NAME", @@ -39,8 +38,8 @@ def _check_db_name(name): """Check if a database name is valid. This functionality is copied from pymongo Database class constructor. """ - if not isinstance(name, six.string_types): - raise TypeError("name must be an instance of %s" % six.string_types) + if not isinstance(name, str): + raise TypeError("name must be an instance of %s" % str) elif name != "$external": _check_name(name) @@ -93,7 +92,7 @@ def _get_connection_settings( conn_host = conn_settings["host"] # Host can be a list or a string, so if string, force to a list. - if isinstance(conn_host, six.string_types): + if isinstance(conn_host, str): conn_host = [conn_host] resolved_hosts = [] @@ -148,7 +147,7 @@ def _get_connection_settings( # TODO simplify the code below once we drop support for # PyMongo v3.4. read_pf_mode = uri_options["readpreference"] - if isinstance(read_pf_mode, six.string_types): + if isinstance(read_pf_mode, str): read_pf_mode = read_pf_mode.lower() for preference in read_preferences: if ( @@ -318,7 +317,7 @@ def _create_connection(alias, connection_class, **connection_settings): try: return connection_class(**connection_settings) except Exception as e: - raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e)) + raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) def _find_existing_connection(connection_settings): @@ -396,8 +395,8 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): if new_conn_settings != prev_conn_setting: err_msg = ( - u"A different connection with alias `{}` was already " - u"registered. Use disconnect() first" + "A different connection with alias `{}` was already " + "registered. Use disconnect() first" ).format(alias) raise ConnectionFailure(err_msg) else: diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 9319abae..77e6b55c 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -2,7 +2,6 @@ from contextlib import contextmanager from pymongo.write_concern import WriteConcern from pymongo.read_concern import ReadConcern -from six import iteritems from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db @@ -19,7 +18,7 @@ __all__ = ( ) -class switch_db(object): +class switch_db: """switch_db alias context manager. Example :: @@ -60,7 +59,7 @@ class switch_db(object): self.cls._collection = self.collection -class switch_collection(object): +class switch_collection: """switch_collection alias context manager. Example :: @@ -102,7 +101,7 @@ class switch_collection(object): self.cls._get_collection_name = self.ori_get_collection_name -class no_dereference(object): +class no_dereference: """no_dereference context manager. Turns off all dereferencing in Documents for the duration of the context @@ -125,7 +124,7 @@ class no_dereference(object): self.deref_fields = [ k - for k, v in iteritems(self.cls._fields) + for k, v in self.cls._fields.items() if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) ] @@ -142,7 +141,7 @@ class no_dereference(object): return self.cls -class no_sub_classes(object): +class no_sub_classes: """no_sub_classes context manager. Only returns instances of this class and no sub (inherited) classes:: @@ -170,7 +169,7 @@ class no_sub_classes(object): self.cls._subclasses = self.cls_initial_subclasses -class query_counter(object): +class query_counter: """Query_counter context manager to get the number of queries. This works by updating the `profiling_level` of the database so that all queries get logged, resetting the db.system.profile collection at the beginning of the context and counting the new entries. @@ -237,7 +236,7 @@ class query_counter(object): def __repr__(self): """repr query_counter as the number of queries.""" - return u"%s" % self._get_count() + return "%s" % self._get_count() def _get_count(self): """Get the number of queries by counting the current number of entries in db.system.profile diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index 9e75f353..ff608a3b 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -1,6 +1,4 @@ from bson import DBRef, SON -import six -from six import iteritems from mongoengine.base import ( BaseDict, @@ -16,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField from mongoengine.queryset import QuerySet -class DeReference(object): +class DeReference: def __call__(self, items, max_depth=1, instance=None, name=None): """ Cheaply dereferences the items to a set depth. @@ -30,7 +28,7 @@ class DeReference(object): :class:`~mongoengine.base.ComplexBaseField` :param get: A boolean determining if being called by __get__ """ - if items is None or isinstance(items, six.string_types): + if items is None or isinstance(items, str): return items # cheapest way to convert a queryset to a list @@ -79,7 +77,7 @@ class DeReference(object): def _get_items_from_dict(items): new_items = {} - for k, v in iteritems(items): + for k, v in items.items(): value = v if isinstance(v, list): value = _get_items_from_list(v) @@ -120,7 +118,7 @@ class DeReference(object): depth += 1 for item in iterator: if isinstance(item, (Document, EmbeddedDocument)): - for field_name, field in iteritems(item._fields): + for field_name, field in item._fields.items(): v = item._data.get(field_name, None) if isinstance(v, LazyReference): # LazyReference inherits DBRef but should not be dereferenced here ! @@ -136,7 +134,7 @@ class DeReference(object): getattr(field, "field", None), "document_type", None ) references = self._find_references(v, depth) - for key, refs in iteritems(references): + for key, refs in references.items(): if isinstance( field_cls, (Document, TopLevelDocumentMetaclass) ): @@ -153,7 +151,7 @@ class DeReference(object): ) elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: references = self._find_references(item, depth - 1) - for key, refs in iteritems(references): + for key, refs in references.items(): reference_map.setdefault(key, set()).update(refs) return reference_map @@ -162,7 +160,7 @@ class DeReference(object): """Fetch all references and convert to their document objects """ object_map = {} - for collection, dbrefs in iteritems(self.reference_map): + for collection, dbrefs in self.reference_map.items(): # we use getattr instead of hasattr because hasattr swallows any exception under python2 # so it could hide nasty things without raising exceptions (cfr bug #1688)) @@ -174,7 +172,7 @@ class DeReference(object): dbref for dbref in dbrefs if (col_name, dbref) not in object_map ] references = collection.objects.in_bulk(refs) - for key, doc in iteritems(references): + for key, doc in references.items(): object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document if isinstance(doc_type, (ListField, DictField, MapField)): @@ -250,7 +248,7 @@ class DeReference(object): data = [] else: is_list = False - iterator = iteritems(items) + iterator = items.items() data = {} depth += 1 @@ -274,14 +272,12 @@ class DeReference(object): (v["_ref"].collection, v["_ref"].id), v ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = six.text_type("{0}.{1}.{2}").format( - name, k, field_name - ) + item_name = "{}.{}.{}".format(name, k, field_name) data[k]._data[field_name] = self._attach_objects( v, depth, instance=instance, name=item_name ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = "%s.%s" % (name, k) if name else name + item_name = "{}.{}".format(name, k) if name else name data[k] = self._attach_objects( v, depth - 1, instance=instance, name=item_name ) diff --git a/mongoengine/document.py b/mongoengine/document.py index 3cc0046e..db64054a 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -4,8 +4,6 @@ import warnings from bson.dbref import DBRef import pymongo from pymongo.read_preferences import ReadPreference -import six -from six import iteritems from mongoengine import signals from mongoengine.base import ( @@ -44,7 +42,7 @@ def includes_cls(fields): """Helper function used for ensuring and comparing indexes.""" first_field = None if len(fields): - if isinstance(fields[0], six.string_types): + if isinstance(fields[0], str): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] @@ -55,7 +53,7 @@ class InvalidCollectionError(Exception): pass -class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): +class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): r"""A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the @@ -71,7 +69,6 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): __slots__ = ("_instance",) - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -82,7 +79,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): __hash__ = None def __init__(self, *args, **kwargs): - super(EmbeddedDocument, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._instance = None self._changed_fields = [] @@ -95,7 +92,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): return not self.__eq__(other) def to_mongo(self, *args, **kwargs): - data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # remove _id from the SON if it's in it and it's None if "_id" in data and data["_id"] is None: @@ -104,7 +101,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): return data -class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): +class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): """The base class used for defining the structure and properties of collections of documents stored in MongoDB. Inherit from this class, and add fields as class attributes to define a document's structure. @@ -156,7 +153,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): in the :attr:`meta` dictionary. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -260,7 +256,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return db.create_collection(collection_name, **opts) def to_mongo(self, *args, **kwargs): - data = super(Document, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # If '_id' is None, try and set it from self._data. If that # doesn't exist either, remove '_id' from the SON completely. @@ -431,16 +427,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): self.cascade_save(**kwargs) except pymongo.errors.DuplicateKeyError as err: - message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" - if re.match("^E1100[01] duplicate key", six.text_type(err)): + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) + raise OperationError(message % err) # Make sure we store the PK on this document now that it's saved id_field = self._meta["id_field"] @@ -559,7 +555,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if not getattr(ref, "_changed_fields", True): continue - ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) + ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: _refs.append(ref_id) kwargs["_refs"] = _refs @@ -634,7 +630,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # Delete FileFields separately FileField = _import_class("FileField") - for name, field in iteritems(self._fields): + for name, field in self._fields.items(): if isinstance(field, FileField): getattr(self, name).delete() @@ -643,7 +639,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): write_concern=write_concern, _from_doc_delete=True ) except pymongo.errors.OperationFailure as err: - message = u"Could not delete document (%s)" % err.message + message = "Could not delete document (%s)" % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self, **signal_kwargs) @@ -851,17 +847,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): index_spec = cls._build_index_spec(keys) index_spec = index_spec.copy() fields = index_spec.pop("fields") - drop_dups = kwargs.get("drop_dups", False) - if drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) index_spec["background"] = background index_spec.update(kwargs) return cls._get_collection().create_index(fields, **index_spec) @classmethod - def ensure_index(cls, key_or_list, drop_dups=False, background=False, **kwargs): + def ensure_index(cls, key_or_list, background=False, **kwargs): """Ensure that the given indexes are in place. Deprecated in favour of create_index. @@ -869,12 +861,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering :param background: Allows index creation in the background - :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value - will be removed if PyMongo3+ is used """ - if drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) return cls.create_index(key_or_list, background=background, **kwargs) @classmethod @@ -887,12 +874,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): `auto_create_index` to False in the documents meta data """ background = cls._meta.get("index_background", False) - drop_dups = cls._meta.get("index_drop_dups", False) index_opts = cls._meta.get("index_opts") or {} index_cls = cls._meta.get("index_cls", True) - if drop_dups: - msg = "drop_dups is deprecated and is removed when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) collection = cls._get_collection() # 746: when connection is via mongos, the read preference is not necessarily an indication that @@ -992,10 +975,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): indexes.append(index) # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed - if [(u"_id", 1)] not in indexes: - indexes.append([(u"_id", 1)]) + if [("_id", 1)] not in indexes: + indexes.append([("_id", 1)]) if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): - indexes.append([(u"_cls", 1)]) + indexes.append([("_cls", 1)]) return indexes @@ -1019,19 +1002,19 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): extra = [index for index in existing if index not in required] # if { _cls: 1 } is missing, make sure it's *really* necessary - if [(u"_cls", 1)] in missing: + if [("_cls", 1)] in missing: cls_obsolete = False for index in existing: if includes_cls(index) and index not in extra: cls_obsolete = True break if cls_obsolete: - missing.remove([(u"_cls", 1)]) + missing.remove([("_cls", 1)]) return {"missing": missing, "extra": extra} -class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): +class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same way as an ordinary document but has expanded style properties. Any data @@ -1045,7 +1028,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -1060,16 +1042,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): setattr(self, field_name, None) self._dynamic_fields[field_name].null = False else: - super(DynamicDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) -class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): +class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): """A Dynamic Embedded Document class allowing flexible, expandable and uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more information about dynamic documents. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -1089,7 +1070,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu setattr(self, field_name, None) -class MapReduceDocument(object): +class MapReduceDocument: """A document returned from a map/reduce query. :param collection: An instance of :class:`~pymongo.Collection` diff --git a/mongoengine/errors.py b/mongoengine/errors.py index b76243d3..95564ff9 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,7 +1,5 @@ from collections import defaultdict -import six -from six import iteritems __all__ = ( "NotRegistered", @@ -87,24 +85,24 @@ class ValidationError(AssertionError): _message = None def __init__(self, message="", **kwargs): - super(ValidationError, self).__init__(message) + super().__init__(message) self.errors = kwargs.get("errors", {}) self.field_name = kwargs.get("field_name") self.message = message def __str__(self): - return six.text_type(self.message) + return str(self.message) def __repr__(self): - return "%s(%s,)" % (self.__class__.__name__, self.message) + return "{}({},)".format(self.__class__.__name__, self.message) def __getattribute__(self, name): - message = super(ValidationError, self).__getattribute__(name) + message = super().__getattribute__(name) if name == "message": if self.field_name: message = "%s" % message if self.errors: - message = "%s(%s)" % (message, self._format_errors()) + message = "{}({})".format(message, self._format_errors()) return message def _get_message(self): @@ -126,12 +124,12 @@ class ValidationError(AssertionError): def build_dict(source): errors_dict = {} if isinstance(source, dict): - for field_name, error in iteritems(source): + for field_name, error in source.items(): errors_dict[field_name] = build_dict(error) elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) else: - return six.text_type(source) + return str(source) return errors_dict @@ -147,15 +145,15 @@ class ValidationError(AssertionError): if isinstance(value, list): value = " ".join([generate_key(k) for k in value]) elif isinstance(value, dict): - value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) + value = " ".join([generate_key(v, k) for k, v in value.items()]) - results = "%s.%s" % (prefix, value) if prefix else value + results = "{}.{}".format(prefix, value) if prefix else value return results error_dict = defaultdict(list) - for k, v in iteritems(self.to_dict()): + for k, v in self.to_dict().items(): error_dict[generate_key(v)].append(k) - return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) + return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) class DeprecatedError(Exception): diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 7ec8c0f3..b05e726a 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -5,14 +5,14 @@ import re import socket import time import uuid +from io import BytesIO from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON +from bson.int64 import Int64 import gridfs import pymongo from pymongo import ReturnDocument -import six -from six import iteritems try: import dateutil @@ -21,11 +21,6 @@ except ImportError: else: import dateutil.parser -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long - from mongoengine.base import ( BaseDocument, @@ -42,7 +37,6 @@ from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version -from mongoengine.python_support import StringIO from mongoengine.queryset import DO_NOTHING from mongoengine.queryset.base import BaseQuerySet from mongoengine.queryset.transform import STRING_OPERATORS @@ -53,11 +47,6 @@ except ImportError: Image = None ImageOps = None -if six.PY3: - # Useless as long as 2to3 gets executed - # as it turns `long` into `int` blindly - long = int - __all__ = ( "StringField", @@ -114,10 +103,10 @@ class StringField(BaseField): self.regex = re.compile(regex) if regex else None self.max_length = max_length self.min_length = min_length - super(StringField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): - if isinstance(value, six.text_type): + if isinstance(value, str): return value try: value = value.decode("utf-8") @@ -126,7 +115,7 @@ class StringField(BaseField): return value def validate(self, value): - if not isinstance(value, six.string_types): + if not isinstance(value, str): self.error("StringField only accepts string values") if self.max_length is not None and len(value) > self.max_length: @@ -142,7 +131,7 @@ class StringField(BaseField): return None def prepare_query_value(self, op, value): - if not isinstance(op, six.string_types): + if not isinstance(op, str): return value if op in STRING_OPERATORS: @@ -162,7 +151,7 @@ class StringField(BaseField): # escape unsafe characters which could lead to a re.error value = re.escape(value) value = re.compile(regex % value, flags) - return super(StringField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class URLField(StringField): @@ -186,17 +175,17 @@ class URLField(StringField): def __init__(self, url_regex=None, schemes=None, **kwargs): self.url_regex = url_regex or self._URL_REGEX self.schemes = schemes or self._URL_SCHEMES - super(URLField, self).__init__(**kwargs) + super().__init__(**kwargs) def validate(self, value): # Check first if the scheme is valid scheme = value.split("://")[0].lower() if scheme not in self.schemes: - self.error(u"Invalid scheme {} in URL: {}".format(scheme, value)) + self.error("Invalid scheme {} in URL: {}".format(scheme, value)) # Then check full URL if not self.url_regex.match(value): - self.error(u"Invalid URL: {}".format(value)) + self.error("Invalid URL: {}".format(value)) class EmailField(StringField): @@ -214,7 +203,7 @@ class EmailField(StringField): ) UTF8_USER_REGEX = LazyRegexCompiler( - six.u( + ( # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to # include `UTF8-non-ascii`. r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z" @@ -229,7 +218,7 @@ class EmailField(StringField): re.IGNORECASE, ) - error_msg = u"Invalid email address: %s" + error_msg = "Invalid email address: %s" def __init__( self, @@ -253,7 +242,7 @@ class EmailField(StringField): self.domain_whitelist = domain_whitelist or [] self.allow_utf8_user = allow_utf8_user self.allow_ip_domain = allow_ip_domain - super(EmailField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate_user_part(self, user_part): """Validate the user part of the email address. Return True if @@ -280,13 +269,13 @@ class EmailField(StringField): try: socket.inet_pton(addr_family, domain_part[1:-1]) return True - except (socket.error, UnicodeEncodeError): + except (OSError, UnicodeEncodeError): pass return False def validate(self, value): - super(EmailField, self).validate(value) + super().validate(value) if "@" not in value: self.error(self.error_msg % value) @@ -303,12 +292,16 @@ class EmailField(StringField): domain_part = domain_part.encode("idna").decode("ascii") except UnicodeError: self.error( - "%s %s" % (self.error_msg % value, "(domain failed IDN encoding)") + "{} {}".format( + self.error_msg % value, "(domain failed IDN encoding)" + ) ) else: if not self.validate_domain_part(domain_part): self.error( - "%s %s" % (self.error_msg % value, "(domain validation failed)") + "{} {}".format( + self.error_msg % value, "(domain validation failed)" + ) ) @@ -317,7 +310,7 @@ class IntField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(IntField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -342,19 +335,19 @@ class IntField(BaseField): if value is None: return value - return super(IntField, self).prepare_query_value(op, int(value)) + return super().prepare_query_value(op, int(value)) class LongField(BaseField): - """64-bit integer field.""" + """64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(LongField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): pass return value @@ -364,7 +357,7 @@ class LongField(BaseField): def validate(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): self.error("%s could not be converted to long" % value) @@ -378,7 +371,7 @@ class LongField(BaseField): if value is None: return value - return super(LongField, self).prepare_query_value(op, long(value)) + return super().prepare_query_value(op, int(value)) class FloatField(BaseField): @@ -386,7 +379,7 @@ class FloatField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(FloatField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -396,7 +389,7 @@ class FloatField(BaseField): return value def validate(self, value): - if isinstance(value, six.integer_types): + if isinstance(value, int): try: value = float(value) except OverflowError: @@ -415,7 +408,7 @@ class FloatField(BaseField): if value is None: return value - return super(FloatField, self).prepare_query_value(op, float(value)) + return super().prepare_query_value(op, float(value)) class DecimalField(BaseField): @@ -462,7 +455,7 @@ class DecimalField(BaseField): self.precision = precision self.rounding = rounding - super(DecimalField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if value is None: @@ -481,13 +474,13 @@ class DecimalField(BaseField): if value is None: return value if self.force_string: - return six.text_type(self.to_python(value)) + return str(self.to_python(value)) return float(self.to_python(value)) def validate(self, value): if not isinstance(value, decimal.Decimal): - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) try: value = decimal.Decimal(value) except (TypeError, ValueError, decimal.InvalidOperation) as exc: @@ -500,7 +493,7 @@ class DecimalField(BaseField): self.error("Decimal value is too large") def prepare_query_value(self, op, value): - return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class BooleanField(BaseField): @@ -540,7 +533,7 @@ class DateTimeField(BaseField): def validate(self, value): new_value = self.to_mongo(value) if not isinstance(new_value, (datetime.datetime, datetime.date)): - self.error(u'cannot parse date "%s"' % value) + self.error('cannot parse date "%s"' % value) def to_mongo(self, value): if value is None: @@ -552,7 +545,7 @@ class DateTimeField(BaseField): if callable(value): return value() - if not isinstance(value, six.string_types): + if not isinstance(value, str): return None return self._parse_datetime(value) @@ -597,19 +590,19 @@ class DateTimeField(BaseField): return None def prepare_query_value(self, op, value): - return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class DateField(DateTimeField): def to_mongo(self, value): - value = super(DateField, self).to_mongo(value) + value = super().to_mongo(value) # drop hours, minutes, seconds if isinstance(value, datetime.datetime): value = datetime.datetime(value.year, value.month, value.day) return value def to_python(self, value): - value = super(DateField, self).to_python(value) + value = super().to_python(value) # convert datetime to date if isinstance(value, datetime.datetime): value = datetime.date(value.year, value.month, value.day) @@ -643,7 +636,7 @@ class ComplexDateTimeField(StringField): """ self.separator = separator self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) - super(ComplexDateTimeField, self).__init__(**kwargs) + super().__init__(**kwargs) def _convert_from_datetime(self, val): """ @@ -674,17 +667,20 @@ class ComplexDateTimeField(StringField): if instance is None: return self - data = super(ComplexDateTimeField, self).__get__(instance, owner) + data = super().__get__(instance, owner) if isinstance(data, datetime.datetime) or data is None: return data return self._convert_from_string(data) def __set__(self, instance, value): - super(ComplexDateTimeField, self).__set__(instance, value) + super().__set__(instance, value) value = instance._data[self.name] if value is not None: - instance._data[self.name] = self._convert_from_datetime(value) + if isinstance(value, datetime.datetime): + instance._data[self.name] = self._convert_from_datetime(value) + else: + instance._data[self.name] = value def validate(self, value): value = self.to_python(value) @@ -703,9 +699,7 @@ class ComplexDateTimeField(StringField): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): - return super(ComplexDateTimeField, self).prepare_query_value( - op, self._convert_from_datetime(value) - ) + return super().prepare_query_value(op, self._convert_from_datetime(value)) class EmbeddedDocumentField(BaseField): @@ -716,7 +710,7 @@ class EmbeddedDocumentField(BaseField): def __init__(self, document_type, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not ( - isinstance(document_type, six.string_types) + isinstance(document_type, str) or issubclass(document_type, EmbeddedDocument) ): self.error( @@ -725,11 +719,11 @@ class EmbeddedDocumentField(BaseField): ) self.document_type_obj = document_type - super(EmbeddedDocumentField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: resolved_document_type = self.owner_document else: @@ -786,7 +780,7 @@ class EmbeddedDocumentField(BaseField): "Querying the embedded document '%s' failed, due to an invalid query value" % (self.document_type._class_name,) ) - super(EmbeddedDocumentField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) @@ -802,9 +796,7 @@ class GenericEmbeddedDocumentField(BaseField): """ def prepare_query_value(self, op, value): - return super(GenericEmbeddedDocumentField, self).prepare_query_value( - op, self.to_mongo(value) - ) + return super().prepare_query_value(op, self.to_mongo(value)) def to_python(self, value): if isinstance(value, dict): @@ -855,7 +847,7 @@ class DynamicField(BaseField): """Convert a Python type to a MongoDB compatible type. """ - if isinstance(value, six.string_types): + if isinstance(value, str): return value if hasattr(value, "to_mongo"): @@ -877,12 +869,12 @@ class DynamicField(BaseField): value = {k: v for k, v in enumerate(value)} data = {} - for k, v in iteritems(value): + for k, v in value.items(): data[k] = self.to_mongo(v, use_db_field, fields) value = data if is_list: # Convert back to a list - value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] + value = [v for k, v in sorted(data.items(), key=itemgetter(0))] return value def to_python(self, value): @@ -892,15 +884,15 @@ class DynamicField(BaseField): value = doc_cls._get_db().dereference(value["_ref"]) return doc_cls._from_son(value) - return super(DynamicField, self).to_python(value) + return super().to_python(value) def lookup_member(self, member_name): return member_name def prepare_query_value(self, op, value): - if isinstance(value, six.string_types): + if isinstance(value, str): return StringField().prepare_query_value(op, value) - return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) def validate(self, value, clean=True): if hasattr(value, "validate"): @@ -921,7 +913,7 @@ class ListField(ComplexBaseField): self.field = field self.max_length = max_length kwargs.setdefault("default", lambda: []) - super(ListField, self).__init__(**kwargs) + super().__init__(**kwargs) def __get__(self, instance, owner): if instance is None: @@ -935,7 +927,7 @@ class ListField(ComplexBaseField): and value ): instance._data[self.name] = [self.field.build_lazyref(x) for x in value] - return super(ListField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): """Make sure that a list of valid fields is being used.""" @@ -949,7 +941,7 @@ class ListField(ComplexBaseField): if self.max_length is not None and len(value) > self.max_length: self.error("List is too long") - super(ListField, self).validate(value) + super().validate(value) def prepare_query_value(self, op, value): # Validate that the `set` operator doesn't contain more items than `max_length`. @@ -963,14 +955,14 @@ class ListField(ComplexBaseField): if ( op in ("set", "unset", None) and hasattr(value, "__iter__") - and not isinstance(value, six.string_types) + and not isinstance(value, str) and not isinstance(value, BaseDocument) ): return [self.field.prepare_query_value(op, v) for v in value] return self.field.prepare_query_value(op, value) - return super(ListField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class EmbeddedDocumentListField(ListField): @@ -991,9 +983,7 @@ class EmbeddedDocumentListField(ListField): :param kwargs: Keyword arguments passed directly into the parent :class:`~mongoengine.ListField`. """ - super(EmbeddedDocumentListField, self).__init__( - field=EmbeddedDocumentField(document_type), **kwargs - ) + super().__init__(field=EmbeddedDocumentField(document_type), **kwargs) class SortedListField(ListField): @@ -1019,10 +1009,10 @@ class SortedListField(ListField): self._ordering = kwargs.pop("ordering") if "reverse" in kwargs.keys(): self._order_reverse = kwargs.pop("reverse") - super(SortedListField, self).__init__(field, **kwargs) + super().__init__(field, **kwargs) def to_mongo(self, value, use_db_field=True, fields=None): - value = super(SortedListField, self).to_mongo(value, use_db_field, fields) + value = super().to_mongo(value, use_db_field, fields) if self._ordering is not None: return sorted( value, key=itemgetter(self._ordering), reverse=self._order_reverse @@ -1035,9 +1025,7 @@ def key_not_string(d): dictionary is not a string. """ for k, v in d.items(): - if not isinstance(k, six.string_types) or ( - isinstance(v, dict) and key_not_string(v) - ): + if not isinstance(k, str) or (isinstance(v, dict) and key_not_string(v)): return True @@ -1077,7 +1065,7 @@ class DictField(ComplexBaseField): self._auto_dereference = False kwargs.setdefault("default", lambda: {}) - super(DictField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Make sure that a list of valid fields is being used.""" @@ -1088,18 +1076,16 @@ class DictField(ComplexBaseField): msg = "Invalid dictionary key - documents must have only string keys" self.error(msg) - curr_mongo_ver = get_mongodb_version() - - if curr_mongo_ver < MONGODB_36 and key_has_dot_or_dollar(value): - self.error( - 'Invalid dictionary key name - keys may not contain "."' - ' or startswith "$" characters' - ) - elif curr_mongo_ver >= MONGODB_36 and key_starts_with_dollar(value): + # Following condition applies to MongoDB >= 3.6 + # older Mongo has stricter constraints but + # it will be rejected upon insertion anyway + # Having a validation that depends on the MongoDB version + # is not straightforward as the field isn't aware of the connected Mongo + if key_starts_with_dollar(value): self.error( 'Invalid dictionary key name - keys may not startswith "$" characters' ) - super(DictField, self).validate(value) + super().validate(value) def lookup_member(self, member_name): return DictField(db_field=member_name) @@ -1116,7 +1102,7 @@ class DictField(ComplexBaseField): "iexact", ] - if op in match_operators and isinstance(value, six.string_types): + if op in match_operators and isinstance(value, str): return StringField().prepare_query_value(op, value) if hasattr( @@ -1128,7 +1114,7 @@ class DictField(ComplexBaseField): } return self.field.prepare_query_value(op, value) - return super(DictField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class MapField(DictField): @@ -1143,7 +1129,7 @@ class MapField(DictField): # XXX ValidationError raised outside of the "validate" method. if not isinstance(field, BaseField): self.error("Argument to MapField constructor must be a valid field") - super(MapField, self).__init__(field=field, *args, **kwargs) + super().__init__(field=field, *args, **kwargs) class ReferenceField(BaseField): @@ -1203,7 +1189,7 @@ class ReferenceField(BaseField): :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -1214,11 +1200,11 @@ class ReferenceField(BaseField): self.dbref = dbref self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(ReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1247,7 +1233,7 @@ class ReferenceField(BaseField): else: instance._data[self.name] = cls._from_son(dereferenced) - return super(ReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document): if isinstance(document, DBRef): @@ -1298,7 +1284,7 @@ class ReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(ReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def validate(self, value): @@ -1334,7 +1320,7 @@ class CachedReferenceField(BaseField): fields = [] # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -1345,7 +1331,7 @@ class CachedReferenceField(BaseField): self.auto_sync = auto_sync self.document_type_obj = document_type self.fields = fields - super(CachedReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) def start_listener(self): from mongoengine import signals @@ -1357,7 +1343,7 @@ class CachedReferenceField(BaseField): return None update_kwargs = { - "set__%s__%s" % (self.name, key): val + "set__{}__{}".format(self.name, key): val for key, val in document._delta()[0].items() if key in self.fields } @@ -1379,7 +1365,7 @@ class CachedReferenceField(BaseField): @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1403,7 +1389,7 @@ class CachedReferenceField(BaseField): else: instance._data[self.name] = self.document_type._from_son(dereferenced) - return super(CachedReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document, use_db_field=True, fields=None): id_field_name = self.document_type._meta["id_field"] @@ -1502,12 +1488,12 @@ class GenericReferenceField(BaseField): def __init__(self, *args, **kwargs): choices = kwargs.pop("choices", None) - super(GenericReferenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.choices = [] # Keep the choices as a list of allowed Document class names if choices: for choice in choices: - if isinstance(choice, six.string_types): + if isinstance(choice, str): self.choices.append(choice) elif isinstance(choice, type) and issubclass(choice, Document): self.choices.append(choice._class_name) @@ -1516,7 +1502,7 @@ class GenericReferenceField(BaseField): # method. self.error( "Invalid choices provided: must be a list of" - "Document subclasses and/or six.string_typess" + "Document subclasses and/or str" ) def _validate_choices(self, value): @@ -1526,7 +1512,7 @@ class GenericReferenceField(BaseField): value = value.get("_cls") elif isinstance(value, Document): value = value._class_name - super(GenericReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def __get__(self, instance, owner): if instance is None: @@ -1542,7 +1528,7 @@ class GenericReferenceField(BaseField): else: instance._data[self.name] = dereferenced - return super(GenericReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if not isinstance(value, (Document, DBRef, dict, SON)): @@ -1606,22 +1592,22 @@ class BinaryField(BaseField): def __init__(self, max_bytes=None, **kwargs): self.max_bytes = max_bytes - super(BinaryField, self).__init__(**kwargs) + super().__init__(**kwargs) def __set__(self, instance, value): """Handle bytearrays in python 3.1""" - if six.PY3 and isinstance(value, bytearray): - value = six.binary_type(value) - return super(BinaryField, self).__set__(instance, value) + if isinstance(value, bytearray): + value = bytes(value) + return super().__set__(instance, value) def to_mongo(self, value): return Binary(value) def validate(self, value): - if not isinstance(value, (six.binary_type, Binary)): + if not isinstance(value, (bytes, Binary)): self.error( "BinaryField only accepts instances of " - "(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) + "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__) ) if self.max_bytes is not None and len(value) > self.max_bytes: @@ -1630,14 +1616,14 @@ class BinaryField(BaseField): def prepare_query_value(self, op, value): if value is None: return value - return super(BinaryField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class GridFSError(Exception): pass -class GridFSProxy(object): +class GridFSProxy: """Proxy object to handle writing and reading of files to and from GridFS .. versionadded:: 0.4 @@ -1687,8 +1673,6 @@ class GridFSProxy(object): def __bool__(self): return bool(self.grid_id) - __nonzero__ = __bool__ # For Py2 support - def __getstate__(self): self_dict = self.__dict__ self_dict["_fs"] = None @@ -1703,12 +1687,12 @@ class GridFSProxy(object): return self.__copy__() def __repr__(self): - return "<%s: %s>" % (self.__class__.__name__, self.grid_id) + return "<{}: {}>".format(self.__class__.__name__, self.grid_id) def __str__(self): gridout = self.get() filename = getattr(gridout, "filename") if gridout else "" - return "<%s: %s (%s)>" % (self.__class__.__name__, filename, self.grid_id) + return "<{}: {} ({})>".format(self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): @@ -1819,7 +1803,7 @@ class FileField(BaseField): def __init__( self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs ): - super(FileField, self).__init__(**kwargs) + super().__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias @@ -1842,7 +1826,7 @@ class FileField(BaseField): key = self.name if ( hasattr(value, "read") and not isinstance(value, GridFSProxy) - ) or isinstance(value, (six.binary_type, six.string_types)): + ) or isinstance(value, (bytes, str)): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it @@ -1960,11 +1944,11 @@ class ImageGridFsProxy(GridFSProxy): w, h = img.size - io = StringIO() + io = BytesIO() img.save(io, img_format, progressive=progressive) io.seek(0) - return super(ImageGridFsProxy, self).put( + return super().put( io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs ) @@ -1974,12 +1958,12 @@ class ImageGridFsProxy(GridFSProxy): if out and out.thumbnail_id: self.fs.delete(out.thumbnail_id) - return super(ImageGridFsProxy, self).delete() + return super().delete() def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): w, h = thumbnail.size - io = StringIO() + io = BytesIO() thumbnail.save(io, format, progressive=progressive) io.seek(0) @@ -2049,16 +2033,11 @@ class ImageField(FileField): for att_name, att in extra_args.items(): value = None if isinstance(att, (tuple, list)): - if six.PY3: - value = dict( - itertools.zip_longest(params_size, att, fillvalue=None) - ) - else: - value = dict(map(None, params_size, att)) + value = dict(itertools.zip_longest(params_size, att, fillvalue=None)) setattr(self, att_name, value) - super(ImageField, self).__init__(collection_name=collection_name, **kwargs) + super().__init__(collection_name=collection_name, **kwargs) class SequenceField(BaseField): @@ -2110,14 +2089,14 @@ class SequenceField(BaseField): self.value_decorator = ( value_decorator if callable(value_decorator) else self.VALUE_DECORATOR ) - super(SequenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def generate(self): """ Generate and Increment the counter """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( @@ -2131,7 +2110,7 @@ class SequenceField(BaseField): def set_next_value(self, value): """Helper method to set the next sequence value""" sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] counter = collection.find_one_and_update( filter={"_id": sequence_id}, @@ -2148,7 +2127,7 @@ class SequenceField(BaseField): as it is only fixed on set. """ sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] data = collection.find_one({"_id": sequence_id}) @@ -2171,7 +2150,7 @@ class SequenceField(BaseField): ) def __get__(self, instance, owner): - value = super(SequenceField, self).__get__(instance, owner) + value = super().__get__(instance, owner) if value is None and instance._initialised: value = self.generate() instance._data[self.name] = value @@ -2184,7 +2163,7 @@ class SequenceField(BaseField): if value is None and instance._initialised: value = self.generate() - return super(SequenceField, self).__set__(instance, value) + return super().__set__(instance, value) def prepare_query_value(self, op, value): """ @@ -2218,14 +2197,14 @@ class UUIDField(BaseField): .. versionchanged:: 0.6.19 """ self._binary = binary - super(UUIDField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if not self._binary: original_value = value try: - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) return uuid.UUID(value) except (ValueError, TypeError, AttributeError): return original_value @@ -2233,8 +2212,8 @@ class UUIDField(BaseField): def to_mongo(self, value): if not self._binary: - return six.text_type(value) - elif isinstance(value, six.string_types): + return str(value) + elif isinstance(value, str): return uuid.UUID(value) return value @@ -2245,7 +2224,7 @@ class UUIDField(BaseField): def validate(self, value): if not isinstance(value, uuid.UUID): - if not isinstance(value, six.string_types): + if not isinstance(value, str): value = str(value) try: uuid.UUID(value) @@ -2444,7 +2423,7 @@ class LazyReferenceField(BaseField): document. Note this only work getting field (not setting or deleting). """ # XXX ValidationError raised outside of the "validate" method. - if not isinstance(document_type, six.string_types) and not issubclass( + if not isinstance(document_type, str) and not issubclass( document_type, Document ): self.error( @@ -2456,11 +2435,11 @@ class LazyReferenceField(BaseField): self.passthrough = passthrough self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(LazyReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -2499,7 +2478,7 @@ class LazyReferenceField(BaseField): if value: instance._data[self.name] = value - return super(LazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, value): if isinstance(value, LazyReference): @@ -2563,7 +2542,7 @@ class LazyReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(LazyReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def lookup_member(self, member_name): @@ -2590,12 +2569,12 @@ class GenericLazyReferenceField(GenericReferenceField): def __init__(self, *args, **kwargs): self.passthrough = kwargs.pop("passthrough", False) - super(GenericLazyReferenceField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _validate_choices(self, value): if isinstance(value, LazyReference): value = value.document_type._class_name - super(GenericLazyReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def build_lazyref(self, value): if isinstance(value, LazyReference): @@ -2624,7 +2603,7 @@ class GenericLazyReferenceField(GenericReferenceField): if value: instance._data[self.name] = value - return super(GenericLazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if isinstance(value, LazyReference) and value.pk is None: @@ -2632,7 +2611,7 @@ class GenericLazyReferenceField(GenericReferenceField): "You can only reference documents once they have been" " saved to the database" ) - return super(GenericLazyReferenceField, self).validate(value) + return super().validate(value) def to_mongo(self, document): if document is None: @@ -2651,4 +2630,4 @@ class GenericLazyReferenceField(GenericReferenceField): ) ) else: - return super(GenericLazyReferenceField, self).to_mongo(document) + return super().to_mongo(document) diff --git a/mongoengine/mongodb_support.py b/mongoengine/mongodb_support.py index 5d437fef..522f064e 100644 --- a/mongoengine/mongodb_support.py +++ b/mongoengine/mongodb_support.py @@ -11,7 +11,7 @@ MONGODB_36 = (3, 6) def get_mongodb_version(): - """Return the version of the connected mongoDB (first 2 digits) + """Return the version of the default connected mongoDB (first 2 digits) :return: tuple(int, int) """ diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py deleted file mode 100644 index 57e467db..00000000 --- a/mongoengine/python_support.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Helper functions, constants, and types to aid with Python v2.7 - v3.x support -""" -import six - -# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. -StringIO = six.BytesIO - -# Additionally for Py2, try to use the faster cStringIO, if available -if not six.PY3: - try: - import cStringIO - except ImportError: - pass - else: - StringIO = cStringIO.StringIO - - -if six.PY3: - from collections.abc import Hashable -else: - # raises DeprecationWarnings in Python >=3.7 - from collections import Hashable diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 76427c89..317ec698 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import copy import itertools import re @@ -14,8 +12,6 @@ import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference from pymongo.read_concern import ReadConcern -import six -from six import iteritems from mongoengine import signals from mongoengine.base import get_document @@ -48,7 +44,7 @@ DENY = 3 PULL = 4 -class BaseQuerySet(object): +class BaseQuerySet: """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ @@ -67,7 +63,6 @@ class BaseQuerySet(object): self._ordering = None self._snapshot = False self._timeout = True - self._slave_okay = False self._read_preference = None self._read_concern = None self._iter = False @@ -212,8 +207,6 @@ class BaseQuerySet(object): """Avoid to open all records in an if stmt in Py3.""" return self._has_data() - __nonzero__ = __bool__ # For Py2 support - # Core functions def all(self): @@ -264,20 +257,21 @@ class BaseQuerySet(object): queryset = queryset.filter(*q_objs, **query) try: - result = six.next(queryset) + result = next(queryset) except StopIteration: msg = "%s matching query does not exist." % queryset._document._class_name raise queryset._document.DoesNotExist(msg) + try: - six.next(queryset) + # Check if there is another match + next(queryset) except StopIteration: return result - # If we were able to retrieve the 2nd doc, rewind the cursor and - # raise the MultipleObjectsReturned exception. - queryset.rewind() - message = u"%d items returned, instead of 1" % queryset.count() - raise queryset._document.MultipleObjectsReturned(message) + # If we were able to retrieve the 2nd doc, raise the MultipleObjectsReturned exception. + raise queryset._document.MultipleObjectsReturned( + "2 or more items returned, instead of 1" + ) def create(self, **kwargs): """Create new object. Returns the saved object instance. @@ -361,20 +355,20 @@ class BaseQuerySet(object): ) except pymongo.errors.DuplicateKeyError as err: message = "Could not save document (%s)" - raise NotUniqueError(message % six.text_type(err)) + raise NotUniqueError(message % err) except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise - message = u"Bulk write error: (%s)" - raise BulkWriteError(message % six.text_type(err.details)) + message = "Bulk write error: (%s)" + raise BulkWriteError(message % err.details) except pymongo.errors.OperationFailure as err: message = "Could not save document (%s)" - if re.match("^E1100[01] duplicate key", six.text_type(err)): + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u"Tried to save duplicate unique keys (%s)" - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) + raise OperationError(message % err) # Apply inserted_ids to documents for doc, doc_id in zip(docs, ids): @@ -555,12 +549,12 @@ class BaseQuerySet(object): elif result.raw_result: return result.raw_result["n"] except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u"Update failed (%s)" % six.text_type(err)) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - if six.text_type(err) == u"multi not coded yet": - message = u"update() method requires MongoDB 1.1.3+" + if str(err) == "multi not coded yet": + message = "update() method requires MongoDB 1.1.3+" raise OperationError(message) - raise OperationError(u"Update failed (%s)" % six.text_type(err)) + raise OperationError("Update failed (%s)" % err) def upsert_one(self, write_concern=None, read_concern=None, **update): """Overwrite or add the first document matched by the query. @@ -680,9 +674,9 @@ class BaseQuerySet(object): **self._cursor_args ) except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u"Update failed (%s)" % err) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - raise OperationError(u"Update failed (%s)" % err) + raise OperationError("Update failed (%s)" % err) if full_response: if result["value"] is not None: @@ -711,7 +705,7 @@ class BaseQuerySet(object): return queryset.filter(pk=object_id).first() def in_bulk(self, object_ids): - """Retrieve a set of documents by their ids. + """"Retrieve a set of documents by their ids. :param object_ids: a list or tuple of ObjectId's :rtype: dict of ObjectId's as keys and collection-specific @@ -794,7 +788,6 @@ class BaseQuerySet(object): "_ordering", "_snapshot", "_timeout", - "_slave_okay", "_read_preference", "_iter", "_scalar", @@ -1008,7 +1001,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.5 - Added subfield support """ fields = {f: QueryFieldList.ONLY for f in fields} - self.only_fields = fields.keys() + self.only_fields = list(fields.keys()) return self.fields(True, **fields) def exclude(self, *fields): @@ -1191,20 +1184,6 @@ class BaseQuerySet(object): queryset._timeout = enabled return queryset - # DEPRECATED. Has no more impact on PyMongo 3+ - def slave_okay(self, enabled): - """Enable or disable the slave_okay when querying. - - :param enabled: whether or not the slave_okay is enabled - - .. deprecated:: Ignored with PyMongo 3+ - """ - msg = "slave_okay is deprecated as it has no impact when using PyMongo 3+." - warnings.warn(msg, DeprecationWarning) - queryset = self.clone() - queryset._slave_okay = enabled - return queryset - def read_preference(self, read_preference): """Change the read_preference when querying. @@ -1387,13 +1366,13 @@ class BaseQuerySet(object): map_f_scope = {} if isinstance(map_f, Code): map_f_scope = map_f.scope - map_f = six.text_type(map_f) + map_f = str(map_f) map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) reduce_f_scope = {} if isinstance(reduce_f, Code): reduce_f_scope = reduce_f.scope - reduce_f = six.text_type(reduce_f) + reduce_f = str(reduce_f) reduce_f_code = queryset._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) @@ -1403,7 +1382,7 @@ class BaseQuerySet(object): finalize_f_scope = {} if isinstance(finalize_f, Code): finalize_f_scope = finalize_f.scope - finalize_f = six.text_type(finalize_f) + finalize_f = str(finalize_f) finalize_f_code = queryset._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) mr_args["finalize"] = finalize_f @@ -1419,7 +1398,7 @@ class BaseQuerySet(object): else: map_reduce_function = "map_reduce" - if isinstance(output, six.string_types): + if isinstance(output, str): mr_args["out"] = output elif isinstance(output, dict): @@ -1606,7 +1585,7 @@ class BaseQuerySet(object): if self._limit == 0 or self._none: raise StopIteration - raw_doc = six.next(self._cursor) + raw_doc = next(self._cursor) if self._as_pymongo: return raw_doc @@ -1851,13 +1830,13 @@ class BaseQuerySet(object): } """ total, data, types = self.exec_js(freq_func, field) - values = {types.get(k): int(v) for k, v in iteritems(data)} + values = {types.get(k): int(v) for k, v in data.items()} if normalize: values = {k: float(v) / total for k, v in values.items()} frequencies = {} - for k, v in iteritems(values): + for k, v in values.items(): if isinstance(k, float): if int(k) == k: k = int(k) @@ -1877,7 +1856,7 @@ class BaseQuerySet(object): field_parts = field.split(".") try: field = ".".join( - f if isinstance(f, six.string_types) else f.db_field + f if isinstance(f, str) else f.db_field for f in self._document._lookup_field(field_parts) ) db_field_paths.append(field) @@ -1889,7 +1868,7 @@ class BaseQuerySet(object): for subdoc in subclasses: try: subfield = ".".join( - f if isinstance(f, six.string_types) else f.db_field + f if isinstance(f, str) else f.db_field for f in subdoc._lookup_field(field_parts) ) db_field_paths.append(subfield) @@ -1963,7 +1942,7 @@ class BaseQuerySet(object): field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field + return '["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects @@ -1993,23 +1972,3 @@ class BaseQuerySet(object): setattr(queryset, "_" + method_name, val) return queryset - - # Deprecated - def ensure_index(self, **kwargs): - """Deprecated use :func:`Document.ensure_index`""" - msg = ( - "Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead." - ) - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_index(**kwargs) - return self - - def _ensure_indexes(self): - """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ( - "Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead." - ) - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_indexes() diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 5c3ff222..443c895c 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -1,7 +1,7 @@ __all__ = ("QueryFieldList",) -class QueryFieldList(object): +class QueryFieldList: """Object that handles combinations of .only() and .exclude() calls""" ONLY = 1 @@ -69,8 +69,6 @@ class QueryFieldList(object): def __bool__(self): return bool(self.fields) - __nonzero__ = __bool__ # For Py2 support - def as_dict(self): field_list = {field: self.value for field in self.fields} if self.slice: @@ -80,7 +78,7 @@ class QueryFieldList(object): return field_list def reset(self): - self.fields = set([]) + self.fields = set() self.slice = {} self.value = self.ONLY diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index 5067ffbf..699526fd 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -4,7 +4,7 @@ from mongoengine.queryset.queryset import QuerySet __all__ = ("queryset_manager", "QuerySetManager") -class QuerySetManager(object): +class QuerySetManager: """ The default QuerySet Manager. diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index 4ba62d46..8b5872f8 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,5 +1,3 @@ -import six - from mongoengine.errors import OperationError from mongoengine.queryset.base import ( BaseQuerySet, @@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet): # Pull in ITER_CHUNK_SIZE docs from the database and store them in # the result cache. try: - for _ in six.moves.range(ITER_CHUNK_SIZE): - self._result_cache.append(six.next(self)) + for _ in range(ITER_CHUNK_SIZE): + self._result_cache.append(next(self)) except StopIteration: # Getting this exception means there are no more docs in the # db cursor. Set _has_more to False so that we can use that @@ -143,10 +141,10 @@ class QuerySet(BaseQuerySet): getting the count """ if with_limit_and_skip is False: - return super(QuerySet, self).count(with_limit_and_skip) + return super().count(with_limit_and_skip) if self._len is None: - self._len = super(QuerySet, self).count(with_limit_and_skip) + self._len = super().count(with_limit_and_skip) return self._len @@ -180,9 +178,9 @@ class QuerySetNoCache(BaseQuerySet): return ".. queryset mid-iteration .." data = [] - for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): + for _ in range(REPR_OUTPUT_SIZE + 1): try: - data.append(six.next(self)) + data.append(next(self)) except StopIteration: break diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 659a97e2..3f1db8fa 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -3,14 +3,12 @@ from collections import defaultdict from bson import ObjectId, SON from bson.dbref import DBRef import pymongo -import six -from six import iteritems from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError -__all__ = ("query", "update") +__all__ = ("query", "update", "STRING_OPERATORS") COMPARISON_OPERATORS = ( "ne", @@ -101,7 +99,7 @@ def query(_doc_cls=None, **kwargs): cleaned_fields = [] for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): parts.append(field) append_field = False # is last and CachedReferenceField @@ -180,7 +178,7 @@ def query(_doc_cls=None, **kwargs): "$near" in value_dict or "$nearSphere" in value_dict ): value_son = SON() - for k, v in iteritems(value_dict): + for k, v in value_dict.items(): if k == "$maxDistance" or k == "$minDistance": continue value_son[k] = v @@ -281,7 +279,7 @@ def update(_doc_cls=None, **update): appended_sub_field = False for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): # Convert the S operator to $ if field == "S": field = "$" @@ -435,7 +433,9 @@ def _geo_operator(field, op, value): value = {"$near": _infer_geometry(value)} else: raise NotImplementedError( - 'Geo method "%s" has not been implemented for a %s ' % (op, field._name) + 'Geo method "{}" has not been implemented for a {} '.format( + op, field._name + ) ) return value diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 470839c1..0eacc2ef 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -7,7 +7,7 @@ from mongoengine.queryset import transform __all__ = ("Q", "QNode") -class QNodeVisitor(object): +class QNodeVisitor: """Base visitor class for visiting Q-object nodes in a query tree. """ @@ -79,7 +79,7 @@ class QueryCompilerVisitor(QNodeVisitor): return transform.query(self.document, **query.query) -class QNode(object): +class QNode: """Base class for nodes in query trees.""" AND = 0 @@ -143,8 +143,6 @@ class QCombination(QNode): def __bool__(self): return bool(self.children) - __nonzero__ = __bool__ # For Py2 support - def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): @@ -180,8 +178,6 @@ class Q(QNode): def __bool__(self): return bool(self.query) - __nonzero__ = __bool__ # For Py2 support - def __eq__(self, other): return self.__class__ == other.__class__ and self.query == other.query diff --git a/mongoengine/signals.py b/mongoengine/signals.py index 0db63604..582b533d 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -15,11 +15,11 @@ try: signals_available = True except ImportError: - class Namespace(object): + class Namespace: def signal(self, name, doc=None): return _FakeSignal(name, doc) - class _FakeSignal(object): + class _FakeSignal: """If blinker is unavailable, create a fake class with the same interface that allows sending of signals but will fail with an error on anything else. Instead of doing anything on send, it diff --git a/python-mongoengine.spec b/python-mongoengine.spec index eddb488d..635c779f 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT # %{python_sitearch}/* %changelog -* See: http://docs.mongoengine.org/en/latest/changelog.html \ No newline at end of file +* See: http://docs.mongoengine.org/en/latest/changelog.html diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..ee788e7a --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,8 @@ +black +flake8 +flake8-import-order +pre-commit +pytest +ipdb +ipython +tox diff --git a/requirements-lint.txt b/requirements-lint.txt deleted file mode 100644 index 9dc6123b..00000000 --- a/requirements-lint.txt +++ /dev/null @@ -1,3 +0,0 @@ -black -flake8 -flake8-import-order diff --git a/requirements.txt b/requirements.txt index 43e5261b..0ce39f74 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ pymongo>=3.4 -six==1.10.0 Sphinx==1.5.5 sphinx-rtd-theme==0.2.4 diff --git a/setup.py b/setup.py index 5cba5d9e..393de9c7 100644 --- a/setup.py +++ b/setup.py @@ -92,26 +92,22 @@ version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] VERSION = get_version(eval(version_line.split("=")[-1])) CLASSIFIERS = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", ] -PYTHON_VERSION = sys.version_info[0] -PY3 = PYTHON_VERSION == 3 -PY2 = PYTHON_VERSION == 2 - extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), "tests_require": [ @@ -120,18 +116,14 @@ extra_opts = { "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls "blinker", "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support - "zipp<2.0.0", # (dependency of pytest) dropped python2 support ], } -if PY3: - extra_opts["use_2to3"] = True - if "test" in sys.argv: - extra_opts["packages"] = find_packages() - extra_opts["package_data"] = { - "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] - } -else: - extra_opts["tests_require"] += ["python-dateutil"] + +if "test" in sys.argv: + extra_opts["packages"] = find_packages() + extra_opts["package_data"] = { + "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] + } setup( name="mongoengine", @@ -148,7 +140,8 @@ setup( long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], + python_requires=">=3.5", + install_requires=["pymongo>=3.4, <4.0"], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index be857b59..4f728c2d 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -5,7 +5,6 @@ from datetime import datetime from pymongo.collation import Collation from pymongo.errors import OperationFailure import pytest -from six import iteritems from mongoengine import * from mongoengine.connection import get_db @@ -59,7 +58,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') assert len(info) == 4 - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -87,7 +86,7 @@ class TestIndexes(unittest.TestCase): # the indices on -date and tags will both contain # _cls as first element in the key assert len(info) == 4 - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -102,7 +101,7 @@ class TestIndexes(unittest.TestCase): ExtendedBlogPost.ensure_indexes() info = ExtendedBlogPost.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: assert expected["fields"] in info @@ -192,7 +191,7 @@ class TestIndexes(unittest.TestCase): # Indexes are lazy so use list() to perform query list(Person.objects) info = Person.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("rank.title", 1)] in info def test_explicit_geo2d_index(self): @@ -207,7 +206,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "2d")] in info def test_explicit_geo2d_index_embedded(self): @@ -227,7 +226,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("current.location.point", "2d")] in info def test_explicit_geosphere_index(self): @@ -244,7 +243,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "2dsphere")] in info def test_explicit_geohaystack_index(self): @@ -266,7 +265,7 @@ class TestIndexes(unittest.TestCase): Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "geoHaystack")] in info def test_create_geohaystack_index(self): @@ -279,7 +278,7 @@ class TestIndexes(unittest.TestCase): Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) info = Place._get_collection().index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("location.point", "geoHaystack"), ("name", 1)] in info def test_dictionary_indexes(self): @@ -308,7 +307,7 @@ class TestIndexes(unittest.TestCase): info = BlogPost.objects._collection.index_information() info = [ (value["key"], value.get("unique", False), value.get("sparse", False)) - for key, value in iteritems(info) + for key, value in info.items() ] assert ([("addDate", -1)], True, True) in info @@ -806,18 +805,6 @@ class TestIndexes(unittest.TestCase): info = Log.objects._collection.index_information() assert 3600 == info["created_1"]["expireAfterSeconds"] - def test_index_drop_dups_silently_ignored(self): - class Customer(Document): - cust_id = IntField(unique=True, required=True) - meta = { - "indexes": ["cust_id"], - "index_drop_dups": True, - "allow_inheritance": False, - } - - Customer.drop_collection() - Customer.objects.first() - def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by meta.indexes. @@ -901,7 +888,7 @@ class TestIndexes(unittest.TestCase): self.fail("Unbound local error at index + pk definition") info = BlogPost.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] index_item = [("_id", 1), ("comments.comment_id", 1)] assert index_item in info @@ -942,7 +929,7 @@ class TestIndexes(unittest.TestCase): meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} info = MyDoc.objects._collection.index_information() - info = [value["key"] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] assert [("provider_ids.foo", 1)] in info assert [("provider_ids.bar", 1)] in info @@ -1058,10 +1045,6 @@ class TestIndexes(unittest.TestCase): del index_info[key][ "ns" ] # drop the index namespace - we don't care about that here, MongoDB 3+ - if "dropDups" in index_info[key]: - del index_info[key][ - "dropDups" - ] # drop the index dropDups - it is deprecated in MongoDB 3+ assert index_info == { "txt_1": {"key": [("txt", 1)], "background": False}, diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 5072f841..53a1489b 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -3,7 +3,6 @@ import unittest import warnings import pytest -from six import iteritems from mongoengine import ( BooleanField, @@ -523,7 +522,6 @@ class TestInheritance(MongoDBTestCase): defaults = { "index_background": True, - "index_drop_dups": True, "index_opts": {"hello": "world"}, "allow_inheritance": True, "queryset_class": "QuerySet", @@ -550,7 +548,7 @@ class TestInheritance(MongoDBTestCase): class Human(Mammal): pass - for k, v in iteritems(defaults): + for k, v in defaults.items(): for cls in [Animal, Fish, Guppy]: assert cls._meta[k] == v diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index a5c21323..993cc161 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -10,7 +10,6 @@ import bson from bson import DBRef, ObjectId from pymongo.errors import DuplicateKeyError import pytest -from six import iteritems from mongoengine import * from mongoengine import signals @@ -1415,7 +1414,7 @@ class TestDocumentInstance(MongoDBTestCase): assert raw_doc["first_name"] == "John" def test_inserts_if_you_set_the_pk(self): - p1 = self.Person(name="p1", id=bson.ObjectId()).save() + _ = self.Person(name="p1", id=bson.ObjectId()).save() p2 = self.Person(name="p2") p2.id = bson.ObjectId() p2.save() @@ -2196,7 +2195,7 @@ class TestDocumentInstance(MongoDBTestCase): user = User(name="Mike").save() reviewer = User(name="John").save() - book = Book(author=user, reviewer=reviewer).save() + _ = Book(author=user, reviewer=reviewer).save() reviewer.delete() assert Book.objects.count() == 1 @@ -2222,7 +2221,7 @@ class TestDocumentInstance(MongoDBTestCase): user_1 = User(id=1).save() user_2 = User(id=2).save() - book_1 = Book(id=1, author=user_2).save() + _ = Book(id=1, author=user_2).save() book_2 = Book(id=2, author=user_1).save() user_2.delete() @@ -2231,7 +2230,7 @@ class TestDocumentInstance(MongoDBTestCase): assert Book.objects.get() == book_2 user_3 = User(id=3).save() - book_3 = Book(id=3, author=user_3).save() + _ = Book(id=3, author=user_3).save() user_3.delete() # Deleting user_3 should also delete book_3 @@ -3205,7 +3204,7 @@ class TestDocumentInstance(MongoDBTestCase): def test_positional_creation(self): """Document cannot be instantiated using positional arguments.""" with pytest.raises(TypeError) as exc_info: - person = self.Person("Test User", 42) + self.Person("Test User", 42) expected_msg = ( "Instantiating a document with positional arguments is not " @@ -3274,7 +3273,7 @@ class TestDocumentInstance(MongoDBTestCase): def expand(self): self.flattened_parameter = {} - for parameter_name, parameter in iteritems(self.parameters): + for parameter_name, parameter in self.parameters.items(): parameter.expand() class NodesSystem(Document): @@ -3282,7 +3281,7 @@ class TestDocumentInstance(MongoDBTestCase): nodes = MapField(ReferenceField(Node, dbref=False)) def save(self, *args, **kwargs): - for node_name, node in iteritems(self.nodes): + for node_name, node in self.nodes.items(): node.expand() node.save(*args, **kwargs) super(NodesSystem, self).save(*args, **kwargs) @@ -3607,13 +3606,13 @@ class TestDocumentInstance(MongoDBTestCase): v = StringField() class A(Document): - l = ListField(EmbeddedDocumentField(B)) + array = ListField(EmbeddedDocumentField(B)) A.objects.delete() - A(l=[B(v="1"), B(v="2"), B(v="3")]).save() + A(array=[B(v="1"), B(v="2"), B(v="3")]).save() a = A.objects.get() - assert a.l._instance == a - for idx, b in enumerate(a.l): + assert a.array._instance == a + for idx, b in enumerate(a.array): assert b._instance == a assert idx == 2 diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index e2a1b8d6..a9c0c7e5 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -3,13 +3,12 @@ import uuid from bson import Binary import pytest -import six from mongoengine import * from tests.utils import MongoDBTestCase -BIN_VALUE = six.b( - "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" +BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( + "latin-1" ) @@ -22,7 +21,7 @@ class TestBinaryField(MongoDBTestCase): content_type = StringField() blob = BinaryField() - BLOB = six.b("\xe6\x00\xc4\xff\x07") + BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") MIME_TYPE = "application/octet-stream" Attachment.drop_collection() @@ -32,7 +31,7 @@ class TestBinaryField(MongoDBTestCase): attachment_1 = Attachment.objects().first() assert MIME_TYPE == attachment_1.content_type - assert BLOB == six.binary_type(attachment_1.blob) + assert BLOB == bytes(attachment_1.blob) def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields. @@ -47,11 +46,11 @@ class TestBinaryField(MongoDBTestCase): attachment_required = AttachmentRequired() with pytest.raises(ValidationError): attachment_required.validate() - attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) + attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) attachment_required.validate() - _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") - _4_BYTES = six.b("\xe6\x00\xc4\xff") + _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") + _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") with pytest.raises(ValidationError): AttachmentSizeLimit(blob=_5_BYTES).validate() AttachmentSizeLimit(blob=_4_BYTES).validate() @@ -123,10 +122,7 @@ class TestBinaryField(MongoDBTestCase): upsert=True, new=True, set__bin_field=BIN_VALUE ) assert doc.some_field == "test" - if six.PY3: - assert doc.bin_field == BIN_VALUE - else: - assert doc.bin_field == Binary(BIN_VALUE) + assert doc.bin_field == BIN_VALUE def test_update_one(self): """Ensures no regression of bug #1127""" @@ -136,7 +132,7 @@ class TestBinaryField(MongoDBTestCase): MyDocument.drop_collection() - bin_data = six.b("\xe6\x00\xc4\xff\x07") + bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") doc = MyDocument(bin_field=bin_data).save() n_updated = MyDocument.objects(bin_field=bin_data).update_one( @@ -144,7 +140,4 @@ class TestBinaryField(MongoDBTestCase): ) assert n_updated == 1 fetched = MyDocument.objects.with_id(doc.id) - if six.PY3: - assert fetched.bin_field == BIN_VALUE - else: - assert fetched.bin_field == Binary(BIN_VALUE) + assert fetched.bin_field == BIN_VALUE diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index 5bd6c56b..d118ad23 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -4,6 +4,8 @@ import itertools import math import re +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase @@ -191,3 +193,18 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): fetched_log = Log.objects.with_id(log.id) assert fetched_log.timestamp >= NOW + + def test_setting_bad_value_does_not_raise_unless_validate_is_called(self): + # test regression of #2253 + + class Log(Document): + timestamp = ComplexDateTimeField() + + Log.drop_collection() + + log = Log(timestamp="garbage") + with pytest.raises(ValidationError): + log.validate() + + with pytest.raises(ValidationError): + log.save() diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index e94ed0ce..42a4b7f1 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -2,7 +2,6 @@ import datetime import pytest -import six try: import dateutil @@ -89,17 +88,6 @@ class TestDateField(MongoDBTestCase): assert log.date == d1.date() assert log.date == d2.date() - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - assert log.date == d1.date() - assert log.date == d2.date() - def test_regular_usage(self): """Tests for regular datetime fields""" diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 70debac5..48936af7 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -2,7 +2,6 @@ import datetime as dt import pytest -import six try: import dateutil @@ -98,17 +97,6 @@ class TestDateTimeField(MongoDBTestCase): assert log.date != d1 assert log.date == d2 - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = dt.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - assert log.date != d1 - assert log.date == d2 - def test_regular_usage(self): """Tests for regular datetime fields""" @@ -213,7 +201,7 @@ class TestDateTimeField(MongoDBTestCase): # make sure that passing a parsable datetime works dtd = DTDoc() dtd.date = date_str - assert isinstance(dtd.date, six.string_types) + assert isinstance(dtd.date, str) dtd.save() dtd.reload() diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 44e628f6..f423bf8b 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from bson import InvalidDocument import pytest from mongoengine import * @@ -19,22 +20,24 @@ class TestDictField(MongoDBTestCase): post = BlogPost(info=info).save() assert get_as_pymongo(post) == {"_id": post.id, "info": info} - def test_general_things(self): - """Ensure that dict types work as expected.""" + def test_validate_invalid_type(self): + class BlogPost(Document): + info = DictField() + BlogPost.drop_collection() + + invalid_infos = ["my post", ["test", "test"], {1: "test"}] + for invalid_info in invalid_infos: + with pytest.raises(ValidationError): + BlogPost(info=invalid_info).validate() + + def test_keys_with_dots_or_dollars(self): class BlogPost(Document): info = DictField() BlogPost.drop_collection() post = BlogPost() - post.info = "my post" - with pytest.raises(ValidationError): - post.validate() - - post.info = ["test", "test"] - with pytest.raises(ValidationError): - post.validate() post.info = {"$title": "test"} with pytest.raises(ValidationError): @@ -48,25 +51,34 @@ class TestDictField(MongoDBTestCase): with pytest.raises(ValidationError): post.validate() - post.info = {1: "test"} - with pytest.raises(ValidationError): - post.validate() - post.info = {"nested": {"the.title": "test"}} if get_mongodb_version() < MONGODB_36: - with pytest.raises(ValidationError): - post.validate() + # MongoDB < 3.6 rejects dots + # To avoid checking the mongodb version from the DictField class + # we rely on MongoDB to reject the data during the save + post.validate() + with pytest.raises(InvalidDocument): + post.save() else: post.validate() post.info = {"dollar_and_dot": {"te$st.test": "test"}} if get_mongodb_version() < MONGODB_36: - with pytest.raises(ValidationError): - post.validate() + post.validate() + with pytest.raises(InvalidDocument): + post.save() else: post.validate() - post.info = {"title": "test"} + def test_general_things(self): + """Ensure that dict types work as expected.""" + + class BlogPost(Document): + info = DictField() + + BlogPost.drop_collection() + + post = BlogPost(info={"title": "test"}) post.save() post = BlogPost() diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index eeddac1e..13ca9c0b 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -75,7 +75,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -111,7 +111,7 @@ class TestEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id @@ -319,7 +319,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: Person.objects(settings__notexist="bar").first() - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' with pytest.raises(LookUpError): Person.objects.only("settings.notexist") @@ -347,7 +347,7 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): # Test non exiting attribute with pytest.raises(InvalidQueryError) as exc_info: assert Person.objects(settings__notexist="bar").first().id == p.id - assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index b8ece1a9..cbac9b69 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -3,14 +3,13 @@ import copy import os import tempfile import unittest +from io import BytesIO import gridfs import pytest -import six from mongoengine import * from mongoengine.connection import get_db -from mongoengine.python_support import StringIO try: from PIL import Image @@ -30,7 +29,7 @@ TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") def get_file(path): """Use a BytesIO instead of a file to allow to have a one-liner and avoid that the file remains opened""" - bytes_io = StringIO() + bytes_io = BytesIO() with open(path, "rb") as f: bytes_io.write(f.read()) bytes_io.seek(0) @@ -58,7 +57,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" putfile = PutFile() @@ -80,7 +79,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() putfile = PutFile() - putstring = StringIO() + putstring = BytesIO() putstring.write(text) putstring.seek(0) putfile.the_file.put(putstring, content_type=content_type) @@ -101,8 +100,8 @@ class TestFileField(MongoDBTestCase): StreamFile.drop_collection() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") content_type = "text/plain" streamfile = StreamFile() @@ -137,8 +136,8 @@ class TestFileField(MongoDBTestCase): StreamFile.drop_collection() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") streamfile = StreamFile() streamfile.save() @@ -167,8 +166,8 @@ class TestFileField(MongoDBTestCase): class SetFile(Document): the_file = FileField() - text = six.b("Hello, World!") - more_text = six.b("Foo Bar") + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") SetFile.drop_collection() @@ -196,7 +195,7 @@ class TestFileField(MongoDBTestCase): GridDocument.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() # Test without default @@ -213,7 +212,7 @@ class TestFileField(MongoDBTestCase): assert doc_b.the_file.grid_id == doc_c.the_file.grid_id # Test with default - doc_d = GridDocument(the_file=six.b("")) + doc_d = GridDocument(the_file="".encode("latin-1")) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) @@ -240,7 +239,7 @@ class TestFileField(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b("Hello, World!")) + test_file.the_file.put("Hello, World!".encode("latin-1")) test_file.save() # Second instance @@ -297,7 +296,9 @@ class TestFileField(MongoDBTestCase): test_file = TestFile() assert not bool(test_file.the_file) - test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") + test_file.the_file.put( + "Hello, World!".encode("latin-1"), content_type="text/plain" + ) test_file.save() assert bool(test_file.the_file) @@ -319,7 +320,7 @@ class TestFileField(MongoDBTestCase): class TestFile(Document): the_file = FileField() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" testfile = TestFile() @@ -363,7 +364,7 @@ class TestFileField(MongoDBTestCase): testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - text = six.b("Bonjour, World!") + text = "Bonjour, World!".encode("latin-1") testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.save() @@ -387,7 +388,7 @@ class TestFileField(MongoDBTestCase): TestImage.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() t = TestImage() @@ -503,21 +504,21 @@ class TestFileField(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") + test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt") test_file.save() data = get_db("test_files").macumba.files.find_one() assert data.get("name") == "hello.txt" test_file = TestFile.objects.first() - assert test_file.the_file.read() == six.b("Hello, World!") + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") test_file = TestFile.objects.first() - test_file.the_file = six.b("HELLO, WORLD!") + test_file.the_file = "Hello, World!".encode("latin-1") test_file.save() test_file = TestFile.objects.first() - assert test_file.the_file.read() == six.b("HELLO, WORLD!") + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") def test_copyable(self): class PutFile(Document): @@ -525,7 +526,7 @@ class TestFileField(MongoDBTestCase): PutFile.drop_collection() - text = six.b("Hello, World!") + text = "Hello, World!".encode("latin-1") content_type = "text/plain" putfile = PutFile() diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index a1cd7a0a..839494a9 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import pytest -import six from mongoengine import * @@ -52,9 +51,8 @@ class TestFloatField(MongoDBTestCase): big_person = BigPerson() - for value, value_type in enumerate(six.integer_types): - big_person.height = value_type(value) - big_person.validate() + big_person.height = int(0) + big_person.validate() big_person.height = 2 ** 500 big_person.validate() diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index b5b8690e..50e60262 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -152,7 +152,7 @@ class TestLazyReferenceField(MongoDBTestCase): LazyReference(BadDoc, animal.pk), ): with pytest.raises(ValidationError): - p = Ocurrence(person="test", animal=bad).save() + Ocurrence(person="test", animal=bad).save() def test_lazy_reference_query_conversion(self): """Ensure that LazyReferenceFields can be queried using objects and values @@ -386,7 +386,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): mineral = Mineral(name="Granite").save() occ_animal = Ocurrence(living_thing=animal, thing=animal).save() - occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() + _ = Ocurrence(living_thing=vegetal, thing=vegetal).save() with pytest.raises(ValidationError): Ocurrence(living_thing=mineral).save() @@ -458,7 +458,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): baddoc = BadDoc().save() for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): with pytest.raises(ValidationError): - p = Ocurrence(person="test", animal=bad).save() + Ocurrence(person="test", animal=bad).save() def test_generic_lazy_reference_query_conversion(self): class Member(Document): diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index da4f04c8..330051c3 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -1,11 +1,5 @@ -# -*- coding: utf-8 -*- +from bson.int64 import Int64 import pytest -import six - -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long from mongoengine import * from mongoengine.connection import get_db @@ -28,7 +22,7 @@ class TestLongField(MongoDBTestCase): assert isinstance( db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 ) - assert isinstance(doc.some_long, six.integer_types) + assert isinstance(doc.some_long, int) def test_long_validation(self): """Ensure that invalid values cannot be assigned to long fields. diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index aa83f710..81d648fd 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -21,7 +21,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -76,7 +76,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) assert c["next"] == 10 @@ -101,10 +101,10 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) counters = [i.counter for i in Person.objects] - assert counters == range(1, 11) + assert counters == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -166,10 +166,10 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == range(1, 11) + assert ids == list(range(1, 11)) id = [i.id for i in Animal.objects] - assert id == range(1, 11) + assert id == list(range(1, 11)) c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -193,7 +193,7 @@ class TestSequenceField(MongoDBTestCase): assert c["next"] == 10 ids = [i.id for i in Person.objects] - assert ids == map(str, range(1, 11)) + assert ids == [str(i) for i in range(1, 11)] c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) assert c["next"] == 10 @@ -267,12 +267,12 @@ class TestSequenceField(MongoDBTestCase): foo = Foo(name="Foo") foo.save() - assert not ( - "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + assert "base.counter" not in self.db["mongoengine.counters"].find().distinct( + "_id" ) - assert ("foo.counter" and "bar.counter") in self.db[ - "mongoengine.counters" - ].find().distinct("_id") + existing_counters = self.db["mongoengine.counters"].find().distinct("_id") + assert "foo.counter" in existing_counters + assert "bar.counter" in existing_counters assert foo.counter == bar.counter assert foo._fields["counter"].owner_document == Foo assert bar._fields["counter"].owner_document == Bar diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index 948a4788..c449e467 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -2,7 +2,6 @@ import pytest from mongoengine import * - from tests.utils import MongoDBTestCase @@ -35,7 +34,7 @@ class TestURLField(MongoDBTestCase): with pytest.raises(ValidationError) as exc_info: link.validate() assert ( - unicode(exc_info.value) + str(exc_info.value) == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" ) diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index a8954526..6b6000c9 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -11,8 +11,6 @@ from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import UpdateResult import pytest -import six -from six import iteritems from mongoengine import * from mongoengine.connection import get_db @@ -111,7 +109,7 @@ class TestQueryset(unittest.TestCase): # Filter people by age people = self.Person.objects(age=20) assert people.count() == 1 - person = people.next() + person = next(people) assert person == user_a assert person.name == "User A" assert person.age == 20 @@ -119,7 +117,7 @@ class TestQueryset(unittest.TestCase): def test_limit(self): """Ensure that QuerySet.limit works as expected.""" user_a = self.Person.objects.create(name="User A", age=20) - user_b = self.Person.objects.create(name="User B", age=30) + _ = self.Person.objects.create(name="User B", age=30) # Test limit on a new queryset people = list(self.Person.objects.limit(1)) @@ -151,6 +149,11 @@ class TestQueryset(unittest.TestCase): user_b = self.Person.objects.create(name="User B", age=30) # Test skip on a new queryset + people = list(self.Person.objects.skip(0)) + assert len(people) == 2 + assert people[0] == user_a + assert people[1] == user_b + people = list(self.Person.objects.skip(1)) assert len(people) == 1 assert people[0] == user_b @@ -275,32 +278,47 @@ class TestQueryset(unittest.TestCase): with pytest.raises(InvalidQueryError): self.Person.objects(name="User A").with_id(person1.id) - def test_find_only_one(self): - """Ensure that a query using ``get`` returns at most one result. - """ + def test_get_no_document_exists_raises_doesnotexist(self): + assert self.Person.objects.count() == 0 # Try retrieving when no objects exists with pytest.raises(DoesNotExist): self.Person.objects.get() with pytest.raises(self.Person.DoesNotExist): self.Person.objects.get() + def test_get_multiple_match_raises_multipleobjectsreturned(self): + """Ensure that a query using ``get`` returns at most one result. + """ + assert self.Person.objects().count() == 0 + person1 = self.Person(name="User A", age=20) person1.save() - person2 = self.Person(name="User B", age=30) + + p = self.Person.objects.get() + assert p == person1 + + person2 = self.Person(name="User B", age=20) person2.save() - # Retrieve the first person from the database + person3 = self.Person(name="User C", age=30) + person3.save() + + # .get called without argument with pytest.raises(MultipleObjectsReturned): self.Person.objects.get() with pytest.raises(self.Person.MultipleObjectsReturned): self.Person.objects.get() + # check filtering + with pytest.raises(MultipleObjectsReturned): + self.Person.objects.get(age__lt=30) + with pytest.raises(MultipleObjectsReturned) as exc_info: + self.Person.objects(age__lt=30).get() + assert "2 or more items returned, instead of 1" == str(exc_info.value) + # Use a query to filter the people found to just person2 person = self.Person.objects.get(age=30) - assert person.name == "User B" - - person = self.Person.objects.get(age__lt=30) - assert person.name == "User A" + assert person == person3 def test_find_array_position(self): """Ensure that query by array position works. @@ -2574,13 +2592,8 @@ class TestQueryset(unittest.TestCase): age = IntField() with db_ops_tracker() as q: - adult1 = ( - User.objects.filter(age__gte=18).comment("looking for an adult").first() - ) - - adult2 = ( - User.objects.comment("looking for an adult").filter(age__gte=18).first() - ) + User.objects.filter(age__gte=18).comment("looking for an adult").first() + User.objects.comment("looking for an adult").filter(age__gte=18).first() ops = q.get_ops() assert len(ops) == 2 @@ -2769,7 +2782,7 @@ class TestQueryset(unittest.TestCase): ) # start a map/reduce - cursor.next() + next(cursor) results = Person.objects.map_reduce( map_f=map_person, @@ -4094,7 +4107,7 @@ class TestQueryset(unittest.TestCase): info = Comment.objects._collection.index_information() info = [ (value["key"], value.get("unique", False), value.get("sparse", False)) - for key, value in iteritems(info) + for key, value in info.items() ] assert ([("_cls", 1), ("message", 1)], False, False) in info @@ -4396,7 +4409,7 @@ class TestQueryset(unittest.TestCase): # Use a query to filter the people found to just person1 people = self.Person.objects(age=20).scalar("name") assert people.count() == 1 - person = people.next() + person = next(people) assert person == "User A" # Test limit @@ -4446,24 +4459,14 @@ class TestQueryset(unittest.TestCase): "A0" == "%s" % self.Person.objects.order_by("name").scalar("name").first() ) assert "A0" == "%s" % self.Person.objects.scalar("name").order_by("name")[0] - if six.PY3: - assert ( - "['A1', 'A2']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] - ) - assert ( - "['A51', 'A52']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] - ) - else: - assert ( - "[u'A1', u'A2']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] - ) - assert ( - "[u'A51', u'A52']" - == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] - ) + assert ( + "['A1', 'A2']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] + ) + assert ( + "['A51', 'A52']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] + ) # with_id and in_bulk person = self.Person.objects.order_by("name").first() @@ -4471,10 +4474,7 @@ class TestQueryset(unittest.TestCase): pks = self.Person.objects.order_by("age").scalar("pk")[1:3] names = self.Person.objects.scalar("name").in_bulk(list(pks)).values() - if six.PY3: - expected = "['A1', 'A2']" - else: - expected = "[u'A1', u'A2']" + expected = "['A1', 'A2']" assert expected == "%s" % sorted(names) def test_fields(self): @@ -4519,7 +4519,7 @@ class TestQueryset(unittest.TestCase): foos_without_y = list(Foo.objects.order_by("y").fields(y=0)) - assert all(o.y is None for o in foos_with_x) + assert all(o.y is None for o in foos_without_y) foos_with_sliced_items = list(Foo.objects.order_by("y").fields(slice__items=1)) @@ -5403,7 +5403,7 @@ class TestQueryset(unittest.TestCase): if not test: raise AssertionError("Cursor has data and returned False") - queryset.next() + next(queryset) if not queryset: raise AssertionError( "Cursor has data and it must returns True, even in the last item." @@ -5636,7 +5636,7 @@ class TestQueryset(unittest.TestCase): self.Person.objects.create(name="Baz") assert self.Person.objects.count(with_limit_and_skip=True) == 3 - newPerson = self.Person.objects.create(name="Foo_1") + self.Person.objects.create(name="Foo_1") assert self.Person.objects.count(with_limit_and_skip=True) == 4 def test_no_cursor_timeout(self): diff --git a/tests/test_connection.py b/tests/test_connection.py index e40a6994..56bc22cd 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -348,7 +348,7 @@ class ConnectionTest(unittest.TestCase): def test_disconnect_cleans_cached_collection_attribute_in_document(self): """Ensure that the disconnect() method works properly""" - conn1 = connect("mongoenginetest") + connect("mongoenginetest") class History(Document): pass @@ -518,7 +518,7 @@ class ConnectionTest(unittest.TestCase): """Ensure connect() uses the username & password params if the URI doesn't explicitly specify them. """ - c = connect( + connect( host="mongodb://localhost/mongoenginetest", username="user", password="pass" ) @@ -632,7 +632,7 @@ class ConnectionTest(unittest.TestCase): """Ensure connect() works when specifying a replicaSet via the MongoDB URI. """ - c = connect(host="mongodb://localhost/test?replicaSet=local-rs") + connect(host="mongodb://localhost/test?replicaSet=local-rs") db = get_db() assert isinstance(db, pymongo.database.Database) assert db.name == "test" diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index f445cf57..8f3dd555 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -264,7 +264,7 @@ class TestContextManagers: def test_query_counter_does_not_swallow_exception(self): with pytest.raises(TypeError): - with query_counter() as q: + with query_counter(): raise TypeError() def test_query_counter_temporarily_modifies_profiling_level(self): @@ -274,12 +274,12 @@ class TestContextManagers: initial_profiling_level = db.profiling_level() try: - NEW_LEVEL = 1 - db.set_profiling_level(NEW_LEVEL) - assert db.profiling_level() == NEW_LEVEL - with query_counter() as q: + new_level = 1 + db.set_profiling_level(new_level) + assert db.profiling_level() == new_level + with query_counter(): assert db.profiling_level() == 2 - assert db.profiling_level() == NEW_LEVEL + assert db.profiling_level() == new_level except Exception: db.set_profiling_level( initial_profiling_level diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index 7b5d7d11..6d432e32 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,7 +1,6 @@ import unittest import pytest -from six import iterkeys from mongoengine import Document from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict @@ -287,7 +286,7 @@ class TestBaseList: base_list[:] = [ 0, 1, - ] # Will use __setslice__ under py2 and __setitem__ under py3 + ] assert base_list._instance._changed_fields == ["my_name"] assert base_list == [0, 1] @@ -296,13 +295,13 @@ class TestBaseList: base_list[0:2] = [ 1, 0, - ] # Will use __setslice__ under py2 and __setitem__ under py3 + ] assert base_list._instance._changed_fields == ["my_name"] assert base_list == [1, 0, 2] def test___setitem___calls_with_step_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 + base_list[0:3:2] = [-1, -2] # uses __setitem__ assert base_list._instance._changed_fields == ["my_name"] assert base_list == [-1, 1, -2] @@ -372,7 +371,7 @@ class TestStrictDict(unittest.TestCase): def test_iterkeys(self): d = self.dtype(a=1) - assert list(iterkeys(d)) == ["a"] + assert list(d.keys()) == ["a"] def test_len(self): d = self.dtype(a=1) diff --git a/tests/test_dereference.py b/tests/test_dereference.py index b9d92883..0f9f412c 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -2,10 +2,8 @@ import unittest from bson import DBRef, ObjectId -from six import iteritems from mongoengine import * -from mongoengine.connection import get_db from mongoengine.context_managers import query_counter @@ -739,7 +737,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) # Document select_related @@ -752,7 +750,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) # Queryset select_related @@ -766,7 +764,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, User) User.drop_collection() @@ -820,7 +818,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Document select_related @@ -836,7 +834,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Queryset select_related @@ -853,7 +851,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ Group.objects.delete() @@ -910,7 +908,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) # Document select_related @@ -926,7 +924,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) # Queryset select_related @@ -943,7 +941,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 2 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert isinstance(m, UserA) UserA.drop_collection() @@ -997,7 +995,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Document select_related @@ -1013,7 +1011,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ # Queryset select_related @@ -1030,7 +1028,7 @@ class FieldTest(unittest.TestCase): [m for m in group_obj.members] assert q == 4 - for k, m in iteritems(group_obj.members): + for k, m in group_obj.members.items(): assert "User" in m.__class__.__name__ Group.objects.delete() diff --git a/tests/test_signals.py b/tests/test_signals.py index d79eaf75..64976e25 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -58,7 +58,9 @@ class TestSignal(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): - dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() + dirty_keys = list(document._delta()[0].keys()) + list( + document._delta()[1].keys() + ) signal_output.append("post_save signal, %s" % document) signal_output.append("post_save dirty keys, %s" % dirty_keys) if kwargs.pop("created", False): @@ -265,7 +267,7 @@ class TestSignal(unittest.TestCase): a = self.Author(name="Bill Shakespeare") a.save() self.get_signal_output(lambda: None) # eliminate signal output - a1 = self.Author.objects(name="Bill Shakespeare")[0] + _ = self.Author.objects(name="Bill Shakespeare")[0] assert self.get_signal_output(create_author) == [ "pre_init signal, Author", diff --git a/tox.ini b/tox.ini index 396817ca..675b6d9a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg34,mg36,mg39,mg310} +envlist = {py35,pypy3}-{mg34,mg36,mg39,mg310} [testenv] commands =