diff --git a/.gitignore b/.gitignore index 048a2d19..16633bae 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,15 @@ -.* !.gitignore *~ *.py[co] .*.sw[po] +.cache/ +.coverage +.coveragerc +.env +.idea/ +.pytest_cache/ +.tox/ +.eggs/ *.egg docs/.build docs/_build @@ -13,8 +20,6 @@ env/ .settings .project .pydevproject -tests/test_bugfix.py htmlcov/ venv venv3 -scratchpad diff --git a/.landscape.yml b/.landscape.yml index a27bbb03..4f13a5eb 100644 --- a/.landscape.yml +++ b/.landscape.yml @@ -5,17 +5,12 @@ pylint: options: additional-builtins: - # add xrange and long as valid built-ins. In Python 3, xrange is - # translated into range and long is translated into int via 2to3 (see - # "use_2to3" in setup.py). This should be removed when we drop Python - # 2 support (which probably won't happen any time soon). - - xrange + # add long as valid built-ins. - long pyflakes: disable: - # undefined variables are already covered by pylint (and exclude - # xrange & long) + # undefined variables are already covered by pylint (and exclude long) - F821 ignore-paths: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..e11640b8 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,12 @@ +fail_fast: false +repos: + - repo: https://github.com/ambv/black + rev: 19.10b0 + hooks: + - id: black + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.0a2 + hooks: + - id: flake8 + additional_dependencies: + - flake8-import-order diff --git a/.travis.yml b/.travis.yml index 7bbeef8b..c5b37b6f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,10 @@ # For full coverage, we'd have to test all supported Python, MongoDB, and # PyMongo combinations. However, that would result in an overly long build # with a very large number of jobs, hence we only test a subset of all the -# combinations: -# * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, -# tested against Python v2.7, v3.5, v3.6, and PyPy. -# * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo -# combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. -# * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. -# +# combinations. +# * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, +# Other combinations are tested. See below for the details or check the travis jobs + # We should periodically check MongoDB Server versions supported by MongoDB # Inc., add newly released versions to the test matrix, and remove versions # which have reached their End of Life. See: @@ -16,65 +13,69 @@ # # Reminder: Update README.rst if you change MongoDB versions we test. - language: python +dist: xenial python: -- 2.7 - 3.5 - 3.6 -- pypy - -dist: xenial +- 3.7 +- 3.8 +- pypy3 env: global: - - MONGODB_3_4=3.4.17 - - MONGODB_3_6=3.6.12 + - MONGODB_3_4=3.4.19 + - MONGODB_3_6=3.6.13 + - MONGODB_4_0=4.0.13 + + - PYMONGO_3_4=3.4 + - PYMONGO_3_6=3.6 + - PYMONGO_3_9=3.9 + - PYMONGO_3_11=3.11 + + - MAIN_PYTHON_VERSION=3.7 matrix: - - MONGODB=${MONGODB_3_4} PYMONGO=3.x + - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11} matrix: - # Finish the build as soon as one job fails fast_finish: true include: - - python: 2.7 - env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x - - python: 3.6 - env: MONGODB=${MONGODB_3_6} PYMONGO=3.x - python: 3.7 - env: MONGODB=${MONGODB_3_6} PYMONGO=3.x - + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} + - python: 3.7 + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} + - python: 3.7 + env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11} + - python: 3.8 + env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11} install: # Install Mongo - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version - # Install python dependencies + # Install Python dependencies. - pip install --upgrade pip - pip install coveralls - - pip install flake8 flake8-import-order - - pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0 - - pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) - # Install the tox venv - - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test + - pip install pre-commit + - pip install tox + # tox dryrun to setup the tox venv (we run a mock test). + - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" before_script: - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork - - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for py27 + # Run pre-commit hooks (black, flake8, etc) on entire codebase + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi - mongo --eval 'db.version();' # Make sure mongo is awake script: - - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage + - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" -# For now only submit coveralls for Python v2.7. Python v3.x currently shows -# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible -# code in a separate dir and runs tests on that. after_success: -- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi + - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi notifications: irc: irc.freenode.org#mongoengine @@ -96,11 +97,11 @@ deploy: distributions: "sdist bdist_wheel" # Only deploy on tagged commits (aka GitHub releases) and only for the parent - # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. + # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. # We run Travis against many different Python, PyMongo, and MongoDB versions # and we don't want the deploy to occur multiple times). on: tags: true repo: MongoEngine/mongoengine - condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4) - python: 2.7 + condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4}) + python: 3.7 diff --git a/AUTHORS b/AUTHORS index 45a754cc..02e43955 100644 --- a/AUTHORS +++ b/AUTHORS @@ -252,3 +252,8 @@ that much better: * Paulo Amaral (https://github.com/pauloAmaral) * Gaurav Dadhania (https://github.com/GVRV) * Yurii Andrieiev (https://github.com/yandrieiev) + * Filip Kucharczyk (https://github.com/Pacu2) + * Eric Timmons (https://github.com/daewok) + * Matthew Simpson (https://github.com/mcsimps2) + * Leonardo Domingues (https://github.com/leodmgs) + * Agustin Barto (https://github.com/abarto) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f7b15c85..035ae07a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -20,23 +20,43 @@ post to the `user group ` Supported Interpreters ---------------------- -MongoEngine supports CPython 2.7 and newer. Language -features not supported by all interpreters can not be used. -The codebase is written in python 2 so you must be using python 2 -when developing new features. Compatibility of the library with Python 3 -relies on the 2to3 package that gets executed as part of the installation -build. You should ensure that your code is properly converted by -`2to3 `_. +MongoEngine supports CPython 3.5 and newer as well as Pypy3. +Language features not supported by all interpreters can not be used. + +Python3 codebase +---------------------- + +Since 0.20, the codebase is exclusively Python 3. + +Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. +Travis runs the tests against the main Python 3.x versions. + Style Guide ----------- -MongoEngine aims to follow `PEP8 `_ -including 4 space indents. When possible we try to stick to 79 character line -limits. However, screens got bigger and an ORM has a strong focus on -readability and if it can help, we accept 119 as maximum line length, in a -similar way as `django does -`_ +MongoEngine's codebase is formatted with `black `_, other tools like +flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly. + +To install all development tools, simply run the following commands: + +.. code-block:: console + + $ python -m pip install -r requirements-dev.txt + + +You can install `pre-commit `_ into your git hooks, +to automatically check and fix any formatting issue before creating a +git commit. + +To enable ``pre-commit`` simply run: + +.. code-block:: console + + $ pre-commit install + +See the ``.pre-commit-config.yaml`` configuration file for more information +on how it works. Testing ------- @@ -58,7 +78,7 @@ General Guidelines should adapt to the breaking change in docs/upgrade.rst. - Write inline documentation for new classes and methods. - Write tests and make sure they pass (make sure you have a mongod - running on the default port, then execute ``python setup.py nosetests`` + running on the default port, then execute ``python setup.py test`` from the cmd line to run the test suite). - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. You can test various Python and PyMongo versions locally by executing diff --git a/README.rst b/README.rst index 679980f8..aca8edc0 100644 --- a/README.rst +++ b/README.rst @@ -26,15 +26,15 @@ an `API reference `_. Supported MongoDB Versions ========================== -MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions +MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions should be supported as well, but aren't actively tested at the moment. Make sure to open an issue or submit a pull request if you experience any problems -with MongoDB version > 3.6. +with MongoDB version > 4.0. Installation ============ We recommend the use of `virtualenv `_ and of -`pip `_. You can then use ``pip install -U mongoengine``. +`pip `_. You can then use ``python -m pip install -U mongoengine``. You may also have `setuptools `_ and thus you can use ``easy_install -U mongoengine``. Another option is `pipenv `_. You can then use ``pipenv install mongoengine`` @@ -42,13 +42,14 @@ to both create the virtual environment and install the package. Otherwise, you c download the source from `GitHub `_ and run ``python setup.py install``. +The support for Python2 was dropped with MongoEngine 0.20.0 + Dependencies ============ -All of the dependencies can easily be installed via `pip `_. +All of the dependencies can easily be installed via `python -m pip `_. At the very least, you'll need these two packages to use MongoEngine: - pymongo>=3.4 -- six>=1.10.0 If you utilize a ``DateTimeField``, you might also use a more flexible date parser: @@ -58,6 +59,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``: - Pillow>=2.0.0 +If you need to use signals: + +- blinker>=1.3 + Examples ======== Some simple examples of what MongoEngine code looks like: @@ -91,12 +96,11 @@ Some simple examples of what MongoEngine code looks like: # Iterate over all posts using the BlogPost superclass >>> for post in BlogPost.objects: - ... print '===', post.title, '===' + ... print('===', post.title, '===') ... if isinstance(post, TextPost): - ... print post.content + ... print(post.content) ... elif isinstance(post, LinkPost): - ... print 'Link:', post.url - ... print + ... print('Link:', post.url) ... # Count all blog posts and its subtypes @@ -116,7 +120,8 @@ Some simple examples of what MongoEngine code looks like: Tests ===== To run the test suite, ensure you are running a local instance of MongoDB on -the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. +the standard port and have ``pytest`` installed. Then, run ``python setup.py test`` +or simply ``pytest``. To run the test suite on every supported Python and PyMongo version, you can use ``tox``. You'll need to make sure you have each supported Python version @@ -125,20 +130,18 @@ installed in your environment and then: .. code-block:: shell # Install tox - $ pip install tox + $ python -m pip install tox # Run the test suites $ tox -If you wish to run a subset of tests, use the nosetests convention: +If you wish to run a subset of tests, use the pytest convention: .. code-block:: shell # Run all the tests in a particular test file - $ python setup.py nosetests --tests tests/fields/fields.py + $ pytest tests/fields/test_fields.py # Run only particular test class in that file - $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest - # Use the -s option if you want to print some debug statements or use pdb - $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s + $ pytest tests/fields/test_fields.py::TestField Community ========= diff --git a/benchmarks/test_basic_doc_ops.py b/benchmarks/test_basic_doc_ops.py index 06f0538b..e840f97a 100644 --- a/benchmarks/test_basic_doc_ops.py +++ b/benchmarks/test_basic_doc_ops.py @@ -1,11 +1,18 @@ from timeit import repeat import mongoengine -from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument, - EmbeddedDocumentField, IntField, ListField, - StringField) +from mongoengine import ( + BooleanField, + Document, + EmailField, + EmbeddedDocument, + EmbeddedDocumentField, + IntField, + ListField, + StringField, +) -mongoengine.connect(db='mongoengine_benchmark_test') +mongoengine.connect(db="mongoengine_benchmark_test") def timeit(f, n=10000): @@ -24,46 +31,41 @@ def test_basic(): def init_book(): return Book( - name='Always be closing', + name="Always be closing", pages=100, - tags=['self-help', 'sales'], + tags=["self-help", "sales"], is_published=True, - author_email='alec@example.com', + author_email="alec@example.com", ) - print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6)) + print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) b = init_book() - print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6)) + print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) print( - 'Doc setattr: %.3fus' % ( - timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6 - ) + "Doc setattr: %.3fus" + % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) ) - print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6)) + print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) - print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6)) + print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) def save_book(): - b._mark_as_changed('name') - b._mark_as_changed('tags') + b._mark_as_changed("name") + b._mark_as_changed("tags") b.save() - print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6)) + print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) son = b.to_mongo() print( - 'Load from SON: %.3fus' % ( - timeit(lambda: Book._from_son(son), 1000) * 10**6 - ) + "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) ) print( - 'Load from database: %.3fus' % ( - timeit(lambda: Book.objects[0], 100) * 10**6 - ) + "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) ) def create_and_delete_book(): @@ -72,9 +74,8 @@ def test_basic(): b.delete() print( - 'Init + save to database + delete: %.3fms' % ( - timeit(create_and_delete_book, 10) * 10**3 - ) + "Init + save to database + delete: %.3fms" + % (timeit(create_and_delete_book, 10) * 10 ** 3) ) @@ -92,42 +93,36 @@ def test_big_doc(): def init_company(): return Company( - name='MongoDB, Inc.', + name="MongoDB, Inc.", contacts=[ - Contact( - name='Contact %d' % x, - title='CEO', - address='Address %d' % x, - ) + Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x) for x in range(1000) - ] + ], ) company = init_company() - print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3)) + print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) - print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3)) + print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) company.save() def save_company(): - company._mark_as_changed('name') - company._mark_as_changed('contacts') + company._mark_as_changed("name") + company._mark_as_changed("contacts") company.save() - print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3)) + print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) son = company.to_mongo() print( - 'Load from SON: %.3fms' % ( - timeit(lambda: Company._from_son(son), 100) * 10**3 - ) + "Load from SON: %.3fms" + % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) ) print( - 'Load from database: %.3fms' % ( - timeit(lambda: Company.objects[0], 100) * 10**3 - ) + "Load from database: %.3fms" + % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) ) def create_and_delete_company(): @@ -136,13 +131,12 @@ def test_big_doc(): c.delete() print( - 'Init + save to database + delete: %.3fms' % ( - timeit(create_and_delete_company, 10) * 10**3 - ) + "Init + save to database + delete: %.3fms" + % (timeit(create_and_delete_company, 10) * 10 ** 3) ) -if __name__ == '__main__': +if __name__ == "__main__": test_basic() - print('-' * 100) + print("-" * 100) test_big_doc() diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index 8113d988..4ecd48de 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -4,12 +4,14 @@ import timeit def main(): setup = """ from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') """ stmt = """ from pymongo import MongoClient + connection = MongoClient() db = connection.mongoengine_benchmark_test @@ -26,10 +28,10 @@ myNoddys = noddy.find() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('PyMongo: Creating 10000 dictionaries.') + print("-" * 100) + print("PyMongo: Creating 10000 dictionaries.") t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ from pymongo import MongoClient, WriteConcern @@ -49,13 +51,14 @@ myNoddys = noddy.find() [n for n in myNoddys] # iterate """ - print('-' * 100) + print("-" * 100) print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) setup = """ from pymongo import MongoClient + connection = MongoClient() connection.drop_database('mongoengine_benchmark_test') connection.close() @@ -78,10 +81,10 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries.') + print("-" * 100) + print("MongoEngine: Creating 10000 dictionaries.") t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -96,10 +99,10 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).') + print("-" * 100) + print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).") t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -112,10 +115,10 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) + print("-" * 100) print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -128,10 +131,12 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).') + print("-" * 100) + print( + 'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).' + ) t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) stmt = """ for i in range(10000): @@ -144,10 +149,12 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print('-' * 100) - print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).') + print("-" * 100) + print( + 'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).' + ) t = timeit.Timer(stmt=stmt, setup=setup) - print('{}s'.format(t.timeit(1))) + print("{}s".format(t.timeit(1))) if __name__ == "__main__": diff --git a/docs/changelog.rst b/docs/changelog.rst index e82cc124..f616f4a6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -6,208 +6,264 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). +- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count + and Cursor.count that got deprecated in pymongo >= 3.7. + This should have a negative impact on performance of count see Issue #2219 +- Fix a bug that made the queryset drop the read_preference after clone(). +- Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311 +- Bug fix in ListField when updating the first item, it was saving the whole list, instead of + just replacing the first item (as it's usually done) #2392 + +Changes in 0.20.0 +================= +- ATTENTION: Drop support for Python2 +- Add Mongo 4.0 to Travis +- Fix error when setting a string as a ComplexDateTimeField #2253 +- Bump development Status classifier to Production/Stable #2232 +- Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 +- Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 +- Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 +- DictField validate failed without default connection (bug introduced in 0.19.0) #2239 +- Remove methods that were deprecated years ago: + - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field + - Queryset.slave_okay() was deprecated since pymongo3 + - dropDups was dropped with MongoDB3 + - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` +- Added pre-commit for development/CI #2212 +- Renamed requirements-lint.txt to requirements-dev.txt #2212 +- Support for setting ReadConcern #2255 + +Changes in 0.19.1 +================= +- Tests require Pillow < 7.0.0 as it dropped Python2 support +- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of + pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 + +Changes in 0.19.0 +================= +- BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 + - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. + - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. + - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. +- BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 +- BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 + - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. +- BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 + - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. +- BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182 +- DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 + - Added ability to check if Q or QNode are empty by parsing them to bool. + - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. +- Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 +- Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148 +- ``ListField`` now accepts an optional ``max_length`` parameter. #2110 +- Improve error message related to InvalidDocumentError #2180 +- Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 +- Added ability to compare Q and Q operations #2204 +- Added ability to use a db alias on query_counter #2194 +- Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 +- Fix updates of a list field by negative index #2094 +- Switch from nosetest to pytest as test runner #2114 +- The codebase is now formatted using ``black``. #2109 +- Documentation improvements: + - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. + +Changes in 0.18.2 +================= +- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 +- Various code clarity and documentation improvements. Changes in 0.18.1 ================= -- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields - instead of updating only the modified fields. This bug only occurs when using custom pk #2082 -- Add Python 3.7 in travis #2058 +- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 +- Add Python 3.7 to Travis CI. #2058 Changes in 0.18.0 ================= - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. -- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6 (#2017 #2066). -- Improve performance by avoiding a call to `to_mongo` in `Document.save()` #2049 +- MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 +- Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049 - Connection/disconnection improvements: - - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all` - - Fix disconnecting #566 #1599 #605 #607 #1213 #565 - - Improve documentation of `connect`/`disconnect` - - Fix issue when using multiple connections to the same mongo with different credentials #2047 - - `connect` fails immediately when db name contains invalid characters #2031 #1718 -- Fix the default write concern of `Document.save` that was overwriting the connection write concern #568 -- Fix querying on `List(EmbeddedDocument)` subclasses fields #1961 #1492 -- Fix querying on `(Generic)EmbeddedDocument` subclasses fields #475 -- Fix `QuerySet.aggregate` so that it takes limit and skip value into account #2029 -- Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields` #2020 -- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning True/False #2050 -- BREAKING CHANGES (associated with connect/disconnect fixes): - - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). - - `disconnect` now clears `mongoengine.connection._connection_settings`. - - `disconnect` now clears the cached attribute `Document._collection`. -- BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longier exist #1552 + - Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``. + - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 + - Improve documentation of ``connect``/``disconnect``. + - Fix issue when using multiple connections to the same mongo with different credentials. #2047 + - ``connect`` fails immediately when db name contains invalid characters. #2031 #1718 +- Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568 +- Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492 +- Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475 +- Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029 +- Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020 +- BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050 +- BREAKING CHANGES (associated with connection/disconnection fixes): + - Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first). + - ``disconnect`` now clears ``mongoengine.connection._connection_settings``. + - ``disconnect`` now clears the cached attribute ``Document._collection``. +- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 Changes in 0.17.0 ================= -- Fix .only() working improperly after using .count() of the same instance of QuerySet -- Fix batch_size that was not copied when cloning a queryset object #2011 -- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976 -- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995) -- Fix InvalidStringData error when using modify on a BinaryField #1127 -- DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552 -- Fix test suite and CI to support MongoDB 3.4 #1445 -- Fix reference fields querying the database on each access if value contains orphan DBRefs +- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 +- Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 +- DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 +- Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``. +- Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011 +- Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127 +- Fix test suite and CI to support MongoDB v3.4. #1445 +- Fix reference fields querying the database on each access if value contains orphan DBRefs. -================= Changes in 0.16.3 ================= -- Fix $push with $position operator not working with lists in embedded document #1965 +- Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 -================= Changes in 0.16.2 ================= -- Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958 +- Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 -================= Changes in 0.16.1 ================= -- Fix `_cls` that is not set properly in Document constructor (regression) #1950 -- Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733 -- Remove deprecated `save()` method and used `insert_one()` #1899 +- Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 +- Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 +- Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 -================= Changes in 0.16.0 ================= -- Various improvements to the doc -- Improvement to code quality - POTENTIAL BREAKING CHANGES: - - EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661 - - Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876 - - default value of ComplexDateTime is now None (and no longer the current datetime) #1368 -- Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685 -- Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768 -- Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919 -- Fix bug when referencing the abstract class in a ReferenceField #1920 -- Allow modification to the document made in pre_save_post_validation to be taken into account #1202 -- Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903 -- Fix side effects of using queryset.`no_dereference` on other documents #1677 -- Fix TypeError when using lazy django translation objects as translated choices #1879 -- Improve 2-3 codebase compatibility #1889 -- Fix the support for changing the default value of ComplexDateTime #1368 -- Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance - instead of a list #1877 -- Fix the Decimal operator inc/dec #1517 #1320 -- Ignore killcursors queries in `query_counter` context manager #1869 -- Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870 -- Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865 -- Fix index creation error that was swallowed by hasattr under python2 #1688 -- QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611 -- bulk insert updates the ids of the input documents instances #1919 -- Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document - were tracked in the parent #1934 -- Improve validator of BinaryField #273 -- Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806 -- Updated GridFSProxy.__str__ so that it would always print both the filename and grid_id #710 -- Add __repr__ to Q and QCombination #1843 -- fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676 -- Added field `DateField`#513 + - ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661 + - Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876 + - Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368 +- Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685 +- Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768 +- Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919 +- Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920 +- Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202 +- Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 +- Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677 +- Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879 +- Improve Python 2-3 codebase compatibility. #1889 +- Fix support for changing the default value of the ``ComplexDateTime`` field. #1368 +- Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877 +- Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320 +- Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869 +- Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870 +- Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865 +- Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688 +- ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 +- Bulk insert updates the IDs of the input documents instances. #1919 +- Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934 +- Improve validation of the ``BinaryField``. #273 +- Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806 +- Update ``GridFSProxy.__str__`` so that it would always print both the filename and grid_id. #710 +- Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843 +- Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676 +- Add a ``DateField``. #513 +- Various improvements to the documentation. +- Various code quality improvements. Changes in 0.15.3 ================= -- BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491) -- Subfield resolve error in generic_emdedded_document query #1651 #1652 -- use each modifier only with $position #1673 #1675 -- Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 -- Fix validation error instance in GenericEmbeddedDocumentField #1067 -- Update cached fields when fields argument is given #1712 -- Add a db parameter to register_connection for compatibility with connect -- Use insert_one, insert_many in Document.insert #1491 -- Use new update_one, update_many on document/queryset update #1491 -- Use insert_one, insert_many in Document.insert #1491 -- Fix reload(fields) affect changed fields #1371 -- Fix Read-only access to database fails when trying to create indexes #1338 +- ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491 +- Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704 +- Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652 +- Use each modifier only with ``$position``. #1673 #1675 +- Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067 +- Update cached fields when a ``fields`` argument is given. #1712 +- Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``. +- Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491 +- Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491 +- Fix how ``reload(fields)`` affects changed fields. #1371 +- Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 Changes in 0.15.0 ================= -- Add LazyReferenceField and GenericLazyReferenceField to address #1230 +- Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230 Changes in 0.14.1 ================= -- Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630 -- Added support for the `$position` param in the `$push` operator #1566 -- Fixed `DateTimeField` interpreting an empty string as today #1533 -- Added a missing `__ne__` method to the `GridFSProxy` class #1632 -- Fixed `BaseQuerySet._fields_to_db_fields` #1553 +- Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630 +- Add support for the ``$position`` param in the ``$push`` operator. #1566 +- Fix ``DateTimeField`` interpreting an empty string as today. #1533 +- Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632 +- Fix ``BaseQuerySet._fields_to_db_fields``. #1553 Changes in 0.14.0 ================= -- BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549 -- POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528 -- Improved code quality #1531, #1540, #1541, #1547 +- BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549 +- POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528 +- Improve code quality. #1531, #1540, #1541, #1547 Changes in 0.13.0 ================= -- POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see - docs/upgrade.rst for details. +- POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details. Changes in 0.12.0 ================= -- POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 -- POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 -- Fixed the way `Document.objects.create` works with duplicate IDs #1485 -- Fixed connecting to a replica set with PyMongo 2.x #1436 -- Fixed using sets in field choices #1481 -- Fixed deleting items from a `ListField` #1318 -- Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 -- Fixed behavior of a `dec` update operator #1450 -- Added a `rename` update operator #1454 -- Added validation for the `db_field` parameter #1448 -- Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 -- Fixed the error message displayed when validating unicode URLs #1486 -- Raise an error when trying to save an abstract document #1449 +- POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476 +- POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476 +- Fix the way ``Document.objects.create`` works with duplicate IDs. #1485 +- Fix connecting to a replica set with PyMongo 2.x. #1436 +- Fix using sets in field choices. #1481 +- Fix deleting items from a ``ListField``. #1318 +- Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237 +- Fix behavior of a ``dec`` update operator. #1450 +- Add a ``rename`` update operator. #1454 +- Add validation for the ``db_field`` parameter. #1448 +- Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440 +- Fix the error message displayed when validating Unicode URLs. #1486 +- Raise an error when trying to save an abstract document. #1449 Changes in 0.11.0 ================= -- BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 -- BREAKING CHANGE: Dropped Python 2.6 support. #1428 -- BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 -- BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 -- Fixed absent rounding for DecimalField when `force_string` is set. #1103 +- BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428 +- BREAKING CHANGE: Drop Python v2.6 support. #1428 +- BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428 +- BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334 +- Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103 Changes in 0.10.8 ================= -- Added support for QuerySet.batch_size (#1426) -- Fixed query set iteration within iteration #1427 -- Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 -- Added ability to filter the generic reference field by ObjectId and DBRef #1425 -- Fixed delete cascade for models with a custom primary key field #1247 -- Added ability to specify an authentication mechanism (e.g. X.509) #1333 -- Added support for falsey primary keys (e.g. doc.pk = 0) #1354 -- Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 -- Fixed filtering by embedded_doc=None #1422 -- Added support for cursor.comment #1420 -- Fixed doc.get__display #1419 -- Fixed __repr__ method of the StrictDict #1424 -- Added a deprecation warning for Python 2.6 +- Add support for ``QuerySet.batch_size``. (#1426) +- Fix a query set iteration within an iteration. #1427 +- Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 +- Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425 +- Fix cascading deletes for models with a custom primary key field. #1247 +- Add ability to specify an authentication mechanism (e.g. X.509). #1333 +- Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354 +- Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417 +- Fix filtering by ``embedded_doc=None``. #1422 +- Add support for ``Cursor.comment``. #1420 +- Fix ``doc.get__display`` methods. #1419 +- Fix the ``__repr__`` method of the ``StrictDict`` #1424 +- Add a deprecation warning for Python v2.6. Changes in 0.10.7 ================= -- Dropped Python 3.2 support #1390 -- Fixed the bug where dynamic doc has index inside a dict field #1278 -- Fixed: ListField minus index assignment does not work #1128 -- Fixed cascade delete mixing among collections #1224 -- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 -- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. -- count on ListField of EmbeddedDocumentField fails. #1187 -- Fixed long fields stored as int32 in Python 3. #1253 -- MapField now handles unicodes keys correctly. #1267 -- ListField now handles negative indicies correctly. #1270 -- Fixed AttributeError when initializing EmbeddedDocument with positional args. #681 -- Fixed no_cursor_timeout error with pymongo 3.0+ #1304 -- Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336 -- Fixed support for `__` to escape field names that match operators names in `update` #1351 -- Fixed BaseDocument#_mark_as_changed #1369 -- Added support for pickling QuerySet instances. #1397 -- Fixed connecting to a list of hosts #1389 -- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 -- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 -- Improvements to the dictionary fields docs #1383 +- Drop Python 3.2 support #1390 +- Fix a bug where a dynamic doc has an index inside a dict field. #1278 +- Fix: ``ListField`` minus index assignment does not work. #1128 +- Fix cascade delete mixing among collections. #1224 +- Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206 +- Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set. +- Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187 +- Fix ``LongField`` values stored as int32 in Python 3. #1253 +- ``MapField`` now handles unicode keys correctly. #1267 +- ``ListField`` now handles negative indicies correctly. #1270 +- Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681 +- Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304 +- Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336 +- Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351 +- Fix ``BaseDocument._mark_as_changed``. #1369 +- Add support for pickling ``QuerySet`` instances. #1397 +- Fix connecting to a list of hosts. #1389 +- Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334 +- Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218 +- Improvements to the dictionary field's docs. #1383 Changes in 0.10.6 ================= - Add support for mocking MongoEngine based on mongomock. #1151 -- Fixed not being able to run tests on Windows. #1153 +- Fix not being able to run tests on Windows. #1153 - Allow creation of sparse compound indexes. #1114 -- count on ListField of EmbeddedDocumentField fails. #1187 Changes in 0.10.5 ================= @@ -215,12 +271,12 @@ Changes in 0.10.5 Changes in 0.10.4 ================= -- SaveConditionError is now importable from the top level package. #1165 -- upsert_one method added. #1157 +- ``SaveConditionError`` is now importable from the top level package. #1165 +- Add a ``QuerySet.upsert_one`` method. #1157 Changes in 0.10.3 ================= -- Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042 +- Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 Changes in 0.10.2 ================= @@ -230,16 +286,16 @@ Changes in 0.10.2 Changes in 0.10.1 ================= -- Fix infinite recursion with CASCADE delete rules under specific conditions. #1046 -- Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047 -- Fix ignored chained options #842 -- Document save's save_condition error raises `SaveConditionError` exception #1070 -- Fix Document.reload for DynamicDocument. #1050 -- StrictDict & SemiStrictDict are shadowed at init time. #1105 -- Fix ListField minus index assignment does not work. #1119 -- Remove code that marks field as changed when the field has default but not existed in database #1126 -- Remove test dependencies (nose and rednose) from install dependencies list. #1079 -- Recursively build query when using elemMatch operator. #1130 +- Fix infinite recursion with cascade delete rules under specific conditions. #1046 +- Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047 +- Fix ignored chained options. #842 +- ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070 +- Fix ``Document.reload`` for the ``DynamicDocument``. #1050 +- ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105 +- Fix ``ListField`` negative index assignment not working. #1119 +- Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 +- Remove test dependencies (nose and rednose) from install dependencies. #1079 +- Recursively build a query when using the ``elemMatch`` operator. #1130 - Fix instance back references for lists of embedded documents. #1131 Changes in 0.10.0 @@ -250,7 +306,7 @@ Changes in 0.10.0 - Removed get_or_create() deprecated since 0.8.0. #300 - Improve Document._created status when switch collection and db #1020 - Queryset update doesn't go through field validation #453 -- Added support for specifying authentication source as option `authSource` in URI. #967 +- Added support for specifying authentication source as option ``authSource`` in URI. #967 - Fixed mark_as_changed to handle higher/lower level fields changed. #927 - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 - Support += and *= for ListField #595 @@ -266,7 +322,7 @@ Changes in 0.10.0 - Fixes some internal _id handling issue. #961 - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 - Capped collection multiple of 256. #1011 -- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods. +- Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods. - Fix for delete with write_concern {'w': 0}. #1008 - Allow dynamic lookup for more than two parts. #882 - Added support for min_distance on geo queries. #831 @@ -275,10 +331,10 @@ Changes in 0.10.0 Changes in 0.9.0 ================ - Update FileField when creating a new file #714 -- Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826 +- Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826 - ComplexDateTimeField should fall back to None when null=True #864 - Request Support for $min, $max Field update operators #863 -- `BaseDict` does not follow `setdefault` #866 +- ``BaseDict`` does not follow ``setdefault`` #866 - Add support for $type operator # 766 - Fix tests for pymongo 2.8+ #877 - No module named 'django.utils.importlib' (Django dev) #872 @@ -299,13 +355,13 @@ Changes in 0.9.0 - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 - Not overriding default values when loading a subset of fields #399 - Saving document doesn't create new fields in existing collection #620 -- Added `Queryset.aggregate` wrapper to aggregation framework #703 +- Added ``Queryset.aggregate`` wrapper to aggregation framework #703 - Added support to show original model fields on to_json calls instead of db_field #697 - Added Queryset.search_text to Text indexes searchs #700 - Fixed tests for Django 1.7 #696 - Follow ReferenceFields in EmbeddedDocuments with select_related #690 - Added preliminary support for text indexes #680 -- Added `elemMatch` operator as well - `match` is too obscure #653 +- Added ``elemMatch`` operator as well - ``match`` is too obscure #653 - Added support for progressive JPEG #486 #548 - Allow strings to be used in index creation #675 - Fixed EmbeddedDoc weakref proxy issue #592 @@ -341,11 +397,11 @@ Changes in 0.9.0 - Increase email field length to accommodate new TLDs #726 - index_cls is ignored when deciding to set _cls as index prefix #733 - Make 'db' argument to connection optional #737 -- Allow atomic update for the entire `DictField` #742 +- Allow atomic update for the entire ``DictField`` #742 - Added MultiPointField, MultiLineField, MultiPolygonField - Fix multiple connections aliases being rewritten #748 - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 -- Make `in_bulk()` respect `no_dereference()` #775 +- Make ``in_bulk()`` respect ``no_dereference()`` #775 - Handle None from model __str__; Fixes #753 #754 - _get_changed_fields fix for embedded documents with id field. #925 @@ -399,18 +455,15 @@ Changes in 0.8.4 Changes in 0.8.3 ================ -- Fixed EmbeddedDocuments with `id` also storing `_id` (#402) +- Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402) - Added get_proxy_object helper to filefields (#391) - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) - Fixed sum and average mapreduce dot notation support (#375, #376, #393) - Fixed as_pymongo to return the id (#386) -- Document.select_related() now respects `db_alias` (#377) +- Document.select_related() now respects ``db_alias`` (#377) - Reload uses shard_key if applicable (#384) - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) - - **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 - -- Fixed pickling dynamic documents `_dynamic_fields` (#387) +- Fixed pickling dynamic documents ``_dynamic_fields`` (#387) - Fixed ListField setslice and delslice dirty tracking (#390) - Added Django 1.5 PY3 support (#392) - Added match ($elemMatch) support for EmbeddedDocuments (#379) @@ -451,7 +504,7 @@ Changes in 0.8.0 ================ - Fixed querying ReferenceField custom_id (#317) - Fixed pickle issues with collections (#316) -- Added `get_next_value` preview for SequenceFields (#319) +- Added ``get_next_value`` preview for SequenceFields (#319) - Added no_sub_classes context manager and queryset helper (#312) - Querysets now utilises a local cache - Changed __len__ behaviour in the queryset (#247, #311) @@ -480,7 +533,7 @@ Changes in 0.8.0 - Updated connection to use MongoClient (#262, #274) - Fixed db_alias and inherited Documents (#143) - Documentation update for document errors (#124) -- Deprecated `get_or_create` (#35) +- Deprecated ``get_or_create`` (#35) - Updated inheritable objects created by upsert now contain _cls (#118) - Added support for creating documents with embedded documents in a single operation (#6) - Added to_json and from_json to Document (#1) @@ -601,7 +654,7 @@ Changes in 0.7.0 - Fixed UnboundLocalError in composite index with pk field (#88) - Updated ReferenceField's to optionally store ObjectId strings this will become the default in 0.8 (#89) -- Added FutureWarning - save will default to `cascade=False` in 0.8 +- Added FutureWarning - save will default to ``cascade=False`` in 0.8 - Added example of indexing embedded document fields (#75) - Fixed ImageField resizing when forcing size (#80) - Add flexibility for fields handling bad data (#78) @@ -697,7 +750,7 @@ Changes in 0.6.8 ================ - Fixed FileField losing reference when no default set - Removed possible race condition from FileField (grid_file) -- Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` +- Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()`` - Added support for pull operations on nested EmbeddedDocuments - Added support for choices with GenericReferenceFields - Added support for choices with GenericEmbeddedDocumentFields @@ -712,7 +765,7 @@ Changes in 0.6.7 - Fixed indexing on '_id' or 'pk' or 'id' - Invalid data from the DB now raises a InvalidDocumentError - Cleaned up the Validation Error - docs and code -- Added meta `auto_create_index` so you can disable index creation +- Added meta ``auto_create_index`` so you can disable index creation - Added write concern options to inserts - Fixed typo in meta for index options - Bug fix Read preference now passed correctly @@ -753,7 +806,6 @@ Changes in 0.6.1 Changes in 0.6 ============== - - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 - Added support for covered indexes when inheritance is off - No longer always upsert on save for items with a '_id' @@ -978,7 +1030,6 @@ Changes in v0.1.3 querying takes place - A few minor bugfixes - Changes in v0.1.2 ================= - Query values may be processed before before being used in queries @@ -987,7 +1038,6 @@ Changes in v0.1.2 - Added ``BooleanField`` - Added ``Document.reload()`` method - Changes in v0.1.1 ================= - Documents may now use capped collections diff --git a/docs/code/tumblelog.py b/docs/code/tumblelog.py index 796336e6..3ca2384c 100644 --- a/docs/code/tumblelog.py +++ b/docs/code/tumblelog.py @@ -1,16 +1,19 @@ from mongoengine import * -connect('tumblelog') +connect("tumblelog") + class Comment(EmbeddedDocument): content = StringField() name = StringField(max_length=120) + class User(Document): email = StringField(required=True) first_name = StringField(max_length=50) last_name = StringField(max_length=50) + class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) @@ -18,54 +21,57 @@ class Post(Document): comments = ListField(EmbeddedDocumentField(Comment)) # bugfix - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class TextPost(Post): content = StringField() + class ImagePost(Post): image_path = StringField() + class LinkPost(Post): link_url = StringField() + Post.drop_collection() -john = User(email='jdoe@example.com', first_name='John', last_name='Doe') +john = User(email="jdoe@example.com", first_name="John", last_name="Doe") john.save() -post1 = TextPost(title='Fun with MongoEngine', author=john) -post1.content = 'Took a look at MongoEngine today, looks pretty cool.' -post1.tags = ['mongodb', 'mongoengine'] +post1 = TextPost(title="Fun with MongoEngine", author=john) +post1.content = "Took a look at MongoEngine today, looks pretty cool." +post1.tags = ["mongodb", "mongoengine"] post1.save() -post2 = LinkPost(title='MongoEngine Documentation', author=john) -post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' -post2.tags = ['mongoengine'] +post2 = LinkPost(title="MongoEngine Documentation", author=john) +post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs" +post2.tags = ["mongoengine"] post2.save() -print('ALL POSTS') +print("ALL POSTS") print() for post in Post.objects: print(post.title) - #print '=' * post.title.count() + # print '=' * post.title.count() print("=" * 20) if isinstance(post, TextPost): print(post.content) if isinstance(post, LinkPost): - print('Link:', post.link_url) + print("Link:", post.link_url) print() print() -print('POSTS TAGGED \'MONGODB\'') +print("POSTS TAGGED 'MONGODB'") print() -for post in Post.objects(tags='mongodb'): +for post in Post.objects(tags="mongodb"): print(post.title) print() -num_posts = Post.objects(tags='mongodb').count() +num_posts = Post.objects(tags="mongodb").count() print('Found %d posts with tag "mongodb"' % num_posts) diff --git a/docs/conf.py b/docs/conf.py index 468e71e0..48c8e859 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,7 +11,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import os +import sys import sphinx_rtd_theme @@ -20,29 +21,29 @@ import mongoengine # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8' +# source_encoding = 'utf-8' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'MongoEngine' -copyright = u'2009, MongoEngine Authors' +project = u"MongoEngine" +copyright = u"2009, MongoEngine Authors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -55,68 +56,66 @@ release = mongoengine.get_version() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. -#unused_docs = [] +# unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. -exclude_trees = ['_build'] +exclude_trees = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -html_theme_options = { - 'canonical_url': 'http://docs.mongoengine.org/en/latest/' -} +html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 @@ -126,11 +125,11 @@ html_favicon = "favicon.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. @@ -138,69 +137,68 @@ html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { - 'index': ['globaltoc.html', 'searchbox.html'], - '**': ['localtoc.html', 'relations.html', 'searchbox.html'] + "index": ["globaltoc.html", "searchbox.html"], + "**": ["localtoc.html", "relations.html", "searchbox.html"], } # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_use_modindex = True +# html_use_modindex = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' +# html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'MongoEnginedoc' +htmlhelp_basename = "MongoEnginedoc" # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). -latex_paper_size = 'a4' +latex_paper_size = "a4" # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'MongoEngine.tex', 'MongoEngine Documentation', - 'Ross Lawley', 'manual'), + ("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual") ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # Additional stuff for the LaTeX preamble. -#latex_preamble = '' +# latex_preamble = '' # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_use_modindex = True +# latex_use_modindex = True -autoclass_content = 'both' +autoclass_content = "both" diff --git a/docs/django.rst b/docs/django.rst index b8a52165..d43a205e 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -13,7 +13,7 @@ Help Wanted! The MongoEngine team is looking for help contributing and maintaining a new Django extension for MongoEngine! If you have Django experience and would like -to help contribute to the project, please get in touch on the -`mailing list `_ or by +to help contribute to the project, please get in touch on the +`mailing list `_ or by simply contributing on `GitHub `_. diff --git a/docs/faq.rst b/docs/faq.rst new file mode 100644 index 00000000..49c73023 --- /dev/null +++ b/docs/faq.rst @@ -0,0 +1,12 @@ +========================== +Frequently Asked Questions +========================== + +Does MongoEngine support asynchronous drivers (Motor, TxMongo)? +--------------------------------------------------------------- + +No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver. +If this is a requirement for your project, check the alternative: `uMongo`_ and `MotorEngine`_. + +.. _uMongo: https://umongo.readthedocs.io/ +.. _MotorEngine: https://motorengine.readthedocs.io/ diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index aac13902..ac2146a6 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -86,7 +86,7 @@ using 3 different databases to store data:: connect(alias='user-db-alias', db='user-db') connect(alias='book-db-alias', db='book-db') connect(alias='users-books-db-alias', db='users-books-db') - + class User(Document): name = StringField() diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index ae9d3b36..6dc35c30 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -352,7 +352,7 @@ Its value can take any of the following constants: Deletion is denied if there still exist references to the object being deleted. :const:`mongoengine.NULLIFY` - Any object's fields still referring to the object being deleted are removed + Any object's fields still referring to the object being deleted are set to None (using MongoDB's "unset" operation), effectively nullifying the relationship. :const:`mongoengine.CASCADE` Any object containing fields that are referring to the object being deleted @@ -555,7 +555,6 @@ There are a few top level defaults for all indexes that can be set:: 'index_background': True, 'index_cls': False, 'auto_create_index': True, - 'index_drop_dups': True, } @@ -574,11 +573,6 @@ There are a few top level defaults for all indexes that can be set:: in systems where indexes are managed separately. Disabling this will improve performance. -:attr:`index_drop_dups` (Optional) - Set the default value for if an index should drop duplicates - Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning - and has no effect - Compound Indexes and Indexing sub documents ------------------------------------------- @@ -714,11 +708,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: Shard keys ========== -If your collection is sharded, then you need to specify the shard key as a tuple, -using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. -This ensures that the shard key is sent with the query when calling the -:meth:`~mongoengine.document.Document.save` or -:meth:`~mongoengine.document.Document.update` method on an existing +If your collection is sharded by multiple keys, then you can improve shard +routing (and thus the performance of your application) by specifying the shard +key, using the :attr:`shard_key` attribute of +:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. + +This ensures that the full shard key is sent with the query when calling +methods such as :meth:`~mongoengine.document.Document.save`, +:meth:`~mongoengine.document.Document.update`, +:meth:`~mongoengine.document.Document.modify`, or +:meth:`~mongoengine.document.Document.delete` on an existing :class:`~mongoengine.Document` instance:: class LogEntry(Document): @@ -728,7 +727,8 @@ This ensures that the shard key is sent with the query when calling the data = StringField() meta = { - 'shard_key': ('machine', 'timestamp',) + 'shard_key': ('machine', 'timestamp'), + 'indexes': ('machine', 'timestamp'), } .. _document-inheritance: @@ -738,7 +738,7 @@ Document inheritance To create a specialised type of a :class:`~mongoengine.Document` you have defined, you may subclass it and add any extra fields or methods you may need. -As this is new class is not a direct subclass of +As this new class is not a direct subclass of :class:`~mongoengine.Document`, it will not be stored in its own collection; it will use the same collection as its superclass uses. This allows for more convenient and efficient retrieval of related documents -- all you need do is @@ -761,6 +761,27 @@ document.:: Setting :attr:`allow_inheritance` to True should also be used in :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it +When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query +both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents. +Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains +the class name in every documents. When a document is loaded, MongoEngine checks +it's :attr:`_cls` attribute and use that class to construct the instance.:: + + Page(title='a funky title').save() + DatedPage(title='another title', date=datetime.utcnow()).save() + + print(Page.objects().count()) # 2 + print(DatedPage.objects().count()) # 1 + + # print documents in their native form + # we remove 'id' to avoid polluting the output with unnecessary detail + qs = Page.objects.exclude('id').as_pymongo() + print(list(qs)) + # [ + # {'_cls': u 'Page', 'title': 'a funky title'}, + # {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)} + # ] + Working with existing data -------------------------- As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index f7380e89..0baf88e0 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -10,8 +10,9 @@ Writing GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field object. This field acts as a file-like object and provides a couple of different ways of inserting and retrieving data. Arbitrary metadata such as -content type can also be stored alongside the files. In the following example, -a document is created to store details about animals, including a photo:: +content type can also be stored alongside the files. The object returned when accessing a +FileField is a proxy to `Pymongo's GridFS `_ +In the following example, a document is created to store details about animals, including a photo:: class Animal(Document): genus = StringField() @@ -20,8 +21,8 @@ a document is created to store details about animals, including a photo:: marmot = Animal(genus='Marmota', family='Sciuridae') - marmot_photo = open('marmot.jpg', 'rb') - marmot.photo.put(marmot_photo, content_type = 'image/jpeg') + with open('marmot.jpg', 'rb') as fd: + marmot.photo.put(fd, content_type = 'image/jpeg') marmot.save() Retrieval @@ -34,6 +35,20 @@ field. The file can also be retrieved just as easily:: photo = marmot.photo.read() content_type = marmot.photo.content_type +.. note:: If you need to read() the content of a file multiple times, you'll need to "rewind" + the file-like object using `seek`:: + + marmot = Animal.objects(genus='Marmota').first() + content1 = marmot.photo.read() + assert content1 != "" + + content2 = marmot.photo.read() # will be empty + assert content2 == "" + + marmot.photo.seek(0) # rewind the file by setting the current position of the cursor in the file to 0 + content3 = marmot.photo.read() + assert content3 == content1 + Streaming --------- diff --git a/docs/guide/index.rst b/docs/guide/index.rst index c94a4eab..95a9b92b 100644 --- a/docs/guide/index.rst +++ b/docs/guide/index.rst @@ -14,4 +14,5 @@ User Guide gridfs signals text-indexes + logging-monitoring mongomock diff --git a/docs/guide/installing.rst b/docs/guide/installing.rst index b89d48f0..2c962ad9 100644 --- a/docs/guide/installing.rst +++ b/docs/guide/installing.rst @@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`: .. code-block:: console - $ pip install mongoengine + $ python -m pip install mongoengine Alternatively, if you don't have setuptools installed, `download it from PyPi `_ and run diff --git a/docs/guide/logging-monitoring.rst b/docs/guide/logging-monitoring.rst new file mode 100644 index 00000000..9f523b79 --- /dev/null +++ b/docs/guide/logging-monitoring.rst @@ -0,0 +1,80 @@ +================== +Logging/Monitoring +================== + +It is possible to use `pymongo.monitoring `_ to monitor +the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by +MongoEngine to the driver. + +To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners +**before** establishing the database connection (i.e calling `connect`): + +The following snippet provides a basic logging of all command events: + +.. code-block:: python + + import logging + from pymongo import monitoring + from mongoengine import * + + log = logging.getLogger() + log.setLevel(logging.DEBUG) + logging.basicConfig(level=logging.DEBUG) + + + class CommandLogger(monitoring.CommandListener): + + def started(self, event): + log.debug("Command {0.command_name} with request id " + "{0.request_id} started on server " + "{0.connection_id}".format(event)) + + def succeeded(self, event): + log.debug("Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "succeeded in {0.duration_micros} " + "microseconds".format(event)) + + def failed(self, event): + log.debug("Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "failed in {0.duration_micros} " + "microseconds".format(event)) + + monitoring.register(CommandLogger()) + + + class Jedi(Document): + name = StringField() + + + connect() + + + log.info('GO!') + + log.info('Saving an item through MongoEngine...') + Jedi(name='Obi-Wan Kenobii').save() + + log.info('Querying through MongoEngine...') + obiwan = Jedi.objects.first() + + log.info('Updating through MongoEngine...') + obiwan.name = 'Obi-Wan Kenobi' + obiwan.save() + + +Executing this prints the following output:: + + INFO:root:GO! + INFO:root:Saving an item through MongoEngine... + DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017) + DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds + INFO:root:Querying through MongoEngine... + DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017) + DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds + INFO:root:Updating through MongoEngine... + DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017) + DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds + +More details can of course be obtained by checking the `event` argument from the `CommandListener`. diff --git a/docs/guide/mongomock.rst b/docs/guide/mongomock.rst index d70ee6a6..141d7b69 100644 --- a/docs/guide/mongomock.rst +++ b/docs/guide/mongomock.rst @@ -2,10 +2,10 @@ Use mongomock for testing ============================== -`mongomock `_ is a package to do just +`mongomock `_ is a package to do just what the name implies, mocking a mongo database. -To use with mongoengine, simply specify mongomock when connecting with +To use with mongoengine, simply specify mongomock when connecting with mongoengine: .. code-block:: python @@ -21,7 +21,7 @@ or with an alias: conn = get_connection('testdb') Example of test file: --------- +--------------------- .. code-block:: python import unittest @@ -45,4 +45,4 @@ Example of test file: pers.save() fresh_pers = Person.objects().first() - self.assertEqual(fresh_pers.name, 'John') + assert fresh_pers.name == 'John' diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 6937cf68..7307b003 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -222,6 +222,18 @@ keyword argument:: .. versionadded:: 0.4 +Sorting/Ordering results +======================== +It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`. +The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.:: + + # Order by ascending date + blogs = BlogPost.objects().order_by('date') # equivalent to .order_by('+date') + + # Order by ascending date first, then descending title + blogs = BlogPost.objects().order_by('+date', '-title') + + Limiting and skipping results ============================= Just as with traditional ORMs, you may limit the number of results returned or @@ -349,9 +361,9 @@ Just as with limiting and skipping results, there is a method on a You could technically use ``len(User.objects)`` to get the same result, but it would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. When you execute a server-side count query, you let MongoDB do the heavy -lifting and you receive a single integer over the wire. Meanwhile, len() +lifting and you receive a single integer over the wire. Meanwhile, ``len()`` retrieves all the results, places them in a local cache, and finally counts -them. If we compare the performance of the two operations, len() is much slower +them. If we compare the performance of the two operations, ``len()`` is much slower than :meth:`~mongoengine.queryset.QuerySet.count`. Further aggregation @@ -386,6 +398,25 @@ would be generating "tag-clouds":: top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] +MongoDB aggregation API +----------------------- +If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework `_ +through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. +An example of its use would be:: + + class Person(Document): + name = StringField() + + Person(name='John').save() + Person(name='Bob').save() + + pipeline = [ + {"$sort" : {"name" : -1}}, + {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} + ] + data = Person.objects().aggregate(pipeline) + assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] + Query efficiency and performance ================================ @@ -578,7 +609,7 @@ to push values with index:: .. note:: Currently only top level lists are handled, future versions of mongodb / pymongo plan to support nested positional operators. See `The $ positional - operator `_. + operator `_. Server-side javascript execution ================================ diff --git a/docs/guide/signals.rst b/docs/guide/signals.rst index 06bccb3b..e5214610 100644 --- a/docs/guide/signals.rst +++ b/docs/guide/signals.rst @@ -44,8 +44,8 @@ Available signals include: `post_save` Called within :meth:`~mongoengine.Document.save` after most actions - (validation, insert/update, and cascades, but not clearing dirty flags) have - completed successfully. Passed the additional boolean keyword argument + (validation, insert/update, and cascades, but not clearing dirty flags) have + completed successfully. Passed the additional boolean keyword argument `created` to indicate if the save was an insert or an update. `pre_delete` diff --git a/docs/guide/text-indexes.rst b/docs/guide/text-indexes.rst index 92a4471a..a5eaf7d8 100644 --- a/docs/guide/text-indexes.rst +++ b/docs/guide/text-indexes.rst @@ -8,7 +8,7 @@ After MongoDB 2.4 version, supports search documents by text indexes. Defining a Document with text index =================================== Use the *$* prefix to set a text index, Look the declaration:: - + class News(Document): title = StringField() content = StringField() @@ -35,10 +35,10 @@ Saving a document:: content="Various improvements").save() Next, start a text search using :attr:`QuerySet.search_text` method:: - + document = News.objects.search_text('testing').first() document.title # may be: "Using mongodb text search" - + document = News.objects.search_text('released').first() document.title # may be: "MongoEngine 0.9 released" diff --git a/docs/index.rst b/docs/index.rst index 2102df02..a42ff857 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,7 +7,7 @@ MongoDB. To install it, simply run .. code-block:: console - $ pip install -U mongoengine + $ python -m pip install -U mongoengine :doc:`tutorial` A quick tutorial building a tumblelog to get you up and running with @@ -23,9 +23,18 @@ MongoDB. To install it, simply run :doc:`upgrade` How to upgrade MongoEngine. +:doc:`faq` + Frequently Asked Questions + :doc:`django` Using MongoEngine and Django +MongoDB and driver support +-------------------------- + +MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB. +For further details, please refer to the `readme `_. + Community --------- @@ -73,6 +82,7 @@ formats for offline reading. apireference changelog upgrade + faq django Indices and tables @@ -81,4 +91,3 @@ Indices and tables * :ref:`genindex` * :ref:`modindex` * :ref:`search` - diff --git a/docs/tutorial.rst b/docs/tutorial.rst index bcd0d17f..b7885c34 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option then it may be run on a remote server. If you haven't installed MongoEngine, simply use pip to install it like so:: - $ pip install mongoengine + $ python -m pip install mongoengine Before we can start using MongoEngine, we need to tell it how to connect to our instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` diff --git a/docs/upgrade.rst b/docs/upgrade.rst index 082dbadc..4e798dd4 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -52,7 +52,7 @@ rename its occurrences. This release includes a major rehaul of MongoEngine's code quality and introduces a few breaking changes. It also touches many different parts of the package and although all the changes have been tested and scrutinized, -you're encouraged to thorougly test the upgrade. +you're encouraged to thoroughly test the upgrade. First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. If you import or catch this exception, you'll need to rename it in your code. @@ -85,10 +85,10 @@ by default from now on. The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: :: - pip uninstall pymongo - pip uninstall mongoengine - pip install pymongo==2.8 - pip install mongoengine + python -m pip uninstall pymongo + python -m pip uninstall mongoengine + python -m pip install pymongo==2.8 + python -m pip install mongoengine 0.8.7 ***** @@ -153,7 +153,7 @@ inherited classes like so: :: # 4. Remove indexes info = collection.index_information() - indexes_to_drop = [key for key, value in info.iteritems() + indexes_to_drop = [key for key, value in info.items() if '_types' in dict(value['key'])] for index in indexes_to_drop: collection.drop_index(index) diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index d6a50766..dbd88a68 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -18,12 +18,17 @@ from mongoengine.queryset import * from mongoengine.signals import * -__all__ = (list(document.__all__) + list(fields.__all__) + - list(connection.__all__) + list(queryset.__all__) + - list(signals.__all__) + list(errors.__all__)) +__all__ = ( + list(document.__all__) + + list(fields.__all__) + + list(connection.__all__) + + list(queryset.__all__) + + list(signals.__all__) + + list(errors.__all__) +) -VERSION = (0, 18, 1) +VERSION = (0, 20, 0) def get_version(): @@ -31,7 +36,7 @@ def get_version(): For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. """ - return '.'.join(map(str, VERSION)) + return ".".join(map(str, VERSION)) __version__ = get_version() diff --git a/mongoengine/base/__init__.py b/mongoengine/base/__init__.py index e069a147..dca0c4bb 100644 --- a/mongoengine/base/__init__.py +++ b/mongoengine/base/__init__.py @@ -12,17 +12,22 @@ from mongoengine.base.metaclasses import * __all__ = ( # common - 'UPDATE_OPERATORS', '_document_registry', 'get_document', - + "UPDATE_OPERATORS", + "_document_registry", + "get_document", # datastructures - 'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference', - + "BaseDict", + "BaseList", + "EmbeddedDocumentList", + "LazyReference", # document - 'BaseDocument', - + "BaseDocument", # fields - 'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', - + "BaseField", + "ComplexBaseField", + "ObjectIdField", + "GeoJsonBaseField", # metaclasses - 'DocumentMetaclass', 'TopLevelDocumentMetaclass' + "DocumentMetaclass", + "TopLevelDocumentMetaclass", ) diff --git a/mongoengine/base/common.py b/mongoengine/base/common.py index 999fd23a..85897324 100644 --- a/mongoengine/base/common.py +++ b/mongoengine/base/common.py @@ -1,12 +1,25 @@ from mongoengine.errors import NotRegistered -__all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') +__all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry") -UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul', - 'pop', 'push', 'push_all', 'pull', - 'pull_all', 'add_to_set', 'set_on_insert', - 'min', 'max', 'rename'} +UPDATE_OPERATORS = { + "set", + "unset", + "inc", + "dec", + "mul", + "pop", + "push", + "push_all", + "pull", + "pull_all", + "add_to_set", + "set_on_insert", + "min", + "max", + "rename", +} _document_registry = {} @@ -17,25 +30,33 @@ def get_document(name): doc = _document_registry.get(name, None) if not doc: # Possible old style name - single_end = name.split('.')[-1] - compound_end = '.%s' % single_end - possible_match = [k for k in _document_registry - if k.endswith(compound_end) or k == single_end] + single_end = name.split(".")[-1] + compound_end = ".%s" % single_end + possible_match = [ + k for k in _document_registry if k.endswith(compound_end) or k == single_end + ] if len(possible_match) == 1: doc = _document_registry.get(possible_match.pop(), None) if not doc: - raise NotRegistered(""" + raise NotRegistered( + """ `%s` has not been registered in the document registry. Importing the document class automatically registers it, has it been imported? - """.strip() % name) + """.strip() + % name + ) return doc def _get_documents_by_db(connection_alias, default_connection_alias): """Get all registered Documents class attached to a given database""" - def get_doc_alias(doc_cls): - return doc_cls._meta.get('db_alias', default_connection_alias) - return [doc_cls for doc_cls in _document_registry.values() - if get_doc_alias(doc_cls) == connection_alias] + def get_doc_alias(doc_cls): + return doc_cls._meta.get("db_alias", default_connection_alias) + + return [ + doc_cls + for doc_cls in _document_registry.values() + if get_doc_alias(doc_cls) == connection_alias + ] diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index b693b914..2a8fde6d 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,30 +1,40 @@ import weakref from bson import DBRef -import six -from six import iteritems from mongoengine.common import _import_class from mongoengine.errors import DoesNotExist, MultipleObjectsReturned -__all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') +__all__ = ( + "BaseDict", + "StrictDict", + "BaseList", + "EmbeddedDocumentList", + "LazyReference", +) def mark_as_changed_wrapper(parent_method): - """Decorators that ensures _mark_as_changed method gets called""" + """Decorator that ensures _mark_as_changed method gets called.""" + def wrapper(self, *args, **kwargs): - result = parent_method(self, *args, **kwargs) # Can't use super() in the decorator + # Can't use super() in the decorator. + result = parent_method(self, *args, **kwargs) self._mark_as_changed() return result + return wrapper def mark_key_as_changed_wrapper(parent_method): - """Decorators that ensures _mark_as_changed method gets called with the key argument""" + """Decorator that ensures _mark_as_changed method gets called with the key argument""" + def wrapper(self, key, *args, **kwargs): - result = parent_method(self, key, *args, **kwargs) # Can't use super() in the decorator + # Can't use super() in the decorator. + result = parent_method(self, key, *args, **kwargs) self._mark_as_changed(key) return result + return wrapper @@ -36,12 +46,12 @@ class BaseDict(dict): _name = None def __init__(self, dict_items, instance, name): - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseDict, self).__init__(dict_items) + super().__init__(dict_items) def get(self, key, default=None): # get does not use __getitem__ by default so we must override it as well @@ -51,18 +61,18 @@ class BaseDict(dict): return default def __getitem__(self, key): - value = super(BaseDict, self).__getitem__(key) + value = super().__getitem__(key) - EmbeddedDocument = _import_class('EmbeddedDocument') + EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): - value = BaseDict(value, None, '%s.%s' % (self._name, key)) - super(BaseDict, self).__setitem__(key, value) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): - value = BaseList(value, None, '%s.%s' % (self._name, key)) - super(BaseDict, self).__setitem__(key, value) + value = BaseList(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance return value @@ -85,9 +95,9 @@ class BaseDict(dict): setdefault = mark_as_changed_wrapper(dict.setdefault) def _mark_as_changed(self, key=None): - if hasattr(self._instance, '_mark_as_changed'): + if hasattr(self._instance, "_mark_as_changed"): if key: - self._instance._mark_as_changed('%s.%s' % (self._name, key)) + self._instance._mark_as_changed("{}.{}".format(self._name, key)) else: self._instance._mark_as_changed(self._name) @@ -100,39 +110,41 @@ class BaseList(list): _name = None def __init__(self, list_items, instance, name): - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(instance, BaseDocument): self._instance = weakref.proxy(instance) self._name = name - super(BaseList, self).__init__(list_items) + super().__init__(list_items) def __getitem__(self, key): - value = super(BaseList, self).__getitem__(key) + # change index to positive value because MongoDB does not support negative one + if isinstance(key, int) and key < 0: + key = len(self) + key + value = super().__getitem__(key) if isinstance(key, slice): # When receiving a slice operator, we don't convert the structure and bind # to parent's instance. This is buggy for now but would require more work to be handled properly return value - EmbeddedDocument = _import_class('EmbeddedDocument') + EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument) and value._instance is None: value._instance = self._instance elif isinstance(value, dict) and not isinstance(value, BaseDict): # Replace dict by BaseDict - value = BaseDict(value, None, '%s.%s' % (self._name, key)) - super(BaseList, self).__setitem__(key, value) + value = BaseDict(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance elif isinstance(value, list) and not isinstance(value, BaseList): # Replace list by BaseList - value = BaseList(value, None, '%s.%s' % (self._name, key)) - super(BaseList, self).__setitem__(key, value) + value = BaseList(value, None, "{}.{}".format(self._name, key)) + super().__setitem__(key, value) value._instance = self._instance return value def __iter__(self): - for v in super(BaseList, self).__iter__(): - yield v + yield from super().__iter__() def __getstate__(self): self.instance = None @@ -150,7 +162,7 @@ class BaseList(list): # instead, we simply marks the whole list as changed changed_key = None - result = super(BaseList, self).__setitem__(key, value) + result = super().__setitem__(key, value) self._mark_as_changed(changed_key) return result @@ -165,33 +177,19 @@ class BaseList(list): __iadd__ = mark_as_changed_wrapper(list.__iadd__) __imul__ = mark_as_changed_wrapper(list.__imul__) - if six.PY2: - # Under py3 __setslice__, __delslice__ and __getslice__ - # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter - # so we mimic this under python 2 - def __setslice__(self, i, j, sequence): - return self.__setitem__(slice(i, j), sequence) - - def __delslice__(self, i, j): - return self.__delitem__(slice(i, j)) - - def __getslice__(self, i, j): - return self.__getitem__(slice(i, j)) - def _mark_as_changed(self, key=None): - if hasattr(self._instance, '_mark_as_changed'): - if key: + if hasattr(self._instance, "_mark_as_changed"): + if key is not None: self._instance._mark_as_changed( - '%s.%s' % (self._name, key % len(self)) + "{}.{}".format(self._name, key % len(self)) ) else: self._instance._mark_as_changed(self._name) class EmbeddedDocumentList(BaseList): - def __init__(self, list_items, instance, name): - super(EmbeddedDocumentList, self).__init__(list_items, instance, name) + super().__init__(list_items, instance, name) self._instance = instance @classmethod @@ -201,7 +199,7 @@ class EmbeddedDocumentList(BaseList): """ for key, expected_value in kwargs.items(): doc_val = getattr(embedded_doc, key) - if doc_val != expected_value and six.text_type(doc_val) != expected_value: + if doc_val != expected_value and str(doc_val) != expected_value: return False return True @@ -274,12 +272,10 @@ class EmbeddedDocumentList(BaseList): """ values = self.__only_matches(self, kwargs) if len(values) == 0: - raise DoesNotExist( - '%s matching query does not exist.' % self._name - ) + raise DoesNotExist("%s matching query does not exist." % self._name) elif len(values) > 1: raise MultipleObjectsReturned( - '%d items returned, instead of 1' % len(values) + "%d items returned, instead of 1" % len(values) ) return values[0] @@ -293,11 +289,11 @@ class EmbeddedDocumentList(BaseList): def create(self, **values): """ - Creates a new embedded document and saves it to the database. + Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList. .. note:: - The embedded document changes are not automatically saved - to the database after calling this method. + the instance of the EmbeddedDocument is not automatically saved to the database. + You still need to call .save() on the parent Document. :param values: A dictionary of values for the embedded document. :return: The new embedded document instance. @@ -358,24 +354,24 @@ class EmbeddedDocumentList(BaseList): return len(values) -class StrictDict(object): +class StrictDict: __slots__ = () - _special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'} + _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} _classes = {} def __init__(self, **kwargs): - for k, v in iteritems(kwargs): + for k, v in kwargs.items(): setattr(self, k, v) def __getitem__(self, key): - key = '_reserved_' + key if key in self._special_fields else key + key = "_reserved_" + key if key in self._special_fields else key try: return getattr(self, key) except AttributeError: raise KeyError(key) def __setitem__(self, key, value): - key = '_reserved_' + key if key in self._special_fields else key + key = "_reserved_" + key if key in self._special_fields else key return setattr(self, key, value) def __contains__(self, key): @@ -412,37 +408,42 @@ class StrictDict(object): return (key for key in self.__slots__ if hasattr(self, key)) def __len__(self): - return len(list(iteritems(self))) + return len(list(self.items())) def __eq__(self, other): - return self.items() == other.items() + return list(self.items()) == list(other.items()) def __ne__(self, other): - return self.items() != other.items() + return not (self == other) @classmethod def create(cls, allowed_keys): - allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) + allowed_keys_tuple = tuple( + ("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys + ) allowed_keys = frozenset(allowed_keys_tuple) if allowed_keys not in cls._classes: + class SpecificStrictDict(cls): __slots__ = allowed_keys_tuple def __repr__(self): - return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) + return "{%s}" % ", ".join( + '"{!s}": {!r}'.format(k, v) for k, v in self.items() + ) cls._classes[allowed_keys] = SpecificStrictDict return cls._classes[allowed_keys] class LazyReference(DBRef): - __slots__ = ('_cached_doc', 'passthrough', 'document_type') + __slots__ = ("_cached_doc", "passthrough", "document_type") def fetch(self, force=False): if not self._cached_doc or force: self._cached_doc = self.document_type.objects.get(pk=self.pk) if not self._cached_doc: - raise DoesNotExist('Trying to dereference unknown document %s' % (self)) + raise DoesNotExist("Trying to dereference unknown document %s" % (self)) return self._cached_doc @property @@ -453,7 +454,7 @@ class LazyReference(DBRef): self.document_type = document_type self._cached_doc = cached_doc self.passthrough = passthrough - super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk) + super().__init__(self.document_type._get_collection_name(), pk) def __getitem__(self, name): if not self.passthrough: @@ -462,7 +463,7 @@ class LazyReference(DBRef): return document[name] def __getattr__(self, name): - if not object.__getattribute__(self, 'passthrough'): + if not object.__getattribute__(self, "passthrough"): raise AttributeError() document = self.fetch() try: @@ -471,4 +472,4 @@ class LazyReference(DBRef): raise AttributeError() def __repr__(self): - return "" % (self.document_type, self.pk) + return "".format(self.document_type, self.pk) diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 057258f5..e697fe40 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,30 +1,36 @@ import copy + import numbers from functools import partial from bson import DBRef, ObjectId, SON, json_util import pymongo -import six -from six import iteritems from mongoengine import signals from mongoengine.base.common import get_document -from mongoengine.base.datastructures import (BaseDict, BaseList, - EmbeddedDocumentList, - LazyReference, - StrictDict) +from mongoengine.base.datastructures import ( + BaseDict, + BaseList, + EmbeddedDocumentList, + LazyReference, + StrictDict, +) from mongoengine.base.fields import ComplexBaseField from mongoengine.common import _import_class -from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, - LookUpError, OperationError, ValidationError) -from mongoengine.python_support import Hashable +from mongoengine.errors import ( + FieldDoesNotExist, + InvalidDocumentError, + LookUpError, + OperationError, + ValidationError, +) -__all__ = ('BaseDocument', 'NON_FIELD_ERRORS') +__all__ = ("BaseDocument", "NON_FIELD_ERRORS") -NON_FIELD_ERRORS = '__all__' +NON_FIELD_ERRORS = "__all__" -class BaseDocument(object): +class BaseDocument: # TODO simplify how `_changed_fields` is used. # Currently, handling of `_changed_fields` seems unnecessarily convoluted: # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's @@ -35,9 +41,16 @@ class BaseDocument(object): # field is primarily set via `_from_son` or `_clear_changed_fields`, # though there are also other methods that manipulate it. # 4. The codebase is littered with `hasattr` calls for `_changed_fields`. - __slots__ = ('_changed_fields', '_initialised', '_created', '_data', - '_dynamic_fields', '_auto_id_field', '_db_field_map', - '__weakref__') + __slots__ = ( + "_changed_fields", + "_initialised", + "_created", + "_data", + "_dynamic_fields", + "_auto_id_field", + "_db_field_map", + "__weakref__", + ) _dynamic = False _dynamic_lock = True @@ -47,49 +60,42 @@ class BaseDocument(object): """ Initialise a document or an embedded document. - :param dict values: A dictionary of keys and values for the document. + :param values: A dictionary of keys and values for the document. It may contain additional reserved keywords, e.g. "__auto_convert". - :param bool __auto_convert: If True, supplied values will be converted + :param __auto_convert: If True, supplied values will be converted to Python-type values via each field's `to_python` method. - :param set __only_fields: A set of fields that have been loaded for + :param __only_fields: A set of fields that have been loaded for this document. Empty if all fields have been loaded. - :param bool _created: Indicates whether this is a brand new document + :param _created: Indicates whether this is a brand new document or whether it's already been persisted before. Defaults to true. """ self._initialised = False self._created = True if args: - # Combine positional arguments with named arguments. - # We only want named arguments. - field = iter(self._fields_ordered) - # If its an automatic id field then skip to the first defined field - if getattr(self, '_auto_id_field', False): - next(field) - for value in args: - name = next(field) - if name in values: - raise TypeError( - 'Multiple values for keyword argument "%s"' % name) - values[name] = value + raise TypeError( + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." + ) - __auto_convert = values.pop('__auto_convert', True) + __auto_convert = values.pop("__auto_convert", True) - __only_fields = set(values.pop('__only_fields', values)) + __only_fields = set(values.pop("__only_fields", values)) - _created = values.pop('_created', True) + _created = values.pop("_created", True) signals.pre_init.send(self.__class__, document=self, values=values) # Check if there are undefined fields supplied to the constructor, # if so raise an Exception. - if not self._dynamic and (self._meta.get('strict', True) or _created): + if not self._dynamic and (self._meta.get("strict", True) or _created): _undefined_fields = set(values.keys()) - set( - self._fields.keys() + ['id', 'pk', '_cls', '_text_score']) + list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"] + ) if _undefined_fields: - msg = ( - 'The fields "{0}" do not exist on the document "{1}"' - ).format(_undefined_fields, self._class_name) + msg = ('The fields "{}" do not exist on the document "{}"').format( + _undefined_fields, self._class_name + ) raise FieldDoesNotExist(msg) if self.STRICT and not self._dynamic: @@ -102,28 +108,28 @@ class BaseDocument(object): # Assign default values to the instance. # We set default values only for fields loaded from DB. See # https://github.com/mongoengine/mongoengine/issues/399 for more info. - for key, field in iteritems(self._fields): + for key, field in self._fields.items(): if self._db_field_map.get(key, key) in __only_fields: continue value = getattr(self, key, None) setattr(self, key, value) - if '_cls' not in values: + if "_cls" not in values: self._cls = self._class_name # Set passed values after initialisation if self._dynamic: dynamic_data = {} - for key, value in iteritems(values): - if key in self._fields or key == '_id': + for key, value in values.items(): + if key in self._fields or key == "_id": setattr(self, key, value) else: dynamic_data[key] = value else: - FileField = _import_class('FileField') - for key, value in iteritems(values): + FileField = _import_class("FileField") + for key, value in values.items(): key = self._reverse_db_field_map.get(key, key) - if key in self._fields or key in ('id', 'pk', '_cls'): + if key in self._fields or key in ("id", "pk", "_cls"): if __auto_convert and value is not None: field = self._fields.get(key) if field and not isinstance(field, FileField): @@ -137,7 +143,7 @@ class BaseDocument(object): if self._dynamic: self._dynamic_lock = False - for key, value in iteritems(dynamic_data): + for key, value in dynamic_data.items(): setattr(self, key, value) # Flag initialised @@ -155,26 +161,26 @@ class BaseDocument(object): default = default() setattr(self, field_name, default) else: - super(BaseDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: - if not hasattr(self, name) and not name.startswith('_'): - DynamicField = _import_class('DynamicField') + if not hasattr(self, name) and not name.startswith("_"): + DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name, null=True) field.name = name self._dynamic_fields[name] = field self._fields_ordered += (name,) - if not name.startswith('_'): + if not name.startswith("_"): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value - if hasattr(self, '_changed_fields'): + if hasattr(self, "_changed_fields"): self._mark_as_changed(name) try: self__created = self._created @@ -182,51 +188,66 @@ class BaseDocument(object): self__created = True if ( - self._is_document and - not self__created and - name in self._meta.get('shard_key', tuple()) and - self._data.get(name) != value + self._is_document + and not self__created + and name in self._meta.get("shard_key", tuple()) + and self._data.get(name) != value ): - msg = 'Shard Keys are immutable. Tried to update %s' % name + msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) try: self__initialised = self._initialised except AttributeError: self__initialised = False - # Check if the user has created a new instance of a class - if (self._is_document and self__initialised and - self__created and name == self._meta.get('id_field')): - super(BaseDocument, self).__setattr__('_created', False) - super(BaseDocument, self).__setattr__(name, value) + # Check if the user has created a new instance of a class + if ( + self._is_document + and self__initialised + and self__created + and name == self._meta.get("id_field") + ): + super().__setattr__("_created", False) + + super().__setattr__(name, value) def __getstate__(self): data = {} - for k in ('_changed_fields', '_initialised', '_created', - '_dynamic_fields', '_fields_ordered'): + for k in ( + "_changed_fields", + "_initialised", + "_created", + "_dynamic_fields", + "_fields_ordered", + ): if hasattr(self, k): data[k] = getattr(self, k) - data['_data'] = self.to_mongo() + data["_data"] = self.to_mongo() return data def __setstate__(self, data): - if isinstance(data['_data'], SON): - data['_data'] = self.__class__._from_son(data['_data'])._data - for k in ('_changed_fields', '_initialised', '_created', '_data', - '_dynamic_fields'): + if isinstance(data["_data"], SON): + data["_data"] = self.__class__._from_son(data["_data"])._data + for k in ( + "_changed_fields", + "_initialised", + "_created", + "_data", + "_dynamic_fields", + ): if k in data: setattr(self, k, data[k]) - if '_fields_ordered' in data: + if "_fields_ordered" in data: if self._dynamic: - setattr(self, '_fields_ordered', data['_fields_ordered']) + setattr(self, "_fields_ordered", data["_fields_ordered"]) else: _super_fields_ordered = type(self)._fields_ordered - setattr(self, '_fields_ordered', _super_fields_ordered) + setattr(self, "_fields_ordered", _super_fields_ordered) - dynamic_fields = data.get('_dynamic_fields') or SON() + dynamic_fields = data.get("_dynamic_fields") or SON() for k in dynamic_fields.keys(): - setattr(self, k, data['_data'].get(k)) + setattr(self, k, data["_data"].get(k)) def __iter__(self): return iter(self._fields_ordered) @@ -263,24 +284,27 @@ class BaseDocument(object): try: u = self.__str__() except (UnicodeEncodeError, UnicodeDecodeError): - u = '[Bad Unicode data]' + u = "[Bad Unicode data]" repr_type = str if u is None else type(u) - return repr_type('<%s: %s>' % (self.__class__.__name__, u)) + return repr_type("<{}: {}>".format(self.__class__.__name__, u)) def __str__(self): # TODO this could be simpler? - if hasattr(self, '__unicode__'): - if six.PY3: - return self.__unicode__() - else: - return six.text_type(self).encode('utf-8') - return six.text_type('%s object' % self.__class__.__name__) + if hasattr(self, "__unicode__"): + return self.__unicode__() + return "%s object" % self.__class__.__name__ def __eq__(self, other): - if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None: + if ( + isinstance(other, self.__class__) + and hasattr(other, "id") + and other.id is not None + ): return self.id == other.id if isinstance(other, DBRef): - return self._get_collection_name() == other.collection and self.id == other.id + return ( + self._get_collection_name() == other.collection and self.id == other.id + ) if self.id is None: return self is other return False @@ -303,10 +327,12 @@ class BaseDocument(object): Get text score from text query """ - if '_text_score' not in self._data: - raise InvalidDocumentError('This document is not originally built from a text query') + if "_text_score" not in self._data: + raise InvalidDocumentError( + "This document is not originally built from a text query" + ) - return self._data['_text_score'] + return self._data["_text_score"] def to_mongo(self, use_db_field=True, fields=None): """ @@ -315,11 +341,11 @@ class BaseDocument(object): fields = fields or [] data = SON() - data['_id'] = None - data['_cls'] = self._class_name + data["_id"] = None + data["_cls"] = self._class_name # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] - root_fields = {f.split('.')[0] for f in fields} + root_fields = {f.split(".")[0] for f in fields} for field_name in self: if root_fields and field_name not in root_fields: @@ -334,16 +360,16 @@ class BaseDocument(object): if value is not None: f_inputs = field.to_mongo.__code__.co_varnames ex_vars = {} - if fields and 'fields' in f_inputs: - key = '%s.' % field_name + if fields and "fields" in f_inputs: + key = "%s." % field_name embedded_fields = [ - i.replace(key, '') for i in fields - if i.startswith(key)] + i.replace(key, "") for i in fields if i.startswith(key) + ] - ex_vars['fields'] = embedded_fields + ex_vars["fields"] = embedded_fields - if 'use_db_field' in f_inputs: - ex_vars['use_db_field'] = use_db_field + if "use_db_field" in f_inputs: + ex_vars["use_db_field"] = use_db_field value = field.to_mongo(value, **ex_vars) @@ -359,8 +385,8 @@ class BaseDocument(object): data[field.name] = value # Only add _cls if allow_inheritance is True - if not self._meta.get('allow_inheritance'): - data.pop('_cls') + if not self._meta.get("allow_inheritance"): + data.pop("_cls") return data @@ -380,18 +406,23 @@ class BaseDocument(object): errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values - fields = [(self._fields.get(name, self._dynamic_fields.get(name)), - self._data.get(name)) for name in self._fields_ordered] + fields = [ + ( + self._fields.get(name, self._dynamic_fields.get(name)), + self._data.get(name), + ) + for name in self._fields_ordered + ] - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') - GenericEmbeddedDocumentField = _import_class( - 'GenericEmbeddedDocumentField') + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") for field, value in fields: if value is not None: try: - if isinstance(field, (EmbeddedDocumentField, - GenericEmbeddedDocumentField)): + if isinstance( + field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): field._validate(value, clean=clean) else: field._validate(value) @@ -399,17 +430,18 @@ class BaseDocument(object): errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError) as error: errors[field.name] = error - elif field.required and not getattr(field, '_auto_gen', False): - errors[field.name] = ValidationError('Field is required', - field_name=field.name) + elif field.required and not getattr(field, "_auto_gen", False): + errors[field.name] = ValidationError( + "Field is required", field_name=field.name + ) if errors: - pk = 'None' - if hasattr(self, 'pk'): + pk = "None" + if hasattr(self, "pk"): pk = self.pk - elif self._instance and hasattr(self._instance, 'pk'): + elif self._instance and hasattr(self._instance, "pk"): pk = self._instance.pk - message = 'ValidationError (%s:%s) ' % (self._class_name, pk) + message = "ValidationError ({}:{}) ".format(self._class_name, pk) raise ValidationError(message, errors=errors) def to_json(self, *args, **kwargs): @@ -419,20 +451,28 @@ class BaseDocument(object): MongoDB (as opposed to attribute names on this document). Defaults to True. """ - use_db_field = kwargs.pop('use_db_field', True) + use_db_field = kwargs.pop("use_db_field", True) return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) @classmethod def from_json(cls, json_data, created=False): """Converts json data to a Document instance - :param json_data: The json data to load into the Document - :param created: If True, the document will be considered as a brand new document - If False and an id is provided, it will consider that the data being - loaded corresponds to what's already in the database (This has an impact of subsequent call to .save()) - If False and no id is provided, it will consider the data as a new document - (default ``False``) + :param str json_data: The json data to load into the Document + :param bool created: Boolean defining whether to consider the newly + instantiated document as brand new or as persisted already: + * If True, consider the document as brand new, no matter what data + it's loaded with (i.e. even if an ID is loaded). + * If False and an ID is NOT provided, consider the document as + brand new. + * If False and an ID is provided, assume that the object has + already been persisted (this has an impact on the subsequent + call to .save()). + * Defaults to ``False``. """ + # TODO should `created` default to False? If the object already exists + # in the DB, you would likely retrieve it from MongoDB itself through + # a query, not load it from JSON data. return cls._from_son(json_util.loads(json_data), created=created) def __expand_dynamic_values(self, name, value): @@ -442,22 +482,18 @@ class BaseDocument(object): # If the value is a dict with '_cls' in it, turn it into a document is_dict = isinstance(value, dict) - if is_dict and '_cls' in value: - cls = get_document(value['_cls']) + if is_dict and "_cls" in value: + cls = get_document(value["_cls"]) return cls(**value) if is_dict: - value = { - k: self.__expand_dynamic_values(k, v) - for k, v in value.items() - } + value = {k: self.__expand_dynamic_values(k, v) for k, v in value.items()} else: value = [self.__expand_dynamic_values(name, v) for v in value] # Convert lists / values so we can watch for any changes on them - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') - if (isinstance(value, (list, tuple)) and - not isinstance(value, BaseList)): + EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") + if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): if issubclass(type(self), EmbeddedDocumentListField): value = EmbeddedDocumentList(value, self, name) else: @@ -472,26 +508,26 @@ class BaseDocument(object): if not key: return - if not hasattr(self, '_changed_fields'): + if not hasattr(self, "_changed_fields"): return - if '.' in key: - key, rest = key.split('.', 1) + if "." in key: + key, rest = key.split(".", 1) key = self._db_field_map.get(key, key) - key = '%s.%s' % (key, rest) + key = "{}.{}".format(key, rest) else: key = self._db_field_map.get(key, key) if key not in self._changed_fields: - levels, idx = key.split('.'), 1 + levels, idx = key.split("."), 1 while idx <= len(levels): - if '.'.join(levels[:idx]) in self._changed_fields: + if ".".join(levels[:idx]) in self._changed_fields: break idx += 1 else: self._changed_fields.append(key) # remove lower level changed fields - level = '.'.join(levels[:idx]) + '.' + level = ".".join(levels[:idx]) + "." remove = self._changed_fields.remove for field in self._changed_fields[:]: if field.startswith(level): @@ -502,7 +538,7 @@ class BaseDocument(object): are marked as changed. """ for changed in self._get_changed_fields(): - parts = changed.split('.') + parts = changed.split(".") data = self for part in parts: if isinstance(data, list): @@ -515,8 +551,10 @@ class BaseDocument(object): else: data = getattr(data, part, None) - if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'): - if getattr(data, '_is_document', False): + if not isinstance(data, LazyReference) and hasattr( + data, "_changed_fields" + ): + if getattr(data, "_is_document", False): continue data._changed_fields = [] @@ -532,39 +570,38 @@ class BaseDocument(object): """ # Loop list / dict fields as they contain documents # Determine the iterator to use - if not hasattr(data, 'items'): + if not hasattr(data, "items"): iterator = enumerate(data) else: - iterator = iteritems(data) + iterator = data.items() for index_or_key, value in iterator: - item_key = '%s%s.' % (base_key, index_or_key) + item_key = "{}{}.".format(base_key, index_or_key) # don't check anything lower if this key is already marked # as changed. if item_key[:-1] in changed_fields: continue - if hasattr(value, '_get_changed_fields'): + if hasattr(value, "_get_changed_fields"): changed = value._get_changed_fields() - changed_fields += ['%s%s' % (item_key, k) for k in changed if k] + changed_fields += ["{}{}".format(item_key, k) for k in changed if k] elif isinstance(value, (list, tuple, dict)): - self._nestable_types_changed_fields( - changed_fields, item_key, value) + self._nestable_types_changed_fields(changed_fields, item_key, value) def _get_changed_fields(self): """Return a list of all fields that have explicitly been changed. """ - EmbeddedDocument = _import_class('EmbeddedDocument') - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - SortedListField = _import_class('SortedListField') + EmbeddedDocument = _import_class("EmbeddedDocument") + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") + SortedListField = _import_class("SortedListField") changed_fields = [] - changed_fields += getattr(self, '_changed_fields', []) + changed_fields += getattr(self, "_changed_fields", []) for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) - key = '%s.' % db_field_name + key = "%s." % db_field_name data = self._data.get(field_name, None) field = self._fields.get(field_name) @@ -572,16 +609,17 @@ class BaseDocument(object): # Whole field already marked as changed, no need to go further continue - if isinstance(field, ReferenceField): # Don't follow referenced documents + if isinstance(field, ReferenceField): # Don't follow referenced documents continue if isinstance(data, EmbeddedDocument): # Find all embedded fields that have been changed changed = data._get_changed_fields() - changed_fields += ['%s%s' % (key, k) for k in changed if k] + changed_fields += ["{}{}".format(key, k) for k in changed if k] elif isinstance(data, (list, tuple, dict)): - if (hasattr(field, 'field') and - isinstance(field.field, (ReferenceField, GenericReferenceField))): + if hasattr(field, "field") and isinstance( + field.field, (ReferenceField, GenericReferenceField) + ): continue elif isinstance(field, SortedListField) and field._ordering: # if ordering is affected whole list is changed @@ -589,8 +627,7 @@ class BaseDocument(object): changed_fields.append(db_field_name) continue - self._nestable_types_changed_fields( - changed_fields, key, data) + self._nestable_types_changed_fields(changed_fields, key, data) return changed_fields def _delta(self): @@ -602,11 +639,11 @@ class BaseDocument(object): set_fields = self._get_changed_fields() unset_data = {} - if hasattr(self, '_changed_fields'): + if hasattr(self, "_changed_fields"): set_data = {} # Fetch each set item from its path for path in set_fields: - parts = path.split('.') + parts = path.split(".") d = doc new_path = [] for p in parts: @@ -616,26 +653,27 @@ class BaseDocument(object): elif isinstance(d, list) and p.isdigit(): # An item of a list (identified by its index) is updated d = d[int(p)] - elif hasattr(d, 'get'): + elif hasattr(d, "get"): # dict-like (dict, embedded document) d = d.get(p) new_path.append(p) - path = '.'.join(new_path) + path = ".".join(new_path) set_data[path] = d else: set_data = doc - if '_id' in set_data: - del set_data['_id'] + if "_id" in set_data: + del set_data["_id"] # Determine if any changed items were actually unset. - for path, value in set_data.items(): - if value or isinstance(value, (numbers.Number, bool)): # Account for 0 and True that are truthy + for path, value in list(set_data.items()): + if value or isinstance( + value, (numbers.Number, bool) + ): # Account for 0 and True that are truthy continue - parts = path.split('.') + parts = path.split(".") - if (self._dynamic and len(parts) and parts[0] in - self._dynamic_fields): + if self._dynamic and len(parts) and parts[0] in self._dynamic_fields: del set_data[path] unset_data[path] = 1 continue @@ -650,16 +688,16 @@ class BaseDocument(object): for p in parts: if isinstance(d, list) and p.isdigit(): d = d[int(p)] - elif (hasattr(d, '__getattribute__') and - not isinstance(d, dict)): + elif hasattr(d, "__getattribute__") and not isinstance(d, dict): real_path = d._reverse_db_field_map.get(p, p) d = getattr(d, real_path) else: d = d.get(p) - if hasattr(d, '_fields'): - field_name = d._reverse_db_field_map.get(db_field_name, - db_field_name) + if hasattr(d, "_fields"): + field_name = d._reverse_db_field_map.get( + db_field_name, db_field_name + ) if field_name in d._fields: default = d._fields.get(field_name).default else: @@ -680,7 +718,7 @@ class BaseDocument(object): """Return the collection name for this class. None for abstract class. """ - return cls._meta.get('collection', None) + return cls._meta.get("collection", None) @classmethod def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): @@ -689,16 +727,19 @@ class BaseDocument(object): only_fields = [] if son and not isinstance(son, dict): - raise ValueError("The source SON object needs to be of type 'dict'") + raise ValueError( + "The source SON object needs to be of type 'dict' but a '%s' was found" + % type(son) + ) # Get the class name from the document, falling back to the given # class if unavailable - class_name = son.get('_cls', cls._class_name) + class_name = son.get("_cls", cls._class_name) # Convert SON to a data dict, making sure each key is a string and # corresponds to the right db field. data = {} - for key, value in iteritems(son): + for key, value in son.items(): key = str(key) key = cls._db_field_map.get(key, key) data[key] = value @@ -713,34 +754,34 @@ class BaseDocument(object): if not _auto_dereference: fields = copy.deepcopy(fields) - for field_name, field in iteritems(fields): + for field_name, field in fields.items(): field._auto_dereference = _auto_dereference if field.db_field in data: value = data[field.db_field] try: - data[field_name] = (value if value is None - else field.to_python(value)) + data[field_name] = ( + value if value is None else field.to_python(value) + ) if field_name != field.db_field: del data[field.db_field] except (AttributeError, ValueError) as e: errors_dict[field_name] = e if errors_dict: - errors = '\n'.join(['%s - %s' % (k, v) - for k, v in errors_dict.items()]) - msg = ('Invalid data to create a `%s` instance.\n%s' - % (cls._class_name, errors)) + errors = "\n".join( + ["Field '{}' - {}".format(k, v) for k, v in errors_dict.items()] + ) + msg = "Invalid data to create a `{}` instance.\n{}".format( + cls._class_name, errors, + ) raise InvalidDocumentError(msg) # In STRICT documents, remove any keys that aren't in cls._fields if cls.STRICT: - data = {k: v for k, v in iteritems(data) if k in cls._fields} + data = {k: v for k, v in data.items() if k in cls._fields} obj = cls( - __auto_convert=False, - _created=created, - __only_fields=only_fields, - **data + __auto_convert=False, _created=created, __only_fields=only_fields, **data ) obj._changed_fields = [] if not _auto_dereference: @@ -762,15 +803,13 @@ class BaseDocument(object): # Create a map of index fields to index spec. We're converting # the fields from a list to a tuple so that it's hashable. - spec_fields = { - tuple(index['fields']): index for index in index_specs - } + spec_fields = {tuple(index["fields"]): index for index in index_specs} # For each new index, if there's an existing index with the same # fields list, update the existing spec with all data from the # new spec. for new_index in indices: - candidate = spec_fields.get(tuple(new_index['fields'])) + candidate = spec_fields.get(tuple(new_index["fields"])) if candidate is None: index_specs.append(new_index) else: @@ -786,10 +825,10 @@ class BaseDocument(object): @classmethod def _build_index_spec(cls, spec): """Build a PyMongo index spec from a MongoEngine index spec.""" - if isinstance(spec, six.string_types): - spec = {'fields': [spec]} + if isinstance(spec, str): + spec = {"fields": [spec]} elif isinstance(spec, (list, tuple)): - spec = {'fields': list(spec)} + spec = {"fields": list(spec)} elif isinstance(spec, dict): spec = dict(spec) @@ -797,19 +836,21 @@ class BaseDocument(object): direction = None # Check to see if we need to include _cls - allow_inheritance = cls._meta.get('allow_inheritance') + allow_inheritance = cls._meta.get("allow_inheritance") include_cls = ( - allow_inheritance and - not spec.get('sparse', False) and - spec.get('cls', True) and - '_cls' not in spec['fields'] + allow_inheritance + and not spec.get("sparse", False) + and spec.get("cls", True) + and "_cls" not in spec["fields"] ) # 733: don't include cls if index_cls is False unless there is an explicit cls with the index - include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True)) - if 'cls' in spec: - spec.pop('cls') - for key in spec['fields']: + include_cls = include_cls and ( + spec.get("cls", False) or cls._meta.get("index_cls", True) + ) + if "cls" in spec: + spec.pop("cls") + for key in spec["fields"]: # If inherited spec continue if isinstance(key, (list, tuple)): continue @@ -822,51 +863,54 @@ class BaseDocument(object): # GEOHAYSTACK from ) # GEO2D from * direction = pymongo.ASCENDING - if key.startswith('-'): + if key.startswith("-"): direction = pymongo.DESCENDING - elif key.startswith('$'): + elif key.startswith("$"): direction = pymongo.TEXT - elif key.startswith('#'): + elif key.startswith("#"): direction = pymongo.HASHED - elif key.startswith('('): + elif key.startswith("("): direction = pymongo.GEOSPHERE - elif key.startswith(')'): + elif key.startswith(")"): direction = pymongo.GEOHAYSTACK - elif key.startswith('*'): + elif key.startswith("*"): direction = pymongo.GEO2D - if key.startswith(('+', '-', '*', '$', '#', '(', ')')): + if key.startswith(("+", "-", "*", "$", "#", "(", ")")): key = key[1:] # Use real field name, do it manually because we need field # objects for the next part (list field checking) - parts = key.split('.') - if parts in (['pk'], ['id'], ['_id']): - key = '_id' + parts = key.split(".") + if parts in (["pk"], ["id"], ["_id"]): + key = "_id" else: fields = cls._lookup_field(parts) parts = [] for field in fields: try: - if field != '_id': + if field != "_id": field = field.db_field except AttributeError: pass parts.append(field) - key = '.'.join(parts) + key = ".".join(parts) index_list.append((key, direction)) # Don't add cls to a geo index if include_cls and direction not in ( - pymongo.GEO2D, pymongo.GEOHAYSTACK, pymongo.GEOSPHERE): - index_list.insert(0, ('_cls', 1)) + pymongo.GEO2D, + pymongo.GEOHAYSTACK, + pymongo.GEOSPHERE, + ): + index_list.insert(0, ("_cls", 1)) if index_list: - spec['fields'] = index_list + spec["fields"] = index_list return spec @classmethod - def _unique_with_indexes(cls, namespace=''): + def _unique_with_indexes(cls, namespace=""): """Find unique indexes in the document schema and return them.""" unique_indexes = [] for field_name, field in cls._fields.items(): @@ -878,42 +922,46 @@ class BaseDocument(object): # Add any unique_with fields to the back of the index spec if field.unique_with: - if isinstance(field.unique_with, six.string_types): + if isinstance(field.unique_with, str): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: - parts = other_name.split('.') + parts = other_name.split(".") # Lookup real name parts = cls._lookup_field(parts) name_parts = [part.db_field for part in parts] - unique_with.append('.'.join(name_parts)) + unique_with.append(".".join(name_parts)) # Unique field should be required parts[-1].required = True - sparse = (not sparse and - parts[-1].name not in cls.__dict__) + sparse = not sparse and parts[-1].name not in cls.__dict__ unique_fields += unique_with # Add the new index to the list fields = [ - ('%s%s' % (namespace, f), pymongo.ASCENDING) + ("{}{}".format(namespace, f), pymongo.ASCENDING) for f in unique_fields ] - index = {'fields': fields, 'unique': True, 'sparse': sparse} + index = {"fields": fields, "unique": True, "sparse": sparse} unique_indexes.append(index) - if field.__class__.__name__ in {'EmbeddedDocumentListField', - 'ListField', 'SortedListField'}: + if field.__class__.__name__ in { + "EmbeddedDocumentListField", + "ListField", + "SortedListField", + }: field = field.field # Grab any embedded document field unique indexes - if (field.__class__.__name__ == 'EmbeddedDocumentField' and - field.document_type != cls): - field_namespace = '%s.' % field_name + if ( + field.__class__.__name__ == "EmbeddedDocumentField" + and field.document_type != cls + ): + field_namespace = "%s." % field_name doc_cls = field.document_type unique_indexes += doc_cls._unique_with_indexes(field_namespace) @@ -925,32 +973,36 @@ class BaseDocument(object): geo_indices = [] inspected.append(cls) - geo_field_type_names = ('EmbeddedDocumentField', 'GeoPointField', - 'PointField', 'LineStringField', - 'PolygonField') + geo_field_type_names = ( + "EmbeddedDocumentField", + "GeoPointField", + "PointField", + "LineStringField", + "PolygonField", + ) - geo_field_types = tuple([_import_class(field) - for field in geo_field_type_names]) + geo_field_types = tuple( + [_import_class(field) for field in geo_field_type_names] + ) for field in cls._fields.values(): if not isinstance(field, geo_field_types): continue - if hasattr(field, 'document_type'): + if hasattr(field, "document_type"): field_cls = field.document_type if field_cls in inspected: continue - if hasattr(field_cls, '_geo_indices'): + if hasattr(field_cls, "_geo_indices"): geo_indices += field_cls._geo_indices( - inspected, parent_field=field.db_field) + inspected, parent_field=field.db_field + ) elif field._geo_index: field_name = field.db_field if parent_field: - field_name = '%s.%s' % (parent_field, field_name) - geo_indices.append({ - 'fields': [(field_name, field._geo_index)] - }) + field_name = "{}.{}".format(parent_field, field_name) + geo_indices.append({"fields": [(field_name, field._geo_index)]}) return geo_indices @@ -991,8 +1043,8 @@ class BaseDocument(object): # TODO this method is WAY too complicated. Simplify it. # TODO don't think returning a string for embedded non-existent fields is desired - ListField = _import_class('ListField') - DynamicField = _import_class('DynamicField') + ListField = _import_class("ListField") + DynamicField = _import_class("DynamicField") if not isinstance(parts, (list, tuple)): parts = [parts] @@ -1008,15 +1060,17 @@ class BaseDocument(object): # Look up first field from the document if field is None: - if field_name == 'pk': + if field_name == "pk": # Deal with "primary key" alias - field_name = cls._meta['id_field'] + field_name = cls._meta["id_field"] if field_name in cls._fields: field = cls._fields[field_name] elif cls._dynamic: field = DynamicField(db_field=field_name) - elif cls._meta.get('allow_inheritance') or cls._meta.get('abstract', False): + elif cls._meta.get("allow_inheritance") or cls._meta.get( + "abstract", False + ): # 744: in case the field is defined in a subclass for subcls in cls.__subclasses__(): try: @@ -1031,38 +1085,41 @@ class BaseDocument(object): else: raise LookUpError('Cannot resolve field "%s"' % field_name) else: - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") # If previous field was a reference, throw an error (we # cannot look up fields that are on references). if isinstance(field, (ReferenceField, GenericReferenceField)): - raise LookUpError('Cannot perform join in mongoDB: %s' % - '__'.join(parts)) + raise LookUpError( + "Cannot perform join in mongoDB: %s" % "__".join(parts) + ) # If the parent field has a "field" attribute which has a # lookup_member method, call it to find the field # corresponding to this iteration. - if hasattr(getattr(field, 'field', None), 'lookup_member'): + if hasattr(getattr(field, "field", None), "lookup_member"): new_field = field.field.lookup_member(field_name) # If the parent field is a DynamicField or if it's part of # a DynamicDocument, mark current field as a DynamicField # with db_name equal to the field name. - elif cls._dynamic and (isinstance(field, DynamicField) or - getattr(getattr(field, 'document_type', None), '_dynamic', None)): + elif cls._dynamic and ( + isinstance(field, DynamicField) + or getattr(getattr(field, "document_type", None), "_dynamic", None) + ): new_field = DynamicField(db_field=field_name) # Else, try to use the parent field's lookup_member method # to find the subfield. - elif hasattr(field, 'lookup_member'): + elif hasattr(field, "lookup_member"): new_field = field.lookup_member(field_name) # Raise a LookUpError if all the other conditions failed. else: raise LookUpError( - 'Cannot resolve subfield or operator {} ' - 'on the field {}'.format(field_name, field.name) + "Cannot resolve subfield or operator {} " + "on the field {}".format(field_name, field.name) ) # If current field still wasn't found and the parent field @@ -1081,23 +1138,24 @@ class BaseDocument(object): return fields @classmethod - def _translate_field_name(cls, field, sep='.'): + def _translate_field_name(cls, field, sep="."): """Translate a field attribute name to a database field name. """ parts = field.split(sep) parts = [f.db_field for f in cls._lookup_field(parts)] - return '.'.join(parts) + return ".".join(parts) def __set_field_display(self): """For each field that specifies choices, create a get__display method. """ - fields_with_choices = [(n, f) for n, f in self._fields.items() - if f.choices] + fields_with_choices = [(n, f) for n, f in self._fields.items() if f.choices] for attr_name, field in fields_with_choices: - setattr(self, - 'get_%s_display' % attr_name, - partial(self.__get_field_display, field=field)) + setattr( + self, + "get_%s_display" % attr_name, + partial(self.__get_field_display, field=field), + ) def __get_field_display(self, field): """Return the display value for a choice field""" @@ -1105,9 +1163,13 @@ class BaseDocument(object): if field.choices and isinstance(field.choices[0], (list, tuple)): if value is None: return None - sep = getattr(field, 'display_sep', ' ') - values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] - return sep.join([ - six.text_type(dict(field.choices).get(val, val)) - for val in values or []]) + sep = getattr(field, "display_sep", " ") + values = ( + value + if field.__class__.__name__ in ("ListField", "SortedListField") + else [value] + ) + return sep.join( + [str(dict(field.choices).get(val, val)) for val in values or []] + ) return value diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index fe96f15b..7bab813c 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -4,25 +4,22 @@ import weakref from bson import DBRef, ObjectId, SON import pymongo -import six -from six import iteritems from mongoengine.base.common import UPDATE_OPERATORS -from mongoengine.base.datastructures import (BaseDict, BaseList, - EmbeddedDocumentList) +from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList from mongoengine.common import _import_class from mongoengine.errors import DeprecatedError, ValidationError -__all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', - 'GeoJsonBaseField') +__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") -class BaseField(object): +class BaseField: """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. .. versionchanged:: 0.5 - added verbose and help text """ + name = None _geo_index = False _auto_gen = False # Call `generate` to generate a value @@ -34,14 +31,23 @@ class BaseField(object): creation_counter = 0 auto_creation_counter = -1 - def __init__(self, db_field=None, name=None, required=False, default=None, - unique=False, unique_with=None, primary_key=False, - validation=None, choices=None, null=False, sparse=False, - **kwargs): + def __init__( + self, + db_field=None, + required=False, + default=None, + unique=False, + unique_with=None, + primary_key=False, + validation=None, + choices=None, + null=False, + sparse=False, + **kwargs + ): """ :param db_field: The database field to store this field in (defaults to the name of the field) - :param name: Deprecated - use db_field :param required: If the field is required. Whether it has to have a value or not. Defaults to False. :param default: (optional) The default value for this field if no value @@ -65,11 +71,8 @@ class BaseField(object): existing attributes. Common metadata includes `verbose_name` and `help_text`. """ - self.db_field = (db_field or name) if not primary_key else '_id' + self.db_field = db_field if not primary_key else "_id" - if name: - msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' - warnings.warn(msg, DeprecationWarning) self.required = required or primary_key self.default = default self.unique = bool(unique or unique_with) @@ -82,17 +85,14 @@ class BaseField(object): self._owner_document = None # Make sure db_field is a string (if it's explicitly defined). - if ( - self.db_field is not None and - not isinstance(self.db_field, six.string_types) - ): - raise TypeError('db_field should be a string.') + if self.db_field is not None and not isinstance(self.db_field, str): + raise TypeError("db_field should be a string.") # Make sure db_field doesn't contain any forbidden characters. - if isinstance(self.db_field, six.string_types) and ( - '.' in self.db_field or - '\0' in self.db_field or - self.db_field.startswith('$') + if isinstance(self.db_field, str) and ( + "." in self.db_field + or "\0" in self.db_field + or self.db_field.startswith("$") ): raise ValueError( 'field names cannot contain dots (".") or null characters ' @@ -102,15 +102,17 @@ class BaseField(object): # Detect and report conflicts between metadata and base properties. conflicts = set(dir(self)) & set(kwargs) if conflicts: - raise TypeError('%s already has attribute(s): %s' % ( - self.__class__.__name__, ', '.join(conflicts))) + raise TypeError( + "%s already has attribute(s): %s" + % (self.__class__.__name__, ", ".join(conflicts)) + ) # Assign metadata to the instance # This efficient method is available because no __slots__ are defined. self.__dict__.update(kwargs) # Adjust the appropriate creation counter, and save our local copy. - if self.db_field == '_id': + if self.db_field == "_id": self.creation_counter = BaseField.auto_creation_counter BaseField.auto_creation_counter -= 1 else: @@ -128,10 +130,9 @@ class BaseField(object): return instance._data.get(self.name) def __set__(self, instance, value): - """Descriptor for assigning a value to a field in a document. - """ - # If setting to None and there is a default - # Then set the value to the default value + """Descriptor for assigning a value to a field in a document.""" + # If setting to None and there is a default value provided for this + # field, then set the value to the default value. if value is None: if self.null: value = None @@ -142,24 +143,29 @@ class BaseField(object): if instance._initialised: try: - if (self.name not in instance._data or - instance._data[self.name] != value): + value_has_changed = ( + self.name not in instance._data + or instance._data[self.name] != value + ) + if value_has_changed: instance._mark_as_changed(self.name) except Exception: - # Values cant be compared eg: naive and tz datetimes - # So mark it as changed + # Some values can't be compared and throw an error when we + # attempt to do so (e.g. tz-naive and tz-aware datetimes). + # Mark the field as changed in such cases. instance._mark_as_changed(self.name) - EmbeddedDocument = _import_class('EmbeddedDocument') + EmbeddedDocument = _import_class("EmbeddedDocument") if isinstance(value, EmbeddedDocument): value._instance = weakref.proxy(instance) elif isinstance(value, (list, tuple)): for v in value: if isinstance(v, EmbeddedDocument): v._instance = weakref.proxy(instance) + instance._data[self.name] = value - def error(self, message='', errors=None, field_name=None): + def error(self, message="", errors=None, field_name=None): """Raise a ValidationError.""" field_name = field_name if field_name else self.name raise ValidationError(message, errors=errors, field_name=field_name) @@ -176,11 +182,11 @@ class BaseField(object): """Helper method to call to_mongo with proper inputs.""" f_inputs = self.to_mongo.__code__.co_varnames ex_vars = {} - if 'fields' in f_inputs: - ex_vars['fields'] = fields + if "fields" in f_inputs: + ex_vars["fields"] = fields - if 'use_db_field' in f_inputs: - ex_vars['use_db_field'] = use_db_field + if "use_db_field" in f_inputs: + ex_vars["use_db_field"] = use_db_field return self.to_mongo(value, **ex_vars) @@ -195,8 +201,8 @@ class BaseField(object): pass def _validate_choices(self, value): - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') + Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") choice_list = self.choices if isinstance(next(iter(choice_list)), (list, tuple)): @@ -206,16 +212,12 @@ class BaseField(object): # Choices which are other types of Documents if isinstance(value, (Document, EmbeddedDocument)): if not any(isinstance(value, c) for c in choice_list): - self.error( - 'Value must be an instance of %s' % ( - six.text_type(choice_list) - ) - ) + self.error("Value must be an instance of %s" % (choice_list)) # Choices which are types other than Documents else: values = value if isinstance(value, (list, tuple)) else [value] if len(set(values) - set(choice_list)): - self.error('Value must be one of %s' % six.text_type(choice_list)) + self.error("Value must be one of %s" % str(choice_list)) def _validate(self, value, **kwargs): # Check the Choices Constraint @@ -231,13 +233,17 @@ class BaseField(object): # in favor of having validation raising a ValidationError ret = self.validation(value) if ret is not None: - raise DeprecatedError('validation argument for `%s` must not return anything, ' - 'it should raise a ValidationError if validation fails' % self.name) + raise DeprecatedError( + "validation argument for `%s` must not return anything, " + "it should raise a ValidationError if validation fails" + % self.name + ) except ValidationError as ex: self.error(str(ex)) else: - raise ValueError('validation argument for `"%s"` must be a ' - 'callable.' % self.name) + raise ValueError( + 'validation argument for `"%s"` must be a ' "callable." % self.name + ) self.validate(value, **kwargs) @@ -271,35 +277,41 @@ class ComplexBaseField(BaseField): # Document class being used rather than a document object return self - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") + EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") auto_dereference = instance._fields[self.name]._auto_dereference - dereference = (auto_dereference and - (self.field is None or isinstance(self.field, - (GenericReferenceField, ReferenceField)))) + dereference = auto_dereference and ( + self.field is None + or isinstance(self.field, (GenericReferenceField, ReferenceField)) + ) - _dereference = _import_class('DeReference')() + _dereference = _import_class("DeReference")() - if (instance._initialised and - dereference and - instance._data.get(self.name) and - not getattr(instance._data[self.name], '_dereferenced', False)): + if ( + instance._initialised + and dereference + and instance._data.get(self.name) + and not getattr(instance._data[self.name], "_dereferenced", False) + ): instance._data[self.name] = _dereference( - instance._data.get(self.name), max_depth=1, instance=instance, - name=self.name + instance._data.get(self.name), + max_depth=1, + instance=instance, + name=self.name, ) - if hasattr(instance._data[self.name], '_dereferenced'): + if hasattr(instance._data[self.name], "_dereferenced"): instance._data[self.name]._dereferenced = True - value = super(ComplexBaseField, self).__get__(instance, owner) + value = super().__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)): - if (issubclass(type(self), EmbeddedDocumentListField) and - not isinstance(value, EmbeddedDocumentList)): + if issubclass(type(self), EmbeddedDocumentListField) and not isinstance( + value, EmbeddedDocumentList + ): value = EmbeddedDocumentList(value, instance, self.name) elif not isinstance(value, BaseList): value = BaseList(value, instance, self.name) @@ -308,12 +320,13 @@ class ComplexBaseField(BaseField): value = BaseDict(value, instance, self.name) instance._data[self.name] = value - if (auto_dereference and instance._initialised and - isinstance(value, (BaseList, BaseDict)) and - not value._dereferenced): - value = _dereference( - value, max_depth=1, instance=instance, name=self.name - ) + if ( + auto_dereference + and instance._initialised + and isinstance(value, (BaseList, BaseDict)) + and not value._dereferenced + ): + value = _dereference(value, max_depth=1, instance=instance, name=self.name) value._dereferenced = True instance._data[self.name] = value @@ -321,19 +334,19 @@ class ComplexBaseField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if isinstance(value, six.string_types): + if isinstance(value, str): return value - if hasattr(value, 'to_python'): + if hasattr(value, "to_python"): return value.to_python() - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(value, BaseDocument): # Something is wrong, return the value as it is return value is_list = False - if not hasattr(value, 'items'): + if not hasattr(value, "items"): try: is_list = True value = {idx: v for idx, v in enumerate(value)} @@ -342,50 +355,54 @@ class ComplexBaseField(BaseField): if self.field: self.field._auto_dereference = self._auto_dereference - value_dict = {key: self.field.to_python(item) - for key, item in value.items()} + value_dict = { + key: self.field.to_python(item) for key, item in value.items() + } else: - Document = _import_class('Document') + Document = _import_class("Document") value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: - self.error('You can only reference documents once they' - ' have been saved to the database') + self.error( + "You can only reference documents once they" + " have been saved to the database" + ) collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) - elif hasattr(v, 'to_python'): + elif hasattr(v, "to_python"): value_dict[k] = v.to_python() else: value_dict[k] = self.to_python(v) if is_list: # Convert back to a list - return [v for _, v in sorted(value_dict.items(), - key=operator.itemgetter(0))] + return [ + v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) + ] return value_dict def to_mongo(self, value, use_db_field=True, fields=None): """Convert a Python type to a MongoDB-compatible type.""" - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') - GenericReferenceField = _import_class('GenericReferenceField') + Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") + GenericReferenceField = _import_class("GenericReferenceField") - if isinstance(value, six.string_types): + if isinstance(value, str): return value - if hasattr(value, 'to_mongo'): + if hasattr(value, "to_mongo"): if isinstance(value, Document): return GenericReferenceField().to_mongo(value) cls = value.__class__ val = value.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(value, EmbeddedDocument): - val['_cls'] = cls.__name__ + val["_cls"] = cls.__name__ return val is_list = False - if not hasattr(value, 'items'): + if not hasattr(value, "items"): try: is_list = True value = {k: v for k, v in enumerate(value)} @@ -395,48 +412,51 @@ class ComplexBaseField(BaseField): if self.field: value_dict = { key: self.field._to_mongo_safe_call(item, use_db_field, fields) - for key, item in iteritems(value) + for key, item in value.items() } else: value_dict = {} - for k, v in iteritems(value): + for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: - self.error('You can only reference documents once they' - ' have been saved to the database') + self.error( + "You can only reference documents once they" + " have been saved to the database" + ) # If its a document that is not inheritable it won't have # any _cls data so make it a generic reference allows # us to dereference - meta = getattr(v, '_meta', {}) - allow_inheritance = meta.get('allow_inheritance') + meta = getattr(v, "_meta", {}) + allow_inheritance = meta.get("allow_inheritance") if not allow_inheritance and not self.field: value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) - elif hasattr(v, 'to_mongo'): + elif hasattr(v, "to_mongo"): cls = v.__class__ val = v.to_mongo(use_db_field, fields) # If it's a document that is not inherited add _cls if isinstance(v, (Document, EmbeddedDocument)): - val['_cls'] = cls.__name__ + val["_cls"] = cls.__name__ value_dict[k] = val else: value_dict[k] = self.to_mongo(v, use_db_field, fields) if is_list: # Convert back to a list - return [v for _, v in sorted(value_dict.items(), - key=operator.itemgetter(0))] + return [ + v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) + ] return value_dict def validate(self, value): """If field is provided ensure the value is valid.""" errors = {} if self.field: - if hasattr(value, 'iteritems') or hasattr(value, 'items'): - sequence = iteritems(value) + if hasattr(value, "items"): + sequence = value.items() else: sequence = enumerate(value) for k, v in sequence: @@ -449,11 +469,12 @@ class ComplexBaseField(BaseField): if errors: field_class = self.field.__class__.__name__ - self.error('Invalid %s item (%s)' % (field_class, value), - errors=errors) + self.error( + "Invalid {} item ({})".format(field_class, value), errors=errors + ) # Don't allow empty values if required if self.required and not value: - self.error('Field is required and cannot be empty') + self.error("Field is required and cannot be empty") def prepare_query_value(self, op, value): return self.to_mongo(value) @@ -483,10 +504,9 @@ class ObjectIdField(BaseField): def to_mongo(self, value): if not isinstance(value, ObjectId): try: - return ObjectId(six.text_type(value)) + return ObjectId(str(value)) except Exception as e: - # e.message attribute has been deprecated since Python 2.6 - self.error(six.text_type(e)) + self.error(str(e)) return value def prepare_query_value(self, op, value): @@ -494,9 +514,9 @@ class ObjectIdField(BaseField): def validate(self, value): try: - ObjectId(six.text_type(value)) + ObjectId(str(value)) except Exception: - self.error('Invalid Object ID') + self.error("Invalid ObjectID") class GeoJsonBaseField(BaseField): @@ -506,72 +526,73 @@ class GeoJsonBaseField(BaseField): """ _geo_index = pymongo.GEOSPHERE - _type = 'GeoBase' + _type = "GeoBase" def __init__(self, auto_index=True, *args, **kwargs): """ :param bool auto_index: Automatically create a '2dsphere' index.\ Defaults to `True`. """ - self._name = '%sField' % self._type + self._name = "%sField" % self._type if not auto_index: self._geo_index = False - super(GeoJsonBaseField, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def validate(self, value): """Validate the GeoJson object based on its type.""" if isinstance(value, dict): - if set(value.keys()) == {'type', 'coordinates'}: - if value['type'] != self._type: - self.error('%s type must be "%s"' % - (self._name, self._type)) - return self.validate(value['coordinates']) + if set(value.keys()) == {"type", "coordinates"}: + if value["type"] != self._type: + self.error('{} type must be "{}"'.format(self._name, self._type)) + return self.validate(value["coordinates"]) else: - self.error('%s can only accept a valid GeoJson dictionary' - ' or lists of (x, y)' % self._name) + self.error( + "%s can only accept a valid GeoJson dictionary" + " or lists of (x, y)" % self._name + ) return elif not isinstance(value, (list, tuple)): - self.error('%s can only accept lists of [x, y]' % self._name) + self.error("%s can only accept lists of [x, y]" % self._name) return - validate = getattr(self, '_validate_%s' % self._type.lower()) + validate = getattr(self, "_validate_%s" % self._type.lower()) error = validate(value) if error: self.error(error) def _validate_polygon(self, value, top_level=True): if not isinstance(value, (list, tuple)): - return 'Polygons must contain list of linestrings' + return "Polygons must contain list of linestrings" # Quick and dirty validator try: value[0][0][0] except (TypeError, IndexError): - return 'Invalid Polygon must contain at least one valid linestring' + return "Invalid Polygon must contain at least one valid linestring" errors = [] for val in value: error = self._validate_linestring(val, False) if not error and val[0] != val[-1]: - error = 'LineStrings must start and end at the same point' + error = "LineStrings must start and end at the same point" if error and error not in errors: errors.append(error) if errors: if top_level: - return 'Invalid Polygon:\n%s' % ', '.join(errors) + return "Invalid Polygon:\n%s" % ", ".join(errors) else: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_linestring(self, value, top_level=True): """Validate a linestring.""" if not isinstance(value, (list, tuple)): - return 'LineStrings must contain list of coordinate pairs' + return "LineStrings must contain list of coordinate pairs" # Quick and dirty validator try: value[0][0] except (TypeError, IndexError): - return 'Invalid LineString must contain at least one valid point' + return "Invalid LineString must contain at least one valid point" errors = [] for val in value: @@ -580,29 +601,30 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: if top_level: - return 'Invalid LineString:\n%s' % ', '.join(errors) + return "Invalid LineString:\n%s" % ", ".join(errors) else: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_point(self, value): """Validate each set of coords""" if not isinstance(value, (list, tuple)): - return 'Points must be a list of coordinate pairs' + return "Points must be a list of coordinate pairs" elif not len(value) == 2: - return 'Value (%s) must be a two-dimensional point' % repr(value) - elif (not isinstance(value[0], (float, int)) or - not isinstance(value[1], (float, int))): - return 'Both values (%s) in point must be float or int' % repr(value) + return "Value (%s) must be a two-dimensional point" % repr(value) + elif not isinstance(value[0], (float, int)) or not isinstance( + value[1], (float, int) + ): + return "Both values (%s) in point must be float or int" % repr(value) def _validate_multipoint(self, value): if not isinstance(value, (list, tuple)): - return 'MultiPoint must be a list of Point' + return "MultiPoint must be a list of Point" # Quick and dirty validator try: value[0][0] except (TypeError, IndexError): - return 'Invalid MultiPoint must contain at least one valid point' + return "Invalid MultiPoint must contain at least one valid point" errors = [] for point in value: @@ -611,17 +633,17 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_multilinestring(self, value, top_level=True): if not isinstance(value, (list, tuple)): - return 'MultiLineString must be a list of LineString' + return "MultiLineString must be a list of LineString" # Quick and dirty validator try: value[0][0][0] except (TypeError, IndexError): - return 'Invalid MultiLineString must contain at least one valid linestring' + return "Invalid MultiLineString must contain at least one valid linestring" errors = [] for linestring in value: @@ -631,19 +653,19 @@ class GeoJsonBaseField(BaseField): if errors: if top_level: - return 'Invalid MultiLineString:\n%s' % ', '.join(errors) + return "Invalid MultiLineString:\n%s" % ", ".join(errors) else: - return '%s' % ', '.join(errors) + return "%s" % ", ".join(errors) def _validate_multipolygon(self, value): if not isinstance(value, (list, tuple)): - return 'MultiPolygon must be a list of Polygon' + return "MultiPolygon must be a list of Polygon" # Quick and dirty validator try: value[0][0][0][0] except (TypeError, IndexError): - return 'Invalid MultiPolygon must contain at least one valid Polygon' + return "Invalid MultiPolygon must contain at least one valid Polygon" errors = [] for polygon in value: @@ -652,9 +674,9 @@ class GeoJsonBaseField(BaseField): errors.append(error) if errors: - return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) + return "Invalid MultiPolygon:\n%s" % ", ".join(errors) def to_mongo(self, value): if isinstance(value, dict): return value - return SON([('type', self._type), ('coordinates', value)]) + return SON([("type", self._type), ("coordinates", value)]) diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 6f507eaa..b4479b97 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -1,18 +1,19 @@ +import itertools import warnings -import six -from six import iteritems, itervalues - from mongoengine.base.common import _document_registry from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.common import _import_class from mongoengine.errors import InvalidDocumentError -from mongoengine.queryset import (DO_NOTHING, DoesNotExist, - MultipleObjectsReturned, - QuerySetManager) +from mongoengine.queryset import ( + DO_NOTHING, + DoesNotExist, + MultipleObjectsReturned, + QuerySetManager, +) -__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') +__all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass") class DocumentMetaclass(type): @@ -21,49 +22,51 @@ class DocumentMetaclass(type): # TODO lower complexity of this method def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(DocumentMetaclass, mcs).__new__ + super_new = super().__new__ # If a base class just call super - metaclass = attrs.get('my_metaclass') + metaclass = attrs.get("my_metaclass") if metaclass and issubclass(metaclass, DocumentMetaclass): return super_new(mcs, name, bases, attrs) - attrs['_is_document'] = attrs.get('_is_document', False) - attrs['_cached_reference_fields'] = [] + attrs["_is_document"] = attrs.get("_is_document", False) + attrs["_cached_reference_fields"] = [] # EmbeddedDocuments could have meta data for inheritance - if 'meta' in attrs: - attrs['_meta'] = attrs.pop('meta') + if "meta" in attrs: + attrs["_meta"] = attrs.pop("meta") # EmbeddedDocuments should inherit meta data - if '_meta' not in attrs: + if "_meta" not in attrs: meta = MetaDict() for base in flattened_bases[::-1]: # Add any mixin metadata from plain objects - if hasattr(base, 'meta'): + if hasattr(base, "meta"): meta.merge(base.meta) - elif hasattr(base, '_meta'): + elif hasattr(base, "_meta"): meta.merge(base._meta) - attrs['_meta'] = meta - attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract + attrs["_meta"] = meta + attrs["_meta"][ + "abstract" + ] = False # 789: EmbeddedDocument shouldn't inherit abstract # If allow_inheritance is True, add a "_cls" string field to the attrs - if attrs['_meta'].get('allow_inheritance'): - StringField = _import_class('StringField') - attrs['_cls'] = StringField() + if attrs["_meta"].get("allow_inheritance"): + StringField = _import_class("StringField") + attrs["_cls"] = StringField() # Handle document Fields # Merge all fields from subclasses doc_fields = {} for base in flattened_bases[::-1]: - if hasattr(base, '_fields'): + if hasattr(base, "_fields"): doc_fields.update(base._fields) # Standard object mixin - merge in any Fields - if not hasattr(base, '_meta'): + if not hasattr(base, "_meta"): base_fields = {} - for attr_name, attr_value in iteritems(base.__dict__): + for attr_name, attr_value in base.__dict__.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -75,7 +78,7 @@ class DocumentMetaclass(type): # Discover any document fields field_names = {} - for attr_name, attr_value in iteritems(attrs): + for attr_name, attr_value in attrs.items(): if not isinstance(attr_value, BaseField): continue attr_value.name = attr_name @@ -84,27 +87,29 @@ class DocumentMetaclass(type): doc_fields[attr_name] = attr_value # Count names to ensure no db_field redefinitions - field_names[attr_value.db_field] = field_names.get( - attr_value.db_field, 0) + 1 + field_names[attr_value.db_field] = ( + field_names.get(attr_value.db_field, 0) + 1 + ) # Ensure no duplicate db_fields duplicate_db_fields = [k for k, v in field_names.items() if v > 1] if duplicate_db_fields: - msg = ('Multiple db_fields defined for: %s ' % - ', '.join(duplicate_db_fields)) + msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields) raise InvalidDocumentError(msg) # Set _fields and db_field maps - attrs['_fields'] = doc_fields - attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) - for k, v in doc_fields.items()} - attrs['_reverse_db_field_map'] = { - v: k for k, v in attrs['_db_field_map'].items() + attrs["_fields"] = doc_fields + attrs["_db_field_map"] = { + k: getattr(v, "db_field", k) for k, v in doc_fields.items() + } + attrs["_reverse_db_field_map"] = { + v: k for k, v in attrs["_db_field_map"].items() } - attrs['_fields_ordered'] = tuple(i[1] for i in sorted( - (v.creation_counter, v.name) - for v in itervalues(doc_fields))) + attrs["_fields_ordered"] = tuple( + i[1] + for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) + ) # # Set document hierarchy @@ -112,32 +117,34 @@ class DocumentMetaclass(type): superclasses = () class_name = [name] for base in flattened_bases: - if (not getattr(base, '_is_base_cls', True) and - not getattr(base, '_meta', {}).get('abstract', True)): + if not getattr(base, "_is_base_cls", True) and not getattr( + base, "_meta", {} + ).get("abstract", True): # Collate hierarchy for _cls and _subclasses class_name.append(base.__name__) - if hasattr(base, '_meta'): + if hasattr(base, "_meta"): # Warn if allow_inheritance isn't set and prevent # inheritance of classes where inheritance is set to False - allow_inheritance = base._meta.get('allow_inheritance') - if not allow_inheritance and not base._meta.get('abstract'): - raise ValueError('Document %s may not be subclassed. ' - 'To enable inheritance, use the "allow_inheritance" meta attribute.' % - base.__name__) + allow_inheritance = base._meta.get("allow_inheritance") + if not allow_inheritance and not base._meta.get("abstract"): + raise ValueError( + "Document %s may not be subclassed. " + 'To enable inheritance, use the "allow_inheritance" meta attribute.' + % base.__name__ + ) # Get superclasses from last base superclass - document_bases = [b for b in flattened_bases - if hasattr(b, '_class_name')] + document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")] if document_bases: superclasses = document_bases[0]._superclasses - superclasses += (document_bases[0]._class_name, ) + superclasses += (document_bases[0]._class_name,) - _cls = '.'.join(reversed(class_name)) - attrs['_class_name'] = _cls - attrs['_superclasses'] = superclasses - attrs['_subclasses'] = (_cls, ) - attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types + _cls = ".".join(reversed(class_name)) + attrs["_class_name"] = _cls + attrs["_superclasses"] = superclasses + attrs["_subclasses"] = (_cls,) + attrs["_types"] = attrs["_subclasses"] # TODO depreciate _types # Create the new_class new_class = super_new(mcs, name, bases, attrs) @@ -148,8 +155,12 @@ class DocumentMetaclass(type): base._subclasses += (_cls,) base._types = base._subclasses # TODO depreciate _types - (Document, EmbeddedDocument, DictField, - CachedReferenceField) = mcs._import_classes() + ( + Document, + EmbeddedDocument, + DictField, + CachedReferenceField, + ) = mcs._import_classes() if issubclass(new_class, Document): new_class._collection = None @@ -157,63 +168,50 @@ class DocumentMetaclass(type): # Add class to the _document_registry _document_registry[new_class._class_name] = new_class - # In Python 2, User-defined methods objects have special read-only - # attributes 'im_func' and 'im_self' which contain the function obj - # and class instance object respectively. With Python 3 these special - # attributes have been replaced by __func__ and __self__. The Blinker - # module continues to use im_func and im_self, so the code below - # copies __func__ into im_func and __self__ into im_self for - # classmethod objects in Document derived classes. - if six.PY3: - for val in new_class.__dict__.values(): - if isinstance(val, classmethod): - f = val.__get__(new_class) - if hasattr(f, '__func__') and not hasattr(f, 'im_func'): - f.__dict__.update({'im_func': getattr(f, '__func__')}) - if hasattr(f, '__self__') and not hasattr(f, 'im_self'): - f.__dict__.update({'im_self': getattr(f, '__self__')}) - # Handle delete rules - for field in itervalues(new_class._fields): + for field in new_class._fields.values(): f = field if f.owner_document is None: f.owner_document = new_class - delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) + delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) if isinstance(f, CachedReferenceField): if issubclass(new_class, EmbeddedDocument): - raise InvalidDocumentError('CachedReferenceFields is not ' - 'allowed in EmbeddedDocuments') + raise InvalidDocumentError( + "CachedReferenceFields is not allowed in EmbeddedDocuments" + ) if f.auto_sync: f.start_listener() f.document_type._cached_reference_fields.append(f) - if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): - delete_rule = getattr(f.field, - 'reverse_delete_rule', - DO_NOTHING) + if isinstance(f, ComplexBaseField) and hasattr(f, "field"): + delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING) if isinstance(f, DictField) and delete_rule != DO_NOTHING: - msg = ('Reverse delete rules are not supported ' - 'for %s (field: %s)' % - (field.__class__.__name__, field.name)) + msg = ( + "Reverse delete rules are not supported " + "for %s (field: %s)" % (field.__class__.__name__, field.name) + ) raise InvalidDocumentError(msg) f = field.field if delete_rule != DO_NOTHING: if issubclass(new_class, EmbeddedDocument): - msg = ('Reverse delete rules are not supported for ' - 'EmbeddedDocuments (field: %s)' % field.name) + msg = ( + "Reverse delete rules are not supported for " + "EmbeddedDocuments (field: %s)" % field.name + ) raise InvalidDocumentError(msg) - f.document_type.register_delete_rule(new_class, - field.name, delete_rule) + f.document_type.register_delete_rule(new_class, field.name, delete_rule) - if (field.name and hasattr(Document, field.name) and - EmbeddedDocument not in new_class.mro()): - msg = ('%s is a document method and not a valid ' - 'field name' % field.name) + if ( + field.name + and hasattr(Document, field.name) + and EmbeddedDocument not in new_class.mro() + ): + msg = "%s is a document method and not a valid field name" % field.name raise InvalidDocumentError(msg) return new_class @@ -233,15 +231,14 @@ class DocumentMetaclass(type): if base is object: continue yield base - for child_base in mcs.__get_bases(base.__bases__): - yield child_base + yield from mcs.__get_bases(base.__bases__) @classmethod def _import_classes(mcs): - Document = _import_class('Document') - EmbeddedDocument = _import_class('EmbeddedDocument') - DictField = _import_class('DictField') - CachedReferenceField = _import_class('CachedReferenceField') + Document = _import_class("Document") + EmbeddedDocument = _import_class("EmbeddedDocument") + DictField = _import_class("DictField") + CachedReferenceField = _import_class("CachedReferenceField") return Document, EmbeddedDocument, DictField, CachedReferenceField @@ -252,68 +249,69 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): def __new__(mcs, name, bases, attrs): flattened_bases = mcs._get_bases(bases) - super_new = super(TopLevelDocumentMetaclass, mcs).__new__ + super_new = super().__new__ # Set default _meta data if base class, otherwise get user defined meta - if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: + if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: # defaults - attrs['_meta'] = { - 'abstract': True, - 'max_documents': None, - 'max_size': None, - 'ordering': [], # default ordering applied at runtime - 'indexes': [], # indexes to be ensured at runtime - 'id_field': None, - 'index_background': False, - 'index_drop_dups': False, - 'index_opts': None, - 'delete_rules': None, - + attrs["_meta"] = { + "abstract": True, + "max_documents": None, + "max_size": None, + "ordering": [], # default ordering applied at runtime + "indexes": [], # indexes to be ensured at runtime + "id_field": None, + "index_background": False, + "index_opts": None, + "delete_rules": None, # allow_inheritance can be True, False, and None. True means # "allow inheritance", False means "don't allow inheritance", # None means "do whatever your parent does, or don't allow # inheritance if you're a top-level class". - 'allow_inheritance': None, + "allow_inheritance": None, } - attrs['_is_base_cls'] = True - attrs['_meta'].update(attrs.get('meta', {})) + attrs["_is_base_cls"] = True + attrs["_meta"].update(attrs.get("meta", {})) else: - attrs['_meta'] = attrs.get('meta', {}) + attrs["_meta"] = attrs.get("meta", {}) # Explicitly set abstract to false unless set - attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) - attrs['_is_base_cls'] = False + attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False) + attrs["_is_base_cls"] = False # Set flag marking as document class - as opposed to an object mixin - attrs['_is_document'] = True + attrs["_is_document"] = True # Ensure queryset_class is inherited - if 'objects' in attrs: - manager = attrs['objects'] - if hasattr(manager, 'queryset_class'): - attrs['_meta']['queryset_class'] = manager.queryset_class + if "objects" in attrs: + manager = attrs["objects"] + if hasattr(manager, "queryset_class"): + attrs["_meta"]["queryset_class"] = manager.queryset_class # Clean up top level meta - if 'meta' in attrs: - del attrs['meta'] + if "meta" in attrs: + del attrs["meta"] # Find the parent document class - parent_doc_cls = [b for b in flattened_bases - if b.__class__ == TopLevelDocumentMetaclass] + parent_doc_cls = [ + b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass + ] parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] # Prevent classes setting collection different to their parents # If parent wasn't an abstract class - if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and - not parent_doc_cls._meta.get('abstract', True)): - msg = 'Trying to set a collection on a subclass (%s)' % name + if ( + parent_doc_cls + and "collection" in attrs.get("_meta", {}) + and not parent_doc_cls._meta.get("abstract", True) + ): + msg = "Trying to set a collection on a subclass (%s)" % name warnings.warn(msg, SyntaxWarning) - del attrs['_meta']['collection'] + del attrs["_meta"]["collection"] # Ensure abstract documents have abstract bases - if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): - if (parent_doc_cls and - not parent_doc_cls._meta.get('abstract', False)): - msg = 'Abstract document cannot have non-abstract base' + if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"): + if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False): + msg = "Abstract document cannot have non-abstract base" raise ValueError(msg) return super_new(mcs, name, bases, attrs) @@ -322,38 +320,43 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): meta = MetaDict() for base in flattened_bases[::-1]: # Add any mixin metadata from plain objects - if hasattr(base, 'meta'): + if hasattr(base, "meta"): meta.merge(base.meta) - elif hasattr(base, '_meta'): + elif hasattr(base, "_meta"): meta.merge(base._meta) # Set collection in the meta if its callable - if (getattr(base, '_is_document', False) and - not base._meta.get('abstract')): - collection = meta.get('collection', None) + if getattr(base, "_is_document", False) and not base._meta.get("abstract"): + collection = meta.get("collection", None) if callable(collection): - meta['collection'] = collection(base) + meta["collection"] = collection(base) - meta.merge(attrs.get('_meta', {})) # Top level meta + meta.merge(attrs.get("_meta", {})) # Top level meta # Only simple classes (i.e. direct subclasses of Document) may set # allow_inheritance to False. If the base Document allows inheritance, # none of its subclasses can override allow_inheritance to False. - simple_class = all([b._meta.get('abstract') - for b in flattened_bases if hasattr(b, '_meta')]) + simple_class = all( + [b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")] + ) if ( - not simple_class and - meta['allow_inheritance'] is False and - not meta['abstract'] + not simple_class + and meta["allow_inheritance"] is False + and not meta["abstract"] ): - raise ValueError('Only direct subclasses of Document may set ' - '"allow_inheritance" to False') + raise ValueError( + "Only direct subclasses of Document may set " + '"allow_inheritance" to False' + ) # Set default collection name - if 'collection' not in meta: - meta['collection'] = ''.join('_%s' % c if c.isupper() else c - for c in name).strip('_').lower() - attrs['_meta'] = meta + if "collection" not in meta: + meta["collection"] = ( + "".join("_%s" % c if c.isupper() else c for c in name) + .strip("_") + .lower() + ) + attrs["_meta"] = meta # Call super and get the new class new_class = super_new(mcs, name, bases, attrs) @@ -361,82 +364,96 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): meta = new_class._meta # Set index specifications - meta['index_specs'] = new_class._build_index_specs(meta['indexes']) + meta["index_specs"] = new_class._build_index_specs(meta["indexes"]) # If collection is a callable - call it and set the value - collection = meta.get('collection') + collection = meta.get("collection") if callable(collection): - new_class._meta['collection'] = collection(new_class) + new_class._meta["collection"] = collection(new_class) # Provide a default queryset unless exists or one has been set - if 'objects' not in dir(new_class): + if "objects" not in dir(new_class): new_class.objects = QuerySetManager() # Validate the fields and set primary key if needed - for field_name, field in iteritems(new_class._fields): + for field_name, field in new_class._fields.items(): if field.primary_key: # Ensure only one primary key is set - current_pk = new_class._meta.get('id_field') + current_pk = new_class._meta.get("id_field") if current_pk and current_pk != field_name: - raise ValueError('Cannot override primary key field') + raise ValueError("Cannot override primary key field") # Set primary key if not current_pk: - new_class._meta['id_field'] = field_name + new_class._meta["id_field"] = field_name new_class.id = field - # Set primary key if not defined by the document - new_class._auto_id_field = getattr(parent_doc_cls, - '_auto_id_field', False) - if not new_class._meta.get('id_field'): - # After 0.10, find not existing names, instead of overwriting + # If the document doesn't explicitly define a primary key field, create + # one. Make it an ObjectIdField and give it a non-clashing name ("id" + # by default, but can be different if that one's taken). + if not new_class._meta.get("id_field"): id_name, id_db_name = mcs.get_auto_id_names(new_class) - new_class._auto_id_field = True - new_class._meta['id_field'] = id_name + new_class._meta["id_field"] = id_name new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) new_class._fields[id_name].name = id_name new_class.id = new_class._fields[id_name] new_class._db_field_map[id_name] = id_db_name new_class._reverse_db_field_map[id_db_name] = id_name - # Prepend id field to _fields_ordered - new_class._fields_ordered = (id_name, ) + new_class._fields_ordered - # Merge in exceptions with parent hierarchy + # Prepend the ID field to _fields_ordered (so that it's *always* + # the first field). + new_class._fields_ordered = (id_name,) + new_class._fields_ordered + + # Merge in exceptions with parent hierarchy. exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) - module = attrs.get('__module__') + module = attrs.get("__module__") for exc in exceptions_to_merge: name = exc.__name__ - parents = tuple(getattr(base, name) for base in flattened_bases - if hasattr(base, name)) or (exc,) - # Create new exception and set to new_class - exception = type(name, parents, {'__module__': module}) + parents = tuple( + getattr(base, name) for base in flattened_bases if hasattr(base, name) + ) or (exc,) + + # Create a new exception and set it as an attribute on the new + # class. + exception = type(name, parents, {"__module__": module}) setattr(new_class, name, exception) return new_class @classmethod def get_auto_id_names(mcs, new_class): - id_name, id_db_name = ('id', '_id') - if id_name not in new_class._fields and \ - id_db_name not in (v.db_field for v in new_class._fields.values()): + """Find a name for the automatic ID field for the given new class. + + Return a two-element tuple where the first item is the field name (i.e. + the attribute name on the object) and the second element is the DB + field name (i.e. the name of the key stored in MongoDB). + + Defaults to ('id', '_id'), or generates a non-clashing name in the form + of ('auto_id_X', '_auto_id_X') if the default name is already taken. + """ + id_name, id_db_name = ("id", "_id") + existing_fields = {field_name for field_name in new_class._fields} + existing_db_fields = {v.db_field for v in new_class._fields.values()} + if id_name not in existing_fields and id_db_name not in existing_db_fields: return id_name, id_db_name - id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 - while id_name in new_class._fields or \ - id_db_name in (v.db_field for v in new_class._fields.values()): - id_name = '{0}_{1}'.format(id_basename, i) - id_db_name = '{0}_{1}'.format(id_db_basename, i) - i += 1 - return id_name, id_db_name + + id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) + for i in itertools.count(): + id_name = "{}_{}".format(id_basename, i) + id_db_name = "{}_{}".format(id_db_basename, i) + if id_name not in existing_fields and id_db_name not in existing_db_fields: + return id_name, id_db_name class MetaDict(dict): """Custom dictionary for meta classes. Handles the merging of set indexes """ - _merge_options = ('indexes',) + + _merge_options = ("indexes",) def merge(self, new_options): - for k, v in iteritems(new_options): + for k, v in new_options.items(): if k in self._merge_options: self[k] = self.get(k, []) + v else: @@ -445,4 +462,5 @@ class MetaDict(dict): class BasesTuple(tuple): """Special class to handle introspection of bases tuple in __new__""" + pass diff --git a/mongoengine/base/utils.py b/mongoengine/base/utils.py index 8f27ee14..7753ad50 100644 --- a/mongoengine/base/utils.py +++ b/mongoengine/base/utils.py @@ -1,7 +1,7 @@ import re -class LazyRegexCompiler(object): +class LazyRegexCompiler: """Descriptor to allow lazy compilation of regex""" def __init__(self, pattern, flags=0): diff --git a/mongoengine/common.py b/mongoengine/common.py index bcdea194..640384ec 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -19,34 +19,44 @@ def _import_class(cls_name): if cls_name in _class_registry_cache: return _class_registry_cache.get(cls_name) - doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', - 'MapReduceDocument') + doc_classes = ( + "Document", + "DynamicEmbeddedDocument", + "EmbeddedDocument", + "MapReduceDocument", + ) # Field Classes if not _field_list_cache: from mongoengine.fields import __all__ as fields + _field_list_cache.extend(fields) from mongoengine.base.fields import __all__ as fields + _field_list_cache.extend(fields) field_classes = _field_list_cache - deref_classes = ('DeReference',) + deref_classes = ("DeReference",) - if cls_name == 'BaseDocument': + if cls_name == "BaseDocument": from mongoengine.base import document as module - import_classes = ['BaseDocument'] + + import_classes = ["BaseDocument"] elif cls_name in doc_classes: from mongoengine import document as module + import_classes = doc_classes elif cls_name in field_classes: from mongoengine import fields as module + import_classes = field_classes elif cls_name in deref_classes: from mongoengine import dereference as module + import_classes = deref_classes else: - raise ValueError('No import set for: %s' % cls_name) + raise ValueError("No import set for: %s" % cls_name) for cls in import_classes: _class_registry_cache[cls] = getattr(module, cls) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 6a613a42..13d170ec 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,23 +1,22 @@ from pymongo import MongoClient, ReadPreference, uri_parser from pymongo.database import _check_name -import six __all__ = [ - 'DEFAULT_CONNECTION_NAME', - 'DEFAULT_DATABASE_NAME', - 'MongoEngineConnectionError', - 'connect', - 'disconnect', - 'disconnect_all', - 'get_connection', - 'get_db', - 'register_connection', + "DEFAULT_CONNECTION_NAME", + "DEFAULT_DATABASE_NAME", + "ConnectionFailure", + "connect", + "disconnect", + "disconnect_all", + "get_connection", + "get_db", + "register_connection", ] -DEFAULT_CONNECTION_NAME = 'default' -DEFAULT_DATABASE_NAME = 'test' -DEFAULT_HOST = 'localhost' +DEFAULT_CONNECTION_NAME = "default" +DEFAULT_DATABASE_NAME = "test" +DEFAULT_HOST = "localhost" DEFAULT_PORT = 27017 _connection_settings = {} @@ -27,10 +26,11 @@ _dbs = {} READ_PREFERENCE = ReadPreference.PRIMARY -class MongoEngineConnectionError(Exception): +class ConnectionFailure(Exception): """Error raised when the database connection can't be established or when a connection with a requested alias can't be retrieved. """ + pass @@ -38,19 +38,24 @@ def _check_db_name(name): """Check if a database name is valid. This functionality is copied from pymongo Database class constructor. """ - if not isinstance(name, six.string_types): - raise TypeError('name must be an instance of %s' % six.string_types) - elif name != '$external': + if not isinstance(name, str): + raise TypeError("name must be an instance of %s" % str) + elif name != "$external": _check_name(name) def _get_connection_settings( - db=None, name=None, host=None, port=None, - read_preference=READ_PREFERENCE, - username=None, password=None, - authentication_source=None, - authentication_mechanism=None, - **kwargs): + db=None, + name=None, + host=None, + port=None, + read_preference=READ_PREFERENCE, + username=None, + password=None, + authentication_source=None, + authentication_mechanism=None, + **kwargs +): """Get the connection settings as a dict : param db: the name of the database to use, for compatibility with connect @@ -73,53 +78,61 @@ def _get_connection_settings( .. versionchanged:: 0.10.6 - added mongomock support """ conn_settings = { - 'name': name or db or DEFAULT_DATABASE_NAME, - 'host': host or DEFAULT_HOST, - 'port': port or DEFAULT_PORT, - 'read_preference': read_preference, - 'username': username, - 'password': password, - 'authentication_source': authentication_source, - 'authentication_mechanism': authentication_mechanism + "name": name or db or DEFAULT_DATABASE_NAME, + "host": host or DEFAULT_HOST, + "port": port or DEFAULT_PORT, + "read_preference": read_preference, + "username": username, + "password": password, + "authentication_source": authentication_source, + "authentication_mechanism": authentication_mechanism, } - _check_db_name(conn_settings['name']) - conn_host = conn_settings['host'] + _check_db_name(conn_settings["name"]) + conn_host = conn_settings["host"] # Host can be a list or a string, so if string, force to a list. - if isinstance(conn_host, six.string_types): + if isinstance(conn_host, str): conn_host = [conn_host] resolved_hosts = [] for entity in conn_host: # Handle Mongomock - if entity.startswith('mongomock://'): - conn_settings['is_mock'] = True + if entity.startswith("mongomock://"): + conn_settings["is_mock"] = True # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` - resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) + new_entity = entity.replace("mongomock://", "mongodb://", 1) + resolved_hosts.append(new_entity) + + uri_dict = uri_parser.parse_uri(new_entity) + + database = uri_dict.get("database") + if database: + conn_settings["name"] = database # Handle URI style connections, only updating connection params which # were explicitly specified in the URI. - elif '://' in entity: + elif "://" in entity: uri_dict = uri_parser.parse_uri(entity) resolved_hosts.append(entity) - if uri_dict.get('database'): - conn_settings['name'] = uri_dict.get('database') + database = uri_dict.get("database") + if database: + conn_settings["name"] = database - for param in ('read_preference', 'username', 'password'): + for param in ("read_preference", "username", "password"): if uri_dict.get(param): conn_settings[param] = uri_dict[param] - uri_options = uri_dict['options'] - if 'replicaset' in uri_options: - conn_settings['replicaSet'] = uri_options['replicaset'] - if 'authsource' in uri_options: - conn_settings['authentication_source'] = uri_options['authsource'] - if 'authmechanism' in uri_options: - conn_settings['authentication_mechanism'] = uri_options['authmechanism'] - if 'readpreference' in uri_options: + uri_options = uri_dict["options"] + if "replicaset" in uri_options: + conn_settings["replicaSet"] = uri_options["replicaset"] + if "authsource" in uri_options: + conn_settings["authentication_source"] = uri_options["authsource"] + if "authmechanism" in uri_options: + conn_settings["authentication_mechanism"] = uri_options["authmechanism"] + if "readpreference" in uri_options: read_preferences = ( ReadPreference.NEAREST, ReadPreference.PRIMARY, @@ -133,40 +146,47 @@ def _get_connection_settings( # int (e.g. 3). # TODO simplify the code below once we drop support for # PyMongo v3.4. - read_pf_mode = uri_options['readpreference'] - if isinstance(read_pf_mode, six.string_types): + read_pf_mode = uri_options["readpreference"] + if isinstance(read_pf_mode, str): read_pf_mode = read_pf_mode.lower() for preference in read_preferences: if ( - preference.name.lower() == read_pf_mode or - preference.mode == read_pf_mode + preference.name.lower() == read_pf_mode + or preference.mode == read_pf_mode ): - conn_settings['read_preference'] = preference + conn_settings["read_preference"] = preference break else: resolved_hosts.append(entity) - conn_settings['host'] = resolved_hosts + conn_settings["host"] = resolved_hosts # Deprecated parameters that should not be passed on - kwargs.pop('slaves', None) - kwargs.pop('is_slave', None) + kwargs.pop("slaves", None) + kwargs.pop("is_slave", None) conn_settings.update(kwargs) return conn_settings -def register_connection(alias, db=None, name=None, host=None, port=None, - read_preference=READ_PREFERENCE, - username=None, password=None, - authentication_source=None, - authentication_mechanism=None, - **kwargs): +def register_connection( + alias, + db=None, + name=None, + host=None, + port=None, + read_preference=READ_PREFERENCE, + username=None, + password=None, + authentication_source=None, + authentication_mechanism=None, + **kwargs +): """Register the connection settings. : param alias: the name that will be used to refer to this connection throughout MongoEngine - : param name: the name of the specific database to use : param db: the name of the database to use, for compatibility with connect + : param name: the name of the specific database to use : param host: the host name of the: program: `mongod` instance to connect to : param port: the port that the: program: `mongod` instance is running on : param read_preference: The read preference for the collection @@ -185,12 +205,17 @@ def register_connection(alias, db=None, name=None, host=None, port=None, .. versionchanged:: 0.10.6 - added mongomock support """ conn_settings = _get_connection_settings( - db=db, name=name, host=host, port=port, + db=db, + name=name, + host=host, + port=port, read_preference=read_preference, - username=username, password=password, + username=username, + password=password, authentication_source=authentication_source, authentication_mechanism=authentication_mechanism, - **kwargs) + **kwargs + ) _connection_settings[alias] = conn_settings @@ -206,7 +231,7 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME): if alias in _dbs: # Detach all cached collections in Documents for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): - if issubclass(doc_cls, Document): # Skip EmbeddedDocument + if issubclass(doc_cls, Document): # Skip EmbeddedDocument doc_cls._disconnect() del _dbs[alias] @@ -234,22 +259,24 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): return _connections[alias] # Validate that the requested alias exists in the _connection_settings. - # Raise MongoEngineConnectionError if it doesn't. + # Raise ConnectionFailure if it doesn't. if alias not in _connection_settings: if alias == DEFAULT_CONNECTION_NAME: - msg = 'You have not defined a default connection' + msg = "You have not defined a default connection" else: msg = 'Connection with alias "%s" has not been defined' % alias - raise MongoEngineConnectionError(msg) + raise ConnectionFailure(msg) def _clean_settings(settings_dict): irrelevant_fields_set = { - 'name', 'username', 'password', - 'authentication_source', 'authentication_mechanism' + "name", + "username", + "password", + "authentication_source", + "authentication_mechanism", } return { - k: v for k, v in settings_dict.items() - if k not in irrelevant_fields_set + k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set } raw_conn_settings = _connection_settings[alias].copy() @@ -260,13 +287,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): conn_settings = _clean_settings(raw_conn_settings) # Determine if we should use PyMongo's or mongomock's MongoClient. - is_mock = conn_settings.pop('is_mock', False) + is_mock = conn_settings.pop("is_mock", False) if is_mock: try: import mongomock except ImportError: - raise RuntimeError('You need mongomock installed to mock ' - 'MongoEngine.') + raise RuntimeError("You need mongomock installed to mock MongoEngine.") connection_class = mongomock.MongoClient else: connection_class = MongoClient @@ -277,9 +303,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): connection = existing_connection else: connection = _create_connection( - alias=alias, - connection_class=connection_class, - **conn_settings + alias=alias, connection_class=connection_class, **conn_settings ) _connections[alias] = connection return _connections[alias] @@ -288,13 +312,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): def _create_connection(alias, connection_class, **connection_settings): """ Create the new connection for this alias. Raise - MongoEngineConnectionError if it can't be established. + ConnectionFailure if it can't be established. """ try: return connection_class(**connection_settings) except Exception as e: - raise MongoEngineConnectionError( - 'Cannot connect to database %s :\n%s' % (alias, e)) + raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) def _find_existing_connection(connection_settings): @@ -316,7 +339,7 @@ def _find_existing_connection(connection_settings): # Only remove the name but it's important to # keep the username/password/authentication_source/authentication_mechanism # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) - return {k: v for k, v in settings_dict.items() if k != 'name'} + return {k: v for k, v in settings_dict.items() if k != "name"} cleaned_conn_settings = _clean_settings(connection_settings) for db_alias, connection_settings in connection_settings_bis: @@ -332,14 +355,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): if alias not in _dbs: conn = get_connection(alias) conn_settings = _connection_settings[alias] - db = conn[conn_settings['name']] - auth_kwargs = {'source': conn_settings['authentication_source']} - if conn_settings['authentication_mechanism'] is not None: - auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] + db = conn[conn_settings["name"]] + auth_kwargs = {"source": conn_settings["authentication_source"]} + if conn_settings["authentication_mechanism"] is not None: + auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] # Authenticate if necessary - if conn_settings['username'] and (conn_settings['password'] or - conn_settings['authentication_mechanism'] == 'MONGODB-X509'): - db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) + if conn_settings["username"] and ( + conn_settings["password"] + or conn_settings["authentication_mechanism"] == "MONGODB-X509" + ): + db.authenticate( + conn_settings["username"], conn_settings["password"], **auth_kwargs + ) _dbs[alias] = db return _dbs[alias] @@ -368,10 +395,10 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): if new_conn_settings != prev_conn_setting: err_msg = ( - u'A different connection with alias `{}` was already ' - u'registered. Use disconnect() first' + "A different connection with alias `{}` was already " + "registered. Use disconnect() first" ).format(alias) - raise MongoEngineConnectionError(err_msg) + raise ConnectionFailure(err_msg) else: register_connection(alias, db, **kwargs) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 98bd897b..5f2b5229 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,17 +1,24 @@ from contextlib import contextmanager +from pymongo.read_concern import ReadConcern from pymongo.write_concern import WriteConcern -from six import iteritems from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.pymongo_support import count_documents -__all__ = ('switch_db', 'switch_collection', 'no_dereference', - 'no_sub_classes', 'query_counter', 'set_write_concern') +__all__ = ( + "switch_db", + "switch_collection", + "no_dereference", + "no_sub_classes", + "query_counter", + "set_write_concern", + "set_read_write_concern", +) -class switch_db(object): +class switch_db: """switch_db alias context manager. Example :: @@ -38,21 +45,21 @@ class switch_db(object): self.cls = cls self.collection = cls._get_collection() self.db_alias = db_alias - self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) + self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) def __enter__(self): """Change the db_alias and clear the cached collection.""" - self.cls._meta['db_alias'] = self.db_alias + self.cls._meta["db_alias"] = self.db_alias self.cls._collection = None return self.cls def __exit__(self, t, value, traceback): """Reset the db_alias and collection.""" - self.cls._meta['db_alias'] = self.ori_db_alias + self.cls._meta["db_alias"] = self.ori_db_alias self.cls._collection = self.collection -class switch_collection(object): +class switch_collection: """switch_collection alias context manager. Example :: @@ -94,7 +101,7 @@ class switch_collection(object): self.cls._get_collection_name = self.ori_get_collection_name -class no_dereference(object): +class no_dereference: """no_dereference context manager. Turns off all dereferencing in Documents for the duration of the context @@ -111,14 +118,15 @@ class no_dereference(object): """ self.cls = cls - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') - ComplexBaseField = _import_class('ComplexBaseField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") + ComplexBaseField = _import_class("ComplexBaseField") - self.deref_fields = [k for k, v in iteritems(self.cls._fields) - if isinstance(v, (ReferenceField, - GenericReferenceField, - ComplexBaseField))] + self.deref_fields = [ + k + for k, v in self.cls._fields.items() + if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) + ] def __enter__(self): """Change the objects default and _auto_dereference values.""" @@ -133,7 +141,7 @@ class no_dereference(object): return self.cls -class no_sub_classes(object): +class no_sub_classes: """no_sub_classes context manager. Only returns instances of this class and no sub (inherited) classes:: @@ -161,10 +169,10 @@ class no_sub_classes(object): self.cls._subclasses = self.cls_initial_subclasses -class query_counter(object): +class query_counter: """Query_counter context manager to get the number of queries. This works by updating the `profiling_level` of the database so that all queries get logged, - resetting the db.system.profile collection at the beginnig of the context and counting the new entries. + resetting the db.system.profile collection at the beginning of the context and counting the new entries. This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes can interfere with it @@ -175,20 +183,17 @@ class query_counter(object): - Some queries are ignored by default by the counter (killcursors, db.system.indexes) """ - def __init__(self): + def __init__(self, alias=DEFAULT_CONNECTION_NAME): """Construct the query_counter """ - self.db = get_db() + self.db = get_db(alias=alias) self.initial_profiling_level = None - self._ctx_query_counter = 0 # number of queries issued by the context + self._ctx_query_counter = 0 # number of queries issued by the context self._ignored_query = { - 'ns': - {'$ne': '%s.system.indexes' % self.db.name}, - 'op': # MONGODB < 3.2 - {'$ne': 'killcursors'}, - 'command.killCursors': # MONGODB >= 3.2 - {'$exists': False} + "ns": {"$ne": "%s.system.indexes" % self.db.name}, + "op": {"$ne": "killcursors"}, # MONGODB < 3.2 + "command.killCursors": {"$exists": False}, # MONGODB >= 3.2 } def _turn_on_profiling(self): @@ -231,15 +236,20 @@ class query_counter(object): def __repr__(self): """repr query_counter as the number of queries.""" - return u"%s" % self._get_count() + return "%s" % self._get_count() def _get_count(self): """Get the number of queries by counting the current number of entries in db.system.profile and substracting the queries issued by this context. In fact everytime this is called, 1 query is issued so we need to balance that """ - count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter - self._ctx_query_counter += 1 # Account for the query we just issued to gather the information + count = ( + count_documents(self.db.system.profile, self._ignored_query) + - self._ctx_query_counter + ) + self._ctx_query_counter += ( + 1 # Account for the query we just issued to gather the information + ) return count @@ -248,3 +258,21 @@ def set_write_concern(collection, write_concerns): combined_concerns = dict(collection.write_concern.document.items()) combined_concerns.update(write_concerns) yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) + + +@contextmanager +def set_read_write_concern(collection, write_concerns, read_concerns): + combined_write_concerns = dict(collection.write_concern.document.items()) + + if write_concerns is not None: + combined_write_concerns.update(write_concerns) + + combined_read_concerns = dict(collection.read_concern.document.items()) + + if read_concerns is not None: + combined_read_concerns.update(read_concerns) + + yield collection.with_options( + write_concern=WriteConcern(**combined_write_concerns), + read_concern=ReadConcern(**combined_read_concerns), + ) diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index eaebb56f..ff608a3b 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -1,9 +1,12 @@ from bson import DBRef, SON -import six -from six import iteritems -from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, - TopLevelDocumentMetaclass, get_document) +from mongoengine.base import ( + BaseDict, + BaseList, + EmbeddedDocumentList, + TopLevelDocumentMetaclass, + get_document, +) from mongoengine.base.datastructures import LazyReference from mongoengine.connection import get_db from mongoengine.document import Document, EmbeddedDocument @@ -11,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField from mongoengine.queryset import QuerySet -class DeReference(object): +class DeReference: def __call__(self, items, max_depth=1, instance=None, name=None): """ Cheaply dereferences the items to a set depth. @@ -25,7 +28,7 @@ class DeReference(object): :class:`~mongoengine.base.ComplexBaseField` :param get: A boolean determining if being called by __get__ """ - if items is None or isinstance(items, six.string_types): + if items is None or isinstance(items, str): return items # cheapest way to convert a queryset to a list @@ -36,21 +39,23 @@ class DeReference(object): self.max_depth = max_depth doc_type = None - if instance and isinstance(instance, (Document, EmbeddedDocument, - TopLevelDocumentMetaclass)): + if instance and isinstance( + instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass) + ): doc_type = instance._fields.get(name) - while hasattr(doc_type, 'field'): + while hasattr(doc_type, "field"): doc_type = doc_type.field if isinstance(doc_type, ReferenceField): field = doc_type doc_type = doc_type.document_type - is_list = not hasattr(items, 'items') + is_list = not hasattr(items, "items") if is_list and all([i.__class__ == doc_type for i in items]): return items elif not is_list and all( - [i.__class__ == doc_type for i in items.values()]): + [i.__class__ == doc_type for i in items.values()] + ): return items elif not field.dbref: # We must turn the ObjectIds into DBRefs @@ -72,7 +77,7 @@ class DeReference(object): def _get_items_from_dict(items): new_items = {} - for k, v in iteritems(items): + for k, v in items.items(): value = v if isinstance(v, list): value = _get_items_from_list(v) @@ -83,7 +88,7 @@ class DeReference(object): new_items[k] = value return new_items - if not hasattr(items, 'items'): + if not hasattr(items, "items"): items = _get_items_from_list(items) else: items = _get_items_from_dict(items) @@ -113,20 +118,26 @@ class DeReference(object): depth += 1 for item in iterator: if isinstance(item, (Document, EmbeddedDocument)): - for field_name, field in iteritems(item._fields): + for field_name, field in item._fields.items(): v = item._data.get(field_name, None) if isinstance(v, LazyReference): # LazyReference inherits DBRef but should not be dereferenced here ! continue elif isinstance(v, DBRef): reference_map.setdefault(field.document_type, set()).add(v.id) - elif isinstance(v, (dict, SON)) and '_ref' in v: - reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) + elif isinstance(v, (dict, SON)) and "_ref" in v: + reference_map.setdefault(get_document(v["_cls"]), set()).add( + v["_ref"].id + ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - field_cls = getattr(getattr(field, 'field', None), 'document_type', None) + field_cls = getattr( + getattr(field, "field", None), "document_type", None + ) references = self._find_references(v, depth) - for key, refs in iteritems(references): - if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): + for key, refs in references.items(): + if isinstance( + field_cls, (Document, TopLevelDocumentMetaclass) + ): key = field_cls reference_map.setdefault(key, set()).update(refs) elif isinstance(item, LazyReference): @@ -134,11 +145,13 @@ class DeReference(object): continue elif isinstance(item, DBRef): reference_map.setdefault(item.collection, set()).add(item.id) - elif isinstance(item, (dict, SON)) and '_ref' in item: - reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) + elif isinstance(item, (dict, SON)) and "_ref" in item: + reference_map.setdefault(get_document(item["_cls"]), set()).add( + item["_ref"].id + ) elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: references = self._find_references(item, depth - 1) - for key, refs in iteritems(references): + for key, refs in references.items(): reference_map.setdefault(key, set()).update(refs) return reference_map @@ -147,40 +160,44 @@ class DeReference(object): """Fetch all references and convert to their document objects """ object_map = {} - for collection, dbrefs in iteritems(self.reference_map): + for collection, dbrefs in self.reference_map.items(): # we use getattr instead of hasattr because hasattr swallows any exception under python2 # so it could hide nasty things without raising exceptions (cfr bug #1688)) - ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) + ref_document_cls_exists = getattr(collection, "objects", None) is not None if ref_document_cls_exists: col_name = collection._get_collection_name() - refs = [dbref for dbref in dbrefs - if (col_name, dbref) not in object_map] + refs = [ + dbref for dbref in dbrefs if (col_name, dbref) not in object_map + ] references = collection.objects.in_bulk(refs) - for key, doc in iteritems(references): + for key, doc in references.items(): object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document if isinstance(doc_type, (ListField, DictField, MapField)): continue - refs = [dbref for dbref in dbrefs - if (collection, dbref) not in object_map] + refs = [ + dbref for dbref in dbrefs if (collection, dbref) not in object_map + ] if doc_type: - references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) + references = doc_type._get_db()[collection].find( + {"_id": {"$in": refs}} + ) for ref in references: doc = doc_type._from_son(ref) object_map[(collection, doc.id)] = doc else: - references = get_db()[collection].find({'_id': {'$in': refs}}) + references = get_db()[collection].find({"_id": {"$in": refs}}) for ref in references: - if '_cls' in ref: - doc = get_document(ref['_cls'])._from_son(ref) + if "_cls" in ref: + doc = get_document(ref["_cls"])._from_son(ref) elif doc_type is None: doc = get_document( - ''.join(x.capitalize() - for x in collection.split('_')))._from_son(ref) + "".join(x.capitalize() for x in collection.split("_")) + )._from_son(ref) else: doc = doc_type._from_son(ref) object_map[(collection, doc.id)] = doc @@ -208,19 +225,20 @@ class DeReference(object): return BaseList(items, instance, name) if isinstance(items, (dict, SON)): - if '_ref' in items: + if "_ref" in items: return self.object_map.get( - (items['_ref'].collection, items['_ref'].id), items) - elif '_cls' in items: - doc = get_document(items['_cls'])._from_son(items) - _cls = doc._data.pop('_cls', None) - del items['_cls'] + (items["_ref"].collection, items["_ref"].id), items + ) + elif "_cls" in items: + doc = get_document(items["_cls"])._from_son(items) + _cls = doc._data.pop("_cls", None) + del items["_cls"] doc._data = self._attach_objects(doc._data, depth, doc, None) if _cls is not None: - doc._data['_cls'] = _cls + doc._data["_cls"] = _cls return doc - if not hasattr(items, 'items'): + if not hasattr(items, "items"): is_list = True list_type = BaseList if isinstance(items, EmbeddedDocumentList): @@ -230,7 +248,7 @@ class DeReference(object): data = [] else: is_list = False - iterator = iteritems(items) + iterator = items.items() data = {} depth += 1 @@ -247,17 +265,23 @@ class DeReference(object): v = data[k]._data.get(field_name, None) if isinstance(v, DBRef): data[k]._data[field_name] = self.object_map.get( - (v.collection, v.id), v) - elif isinstance(v, (dict, SON)) and '_ref' in v: + (v.collection, v.id), v + ) + elif isinstance(v, (dict, SON)) and "_ref" in v: data[k]._data[field_name] = self.object_map.get( - (v['_ref'].collection, v['_ref'].id), v) + (v["_ref"].collection, v["_ref"].id), v + ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) - data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) + item_name = "{}.{}.{}".format(name, k, field_name) + data[k]._data[field_name] = self._attach_objects( + v, depth, instance=instance, name=item_name + ) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: - item_name = '%s.%s' % (name, k) if name else name - data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) - elif isinstance(v, DBRef) and hasattr(v, 'id'): + item_name = "{}.{}".format(name, k) if name else name + data[k] = self._attach_objects( + v, depth - 1, instance=instance, name=item_name + ) + elif isinstance(v, DBRef) and hasattr(v, "id"): data[k] = self.object_map.get((v.collection, v.id), v) if instance and name: diff --git a/mongoengine/document.py b/mongoengine/document.py index cc35c440..4a57d511 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -4,46 +4,57 @@ import warnings from bson.dbref import DBRef import pymongo from pymongo.read_preferences import ReadPreference -import six -from six import iteritems from mongoengine import signals -from mongoengine.base import (BaseDict, BaseDocument, BaseList, - DocumentMetaclass, EmbeddedDocumentList, - TopLevelDocumentMetaclass, get_document) +from mongoengine.base import ( + BaseDict, + BaseDocument, + BaseList, + DocumentMetaclass, + EmbeddedDocumentList, + TopLevelDocumentMetaclass, + get_document, +) from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db -from mongoengine.context_managers import (set_write_concern, - switch_collection, - switch_db) -from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, - SaveConditionError) +from mongoengine.context_managers import set_write_concern, switch_collection, switch_db +from mongoengine.errors import ( + InvalidDocumentError, + InvalidQueryError, + SaveConditionError, +) from mongoengine.pymongo_support import list_collection_names -from mongoengine.queryset import (NotUniqueError, OperationError, - QuerySet, transform) +from mongoengine.queryset import NotUniqueError, OperationError, QuerySet, transform -__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', - 'DynamicEmbeddedDocument', 'OperationError', - 'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument') +__all__ = ( + "Document", + "EmbeddedDocument", + "DynamicDocument", + "DynamicEmbeddedDocument", + "OperationError", + "InvalidCollectionError", + "NotUniqueError", + "MapReduceDocument", +) def includes_cls(fields): """Helper function used for ensuring and comparing indexes.""" first_field = None if len(fields): - if isinstance(fields[0], six.string_types): + if isinstance(fields[0], str): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] - return first_field == '_cls' + return first_field == "_cls" class InvalidCollectionError(Exception): pass -class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): - """A :class:`~mongoengine.Document` that isn't stored in its own +class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): + r"""A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the :class:`~mongoengine.EmbeddedDocumentField` field type. @@ -56,9 +67,8 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): :attr:`meta` dictionary. """ - __slots__ = ('_instance', ) + __slots__ = ("_instance",) - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -69,7 +79,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): __hash__ = None def __init__(self, *args, **kwargs): - super(EmbeddedDocument, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._instance = None self._changed_fields = [] @@ -82,16 +92,16 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): return not self.__eq__(other) def to_mongo(self, *args, **kwargs): - data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # remove _id from the SON if it's in it and it's None - if '_id' in data and data['_id'] is None: - del data['_id'] + if "_id" in data and data["_id"] is None: + del data["_id"] return data -class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): +class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): """The base class used for defining the structure and properties of collections of documents stored in MongoDB. Inherit from this class, and add fields as class attributes to define a document's structure. @@ -143,23 +153,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): in the :attr:`meta` dictionary. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass - __slots__ = ('__objects',) + __slots__ = ("__objects",) @property def pk(self): """Get the primary key.""" - if 'id_field' not in self._meta: + if "id_field" not in self._meta: return None - return getattr(self, self._meta['id_field']) + return getattr(self, self._meta["id_field"]) @pk.setter def pk(self, value): """Set the primary key.""" - return setattr(self, self._meta['id_field'], value) + return setattr(self, self._meta["id_field"], value) def __hash__(self): """Return the hash based on the PK of this document. If it's new @@ -173,7 +182,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @classmethod def _get_db(cls): """Some Model using other db_alias""" - return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) + return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) @classmethod def _disconnect(cls): @@ -190,9 +199,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): 2. Creates indexes defined in this document's :attr:`meta` dictionary. This happens only if `auto_create_index` is True. """ - if not hasattr(cls, '_collection') or cls._collection is None: + if not hasattr(cls, "_collection") or cls._collection is None: # Get the collection, either capped or regular. - if cls._meta.get('max_size') or cls._meta.get('max_documents'): + if cls._meta.get("max_size") or cls._meta.get("max_documents"): cls._collection = cls._get_capped_collection() else: db = cls._get_db() @@ -203,8 +212,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # set to False. # Also there is no need to ensure indexes on slave. db = cls._get_db() - if cls._meta.get('auto_create_index', True) and\ - db.client.is_primary: + if cls._meta.get("auto_create_index", True) and db.client.is_primary: cls.ensure_indexes() return cls._collection @@ -216,8 +224,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): collection_name = cls._get_collection_name() # Get max document limit and max byte size from meta. - max_size = cls._meta.get('max_size') or 10 * 2 ** 20 # 10MB default - max_documents = cls._meta.get('max_documents') + max_size = cls._meta.get("max_size") or 10 * 2 ** 20 # 10MB default + max_documents = cls._meta.get("max_documents") # MongoDB will automatically raise the size to make it a multiple of # 256 bytes. We raise it here ourselves to be able to reliably compare @@ -227,37 +235,36 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # If the collection already exists and has different options # (i.e. isn't capped or has different max/size), raise an error. - if collection_name in list_collection_names(db, include_system_collections=True): + if collection_name in list_collection_names( + db, include_system_collections=True + ): collection = db[collection_name] options = collection.options() - if ( - options.get('max') != max_documents or - options.get('size') != max_size - ): + if options.get("max") != max_documents or options.get("size") != max_size: raise InvalidCollectionError( 'Cannot create collection "{}" as a capped ' - 'collection as it already exists'.format(cls._collection) + "collection as it already exists".format(cls._collection) ) return collection # Create a new capped collection. - opts = {'capped': True, 'size': max_size} + opts = {"capped": True, "size": max_size} if max_documents: - opts['max'] = max_documents + opts["max"] = max_documents return db.create_collection(collection_name, **opts) def to_mongo(self, *args, **kwargs): - data = super(Document, self).to_mongo(*args, **kwargs) + data = super().to_mongo(*args, **kwargs) # If '_id' is None, try and set it from self._data. If that # doesn't exist either, remove '_id' from the SON completely. - if data['_id'] is None: - if self._data.get('id') is None: - del data['_id'] + if data["_id"] is None: + if self._data.get("id") is None: + del data["_id"] else: - data['_id'] = self._data['id'] + data["_id"] = self._data["id"] return data @@ -279,15 +286,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): query = {} if self.pk is None: - raise InvalidDocumentError('The document does not have a primary key.') + raise InvalidDocumentError("The document does not have a primary key.") - id_field = self._meta['id_field'] + id_field = self._meta["id_field"] query = query.copy() if isinstance(query, dict) else query.to_query(self) if id_field not in query: query[id_field] = self.pk elif query[id_field] != self.pk: - raise InvalidQueryError('Invalid document modify query: it must modify only this document.') + raise InvalidQueryError( + "Invalid document modify query: it must modify only this document." + ) # Need to add shard key to query, or you get an error query.update(self._object_key) @@ -304,12 +313,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return True - def save(self, force_insert=False, validate=True, clean=True, - write_concern=None, cascade=None, cascade_kwargs=None, - _refs=None, save_condition=None, signal_kwargs=None, **kwargs): + def save( + self, + force_insert=False, + validate=True, + clean=True, + write_concern=None, + cascade=None, + cascade_kwargs=None, + _refs=None, + save_condition=None, + signal_kwargs=None, + **kwargs + ): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be - created. + created. Returns the saved object instance. :param force_insert: only try to create a new document, don't allow updates of existing documents. @@ -360,8 +379,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """ signal_kwargs = signal_kwargs or {} - if self._meta.get('abstract'): - raise InvalidDocumentError('Cannot save an abstract document.') + if self._meta.get("abstract"): + raise InvalidDocumentError("Cannot save an abstract document.") signals.pre_save.send(self.__class__, document=self, **signal_kwargs) @@ -371,15 +390,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if write_concern is None: write_concern = {} - doc_id = self.to_mongo(fields=[self._meta['id_field']]) - created = ('_id' not in doc_id or self._created or force_insert) + doc_id = self.to_mongo(fields=[self._meta["id_field"]]) + created = "_id" not in doc_id or self._created or force_insert - signals.pre_save_post_validation.send(self.__class__, document=self, - created=created, **signal_kwargs) + signals.pre_save_post_validation.send( + self.__class__, document=self, created=created, **signal_kwargs + ) # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation doc = self.to_mongo() - if self._meta.get('auto_create_index', True): + if self._meta.get("auto_create_index", True): self.ensure_indexes() try: @@ -387,44 +407,45 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if created: object_id = self._save_create(doc, force_insert, write_concern) else: - object_id, created = self._save_update(doc, save_condition, - write_concern) + object_id, created = self._save_update( + doc, save_condition, write_concern + ) if cascade is None: - cascade = (self._meta.get('cascade', False) or - cascade_kwargs is not None) + cascade = self._meta.get("cascade", False) or cascade_kwargs is not None if cascade: kwargs = { - 'force_insert': force_insert, - 'validate': validate, - 'write_concern': write_concern, - 'cascade': cascade + "force_insert": force_insert, + "validate": validate, + "write_concern": write_concern, + "cascade": cascade, } if cascade_kwargs: # Allow granular control over cascades kwargs.update(cascade_kwargs) - kwargs['_refs'] = _refs + kwargs["_refs"] = _refs self.cascade_save(**kwargs) except pymongo.errors.DuplicateKeyError as err: - message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) except pymongo.errors.OperationFailure as err: - message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', six.text_type(err)): + message = "Could not save document (%s)" + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) + raise OperationError(message % err) # Make sure we store the PK on this document now that it's saved - id_field = self._meta['id_field'] - if created or id_field not in self._meta.get('shard_key', []): + id_field = self._meta["id_field"] + if created or id_field not in self._meta.get("shard_key", []): self[id_field] = self._fields[id_field].to_python(object_id) - signals.post_save.send(self.__class__, document=self, - created=created, **signal_kwargs) + signals.post_save.send( + self.__class__, document=self, created=created, **signal_kwargs + ) self._clear_changed_fields() self._created = False @@ -442,11 +463,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): return wc_collection.insert_one(doc).inserted_id # insert_one will provoke UniqueError alongside save does not # therefore, it need to catch and call replace_one. - if '_id' in doc: + if "_id" in doc: raw_object = wc_collection.find_one_and_replace( - {'_id': doc['_id']}, doc) + {"_id": doc["_id"]}, doc + ) if raw_object: - return doc['_id'] + return doc["_id"] object_id = wc_collection.insert_one(doc).inserted_id @@ -461,9 +483,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): update_doc = {} if updates: - update_doc['$set'] = updates + update_doc["$set"] = updates if removals: - update_doc['$unset'] = removals + update_doc["$unset"] = removals return update_doc @@ -473,39 +495,38 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): Helper method, should only be used inside save(). """ collection = self._get_collection() - object_id = doc['_id'] + object_id = doc["_id"] created = False select_dict = {} if save_condition is not None: select_dict = transform.query(self.__class__, **save_condition) - select_dict['_id'] = object_id + select_dict["_id"] = object_id # Need to add shard key to query, or you get an error - shard_key = self._meta.get('shard_key', tuple()) + shard_key = self._meta.get("shard_key", tuple()) for k in shard_key: - path = self._lookup_field(k.split('.')) + path = self._lookup_field(k.split(".")) actual_key = [p.db_field for p in path] val = doc for ak in actual_key: val = val[ak] - select_dict['.'.join(actual_key)] = val + select_dict[".".join(actual_key)] = val update_doc = self._get_update_doc() if update_doc: upsert = save_condition is None with set_write_concern(collection, write_concern) as wc_collection: last_error = wc_collection.update_one( - select_dict, - update_doc, - upsert=upsert + select_dict, update_doc, upsert=upsert ).raw_result - if not upsert and last_error['n'] == 0: - raise SaveConditionError('Race condition preventing' - ' document update detected') + if not upsert and last_error["n"] == 0: + raise SaveConditionError( + "Race condition preventing document update detected" + ) if last_error is not None: - updated_existing = last_error.get('updatedExisting') + updated_existing = last_error.get("updatedExisting") if updated_existing is False: created = True # !!! This is bad, means we accidentally created a new, @@ -518,24 +539,23 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """Recursively save any references and generic references on the document. """ - _refs = kwargs.get('_refs') or [] + _refs = kwargs.get("_refs") or [] - ReferenceField = _import_class('ReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') + ReferenceField = _import_class("ReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") for name, cls in self._fields.items(): - if not isinstance(cls, (ReferenceField, - GenericReferenceField)): + if not isinstance(cls, (ReferenceField, GenericReferenceField)): continue ref = self._data.get(name) if not ref or isinstance(ref, DBRef): continue - if not getattr(ref, '_changed_fields', True): + if not getattr(ref, "_changed_fields", True): continue - ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) + ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: _refs.append(ref_id) kwargs["_refs"] = _refs @@ -544,27 +564,31 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): @property def _qs(self): - """Return the queryset to use for updating / reloading / deletions.""" - if not hasattr(self, '__objects'): + """Return the default queryset corresponding to this document.""" + if not hasattr(self, "__objects"): self.__objects = QuerySet(self, self._get_collection()) return self.__objects @property def _object_key(self): - """Get the query dict that can be used to fetch this object from - the database. Most of the time it's a simple PK lookup, but in - case of a sharded collection with a compound shard key, it can - contain a more complex query. + """Return a query dict that can be used to fetch this document. + + Most of the time the dict is a simple PK lookup, but in case of + a sharded collection with a compound shard key, it can contain a more + complex query. + + Note that the dict returned by this method uses MongoEngine field + names instead of PyMongo field names (e.g. "pk" instead of "_id", + "some__nested__field" instead of "some.nested.field", etc.). """ - select_dict = {'pk': self.pk} - shard_key = self.__class__._meta.get('shard_key', tuple()) + select_dict = {"pk": self.pk} + shard_key = self.__class__._meta.get("shard_key", tuple()) for k in shard_key: - path = self._lookup_field(k.split('.')) - actual_key = [p.db_field for p in path] val = self - for ak in actual_key: - val = getattr(val, ak) - select_dict['__'.join(actual_key)] = val + field_parts = k.split(".") + for part in field_parts: + val = getattr(val, part) + select_dict["__".join(field_parts)] = val return select_dict def update(self, **kwargs): @@ -575,14 +599,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): been saved. """ if self.pk is None: - if kwargs.get('upsert', False): + if kwargs.get("upsert", False): query = self.to_mongo() - if '_cls' in query: - del query['_cls'] + if "_cls" in query: + del query["_cls"] return self._qs.filter(**query).update_one(**kwargs) else: - raise OperationError( - 'attempt to update a document not yet saved') + raise OperationError("attempt to update a document not yet saved") # Need to add shard key to query, or you get an error return self._qs.filter(**self._object_key).update_one(**kwargs) @@ -606,16 +629,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): signals.pre_delete.send(self.__class__, document=self, **signal_kwargs) # Delete FileFields separately - FileField = _import_class('FileField') - for name, field in iteritems(self._fields): + FileField = _import_class("FileField") + for name, field in self._fields.items(): if isinstance(field, FileField): getattr(self, name).delete() try: - self._qs.filter( - **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) + self._qs.filter(**self._object_key).delete( + write_concern=write_concern, _from_doc_delete=True + ) except pymongo.errors.OperationFailure as err: - message = u'Could not delete document (%s)' % err.message + message = "Could not delete document (%s)" % err.args raise OperationError(message) signals.post_delete.send(self.__class__, document=self, **signal_kwargs) @@ -684,7 +708,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): .. versionadded:: 0.5 """ - DeReference = _import_class('DeReference') + DeReference = _import_class("DeReference") DeReference()([self], max_depth + 1) return self @@ -702,20 +726,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): if fields and isinstance(fields[0], int): max_depth = fields[0] fields = fields[1:] - elif 'max_depth' in kwargs: - max_depth = kwargs['max_depth'] + elif "max_depth" in kwargs: + max_depth = kwargs["max_depth"] if self.pk is None: - raise self.DoesNotExist('Document does not exist') + raise self.DoesNotExist("Document does not exist") - obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( - **self._object_key).only(*fields).limit( - 1).select_related(max_depth=max_depth) + obj = ( + self._qs.read_preference(ReadPreference.PRIMARY) + .filter(**self._object_key) + .only(*fields) + .limit(1) + .select_related(max_depth=max_depth) + ) if obj: obj = obj[0] else: - raise self.DoesNotExist('Document does not exist') + raise self.DoesNotExist("Document does not exist") for field in obj._data: if not fields or field in fields: try: @@ -731,9 +759,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): # i.e. obj.update(unset__field=1) followed by obj.reload() delattr(self, field) - self._changed_fields = list( - set(self._changed_fields) - set(fields) - ) if fields else obj._changed_fields + self._changed_fields = ( + list(set(self._changed_fields) - set(fields)) + if fields + else obj._changed_fields + ) self._created = False return self @@ -759,7 +789,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """Returns an instance of :class:`~bson.dbref.DBRef` useful in `__raw__` queries.""" if self.pk is None: - msg = 'Only saved documents can have a valid dbref' + msg = "Only saved documents can have a valid dbref" raise OperationError(msg) return DBRef(self.__class__._get_collection_name(), self.pk) @@ -768,18 +798,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """This method registers the delete rules to apply when removing this object. """ - classes = [get_document(class_name) - for class_name in cls._subclasses - if class_name != cls.__name__] + [cls] - documents = [get_document(class_name) - for class_name in document_cls._subclasses - if class_name != document_cls.__name__] + [document_cls] + classes = [ + get_document(class_name) + for class_name in cls._subclasses + if class_name != cls.__name__ + ] + [cls] + documents = [ + get_document(class_name) + for class_name in document_cls._subclasses + if class_name != document_cls.__name__ + ] + [document_cls] for klass in classes: for document_cls in documents: - delete_rules = klass._meta.get('delete_rules') or {} + delete_rules = klass._meta.get("delete_rules") or {} delete_rules[(document_cls, field_name)] = rule - klass._meta['delete_rules'] = delete_rules + klass._meta["delete_rules"] = delete_rules @classmethod def drop_collection(cls): @@ -794,8 +828,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """ coll_name = cls._get_collection_name() if not coll_name: - raise OperationError('Document %s has no collection defined ' - '(is it abstract ?)' % cls) + raise OperationError( + "Document %s has no collection defined (is it abstract ?)" % cls + ) cls._collection = None db = cls._get_db() db.drop_collection(coll_name) @@ -811,19 +846,14 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): """ index_spec = cls._build_index_spec(keys) index_spec = index_spec.copy() - fields = index_spec.pop('fields') - drop_dups = kwargs.get('drop_dups', False) - if drop_dups: - msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' - warnings.warn(msg, DeprecationWarning) - index_spec['background'] = background + fields = index_spec.pop("fields") + index_spec["background"] = background index_spec.update(kwargs) return cls._get_collection().create_index(fields, **index_spec) @classmethod - def ensure_index(cls, key_or_list, drop_dups=False, background=False, - **kwargs): + def ensure_index(cls, key_or_list, background=False, **kwargs): """Ensure that the given indexes are in place. Deprecated in favour of create_index. @@ -831,12 +861,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering :param background: Allows index creation in the background - :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value - will be removed if PyMongo3+ is used """ - if drop_dups: - msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' - warnings.warn(msg, DeprecationWarning) return cls.create_index(key_or_list, background=background, **kwargs) @classmethod @@ -848,13 +873,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): .. note:: You can disable automatic index creation by setting `auto_create_index` to False in the documents meta data """ - background = cls._meta.get('index_background', False) - drop_dups = cls._meta.get('index_drop_dups', False) - index_opts = cls._meta.get('index_opts') or {} - index_cls = cls._meta.get('index_cls', True) - if drop_dups: - msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' - warnings.warn(msg, DeprecationWarning) + background = cls._meta.get("index_background", False) + index_opts = cls._meta.get("index_opts") or {} + index_cls = cls._meta.get("index_cls", True) collection = cls._get_collection() # 746: when connection is via mongos, the read preference is not necessarily an indication that @@ -869,40 +890,39 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): cls_indexed = False # Ensure document-defined indexes are created - if cls._meta['index_specs']: - index_spec = cls._meta['index_specs'] + if cls._meta["index_specs"]: + index_spec = cls._meta["index_specs"] for spec in index_spec: spec = spec.copy() - fields = spec.pop('fields') + fields = spec.pop("fields") cls_indexed = cls_indexed or includes_cls(fields) opts = index_opts.copy() opts.update(spec) # we shouldn't pass 'cls' to the collection.ensureIndex options # because of https://jira.mongodb.org/browse/SERVER-769 - if 'cls' in opts: - del opts['cls'] + if "cls" in opts: + del opts["cls"] collection.create_index(fields, background=background, **opts) # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls - if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'): + if index_cls and not cls_indexed and cls._meta.get("allow_inheritance"): # we shouldn't pass 'cls' to the collection.ensureIndex options # because of https://jira.mongodb.org/browse/SERVER-769 - if 'cls' in index_opts: - del index_opts['cls'] + if "cls" in index_opts: + del index_opts["cls"] - collection.create_index('_cls', background=background, - **index_opts) + collection.create_index("_cls", background=background, **index_opts) @classmethod def list_indexes(cls): """ Lists all of the indexes that should be created for given collection. It includes all the indexes from super- and sub-classes. """ - if cls._meta.get('abstract'): + if cls._meta.get("abstract"): return [] # get all the base classes, subclasses and siblings @@ -910,22 +930,27 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): def get_classes(cls): - if (cls not in classes and - isinstance(cls, TopLevelDocumentMetaclass)): + if cls not in classes and isinstance(cls, TopLevelDocumentMetaclass): classes.append(cls) for base_cls in cls.__bases__: - if (isinstance(base_cls, TopLevelDocumentMetaclass) and - base_cls != Document and - not base_cls._meta.get('abstract') and - base_cls._get_collection().full_name == cls._get_collection().full_name and - base_cls not in classes): + if ( + isinstance(base_cls, TopLevelDocumentMetaclass) + and base_cls != Document + and not base_cls._meta.get("abstract") + and base_cls._get_collection().full_name + == cls._get_collection().full_name + and base_cls not in classes + ): classes.append(base_cls) get_classes(base_cls) for subclass in cls.__subclasses__(): - if (isinstance(base_cls, TopLevelDocumentMetaclass) and - subclass._get_collection().full_name == cls._get_collection().full_name and - subclass not in classes): + if ( + isinstance(base_cls, TopLevelDocumentMetaclass) + and subclass._get_collection().full_name + == cls._get_collection().full_name + and subclass not in classes + ): classes.append(subclass) get_classes(subclass) @@ -935,11 +960,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): def get_indexes_spec(cls): indexes = [] - if cls._meta['index_specs']: - index_spec = cls._meta['index_specs'] + if cls._meta["index_specs"]: + index_spec = cls._meta["index_specs"] for spec in index_spec: spec = spec.copy() - fields = spec.pop('fields') + fields = spec.pop("fields") indexes.append(fields) return indexes @@ -950,10 +975,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): indexes.append(index) # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed - if [(u'_id', 1)] not in indexes: - indexes.append([(u'_id', 1)]) - if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'): - indexes.append([(u'_cls', 1)]) + if [("_id", 1)] not in indexes: + indexes.append([("_id", 1)]) + if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): + indexes.append([("_cls", 1)]) return indexes @@ -967,30 +992,29 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): existing = [] for info in cls._get_collection().index_information().values(): - if '_fts' in info['key'][0]: - index_type = info['key'][0][1] - text_index_fields = info.get('weights').keys() - existing.append( - [(key, index_type) for key in text_index_fields]) + if "_fts" in info["key"][0]: + index_type = info["key"][0][1] + text_index_fields = info.get("weights").keys() + existing.append([(key, index_type) for key in text_index_fields]) else: - existing.append(info['key']) + existing.append(info["key"]) missing = [index for index in required if index not in existing] extra = [index for index in existing if index not in required] # if { _cls: 1 } is missing, make sure it's *really* necessary - if [(u'_cls', 1)] in missing: + if [("_cls", 1)] in missing: cls_obsolete = False for index in existing: if includes_cls(index) and index not in extra: cls_obsolete = True break if cls_obsolete: - missing.remove([(u'_cls', 1)]) + missing.remove([("_cls", 1)]) - return {'missing': missing, 'extra': extra} + return {"missing": missing, "extra": extra} -class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): +class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same way as an ordinary document but has expanded style properties. Any data @@ -1004,7 +1028,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = TopLevelDocumentMetaclass @@ -1019,16 +1042,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): setattr(self, field_name, None) self._dynamic_fields[field_name].null = False else: - super(DynamicDocument, self).__delattr__(*args, **kwargs) + super().__delattr__(*args, **kwargs) -class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): +class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): """A Dynamic Embedded Document class allowing flexible, expandable and uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more information about dynamic documents. """ - # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass @@ -1048,7 +1070,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu setattr(self, field_name, None) -class MapReduceDocument(object): +class MapReduceDocument: """A document returned from a map/reduce query. :param collection: An instance of :class:`~pymongo.Collection` @@ -1072,17 +1094,16 @@ class MapReduceDocument(object): """Lazy-load the object referenced by ``self.key``. ``self.key`` should be the ``primary_key``. """ - id_field = self._document()._meta['id_field'] + id_field = self._document()._meta["id_field"] id_field_type = type(id_field) if not isinstance(self.key, id_field_type): try: self.key = id_field_type(self.key) except Exception: - raise Exception('Could not cast key as %s' % - id_field_type.__name__) + raise Exception("Could not cast key as %s" % id_field_type.__name__) - if not hasattr(self, '_key_object'): + if not hasattr(self, "_key_object"): self._key_object = self._document.objects.with_id(self.key) return self._key_object return self._key_object diff --git a/mongoengine/errors.py b/mongoengine/errors.py index bea1d3dc..95564ff9 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -1,12 +1,21 @@ from collections import defaultdict -import six -from six import iteritems -__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', - 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', - 'OperationError', 'NotUniqueError', 'FieldDoesNotExist', - 'ValidationError', 'SaveConditionError', 'DeprecatedError') +__all__ = ( + "NotRegistered", + "InvalidDocumentError", + "LookUpError", + "DoesNotExist", + "MultipleObjectsReturned", + "InvalidQueryError", + "OperationError", + "NotUniqueError", + "BulkWriteError", + "FieldDoesNotExist", + "ValidationError", + "SaveConditionError", + "DeprecatedError", +) class NotRegistered(Exception): @@ -41,6 +50,10 @@ class NotUniqueError(OperationError): pass +class BulkWriteError(OperationError): + pass + + class SaveConditionError(OperationError): pass @@ -71,25 +84,25 @@ class ValidationError(AssertionError): field_name = None _message = None - def __init__(self, message='', **kwargs): - super(ValidationError, self).__init__(message) - self.errors = kwargs.get('errors', {}) - self.field_name = kwargs.get('field_name') + def __init__(self, message="", **kwargs): + super().__init__(message) + self.errors = kwargs.get("errors", {}) + self.field_name = kwargs.get("field_name") self.message = message def __str__(self): - return six.text_type(self.message) + return str(self.message) def __repr__(self): - return '%s(%s,)' % (self.__class__.__name__, self.message) + return "{}({},)".format(self.__class__.__name__, self.message) def __getattribute__(self, name): - message = super(ValidationError, self).__getattribute__(name) - if name == 'message': + message = super().__getattribute__(name) + if name == "message": if self.field_name: - message = '%s' % message + message = "%s" % message if self.errors: - message = '%s(%s)' % (message, self._format_errors()) + message = "{}({})".format(message, self._format_errors()) return message def _get_message(self): @@ -111,12 +124,12 @@ class ValidationError(AssertionError): def build_dict(source): errors_dict = {} if isinstance(source, dict): - for field_name, error in iteritems(source): + for field_name, error in source.items(): errors_dict[field_name] = build_dict(error) elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) else: - return six.text_type(source) + return str(source) return errors_dict @@ -128,22 +141,22 @@ class ValidationError(AssertionError): def _format_errors(self): """Returns a string listing all errors within a document""" - def generate_key(value, prefix=''): + def generate_key(value, prefix=""): if isinstance(value, list): - value = ' '.join([generate_key(k) for k in value]) + value = " ".join([generate_key(k) for k in value]) elif isinstance(value, dict): - value = ' '.join( - [generate_key(v, k) for k, v in iteritems(value)]) + value = " ".join([generate_key(v, k) for k, v in value.items()]) - results = '%s.%s' % (prefix, value) if prefix else value + results = "{}.{}".format(prefix, value) if prefix else value return results error_dict = defaultdict(list) - for k, v in iteritems(self.to_dict()): + for k, v in self.to_dict().items(): error_dict[generate_key(v)].append(k) - return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)]) + return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) class DeprecatedError(Exception): """Raise when a user uses a feature that has been Deprecated""" + pass diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 591cf01a..c5926cbd 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -5,13 +5,14 @@ import re import socket import time import uuid +from io import BytesIO from operator import itemgetter from bson import Binary, DBRef, ObjectId, SON +from bson.int64 import Int64 import gridfs import pymongo -import six -from six import iteritems +from pymongo import ReturnDocument try: import dateutil @@ -20,21 +21,22 @@ except ImportError: else: import dateutil.parser -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long - -from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, - GeoJsonBaseField, LazyReference, ObjectIdField, - get_document) +from mongoengine.base import ( + BaseDocument, + BaseField, + ComplexBaseField, + GeoJsonBaseField, + LazyReference, + ObjectIdField, + get_document, +) from mongoengine.base.utils import LazyRegexCompiler from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.document import Document, EmbeddedDocument from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError -from mongoengine.python_support import StringIO +from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version from mongoengine.queryset import DO_NOTHING from mongoengine.queryset.base import BaseQuerySet from mongoengine.queryset.transform import STRING_OPERATORS @@ -45,28 +47,53 @@ except ImportError: Image = None ImageOps = None -if six.PY3: - # Useless as long as 2to3 gets executed - # as it turns `long` into `int` blindly - long = int - __all__ = ( - 'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', - 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField', - 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', - 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', - 'SortedListField', 'EmbeddedDocumentListField', 'DictField', - 'MapField', 'ReferenceField', 'CachedReferenceField', - 'LazyReferenceField', 'GenericLazyReferenceField', - 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', - 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', - 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField', - 'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField', - 'MultiPolygonField', 'GeoJsonBaseField' + "StringField", + "URLField", + "EmailField", + "IntField", + "LongField", + "FloatField", + "DecimalField", + "BooleanField", + "DateTimeField", + "DateField", + "ComplexDateTimeField", + "EmbeddedDocumentField", + "ObjectIdField", + "GenericEmbeddedDocumentField", + "DynamicField", + "ListField", + "SortedListField", + "EmbeddedDocumentListField", + "DictField", + "MapField", + "ReferenceField", + "CachedReferenceField", + "LazyReferenceField", + "GenericLazyReferenceField", + "GenericReferenceField", + "BinaryField", + "GridFSError", + "GridFSProxy", + "FileField", + "ImageGridFsProxy", + "ImproperlyConfigured", + "ImageField", + "GeoPointField", + "PointField", + "LineStringField", + "PolygonField", + "SequenceField", + "UUIDField", + "MultiPointField", + "MultiLineStringField", + "MultiPolygonField", + "GeoJsonBaseField", ) -RECURSIVE_REFERENCE_CONSTANT = 'self' +RECURSIVE_REFERENCE_CONSTANT = "self" class StringField(BaseField): @@ -76,55 +103,55 @@ class StringField(BaseField): self.regex = re.compile(regex) if regex else None self.max_length = max_length self.min_length = min_length - super(StringField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): - if isinstance(value, six.text_type): + if isinstance(value, str): return value try: - value = value.decode('utf-8') + value = value.decode("utf-8") except Exception: pass return value def validate(self, value): - if not isinstance(value, six.string_types): - self.error('StringField only accepts string values') + if not isinstance(value, str): + self.error("StringField only accepts string values") if self.max_length is not None and len(value) > self.max_length: - self.error('String value is too long') + self.error("String value is too long") if self.min_length is not None and len(value) < self.min_length: - self.error('String value is too short') + self.error("String value is too short") if self.regex is not None and self.regex.match(value) is None: - self.error('String value did not match validation regex') + self.error("String value did not match validation regex") def lookup_member(self, member_name): return None def prepare_query_value(self, op, value): - if not isinstance(op, six.string_types): + if not isinstance(op, str): return value if op in STRING_OPERATORS: - case_insensitive = op.startswith('i') - op = op.lstrip('i') + case_insensitive = op.startswith("i") + op = op.lstrip("i") flags = re.IGNORECASE if case_insensitive else 0 - regex = r'%s' - if op == 'startswith': - regex = r'^%s' - elif op == 'endswith': - regex = r'%s$' - elif op == 'exact': - regex = r'^%s$' + regex = r"%s" + if op == "startswith": + regex = r"^%s" + elif op == "endswith": + regex = r"%s$" + elif op == "exact": + regex = r"^%s$" # escape unsafe characters which could lead to a re.error value = re.escape(value) value = re.compile(regex % value, flags) - return super(StringField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class URLField(StringField): @@ -134,29 +161,31 @@ class URLField(StringField): """ _URL_REGEX = LazyRegexCompiler( - r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately - r'(?:(?:[A-Z0-9](?:[A-Z0-9-_]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(? self.max_value: - self.error('Integer value is too large') + self.error("Integer value is too large") def prepare_query_value(self, op, value): if value is None: return value - return super(IntField, self).prepare_query_value(op, int(value)) + return super().prepare_query_value(op, int(value)) class LongField(BaseField): - """64-bit integer field.""" + """64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)""" def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(LongField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): pass return value @@ -316,21 +357,21 @@ class LongField(BaseField): def validate(self, value): try: - value = long(value) + value = int(value) except (TypeError, ValueError): - self.error('%s could not be converted to long' % value) + self.error("%s could not be converted to long" % value) if self.min_value is not None and value < self.min_value: - self.error('Long value is too small') + self.error("Long value is too small") if self.max_value is not None and value > self.max_value: - self.error('Long value is too large') + self.error("Long value is too large") def prepare_query_value(self, op, value): if value is None: return value - return super(LongField, self).prepare_query_value(op, long(value)) + return super().prepare_query_value(op, int(value)) class FloatField(BaseField): @@ -338,7 +379,7 @@ class FloatField(BaseField): def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value - super(FloatField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): try: @@ -348,26 +389,26 @@ class FloatField(BaseField): return value def validate(self, value): - if isinstance(value, six.integer_types): + if isinstance(value, int): try: value = float(value) except OverflowError: - self.error('The value is too large to be converted to float') + self.error("The value is too large to be converted to float") if not isinstance(value, float): - self.error('FloatField only accepts float and integer values') + self.error("FloatField only accepts float and integer values") if self.min_value is not None and value < self.min_value: - self.error('Float value is too small') + self.error("Float value is too small") if self.max_value is not None and value > self.max_value: - self.error('Float value is too large') + self.error("Float value is too large") def prepare_query_value(self, op, value): if value is None: return value - return super(FloatField, self).prepare_query_value(op, float(value)) + return super().prepare_query_value(op, float(value)) class DecimalField(BaseField): @@ -378,8 +419,15 @@ class DecimalField(BaseField): .. versionadded:: 0.3 """ - def __init__(self, min_value=None, max_value=None, force_string=False, - precision=2, rounding=decimal.ROUND_HALF_UP, **kwargs): + def __init__( + self, + min_value=None, + max_value=None, + force_string=False, + precision=2, + rounding=decimal.ROUND_HALF_UP, + **kwargs + ): """ :param min_value: Validation rule for the minimum acceptable value. :param max_value: Validation rule for the maximum acceptable value. @@ -407,7 +455,7 @@ class DecimalField(BaseField): self.precision = precision self.rounding = rounding - super(DecimalField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if value is None: @@ -415,35 +463,37 @@ class DecimalField(BaseField): # Convert to string for python 2.6 before casting to Decimal try: - value = decimal.Decimal('%s' % value) + value = decimal.Decimal("%s" % value) except (TypeError, ValueError, decimal.InvalidOperation): return value - return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding) + return value.quantize( + decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding + ) def to_mongo(self, value): if value is None: return value if self.force_string: - return six.text_type(self.to_python(value)) + return str(self.to_python(value)) return float(self.to_python(value)) def validate(self, value): if not isinstance(value, decimal.Decimal): - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) try: value = decimal.Decimal(value) except (TypeError, ValueError, decimal.InvalidOperation) as exc: - self.error('Could not convert value to decimal: %s' % exc) + self.error("Could not convert value to decimal: %s" % exc) if self.min_value is not None and value < self.min_value: - self.error('Decimal value is too small') + self.error("Decimal value is too small") if self.max_value is not None and value > self.max_value: - self.error('Decimal value is too large') + self.error("Decimal value is too large") def prepare_query_value(self, op, value): - return super(DecimalField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class BooleanField(BaseField): @@ -461,7 +511,7 @@ class BooleanField(BaseField): def validate(self, value): if not isinstance(value, bool): - self.error('BooleanField only accepts boolean values') + self.error("BooleanField only accepts boolean values") class DateTimeField(BaseField): @@ -483,7 +533,7 @@ class DateTimeField(BaseField): def validate(self, value): new_value = self.to_mongo(value) if not isinstance(new_value, (datetime.datetime, datetime.date)): - self.error(u'cannot parse date "%s"' % value) + self.error('cannot parse date "%s"' % value) def to_mongo(self, value): if value is None: @@ -495,7 +545,7 @@ class DateTimeField(BaseField): if callable(value): return value() - if not isinstance(value, six.string_types): + if not isinstance(value, str): return None return self._parse_datetime(value) @@ -513,43 +563,46 @@ class DateTimeField(BaseField): return None # split usecs, because they are not recognized by strptime. - if '.' in value: + if "." in value: try: - value, usecs = value.split('.') + value, usecs = value.split(".") usecs = int(usecs) except ValueError: return None else: usecs = 0 - kwargs = {'microsecond': usecs} + kwargs = {"microsecond": usecs} try: # Seconds are optional, so try converting seconds first. - return datetime.datetime(*time.strptime(value, - '%Y-%m-%d %H:%M:%S')[:6], **kwargs) + return datetime.datetime( + *time.strptime(value, "%Y-%m-%d %H:%M:%S")[:6], **kwargs + ) except ValueError: try: # Try without seconds. - return datetime.datetime(*time.strptime(value, - '%Y-%m-%d %H:%M')[:5], **kwargs) + return datetime.datetime( + *time.strptime(value, "%Y-%m-%d %H:%M")[:5], **kwargs + ) except ValueError: # Try without hour/minutes/seconds. try: - return datetime.datetime(*time.strptime(value, - '%Y-%m-%d')[:3], **kwargs) + return datetime.datetime( + *time.strptime(value, "%Y-%m-%d")[:3], **kwargs + ) except ValueError: return None def prepare_query_value(self, op, value): - return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class DateField(DateTimeField): def to_mongo(self, value): - value = super(DateField, self).to_mongo(value) + value = super().to_mongo(value) # drop hours, minutes, seconds if isinstance(value, datetime.datetime): value = datetime.datetime(value.year, value.month, value.day) return value def to_python(self, value): - value = super(DateField, self).to_python(value) + value = super().to_python(value) # convert datetime to date if isinstance(value, datetime.datetime): value = datetime.date(value.year, value.month, value.day) @@ -577,13 +630,13 @@ class ComplexDateTimeField(StringField): .. versionadded:: 0.5 """ - def __init__(self, separator=',', **kwargs): + def __init__(self, separator=",", **kwargs): """ :param separator: Allows to customize the separator used for storage (default ``,``) """ self.separator = separator - self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f']) - super(ComplexDateTimeField, self).__init__(**kwargs) + self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"]) + super().__init__(**kwargs) def _convert_from_datetime(self, val): """ @@ -614,23 +667,25 @@ class ComplexDateTimeField(StringField): if instance is None: return self - data = super(ComplexDateTimeField, self).__get__(instance, owner) + data = super().__get__(instance, owner) if isinstance(data, datetime.datetime) or data is None: return data return self._convert_from_string(data) def __set__(self, instance, value): - super(ComplexDateTimeField, self).__set__(instance, value) + super().__set__(instance, value) value = instance._data[self.name] if value is not None: - instance._data[self.name] = self._convert_from_datetime(value) + if isinstance(value, datetime.datetime): + instance._data[self.name] = self._convert_from_datetime(value) + else: + instance._data[self.name] = value def validate(self, value): value = self.to_python(value) if not isinstance(value, datetime.datetime): - self.error('Only datetime objects may used in a ' - 'ComplexDateTimeField') + self.error("Only datetime objects may used in a ComplexDateTimeField") def to_python(self, value): original_value = value @@ -644,7 +699,7 @@ class ComplexDateTimeField(StringField): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): - return super(ComplexDateTimeField, self).prepare_query_value(op, self._convert_from_datetime(value)) + return super().prepare_query_value(op, self._convert_from_datetime(value)) class EmbeddedDocumentField(BaseField): @@ -655,18 +710,20 @@ class EmbeddedDocumentField(BaseField): def __init__(self, document_type, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not ( - isinstance(document_type, six.string_types) or - issubclass(document_type, EmbeddedDocument) + isinstance(document_type, str) + or issubclass(document_type, EmbeddedDocument) ): - self.error('Invalid embedded document class provided to an ' - 'EmbeddedDocumentField') + self.error( + "Invalid embedded document class provided to an " + "EmbeddedDocumentField" + ) self.document_type_obj = document_type - super(EmbeddedDocumentField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: resolved_document_type = self.owner_document else: @@ -675,15 +732,19 @@ class EmbeddedDocumentField(BaseField): if not issubclass(resolved_document_type, EmbeddedDocument): # Due to the late resolution of the document_type # There is a chance that it won't be an EmbeddedDocument (#1661) - self.error('Invalid embedded document class provided to an ' - 'EmbeddedDocumentField') + self.error( + "Invalid embedded document class provided to an " + "EmbeddedDocumentField" + ) self.document_type_obj = resolved_document_type return self.document_type_obj def to_python(self, value): if not isinstance(value, self.document_type): - return self.document_type._from_son(value, _auto_dereference=self._auto_dereference) + return self.document_type._from_son( + value, _auto_dereference=self._auto_dereference + ) return value def to_mongo(self, value, use_db_field=True, fields=None): @@ -697,8 +758,10 @@ class EmbeddedDocumentField(BaseField): """ # Using isinstance also works for subclasses of self.document if not isinstance(value, self.document_type): - self.error('Invalid embedded document instance provided to an ' - 'EmbeddedDocumentField') + self.error( + "Invalid embedded document instance provided to an " + "EmbeddedDocumentField" + ) self.document_type.validate(value, clean) def lookup_member(self, member_name): @@ -710,12 +773,17 @@ class EmbeddedDocumentField(BaseField): def prepare_query_value(self, op, value): if value is not None and not isinstance(value, self.document_type): + # Short circuit for special operators, returning them as is + if isinstance(value, dict) and all(k.startswith("$") for k in value.keys()): + return value try: value = self.document_type._from_son(value) except ValueError: - raise InvalidQueryError("Querying the embedded document '%s' failed, due to an invalid query value" % - (self.document_type._class_name,)) - super(EmbeddedDocumentField, self).prepare_query_value(op, value) + raise InvalidQueryError( + "Querying the embedded document '%s' failed, due to an invalid query value" + % (self.document_type._class_name,) + ) + super().prepare_query_value(op, value) return self.to_mongo(value) @@ -731,11 +799,11 @@ class GenericEmbeddedDocumentField(BaseField): """ def prepare_query_value(self, op, value): - return super(GenericEmbeddedDocumentField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) def to_python(self, value): if isinstance(value, dict): - doc_cls = get_document(value['_cls']) + doc_cls = get_document(value["_cls"]) value = doc_cls._from_son(value) return value @@ -743,12 +811,14 @@ class GenericEmbeddedDocumentField(BaseField): def validate(self, value, clean=True): if self.choices and isinstance(value, SON): for choice in self.choices: - if value['_cls'] == choice._class_name: + if value["_cls"] == choice._class_name: return True if not isinstance(value, EmbeddedDocument): - self.error('Invalid embedded document instance provided to an ' - 'GenericEmbeddedDocumentField') + self.error( + "Invalid embedded document instance provided to an " + "GenericEmbeddedDocumentField" + ) value.validate(clean=clean) @@ -765,8 +835,8 @@ class GenericEmbeddedDocumentField(BaseField): if document is None: return None data = document.to_mongo(use_db_field, fields) - if '_cls' not in data: - data['_cls'] = document._class_name + if "_cls" not in data: + data["_cls"] = document._class_name return data @@ -780,55 +850,55 @@ class DynamicField(BaseField): """Convert a Python type to a MongoDB compatible type. """ - if isinstance(value, six.string_types): + if isinstance(value, str): return value - if hasattr(value, 'to_mongo'): + if hasattr(value, "to_mongo"): cls = value.__class__ val = value.to_mongo(use_db_field, fields) # If we its a document thats not inherited add _cls if isinstance(value, Document): - val = {'_ref': value.to_dbref(), '_cls': cls.__name__} + val = {"_ref": value.to_dbref(), "_cls": cls.__name__} if isinstance(value, EmbeddedDocument): - val['_cls'] = cls.__name__ + val["_cls"] = cls.__name__ return val if not isinstance(value, (dict, list, tuple)): return value is_list = False - if not hasattr(value, 'items'): + if not hasattr(value, "items"): is_list = True value = {k: v for k, v in enumerate(value)} data = {} - for k, v in iteritems(value): + for k, v in value.items(): data[k] = self.to_mongo(v, use_db_field, fields) value = data if is_list: # Convert back to a list - value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] + value = [v for k, v in sorted(data.items(), key=itemgetter(0))] return value def to_python(self, value): - if isinstance(value, dict) and '_cls' in value: - doc_cls = get_document(value['_cls']) - if '_ref' in value: - value = doc_cls._get_db().dereference(value['_ref']) + if isinstance(value, dict) and "_cls" in value: + doc_cls = get_document(value["_cls"]) + if "_ref" in value: + value = doc_cls._get_db().dereference(value["_ref"]) return doc_cls._from_son(value) - return super(DynamicField, self).to_python(value) + return super().to_python(value) def lookup_member(self, member_name): return member_name def prepare_query_value(self, op, value): - if isinstance(value, six.string_types): + if isinstance(value, str): return StringField().prepare_query_value(op, value) - return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) def validate(self, value, clean=True): - if hasattr(value, 'validate'): + if hasattr(value, "validate"): value.validate(clean=clean) @@ -842,44 +912,60 @@ class ListField(ComplexBaseField): Required means it cannot be empty - as the default for ListFields is [] """ - def __init__(self, field=None, **kwargs): + def __init__(self, field=None, max_length=None, **kwargs): self.field = field - kwargs.setdefault('default', lambda: []) - super(ListField, self).__init__(**kwargs) + self.max_length = max_length + kwargs.setdefault("default", lambda: []) + super().__init__(**kwargs) def __get__(self, instance, owner): if instance is None: # Document class being used rather than a document object return self value = instance._data.get(self.name) - LazyReferenceField = _import_class('LazyReferenceField') - GenericLazyReferenceField = _import_class('GenericLazyReferenceField') - if isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) and value: + LazyReferenceField = _import_class("LazyReferenceField") + GenericLazyReferenceField = _import_class("GenericLazyReferenceField") + if ( + isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField)) + and value + ): instance._data[self.name] = [self.field.build_lazyref(x) for x in value] - return super(ListField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): """Make sure that a list of valid fields is being used.""" if not isinstance(value, (list, tuple, BaseQuerySet)): - self.error('Only lists and tuples may be used in a list field') - super(ListField, self).validate(value) + self.error("Only lists and tuples may be used in a list field") + + # Validate that max_length is not exceeded. + # NOTE It's still possible to bypass this enforcement by using $push. + # However, if the document is reloaded after $push and then re-saved, + # the validation error will be raised. + if self.max_length is not None and len(value) > self.max_length: + self.error("List is too long") + + super().validate(value) def prepare_query_value(self, op, value): + # Validate that the `set` operator doesn't contain more items than `max_length`. + if op == "set" and self.max_length is not None and len(value) > self.max_length: + self.error("List is too long") + if self.field: # If the value is iterable and it's not a string nor a # BaseDocument, call prepare_query_value for each of its items. if ( - op in ('set', 'unset', None) and - hasattr(value, '__iter__') and - not isinstance(value, six.string_types) and - not isinstance(value, BaseDocument) + op in ("set", "unset", None) + and hasattr(value, "__iter__") + and not isinstance(value, str) + and not isinstance(value, BaseDocument) ): return [self.field.prepare_query_value(op, v) for v in value] return self.field.prepare_query_value(op, value) - return super(ListField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class EmbeddedDocumentListField(ListField): @@ -900,9 +986,7 @@ class EmbeddedDocumentListField(ListField): :param kwargs: Keyword arguments passed directly into the parent :class:`~mongoengine.ListField`. """ - super(EmbeddedDocumentListField, self).__init__( - field=EmbeddedDocumentField(document_type), **kwargs - ) + super().__init__(field=EmbeddedDocumentField(document_type), **kwargs) class SortedListField(ListField): @@ -924,17 +1008,18 @@ class SortedListField(ListField): _order_reverse = False def __init__(self, field, **kwargs): - if 'ordering' in kwargs.keys(): - self._ordering = kwargs.pop('ordering') - if 'reverse' in kwargs.keys(): - self._order_reverse = kwargs.pop('reverse') - super(SortedListField, self).__init__(field, **kwargs) + if "ordering" in kwargs.keys(): + self._ordering = kwargs.pop("ordering") + if "reverse" in kwargs.keys(): + self._order_reverse = kwargs.pop("reverse") + super().__init__(field, **kwargs) def to_mongo(self, value, use_db_field=True, fields=None): - value = super(SortedListField, self).to_mongo(value, use_db_field, fields) + value = super().to_mongo(value, use_db_field, fields) if self._ordering is not None: - return sorted(value, key=itemgetter(self._ordering), - reverse=self._order_reverse) + return sorted( + value, key=itemgetter(self._ordering), reverse=self._order_reverse + ) return sorted(value, reverse=self._order_reverse) @@ -943,7 +1028,7 @@ def key_not_string(d): dictionary is not a string. """ for k, v in d.items(): - if not isinstance(k, six.string_types) or (isinstance(v, dict) and key_not_string(v)): + if not isinstance(k, str) or (isinstance(v, dict) and key_not_string(v)): return True @@ -952,7 +1037,18 @@ def key_has_dot_or_dollar(d): dictionary contains a dot or a dollar sign. """ for k, v in d.items(): - if ('.' in k or k.startswith('$')) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): + if ("." in k or k.startswith("$")) or ( + isinstance(v, dict) and key_has_dot_or_dollar(v) + ): + return True + + +def key_starts_with_dollar(d): + """Helper function to recursively determine if any key in a + dictionary starts with a dollar + """ + for k, v in d.items(): + if (k.startswith("$")) or (isinstance(v, dict) and key_starts_with_dollar(v)): return True @@ -971,43 +1067,57 @@ class DictField(ComplexBaseField): self.field = field self._auto_dereference = False - kwargs.setdefault('default', lambda: {}) - super(DictField, self).__init__(*args, **kwargs) + kwargs.setdefault("default", lambda: {}) + super().__init__(*args, **kwargs) def validate(self, value): """Make sure that a list of valid fields is being used.""" if not isinstance(value, dict): - self.error('Only dictionaries may be used in a DictField') + self.error("Only dictionaries may be used in a DictField") if key_not_string(value): - msg = ('Invalid dictionary key - documents must ' - 'have only string keys') + msg = "Invalid dictionary key - documents must have only string keys" self.error(msg) - if key_has_dot_or_dollar(value): - self.error('Invalid dictionary key name - keys may not contain "."' - ' or startswith "$" characters') - super(DictField, self).validate(value) + + # Following condition applies to MongoDB >= 3.6 + # older Mongo has stricter constraints but + # it will be rejected upon insertion anyway + # Having a validation that depends on the MongoDB version + # is not straightforward as the field isn't aware of the connected Mongo + if key_starts_with_dollar(value): + self.error( + 'Invalid dictionary key name - keys may not startswith "$" characters' + ) + super().validate(value) def lookup_member(self, member_name): return DictField(db_field=member_name) def prepare_query_value(self, op, value): - match_operators = ['contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', - 'exact', 'iexact'] + match_operators = [ + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + "exact", + "iexact", + ] - if op in match_operators and isinstance(value, six.string_types): + if op in match_operators and isinstance(value, str): return StringField().prepare_query_value(op, value) - if hasattr(self.field, 'field'): # Used for instance when using DictField(ListField(IntField())) - if op in ('set', 'unset') and isinstance(value, dict): + if hasattr( + self.field, "field" + ): # Used for instance when using DictField(ListField(IntField())) + if op in ("set", "unset") and isinstance(value, dict): return { - k: self.field.prepare_query_value(op, v) - for k, v in value.items() + k: self.field.prepare_query_value(op, v) for k, v in value.items() } return self.field.prepare_query_value(op, value) - return super(DictField, self).prepare_query_value(op, value) + return super().prepare_query_value(op, value) class MapField(DictField): @@ -1021,9 +1131,8 @@ class MapField(DictField): def __init__(self, field=None, *args, **kwargs): # XXX ValidationError raised outside of the "validate" method. if not isinstance(field, BaseField): - self.error('Argument to MapField constructor must be a valid ' - 'field') - super(MapField, self).__init__(field=field, *args, **kwargs) + self.error("Argument to MapField constructor must be a valid field") + super().__init__(field=field, *args, **kwargs) class ReferenceField(BaseField): @@ -1068,8 +1177,9 @@ class ReferenceField(BaseField): .. versionchanged:: 0.5 added `reverse_delete_rule` """ - def __init__(self, document_type, dbref=False, - reverse_delete_rule=DO_NOTHING, **kwargs): + def __init__( + self, document_type, dbref=False, reverse_delete_rule=DO_NOTHING, **kwargs + ): """Initialises the Reference Field. :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef` @@ -1082,21 +1192,22 @@ class ReferenceField(BaseField): :class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`. """ # XXX ValidationError raised outside of the "validate" method. - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, Document) + if not isinstance(document_type, str) and not issubclass( + document_type, Document ): - self.error('Argument to ReferenceField constructor must be a ' - 'document class or a string') + self.error( + "Argument to ReferenceField constructor must be a " + "document class or a string" + ) self.dbref = dbref self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(ReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1114,18 +1225,18 @@ class ReferenceField(BaseField): auto_dereference = instance._fields[self.name]._auto_dereference # Dereference DBRefs if auto_dereference and isinstance(value, DBRef): - if hasattr(value, 'cls'): + if hasattr(value, "cls"): # Dereference using the class type specified in the reference cls = get_document(value.cls) else: cls = self.document_type dereferenced = cls._get_db().dereference(value) if dereferenced is None: - raise DoesNotExist('Trying to dereference unknown document %s' % value) + raise DoesNotExist("Trying to dereference unknown document %s" % value) else: instance._data[self.name] = cls._from_son(dereferenced) - return super(ReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document): if isinstance(document, DBRef): @@ -1139,8 +1250,10 @@ class ReferenceField(BaseField): # XXX ValidationError raised outside of the "validate" method. if id_ is None: - self.error('You can only reference documents once they have' - ' been saved to the database') + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) # Use the attributes from the document instance, so that they # override the attributes of this field's document type @@ -1149,11 +1262,11 @@ class ReferenceField(BaseField): id_ = document cls = self.document_type - id_field_name = cls._meta['id_field'] + id_field_name = cls._meta["id_field"] id_field = cls._fields[id_field_name] id_ = id_field.to_mongo(id_) - if self.document_type._meta.get('abstract'): + if self.document_type._meta.get("abstract"): collection = cls._get_collection_name() return DBRef(collection, id_, cls=cls._class_name) elif self.dbref: @@ -1164,8 +1277,9 @@ class ReferenceField(BaseField): def to_python(self, value): """Convert a MongoDB-compatible type to a Python type.""" - if (not self.dbref and - not isinstance(value, (DBRef, Document, EmbeddedDocument))): + if not self.dbref and not isinstance( + value, (DBRef, Document, EmbeddedDocument) + ): collection = self.document_type._get_collection_name() value = DBRef(collection, self.document_type.id.to_python(value)) return value @@ -1173,16 +1287,20 @@ class ReferenceField(BaseField): def prepare_query_value(self, op, value): if value is None: return None - super(ReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def validate(self, value): if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)): - self.error('A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents') + self.error( + "A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents" + ) if isinstance(value, Document) and value.id is None: - self.error('You can only reference documents once they have been ' - 'saved to the database') + self.error( + "You can only reference documents once they have been " + "saved to the database" + ) def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -1205,30 +1323,30 @@ class CachedReferenceField(BaseField): fields = [] # XXX ValidationError raised outside of the "validate" method. - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, Document) + if not isinstance(document_type, str) and not issubclass( + document_type, Document ): - self.error('Argument to CachedReferenceField constructor must be a' - ' document class or a string') + self.error( + "Argument to CachedReferenceField constructor must be a" + " document class or a string" + ) self.auto_sync = auto_sync self.document_type_obj = document_type self.fields = fields - super(CachedReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) def start_listener(self): from mongoengine import signals - signals.post_save.connect(self.on_document_pre_save, - sender=self.document_type) + signals.post_save.connect(self.on_document_pre_save, sender=self.document_type) def on_document_pre_save(self, sender, document, created, **kwargs): if created: return None update_kwargs = { - 'set__%s__%s' % (self.name, key): val + "set__{}__{}".format(self.name, key): val for key, val in document._delta()[0].items() if key in self.fields } @@ -1236,21 +1354,21 @@ class CachedReferenceField(BaseField): filter_kwargs = {} filter_kwargs[self.name] = document - self.owner_document.objects( - **filter_kwargs).update(**update_kwargs) + self.owner_document.objects(**filter_kwargs).update(**update_kwargs) def to_python(self, value): if isinstance(value, dict): collection = self.document_type._get_collection_name() - value = DBRef( - collection, self.document_type.id.to_python(value['_id'])) - return self.document_type._from_son(self.document_type._get_db().dereference(value)) + value = DBRef(collection, self.document_type.id.to_python(value["_id"])) + return self.document_type._from_son( + self.document_type._get_db().dereference(value) + ) return value @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -1270,14 +1388,14 @@ class CachedReferenceField(BaseField): if auto_dereference and isinstance(value, DBRef): dereferenced = self.document_type._get_db().dereference(value) if dereferenced is None: - raise DoesNotExist('Trying to dereference unknown document %s' % value) + raise DoesNotExist("Trying to dereference unknown document %s" % value) else: instance._data[self.name] = self.document_type._from_son(dereferenced) - return super(CachedReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, document, use_db_field=True, fields=None): - id_field_name = self.document_type._meta['id_field'] + id_field_name = self.document_type._meta["id_field"] id_field = self.document_type._fields[id_field_name] # XXX ValidationError raised outside of the "validate" method. @@ -1285,14 +1403,14 @@ class CachedReferenceField(BaseField): # We need the id from the saved object to create the DBRef id_ = document.pk if id_ is None: - self.error('You can only reference documents once they have' - ' been saved to the database') + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) else: - self.error('Only accept a document object') + self.error("Only accept a document object") - value = SON(( - ('_id', id_field.to_mongo(id_)), - )) + value = SON((("_id", id_field.to_mongo(id_)),)) if fields: new_fields = [f for f in self.fields if f in fields] @@ -1309,9 +1427,11 @@ class CachedReferenceField(BaseField): # XXX ValidationError raised outside of the "validate" method. if isinstance(value, Document): if value.pk is None: - self.error('You can only reference documents once they have' - ' been saved to the database') - value_dict = {'_id': value.pk} + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) + value_dict = {"_id": value.pk} for field in self.fields: value_dict.update({field: value[field]}) @@ -1321,11 +1441,13 @@ class CachedReferenceField(BaseField): def validate(self, value): if not isinstance(value, self.document_type): - self.error('A CachedReferenceField only accepts documents') + self.error("A CachedReferenceField only accepts documents") if isinstance(value, Document) and value.id is None: - self.error('You can only reference documents once they have been ' - 'saved to the database') + self.error( + "You can only reference documents once they have been " + "saved to the database" + ) def lookup_member(self, member_name): return self.document_type._fields.get(member_name) @@ -1335,7 +1457,7 @@ class CachedReferenceField(BaseField): Sync all cached fields on demand. Caution: this operation may be slower. """ - update_key = 'set__%s' % self.name + update_key = "set__%s" % self.name for doc in self.document_type.objects: filter_kwargs = {} @@ -1344,8 +1466,7 @@ class CachedReferenceField(BaseField): update_kwargs = {} update_kwargs[update_key] = doc - self.owner_document.objects( - **filter_kwargs).update(**update_kwargs) + self.owner_document.objects(**filter_kwargs).update(**update_kwargs) class GenericReferenceField(BaseField): @@ -1369,30 +1490,32 @@ class GenericReferenceField(BaseField): """ def __init__(self, *args, **kwargs): - choices = kwargs.pop('choices', None) - super(GenericReferenceField, self).__init__(*args, **kwargs) + choices = kwargs.pop("choices", None) + super().__init__(*args, **kwargs) self.choices = [] # Keep the choices as a list of allowed Document class names if choices: for choice in choices: - if isinstance(choice, six.string_types): + if isinstance(choice, str): self.choices.append(choice) elif isinstance(choice, type) and issubclass(choice, Document): self.choices.append(choice._class_name) else: # XXX ValidationError raised outside of the "validate" # method. - self.error('Invalid choices provided: must be a list of' - 'Document subclasses and/or six.string_typess') + self.error( + "Invalid choices provided: must be a list of" + "Document subclasses and/or str" + ) def _validate_choices(self, value): if isinstance(value, dict): # If the field has not been dereferenced, it is still a dict # of class and DBRef - value = value.get('_cls') + value = value.get("_cls") elif isinstance(value, Document): value = value._class_name - super(GenericReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def __get__(self, instance, owner): if instance is None: @@ -1404,28 +1527,30 @@ class GenericReferenceField(BaseField): if auto_dereference and isinstance(value, (dict, SON)): dereferenced = self.dereference(value) if dereferenced is None: - raise DoesNotExist('Trying to dereference unknown document %s' % value) + raise DoesNotExist("Trying to dereference unknown document %s" % value) else: instance._data[self.name] = dereferenced - return super(GenericReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if not isinstance(value, (Document, DBRef, dict, SON)): - self.error('GenericReferences can only contain documents') + self.error("GenericReferences can only contain documents") if isinstance(value, (dict, SON)): - if '_ref' not in value or '_cls' not in value: - self.error('GenericReferences can only contain documents') + if "_ref" not in value or "_cls" not in value: + self.error("GenericReferences can only contain documents") # We need the id from the saved object to create the DBRef elif isinstance(value, Document) and value.id is None: - self.error('You can only reference documents once they have been' - ' saved to the database') + self.error( + "You can only reference documents once they have been" + " saved to the database" + ) def dereference(self, value): - doc_cls = get_document(value['_cls']) - reference = value['_ref'] + doc_cls = get_document(value["_cls"]) + reference = value["_ref"] doc = doc_cls._get_db().dereference(reference) if doc is not None: doc = doc_cls._from_son(doc) @@ -1438,7 +1563,7 @@ class GenericReferenceField(BaseField): if isinstance(document, (dict, SON, ObjectId, DBRef)): return document - id_field_name = document.__class__._meta['id_field'] + id_field_name = document.__class__._meta["id_field"] id_field = document.__class__._fields[id_field_name] if isinstance(document, Document): @@ -1446,18 +1571,17 @@ class GenericReferenceField(BaseField): id_ = document.id if id_ is None: # XXX ValidationError raised outside of the "validate" method. - self.error('You can only reference documents once they have' - ' been saved to the database') + self.error( + "You can only reference documents once they have" + " been saved to the database" + ) else: id_ = document id_ = id_field.to_mongo(id_) collection = document._get_collection_name() ref = DBRef(collection, id_) - return SON(( - ('_cls', document._class_name), - ('_ref', ref) - )) + return SON((("_cls", document._class_name), ("_ref", ref))) def prepare_query_value(self, op, value): if value is None: @@ -1471,38 +1595,38 @@ class BinaryField(BaseField): def __init__(self, max_bytes=None, **kwargs): self.max_bytes = max_bytes - super(BinaryField, self).__init__(**kwargs) + super().__init__(**kwargs) def __set__(self, instance, value): """Handle bytearrays in python 3.1""" - if six.PY3 and isinstance(value, bytearray): - value = six.binary_type(value) - return super(BinaryField, self).__set__(instance, value) + if isinstance(value, bytearray): + value = bytes(value) + return super().__set__(instance, value) def to_mongo(self, value): return Binary(value) def validate(self, value): - if not isinstance(value, (six.binary_type, Binary)): - self.error('BinaryField only accepts instances of ' - '(%s, %s, Binary)' % ( - six.binary_type.__name__, Binary.__name__)) + if not isinstance(value, (bytes, Binary)): + self.error( + "BinaryField only accepts instances of " + "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__) + ) if self.max_bytes is not None and len(value) > self.max_bytes: - self.error('Binary value is too long') + self.error("Binary value is too long") def prepare_query_value(self, op, value): if value is None: return value - return super(BinaryField, self).prepare_query_value( - op, self.to_mongo(value)) + return super().prepare_query_value(op, self.to_mongo(value)) class GridFSError(Exception): pass -class GridFSProxy(object): +class GridFSProxy: """Proxy object to handle writing and reading of files to and from GridFS .. versionadded:: 0.4 @@ -1512,10 +1636,14 @@ class GridFSProxy(object): _fs = None - def __init__(self, grid_id=None, key=None, - instance=None, - db_alias=DEFAULT_CONNECTION_NAME, - collection_name='fs'): + def __init__( + self, + grid_id=None, + key=None, + instance=None, + db_alias=DEFAULT_CONNECTION_NAME, + collection_name="fs", + ): self.grid_id = grid_id # Store GridFS id for file self.key = key self.instance = instance @@ -1525,8 +1653,16 @@ class GridFSProxy(object): self.gridout = None def __getattr__(self, name): - attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias', - 'collection_name', 'newfile', 'gridout') + attrs = ( + "_fs", + "grid_id", + "key", + "instance", + "db_alias", + "collection_name", + "newfile", + "gridout", + ) if name in attrs: return self.__getattribute__(name) obj = self.get() @@ -1540,11 +1676,9 @@ class GridFSProxy(object): def __bool__(self): return bool(self.grid_id) - __nonzero__ = __bool__ # For Py2 support - def __getstate__(self): self_dict = self.__dict__ - self_dict['_fs'] = None + self_dict["_fs"] = None return self_dict def __copy__(self): @@ -1556,18 +1690,20 @@ class GridFSProxy(object): return self.__copy__() def __repr__(self): - return '<%s: %s>' % (self.__class__.__name__, self.grid_id) + return "<{}: {}>".format(self.__class__.__name__, self.grid_id) def __str__(self): gridout = self.get() - filename = getattr(gridout, 'filename') if gridout else '' - return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id) + filename = getattr(gridout, "filename") if gridout else "" + return "<{}: {} ({})>".format(self.__class__.__name__, filename, self.grid_id) def __eq__(self, other): if isinstance(other, GridFSProxy): - return ((self.grid_id == other.grid_id) and - (self.collection_name == other.collection_name) and - (self.db_alias == other.db_alias)) + return ( + (self.grid_id == other.grid_id) + and (self.collection_name == other.collection_name) + and (self.db_alias == other.db_alias) + ) else: return False @@ -1577,8 +1713,7 @@ class GridFSProxy(object): @property def fs(self): if not self._fs: - self._fs = gridfs.GridFS( - get_db(self.db_alias), self.collection_name) + self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name) return self._fs def get(self, grid_id=None): @@ -1603,16 +1738,20 @@ class GridFSProxy(object): def put(self, file_obj, **kwargs): if self.grid_id: - raise GridFSError('This document already has a file. Either delete ' - 'it or call replace to overwrite it') + raise GridFSError( + "This document already has a file. Either delete " + "it or call replace to overwrite it" + ) self.grid_id = self.fs.put(file_obj, **kwargs) self._mark_as_changed() def write(self, string): if self.grid_id: if not self.newfile: - raise GridFSError('This document already has a file. Either ' - 'delete it or call replace to overwrite it') + raise GridFSError( + "This document already has a file. Either " + "delete it or call replace to overwrite it" + ) else: self.new_file() self.newfile.write(string) @@ -1631,7 +1770,7 @@ class GridFSProxy(object): try: return gridout.read(size) except Exception: - return '' + return "" def delete(self): # Delete file from GridFS, FileField still remains @@ -1661,11 +1800,13 @@ class FileField(BaseField): .. versionchanged:: 0.5 added optional size param for read .. versionchanged:: 0.6 added db_alias for multidb support """ + proxy_class = GridFSProxy - def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs', - **kwargs): - super(FileField, self).__init__(**kwargs) + def __init__( + self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs + ): + super().__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias @@ -1687,9 +1828,8 @@ class FileField(BaseField): def __set__(self, instance, value): key = self.name if ( - (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or - isinstance(value, (six.binary_type, six.string_types)) - ): + hasattr(value, "read") and not isinstance(value, GridFSProxy) + ) or isinstance(value, (bytes, str)): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it @@ -1700,8 +1840,7 @@ class FileField(BaseField): pass # Create a new proxy object as we don't already have one - instance._data[key] = self.get_proxy_obj( - key=key, instance=instance) + instance._data[key] = self.get_proxy_obj(key=key, instance=instance) instance._data[key].put(value) else: instance._data[key] = value @@ -1714,9 +1853,12 @@ class FileField(BaseField): if collection_name is None: collection_name = self.collection_name - return self.proxy_class(key=key, instance=instance, - db_alias=db_alias, - collection_name=collection_name) + return self.proxy_class( + key=key, + instance=instance, + db_alias=db_alias, + collection_name=collection_name, + ) def to_mongo(self, value): # Store the GridFS file id in MongoDB @@ -1726,16 +1868,16 @@ class FileField(BaseField): def to_python(self, value): if value is not None: - return self.proxy_class(value, - collection_name=self.collection_name, - db_alias=self.db_alias) + return self.proxy_class( + value, collection_name=self.collection_name, db_alias=self.db_alias + ) def validate(self, value): if value.grid_id is not None: if not isinstance(value, self.proxy_class): - self.error('FileField only accepts GridFSProxy values') + self.error("FileField only accepts GridFSProxy values") if not isinstance(value.grid_id, ObjectId): - self.error('Invalid GridFSProxy value') + self.error("Invalid GridFSProxy value") class ImageGridFsProxy(GridFSProxy): @@ -1752,52 +1894,51 @@ class ImageGridFsProxy(GridFSProxy): """ field = self.instance._fields[self.key] # Handle nested fields - if hasattr(field, 'field') and isinstance(field.field, FileField): + if hasattr(field, "field") and isinstance(field.field, FileField): field = field.field try: img = Image.open(file_obj) img_format = img.format except Exception as e: - raise ValidationError('Invalid image: %s' % e) + raise ValidationError("Invalid image: %s" % e) # Progressive JPEG # TODO: fixme, at least unused, at worst bad implementation - progressive = img.info.get('progressive') or False + progressive = img.info.get("progressive") or False - if (kwargs.get('progressive') and - isinstance(kwargs.get('progressive'), bool) and - img_format == 'JPEG'): + if ( + kwargs.get("progressive") + and isinstance(kwargs.get("progressive"), bool) + and img_format == "JPEG" + ): progressive = True else: progressive = False - if (field.size and (img.size[0] > field.size['width'] or - img.size[1] > field.size['height'])): + if field.size and ( + img.size[0] > field.size["width"] or img.size[1] > field.size["height"] + ): size = field.size - if size['force']: - img = ImageOps.fit(img, - (size['width'], - size['height']), - Image.ANTIALIAS) + if size["force"]: + img = ImageOps.fit( + img, (size["width"], size["height"]), Image.ANTIALIAS + ) else: - img.thumbnail((size['width'], - size['height']), - Image.ANTIALIAS) + img.thumbnail((size["width"], size["height"]), Image.ANTIALIAS) thumbnail = None if field.thumbnail_size: size = field.thumbnail_size - if size['force']: + if size["force"]: thumbnail = ImageOps.fit( - img, (size['width'], size['height']), Image.ANTIALIAS) + img, (size["width"], size["height"]), Image.ANTIALIAS + ) else: thumbnail = img.copy() - thumbnail.thumbnail((size['width'], - size['height']), - Image.ANTIALIAS) + thumbnail.thumbnail((size["width"], size["height"]), Image.ANTIALIAS) if thumbnail: thumb_id = self._put_thumbnail(thumbnail, img_format, progressive) @@ -1806,16 +1947,13 @@ class ImageGridFsProxy(GridFSProxy): w, h = img.size - io = StringIO() + io = BytesIO() img.save(io, img_format, progressive=progressive) io.seek(0) - return super(ImageGridFsProxy, self).put(io, - width=w, - height=h, - format=img_format, - thumbnail_id=thumb_id, - **kwargs) + return super().put( + io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs + ) def delete(self, *args, **kwargs): # deletes thumbnail @@ -1823,19 +1961,16 @@ class ImageGridFsProxy(GridFSProxy): if out and out.thumbnail_id: self.fs.delete(out.thumbnail_id) - return super(ImageGridFsProxy, self).delete() + return super().delete() def _put_thumbnail(self, thumbnail, format, progressive, **kwargs): w, h = thumbnail.size - io = StringIO() + io = BytesIO() thumbnail.save(io, format, progressive=progressive) io.seek(0) - return self.fs.put(io, width=w, - height=h, - format=format, - **kwargs) + return self.fs.put(io, width=w, height=h, format=format, **kwargs) @property def size(self): @@ -1887,37 +2022,30 @@ class ImageField(FileField): .. versionadded:: 0.6 """ + proxy_class = ImageGridFsProxy - def __init__(self, size=None, thumbnail_size=None, - collection_name='images', **kwargs): + def __init__( + self, size=None, thumbnail_size=None, collection_name="images", **kwargs + ): if not Image: - raise ImproperlyConfigured('PIL library was not found') + raise ImproperlyConfigured("PIL library was not found") - params_size = ('width', 'height', 'force') - extra_args = { - 'size': size, - 'thumbnail_size': thumbnail_size - } + params_size = ("width", "height", "force") + extra_args = {"size": size, "thumbnail_size": thumbnail_size} for att_name, att in extra_args.items(): value = None if isinstance(att, (tuple, list)): - if six.PY3: - value = dict(itertools.zip_longest(params_size, att, - fillvalue=None)) - else: - value = dict(map(None, params_size, att)) + value = dict(itertools.zip_longest(params_size, att, fillvalue=None)) setattr(self, att_name, value) - super(ImageField, self).__init__( - collection_name=collection_name, - **kwargs) + super().__init__(collection_name=collection_name, **kwargs) class SequenceField(BaseField): """Provides a sequential counter see: - http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers + https://docs.mongodb.com/manual/reference/method/ObjectId/#ObjectIDs-SequenceNumbers .. note:: @@ -1946,40 +2074,54 @@ class SequenceField(BaseField): """ _auto_gen = True - COLLECTION_NAME = 'mongoengine.counters' + COLLECTION_NAME = "mongoengine.counters" VALUE_DECORATOR = int - def __init__(self, collection_name=None, db_alias=None, sequence_name=None, - value_decorator=None, *args, **kwargs): + def __init__( + self, + collection_name=None, + db_alias=None, + sequence_name=None, + value_decorator=None, + *args, + **kwargs + ): self.collection_name = collection_name or self.COLLECTION_NAME self.db_alias = db_alias or DEFAULT_CONNECTION_NAME self.sequence_name = sequence_name - self.value_decorator = value_decorator if callable(value_decorator) else self.VALUE_DECORATOR - super(SequenceField, self).__init__(*args, **kwargs) + self.value_decorator = ( + value_decorator if callable(value_decorator) else self.VALUE_DECORATOR + ) + super().__init__(*args, **kwargs) def generate(self): """ Generate and Increment the counter """ sequence_name = self.get_sequence_name() - sequence_id = '%s.%s' % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - counter = collection.find_and_modify(query={'_id': sequence_id}, - update={'$inc': {'next': 1}}, - new=True, - upsert=True) - return self.value_decorator(counter['next']) + + counter = collection.find_one_and_update( + filter={"_id": sequence_id}, + update={"$inc": {"next": 1}}, + return_document=ReturnDocument.AFTER, + upsert=True, + ) + return self.value_decorator(counter["next"]) def set_next_value(self, value): """Helper method to set the next sequence value""" sequence_name = self.get_sequence_name() - sequence_id = "%s.%s" % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - counter = collection.find_and_modify(query={"_id": sequence_id}, - update={"$set": {"next": value}}, - new=True, - upsert=True) - return self.value_decorator(counter['next']) + counter = collection.find_one_and_update( + filter={"_id": sequence_id}, + update={"$set": {"next": value}}, + return_document=ReturnDocument.AFTER, + upsert=True, + ) + return self.value_decorator(counter["next"]) def get_next_value(self): """Helper method to get the next value for previewing. @@ -1988,12 +2130,12 @@ class SequenceField(BaseField): as it is only fixed on set. """ sequence_name = self.get_sequence_name() - sequence_id = '%s.%s' % (sequence_name, self.name) + sequence_id = "{}.{}".format(sequence_name, self.name) collection = get_db(alias=self.db_alias)[self.collection_name] - data = collection.find_one({'_id': sequence_id}) + data = collection.find_one({"_id": sequence_id}) if data: - return self.value_decorator(data['next'] + 1) + return self.value_decorator(data["next"] + 1) return self.value_decorator(1) @@ -2001,14 +2143,17 @@ class SequenceField(BaseField): if self.sequence_name: return self.sequence_name owner = self.owner_document - if issubclass(owner, Document) and not owner._meta.get('abstract'): + if issubclass(owner, Document) and not owner._meta.get("abstract"): return owner._get_collection_name() else: - return ''.join('_%s' % c if c.isupper() else c - for c in owner._class_name).strip('_').lower() + return ( + "".join("_%s" % c if c.isupper() else c for c in owner._class_name) + .strip("_") + .lower() + ) def __get__(self, instance, owner): - value = super(SequenceField, self).__get__(instance, owner) + value = super().__get__(instance, owner) if value is None and instance._initialised: value = self.generate() instance._data[self.name] = value @@ -2021,7 +2166,7 @@ class SequenceField(BaseField): if value is None and instance._initialised: value = self.generate() - return super(SequenceField, self).__set__(instance, value) + return super().__set__(instance, value) def prepare_query_value(self, op, value): """ @@ -2042,6 +2187,7 @@ class UUIDField(BaseField): .. versionadded:: 0.6 """ + _binary = None def __init__(self, binary=True, **kwargs): @@ -2054,14 +2200,14 @@ class UUIDField(BaseField): .. versionchanged:: 0.6.19 """ self._binary = binary - super(UUIDField, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python(self, value): if not self._binary: original_value = value try: - if not isinstance(value, six.string_types): - value = six.text_type(value) + if not isinstance(value, str): + value = str(value) return uuid.UUID(value) except (ValueError, TypeError, AttributeError): return original_value @@ -2069,8 +2215,8 @@ class UUIDField(BaseField): def to_mongo(self, value): if not self._binary: - return six.text_type(value) - elif isinstance(value, six.string_types): + return str(value) + elif isinstance(value, str): return uuid.UUID(value) return value @@ -2081,12 +2227,12 @@ class UUIDField(BaseField): def validate(self, value): if not isinstance(value, uuid.UUID): - if not isinstance(value, six.string_types): + if not isinstance(value, str): value = str(value) try: uuid.UUID(value) except (ValueError, TypeError, AttributeError) as exc: - self.error('Could not convert to UUID: %s' % exc) + self.error("Could not convert to UUID: %s" % exc) class GeoPointField(BaseField): @@ -2105,16 +2251,14 @@ class GeoPointField(BaseField): def validate(self, value): """Make sure that a geo-value is of type (x, y)""" if not isinstance(value, (list, tuple)): - self.error('GeoPointField can only accept tuples or lists ' - 'of (x, y)') + self.error("GeoPointField can only accept tuples or lists of (x, y)") if not len(value) == 2: - self.error('Value (%s) must be a two-dimensional point' % - repr(value)) - elif (not isinstance(value[0], (float, int)) or - not isinstance(value[1], (float, int))): - self.error( - 'Both values (%s) in point must be float or int' % repr(value)) + self.error("Value (%s) must be a two-dimensional point" % repr(value)) + elif not isinstance(value[0], (float, int)) or not isinstance( + value[1], (float, int) + ): + self.error("Both values (%s) in point must be float or int" % repr(value)) class PointField(GeoJsonBaseField): @@ -2134,7 +2278,8 @@ class PointField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = 'Point' + + _type = "Point" class LineStringField(GeoJsonBaseField): @@ -2145,7 +2290,7 @@ class LineStringField(GeoJsonBaseField): .. code-block:: js {'type' : 'LineString' , - 'coordinates' : [[x1, y1], [x1, y1] ... [xn, yn]]} + 'coordinates' : [[x1, y1], [x2, y2] ... [xn, yn]]} You can either pass a dict with the full information or a list of points. @@ -2153,7 +2298,8 @@ class LineStringField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = 'LineString' + + _type = "LineString" class PolygonField(GeoJsonBaseField): @@ -2175,7 +2321,8 @@ class PolygonField(GeoJsonBaseField): .. versionadded:: 0.8 """ - _type = 'Polygon' + + _type = "Polygon" class MultiPointField(GeoJsonBaseField): @@ -2195,7 +2342,8 @@ class MultiPointField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = 'MultiPoint' + + _type = "MultiPoint" class MultiLineStringField(GeoJsonBaseField): @@ -2215,7 +2363,8 @@ class MultiLineStringField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = 'MultiLineString' + + _type = "MultiLineString" class MultiPolygonField(GeoJsonBaseField): @@ -2242,7 +2391,8 @@ class MultiPolygonField(GeoJsonBaseField): .. versionadded:: 0.9 """ - _type = 'MultiPolygon' + + _type = "MultiPolygon" class LazyReferenceField(BaseField): @@ -2256,8 +2406,14 @@ class LazyReferenceField(BaseField): .. versionadded:: 0.15 """ - def __init__(self, document_type, passthrough=False, dbref=False, - reverse_delete_rule=DO_NOTHING, **kwargs): + def __init__( + self, + document_type, + passthrough=False, + dbref=False, + reverse_delete_rule=DO_NOTHING, + **kwargs + ): """Initialises the Reference Field. :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef` @@ -2270,22 +2426,23 @@ class LazyReferenceField(BaseField): document. Note this only work getting field (not setting or deleting). """ # XXX ValidationError raised outside of the "validate" method. - if ( - not isinstance(document_type, six.string_types) and - not issubclass(document_type, Document) + if not isinstance(document_type, str) and not issubclass( + document_type, Document ): - self.error('Argument to LazyReferenceField constructor must be a ' - 'document class or a string') + self.error( + "Argument to LazyReferenceField constructor must be a " + "document class or a string" + ) self.dbref = dbref self.passthrough = passthrough self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule - super(LazyReferenceField, self).__init__(**kwargs) + super().__init__(**kwargs) @property def document_type(self): - if isinstance(self.document_type_obj, six.string_types): + if isinstance(self.document_type_obj, str): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: @@ -2295,15 +2452,23 @@ class LazyReferenceField(BaseField): def build_lazyref(self, value): if isinstance(value, LazyReference): if value.passthrough != self.passthrough: - value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + value = LazyReference( + value.document_type, value.pk, passthrough=self.passthrough + ) elif value is not None: if isinstance(value, self.document_type): - value = LazyReference(self.document_type, value.pk, passthrough=self.passthrough) + value = LazyReference( + self.document_type, value.pk, passthrough=self.passthrough + ) elif isinstance(value, DBRef): - value = LazyReference(self.document_type, value.id, passthrough=self.passthrough) + value = LazyReference( + self.document_type, value.id, passthrough=self.passthrough + ) else: # value is the primary key of the referenced document - value = LazyReference(self.document_type, value, passthrough=self.passthrough) + value = LazyReference( + self.document_type, value, passthrough=self.passthrough + ) return value def __get__(self, instance, owner): @@ -2316,7 +2481,7 @@ class LazyReferenceField(BaseField): if value: instance._data[self.name] = value - return super(LazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def to_mongo(self, value): if isinstance(value, LazyReference): @@ -2328,7 +2493,7 @@ class LazyReferenceField(BaseField): else: # value is the primary key of the referenced document pk = value - id_field_name = self.document_type._meta['id_field'] + id_field_name = self.document_type._meta["id_field"] id_field = self.document_type._fields[id_field_name] pk = id_field.to_mongo(pk) if self.dbref: @@ -2336,10 +2501,17 @@ class LazyReferenceField(BaseField): else: return pk + def to_python(self, value): + """Convert a MongoDB-compatible type to a Python type.""" + if not isinstance(value, (DBRef, Document, EmbeddedDocument)): + collection = self.document_type._get_collection_name() + value = DBRef(collection, self.document_type.id.to_python(value)) + return value + def validate(self, value): if isinstance(value, LazyReference): if value.collection != self.document_type._get_collection_name(): - self.error('Reference must be on a `%s` document.' % self.document_type) + self.error("Reference must be on a `%s` document." % self.document_type) pk = value.pk elif isinstance(value, self.document_type): pk = value.pk @@ -2351,7 +2523,7 @@ class LazyReferenceField(BaseField): pk = value.id else: # value is the primary key of the referenced document - id_field_name = self.document_type._meta['id_field'] + id_field_name = self.document_type._meta["id_field"] id_field = getattr(self.document_type, id_field_name) pk = value try: @@ -2360,16 +2532,20 @@ class LazyReferenceField(BaseField): self.error( "value should be `{0}` document, LazyReference or DBRef on `{0}` " "or `{0}`'s primary key (i.e. `{1}`)".format( - self.document_type.__name__, type(id_field).__name__)) + self.document_type.__name__, type(id_field).__name__ + ) + ) if pk is None: - self.error('You can only reference documents once they have been ' - 'saved to the database') + self.error( + "You can only reference documents once they have been " + "saved to the database" + ) def prepare_query_value(self, op, value): if value is None: return None - super(LazyReferenceField, self).prepare_query_value(op, value) + super().prepare_query_value(op, value) return self.to_mongo(value) def lookup_member(self, member_name): @@ -2395,23 +2571,31 @@ class GenericLazyReferenceField(GenericReferenceField): """ def __init__(self, *args, **kwargs): - self.passthrough = kwargs.pop('passthrough', False) - super(GenericLazyReferenceField, self).__init__(*args, **kwargs) + self.passthrough = kwargs.pop("passthrough", False) + super().__init__(*args, **kwargs) def _validate_choices(self, value): if isinstance(value, LazyReference): value = value.document_type._class_name - super(GenericLazyReferenceField, self)._validate_choices(value) + super()._validate_choices(value) def build_lazyref(self, value): if isinstance(value, LazyReference): if value.passthrough != self.passthrough: - value = LazyReference(value.document_type, value.pk, passthrough=self.passthrough) + value = LazyReference( + value.document_type, value.pk, passthrough=self.passthrough + ) elif value is not None: if isinstance(value, (dict, SON)): - value = LazyReference(get_document(value['_cls']), value['_ref'].id, passthrough=self.passthrough) + value = LazyReference( + get_document(value["_cls"]), + value["_ref"].id, + passthrough=self.passthrough, + ) elif isinstance(value, Document): - value = LazyReference(type(value), value.pk, passthrough=self.passthrough) + value = LazyReference( + type(value), value.pk, passthrough=self.passthrough + ) return value def __get__(self, instance, owner): @@ -2422,22 +2606,31 @@ class GenericLazyReferenceField(GenericReferenceField): if value: instance._data[self.name] = value - return super(GenericLazyReferenceField, self).__get__(instance, owner) + return super().__get__(instance, owner) def validate(self, value): if isinstance(value, LazyReference) and value.pk is None: - self.error('You can only reference documents once they have been' - ' saved to the database') - return super(GenericLazyReferenceField, self).validate(value) + self.error( + "You can only reference documents once they have been" + " saved to the database" + ) + return super().validate(value) def to_mongo(self, document): if document is None: return None if isinstance(document, LazyReference): - return SON(( - ('_cls', document.document_type._class_name), - ('_ref', DBRef(document.document_type._get_collection_name(), document.pk)) - )) + return SON( + ( + ("_cls", document.document_type._class_name), + ( + "_ref", + DBRef( + document.document_type._get_collection_name(), document.pk + ), + ), + ) + ) else: - return super(GenericLazyReferenceField, self).to_mongo(document) + return super().to_mongo(document) diff --git a/mongoengine/mongodb_support.py b/mongoengine/mongodb_support.py index 423cd92b..522f064e 100644 --- a/mongoengine/mongodb_support.py +++ b/mongoengine/mongodb_support.py @@ -11,9 +11,9 @@ MONGODB_36 = (3, 6) def get_mongodb_version(): - """Return the version of the connected mongoDB (first 2 digits) + """Return the version of the default connected mongoDB (first 2 digits) :return: tuple(int, int) """ - version_list = get_connection().server_info()['versionArray'][:2] # e.g. (3, 2) + version_list = get_connection().server_info()["versionArray"][:2] # e.g: (3, 2) return tuple(version_list) diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index f66c038e..9cf9e2ae 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -2,6 +2,7 @@ Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. """ import pymongo +from pymongo.errors import OperationFailure _PYMONGO_37 = (3, 7) @@ -10,13 +11,41 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 -def count_documents(collection, filter): - """Pymongo>3.7 deprecates count in favour of count_documents""" +def count_documents( + collection, filter, skip=None, limit=None, hint=None, collation=None +): + """Pymongo>3.7 deprecates count in favour of count_documents + """ + if limit == 0: + return 0 # Pymongo raises an OperationFailure if called with limit=0 + + kwargs = {} + if skip is not None: + kwargs["skip"] = skip + if limit is not None: + kwargs["limit"] = limit + if hint not in (-1, None): + kwargs["hint"] = hint + if collation is not None: + kwargs["collation"] = collation + + # count_documents appeared in pymongo 3.7 if IS_PYMONGO_GTE_37: - return collection.count_documents(filter) - else: - count = collection.find(filter).count() - return count + try: + return collection.count_documents(filter=filter, **kwargs) + except OperationFailure: + # OperationFailure - accounts for some operators that used to work + # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) + # fallback to deprecated Cursor.count + # Keeping this should be reevaluated the day pymongo removes .count entirely + pass + + cursor = collection.find(filter) + for option, option_value in kwargs.items(): + cursor_method = getattr(cursor, option) + cursor = cursor_method(option_value) + with_limit_and_skip = "skip" in kwargs or "limit" in kwargs + return cursor.count(with_limit_and_skip=with_limit_and_skip) def list_collection_names(db, include_system_collections=False): @@ -27,6 +56,6 @@ def list_collection_names(db, include_system_collections=False): collections = db.collection_names() if not include_system_collections: - collections = [c for c in collections if not c.startswith('system.')] + collections = [c for c in collections if not c.startswith("system.")] return collections diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py deleted file mode 100644 index 57e467db..00000000 --- a/mongoengine/python_support.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Helper functions, constants, and types to aid with Python v2.7 - v3.x support -""" -import six - -# six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. -StringIO = six.BytesIO - -# Additionally for Py2, try to use the faster cStringIO, if available -if not six.PY3: - try: - import cStringIO - except ImportError: - pass - else: - StringIO = cStringIO.StringIO - - -if six.PY3: - from collections.abc import Hashable -else: - # raises DeprecationWarnings in Python >=3.7 - from collections import Hashable diff --git a/mongoengine/queryset/__init__.py b/mongoengine/queryset/__init__.py index 5219c39e..f041d07b 100644 --- a/mongoengine/queryset/__init__.py +++ b/mongoengine/queryset/__init__.py @@ -7,11 +7,22 @@ from mongoengine.queryset.visitor import * # Expose just the public subset of all imported objects and constants. __all__ = ( - 'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', - 'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', - + "QuerySet", + "QuerySetNoCache", + "Q", + "queryset_manager", + "QuerySetManager", + "QueryFieldList", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", # Errors that might be related to a queryset, mostly here for backward # compatibility - 'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', - 'NotUniqueError', 'OperationError', + "DoesNotExist", + "InvalidQueryError", + "MultipleObjectsReturned", + "NotUniqueError", + "OperationError", ) diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 49e154fb..33ab6e2a 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -1,33 +1,41 @@ -from __future__ import absolute_import - import copy import itertools -import pprint import re import warnings +from collections.abc import Mapping + from bson import SON, json_util from bson.code import Code import pymongo import pymongo.errors from pymongo.collection import ReturnDocument from pymongo.common import validate_read_preference -import six -from six import iteritems +from pymongo.read_concern import ReadConcern from mongoengine import signals from mongoengine.base import get_document from mongoengine.common import _import_class from mongoengine.connection import get_db -from mongoengine.context_managers import set_write_concern, switch_db -from mongoengine.errors import (InvalidQueryError, LookUpError, - NotUniqueError, OperationError) +from mongoengine.context_managers import ( + set_read_write_concern, + set_write_concern, + switch_db, +) +from mongoengine.errors import ( + BulkWriteError, + InvalidQueryError, + LookUpError, + NotUniqueError, + OperationError, +) +from mongoengine.pymongo_support import count_documents from mongoengine.queryset import transform from mongoengine.queryset.field_list import QueryFieldList from mongoengine.queryset.visitor import Q, QNode -__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') +__all__ = ("BaseQuerySet", "DO_NOTHING", "NULLIFY", "CASCADE", "DENY", "PULL") # Delete rules DO_NOTHING = 0 @@ -37,10 +45,11 @@ DENY = 3 PULL = 4 -class BaseQuerySet(object): +class BaseQuerySet: """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ + __dereference = False _auto_dereference = True @@ -49,15 +58,14 @@ class BaseQuerySet(object): self._collection_obj = collection self._mongo_query = None self._query_obj = Q() - self._initial_query = {} + self._cls_query = {} self._where_clause = None self._loaded_fields = QueryFieldList() self._ordering = None self._snapshot = False self._timeout = True - self._class_check = True - self._slave_okay = False self._read_preference = None + self._read_concern = None self._iter = False self._scalar = [] self._none = False @@ -66,56 +74,56 @@ class BaseQuerySet(object): # If inheritance is allowed, only return instances and instances of # subclasses of the class being used - if document._meta.get('allow_inheritance') is True: + if document._meta.get("allow_inheritance") is True: if len(self._document._subclasses) == 1: - self._initial_query = {'_cls': self._document._subclasses[0]} + self._cls_query = {"_cls": self._document._subclasses[0]} else: - self._initial_query = { - '_cls': {'$in': self._document._subclasses}} - self._loaded_fields = QueryFieldList(always_include=['_cls']) + self._cls_query = {"_cls": {"$in": self._document._subclasses}} + self._loaded_fields = QueryFieldList(always_include=["_cls"]) + self._cursor_obj = None self._limit = None self._skip = None + self._hint = -1 # Using -1 as None is a valid value for hint + self._collation = None self._batch_size = None self.only_fields = [] self._max_time_ms = None self._comment = None - def __call__(self, q_obj=None, class_check=True, read_preference=None, - **query): + # Hack - As people expect cursor[5:5] to return + # an empty result set. It's hard to do that right, though, because the + # server uses limit(0) to mean 'no limit'. So we set _empty + # in that case and check for it when iterating. We also unset + # it anytime we change _limit. Inspired by how it is done in pymongo.Cursor + self._empty = False + + def __call__(self, q_obj=None, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in the query; the :class:`~mongoengine.queryset.QuerySet` is filtered multiple times with different :class:`~mongoengine.queryset.Q` - objects, only the last one will be used - :param class_check: If set to False bypass class name check when - querying collection - :param read_preference: if set, overrides connection-level - read_preference from `ReplicaSetConnection`. - :param query: Django-style query keyword arguments + objects, only the last one will be used. + :param query: Django-style query keyword arguments. """ query = Q(**query) if q_obj: - # make sure proper query object is passed + # Make sure proper query object is passed. if not isinstance(q_obj, QNode): - msg = ('Not a query object: %s. ' - 'Did you intend to use key=value?' % q_obj) + msg = ( + "Not a query object: %s. " + "Did you intend to use key=value?" % q_obj + ) raise InvalidQueryError(msg) query &= q_obj - if read_preference is None: - queryset = self.clone() - else: - # Use the clone provided when setting read_preference - queryset = self.read_preference(read_preference) - + queryset = self.clone() queryset._query_obj &= query queryset._mongo_query = None queryset._cursor_obj = None - queryset._class_check = class_check return queryset @@ -129,10 +137,10 @@ class BaseQuerySet(object): obj_dict = self.__dict__.copy() # don't picke collection, instead pickle collection params - obj_dict.pop('_collection_obj') + obj_dict.pop("_collection_obj") # don't pickle cursor - obj_dict['_cursor_obj'] = None + obj_dict["_cursor_obj"] = None return obj_dict @@ -143,7 +151,7 @@ class BaseQuerySet(object): See https://github.com/MongoEngine/mongoengine/issues/442 """ - obj_dict['_collection_obj'] = obj_dict['_document']._get_collection() + obj_dict["_collection_obj"] = obj_dict["_document"]._get_collection() # update attributes self.__dict__.update(obj_dict) @@ -163,6 +171,7 @@ class BaseQuerySet(object): [, ] """ queryset = self.clone() + queryset._empty = False # Handle a slice if isinstance(key, slice): @@ -170,6 +179,8 @@ class BaseQuerySet(object): queryset._skip, queryset._limit = key.start, key.stop if key.start and key.stop: queryset._limit = key.stop - key.start + if queryset._limit == 0: + queryset._empty = True # Allow further QuerySet modifications to be performed return queryset @@ -181,7 +192,7 @@ class BaseQuerySet(object): queryset._document._from_son( queryset._cursor[key], _auto_dereference=self._auto_dereference, - only_fields=self.only_fields + only_fields=self.only_fields, ) ) @@ -191,10 +202,10 @@ class BaseQuerySet(object): return queryset._document._from_son( queryset._cursor[key], _auto_dereference=self._auto_dereference, - only_fields=self.only_fields + only_fields=self.only_fields, ) - raise TypeError('Provide a slice or an integer index') + raise TypeError("Provide a slice or an integer index") def __iter__(self): raise NotImplementedError @@ -208,8 +219,6 @@ class BaseQuerySet(object): """Avoid to open all records in an if stmt in Py3.""" return self._has_data() - __nonzero__ = __bool__ # For Py2 support - # Core functions def all(self): @@ -217,8 +226,7 @@ class BaseQuerySet(object): return self.__call__() def filter(self, *q_objs, **query): - """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` - """ + """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`""" return self.__call__(*q_objs, **query) def search_text(self, text, language=None): @@ -234,14 +242,13 @@ class BaseQuerySet(object): """ queryset = self.clone() if queryset._search_text: - raise OperationError( - 'It is not possible to use search_text two times.') + raise OperationError("It is not possible to use search_text two times.") - query_kwargs = SON({'$search': text}) + query_kwargs = SON({"$search": text}) if language: - query_kwargs['$language'] = language + query_kwargs["$language"] = language - queryset._query_obj &= Q(__raw__={'$text': query_kwargs}) + queryset._query_obj &= Q(__raw__={"$text": query_kwargs}) queryset._mongo_query = None queryset._cursor_obj = None queryset._search_text = text @@ -262,21 +269,21 @@ class BaseQuerySet(object): queryset = queryset.filter(*q_objs, **query) try: - result = six.next(queryset) + result = next(queryset) except StopIteration: - msg = ('%s matching query does not exist.' - % queryset._document._class_name) + msg = "%s matching query does not exist." % queryset._document._class_name raise queryset._document.DoesNotExist(msg) + try: - six.next(queryset) + # Check if there is another match + next(queryset) except StopIteration: return result - # If we were able to retrieve the 2nd doc, rewind the cursor and - # raise the MultipleObjectsReturned exception. - queryset.rewind() - message = u'%d items returned, instead of 1' % queryset.count() - raise queryset._document.MultipleObjectsReturned(message) + # If we were able to retrieve the 2nd doc, raise the MultipleObjectsReturned exception. + raise queryset._document.MultipleObjectsReturned( + "2 or more items returned, instead of 1" + ) def create(self, **kwargs): """Create new object. Returns the saved object instance. @@ -294,8 +301,9 @@ class BaseQuerySet(object): result = None return result - def insert(self, doc_or_docs, load_bulk=True, - write_concern=None, signal_kwargs=None): + def insert( + self, doc_or_docs, load_bulk=True, write_concern=None, signal_kwargs=None + ): """bulk insert documents :param doc_or_docs: a document or list of documents to be inserted @@ -308,7 +316,7 @@ class BaseQuerySet(object): ``insert(..., {w: 2, fsync: True})`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. - :parm signal_kwargs: (optional) kwargs dictionary to be passed to + :param signal_kwargs: (optional) kwargs dictionary to be passed to the signal calls. By default returns document instances, set ``load_bulk`` to False to @@ -318,7 +326,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.10.7 Add signal_kwargs argument """ - Document = _import_class('Document') + Document = _import_class("Document") if write_concern is None: write_concern = {} @@ -331,16 +339,16 @@ class BaseQuerySet(object): for doc in docs: if not isinstance(doc, self._document): - msg = ("Some documents inserted aren't instances of %s" - % str(self._document)) + msg = "Some documents inserted aren't instances of %s" % str( + self._document + ) raise OperationError(msg) if doc.pk and not doc._created: - msg = 'Some documents have ObjectIds, use doc.update() instead' + msg = "Some documents have ObjectIds, use doc.update() instead" raise OperationError(msg) signal_kwargs = signal_kwargs or {} - signals.pre_bulk_insert.send(self._document, - documents=docs, **signal_kwargs) + signals.pre_bulk_insert.send(self._document, documents=docs, **signal_kwargs) raw = [doc.to_mongo() for doc in docs] @@ -352,23 +360,27 @@ class BaseQuerySet(object): try: inserted_result = insert_func(raw) - ids = [inserted_result.inserted_id] if return_one else inserted_result.inserted_ids + ids = ( + [inserted_result.inserted_id] + if return_one + else inserted_result.inserted_ids + ) except pymongo.errors.DuplicateKeyError as err: - message = 'Could not save document (%s)' - raise NotUniqueError(message % six.text_type(err)) + message = "Could not save document (%s)" + raise NotUniqueError(message % err) except pymongo.errors.BulkWriteError as err: # inserting documents that already have an _id field will # give huge performance debt or raise - message = u'Document must not have _id value before bulk write (%s)' - raise NotUniqueError(message % six.text_type(err)) + message = "Bulk write error: (%s)" + raise BulkWriteError(message % err.details) except pymongo.errors.OperationFailure as err: - message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', six.text_type(err)): + message = "Could not save document (%s)" + if re.match("^E1100[01] duplicate key", str(err)): # E11000 - duplicate key error index # E11001 - duplicate key on update - message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % six.text_type(err)) - raise OperationError(message % six.text_type(err)) + message = "Tried to save duplicate unique keys (%s)" + raise NotUniqueError(message % err) + raise OperationError(message % err) # Apply inserted_ids to documents for doc, doc_id in zip(docs, ids): @@ -376,13 +388,15 @@ class BaseQuerySet(object): if not load_bulk: signals.post_bulk_insert.send( - self._document, documents=docs, loaded=False, **signal_kwargs) + self._document, documents=docs, loaded=False, **signal_kwargs + ) return ids[0] if return_one else ids documents = self.in_bulk(ids) results = [documents.get(obj_id) for obj_id in ids] signals.post_bulk_insert.send( - self._document, documents=results, loaded=True, **signal_kwargs) + self._document, documents=results, loaded=True, **signal_kwargs + ) return results[0] if return_one else results def count(self, with_limit_and_skip=False): @@ -392,14 +406,40 @@ class BaseQuerySet(object): :meth:`skip` that has been applied to this cursor into account when getting the count """ - if self._limit == 0 and with_limit_and_skip is False or self._none: + # mimic the fact that setting .limit(0) in pymongo sets no limit + # https://docs.mongodb.com/manual/reference/method/cursor.limit/#zero-value + if ( + self._limit == 0 + and with_limit_and_skip is False + or self._none + or self._empty + ): return 0 - count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) + + kwargs = ( + {"limit": self._limit, "skip": self._skip} if with_limit_and_skip else {} + ) + + if self._limit == 0: + # mimic the fact that historically .limit(0) sets no limit + kwargs.pop("limit", None) + + if self._hint not in (-1, None): + kwargs["hint"] = self._hint + + if self._collation: + kwargs["collation"] = self._collation + + count = count_documents( + collection=self._cursor.collection, + filter=self._cursor._Cursor__spec, + **kwargs + ) + self._cursor_obj = None return count - def delete(self, write_concern=None, _from_doc_delete=False, - cascade_refs=None): + def delete(self, write_concern=None, _from_doc_delete=False, cascade_refs=None): """Delete the documents matched by the query. :param write_concern: Extra keyword arguments are passed down which @@ -422,12 +462,13 @@ class BaseQuerySet(object): # Handle deletes where skips or limits have been applied or # there is an untriggered delete signal has_delete_signal = signals.signals_available and ( - signals.pre_delete.has_receivers_for(doc) or - signals.post_delete.has_receivers_for(doc) + signals.pre_delete.has_receivers_for(doc) + or signals.post_delete.has_receivers_for(doc) ) - call_document_delete = (queryset._skip or queryset._limit or - has_delete_signal) and not _from_doc_delete + call_document_delete = ( + queryset._skip or queryset._limit or has_delete_signal + ) and not _from_doc_delete if call_document_delete: cnt = 0 @@ -436,28 +477,28 @@ class BaseQuerySet(object): cnt += 1 return cnt - delete_rules = doc._meta.get('delete_rules') or {} + delete_rules = doc._meta.get("delete_rules") or {} delete_rules = list(delete_rules.items()) # Check for DENY rules before actually deleting/nullifying any other # references for rule_entry, rule in delete_rules: document_cls, field_name = rule_entry - if document_cls._meta.get('abstract'): + if document_cls._meta.get("abstract"): continue if rule == DENY: - refs = document_cls.objects(**{field_name + '__in': self}) + refs = document_cls.objects(**{field_name + "__in": self}) if refs.limit(1).count() > 0: raise OperationError( - 'Could not delete document (%s.%s refers to it)' + "Could not delete document (%s.%s refers to it)" % (document_cls.__name__, field_name) ) # Check all the other rules for rule_entry, rule in delete_rules: document_cls, field_name = rule_entry - if document_cls._meta.get('abstract'): + if document_cls._meta.get("abstract"): continue if rule == CASCADE: @@ -466,26 +507,38 @@ class BaseQuerySet(object): if doc._collection == document_cls._collection: for ref in queryset: cascade_refs.add(ref.id) - refs = document_cls.objects(**{field_name + '__in': self, - 'pk__nin': cascade_refs}) + refs = document_cls.objects( + **{field_name + "__in": self, "pk__nin": cascade_refs} + ) if refs.count() > 0: - refs.delete(write_concern=write_concern, - cascade_refs=cascade_refs) + refs.delete(write_concern=write_concern, cascade_refs=cascade_refs) elif rule == NULLIFY: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, - **{'unset__%s' % field_name: 1}) + document_cls.objects(**{field_name + "__in": self}).update( + write_concern=write_concern, **{"unset__%s" % field_name: 1} + ) elif rule == PULL: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, - **{'pull_all__%s' % field_name: self}) + document_cls.objects(**{field_name + "__in": self}).update( + write_concern=write_concern, **{"pull_all__%s" % field_name: self} + ) - result = queryset._collection.remove(queryset._query, **write_concern) - if result: - return result.get('n') + with set_write_concern(queryset._collection, write_concern) as collection: + result = collection.delete_many(queryset._query) - def update(self, upsert=False, multi=True, write_concern=None, - full_result=False, **update): + # If we're using an unack'd write concern, we don't really know how + # many items have been deleted at this point, hence we only return + # the count for ack'd ops. + if result.acknowledged: + return result.deleted_count + + def update( + self, + upsert=False, + multi=True, + write_concern=None, + read_concern=None, + full_result=False, + **update + ): """Perform an atomic update on the fields matched by the query. :param upsert: insert if document doesn't exist (default ``False``) @@ -496,6 +549,7 @@ class BaseQuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. + :param read_concern: Override the read concern for the operation :param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number updated items :param update: Django-style update keyword arguments @@ -505,7 +559,7 @@ class BaseQuerySet(object): .. versionadded:: 0.2 """ if not update and not upsert: - raise OperationError('No update parameters, would remove data') + raise OperationError("No update parameters, would remove data") if write_concern is None: write_concern = {} @@ -516,13 +570,15 @@ class BaseQuerySet(object): # If doing an atomic upsert on an inheritable class # then ensure we add _cls to the update operation - if upsert and '_cls' in query: - if '$set' in update: - update['$set']['_cls'] = queryset._document._class_name + if upsert and "_cls" in query: + if "$set" in update: + update["$set"]["_cls"] = queryset._document._class_name else: - update['$set'] = {'_cls': queryset._document._class_name} + update["$set"] = {"_cls": queryset._document._class_name} try: - with set_write_concern(queryset._collection, write_concern) as collection: + with set_read_write_concern( + queryset._collection, write_concern, read_concern + ) as collection: update_func = collection.update_one if multi: update_func = collection.update_many @@ -530,16 +586,16 @@ class BaseQuerySet(object): if full_result: return result elif result.raw_result: - return result.raw_result['n'] + return result.raw_result["n"] except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - if six.text_type(err) == u'multi not coded yet': - message = u'update() method requires MongoDB 1.1.3+' + if str(err) == "multi not coded yet": + message = "update() method requires MongoDB 1.1.3+" raise OperationError(message) - raise OperationError(u'Update failed (%s)' % six.text_type(err)) + raise OperationError("Update failed (%s)" % err) - def upsert_one(self, write_concern=None, **update): + def upsert_one(self, write_concern=None, read_concern=None, **update): """Overwrite or add the first document matched by the query. :param write_concern: Extra keyword arguments are passed down which @@ -548,6 +604,7 @@ class BaseQuerySet(object): ``save(..., write_concern={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on the primary server. + :param read_concern: Override the read concern for the operation :param update: Django-style update keyword arguments :returns the new or overwritten document @@ -555,11 +612,16 @@ class BaseQuerySet(object): .. versionadded:: 0.10.2 """ - atomic_update = self.update(multi=False, upsert=True, - write_concern=write_concern, - full_result=True, **update) + atomic_update = self.update( + multi=False, + upsert=True, + write_concern=write_concern, + read_concern=read_concern, + full_result=True, + **update + ) - if atomic_update.raw_result['updatedExisting']: + if atomic_update.raw_result["updatedExisting"]: document = self.get() else: document = self._document.objects.with_id(atomic_update.upserted_id) @@ -588,9 +650,12 @@ class BaseQuerySet(object): multi=False, write_concern=write_concern, full_result=full_result, - **update) + **update + ) - def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): + def modify( + self, upsert=False, full_response=False, remove=False, new=False, **update + ): """Update and return the updated document. Returns either the document before or after modification based on `new` @@ -615,11 +680,10 @@ class BaseQuerySet(object): """ if remove and new: - raise OperationError('Conflicting parameters: remove and new') + raise OperationError("Conflicting parameters: remove and new") if not update and not upsert and not remove: - raise OperationError( - 'No update parameters, must either update or remove') + raise OperationError("No update parameters, must either update or remove") queryset = self.clone() query = queryset._query @@ -629,27 +693,35 @@ class BaseQuerySet(object): try: if full_response: - msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' + msg = "With PyMongo 3+, it is not possible anymore to get the full response." warnings.warn(msg, DeprecationWarning) if remove: result = queryset._collection.find_one_and_delete( - query, sort=sort, **self._cursor_args) + query, sort=sort, **self._cursor_args + ) else: if new: return_doc = ReturnDocument.AFTER else: return_doc = ReturnDocument.BEFORE result = queryset._collection.find_one_and_update( - query, update, upsert=upsert, sort=sort, return_document=return_doc, - **self._cursor_args) + query, + update, + upsert=upsert, + sort=sort, + return_document=return_doc, + **self._cursor_args + ) except pymongo.errors.DuplicateKeyError as err: - raise NotUniqueError(u'Update failed (%s)' % err) + raise NotUniqueError("Update failed (%s)" % err) except pymongo.errors.OperationFailure as err: - raise OperationError(u'Update failed (%s)' % err) + raise OperationError("Update failed (%s)" % err) if full_response: - if result['value'] is not None: - result['value'] = self._document._from_son(result['value'], only_fields=self.only_fields) + if result["value"] is not None: + result["value"] = self._document._from_son( + result["value"], only_fields=self.only_fields + ) else: if result is not None: result = self._document._from_son(result, only_fields=self.only_fields) @@ -667,51 +739,55 @@ class BaseQuerySet(object): """ queryset = self.clone() if not queryset._query_obj.empty: - msg = 'Cannot use a filter whilst using `with_id`' + msg = "Cannot use a filter whilst using `with_id`" raise InvalidQueryError(msg) return queryset.filter(pk=object_id).first() def in_bulk(self, object_ids): - """Retrieve a set of documents by their ids. + """"Retrieve a set of documents by their ids. - :param object_ids: a list or tuple of ``ObjectId``\ s - :rtype: dict of ObjectIds as keys and collection-specific + :param object_ids: a list or tuple of ObjectId's + :rtype: dict of ObjectId's as keys and collection-specific Document subclasses as values. .. versionadded:: 0.3 """ doc_map = {} - docs = self._collection.find({'_id': {'$in': object_ids}}, - **self._cursor_args) + docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args) if self._scalar: for doc in docs: - doc_map[doc['_id']] = self._get_scalar( - self._document._from_son(doc, only_fields=self.only_fields)) + doc_map[doc["_id"]] = self._get_scalar( + self._document._from_son(doc, only_fields=self.only_fields) + ) elif self._as_pymongo: for doc in docs: - doc_map[doc['_id']] = doc + doc_map[doc["_id"]] = doc else: for doc in docs: - doc_map[doc['_id']] = self._document._from_son( + doc_map[doc["_id"]] = self._document._from_son( doc, only_fields=self.only_fields, - _auto_dereference=self._auto_dereference) + _auto_dereference=self._auto_dereference, + ) return doc_map def none(self): - """Helper that just returns a list""" + """Returns a queryset that never returns any objects and no query will be executed when accessing the results + inspired by django none() https://docs.djangoproject.com/en/dev/ref/models/querysets/#none + """ queryset = self.clone() queryset._none = True return queryset def no_sub_classes(self): + """Filter for only the instances of this specific document. + + Do NOT return any inherited documents. """ - Only return instances of this document and not any inherited documents - """ - if self._document._meta.get('allow_inheritance') is True: - self._initial_query = {'_cls': self._document._class_name} + if self._document._meta.get("allow_inheritance") is True: + self._cls_query = {"_cls": self._document._class_name} return self @@ -740,15 +816,36 @@ class BaseQuerySet(object): """ if not isinstance(new_qs, BaseQuerySet): raise OperationError( - '%s is not a subclass of BaseQuerySet' % new_qs.__name__) + "%s is not a subclass of BaseQuerySet" % new_qs.__name__ + ) - copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', - '_where_clause', '_loaded_fields', '_ordering', - '_snapshot', '_timeout', '_class_check', '_slave_okay', - '_read_preference', '_iter', '_scalar', '_as_pymongo', - '_limit', '_skip', '_hint', '_auto_dereference', - '_search_text', 'only_fields', '_max_time_ms', - '_comment', '_batch_size') + copy_props = ( + "_mongo_query", + "_cls_query", + "_none", + "_query_obj", + "_where_clause", + "_loaded_fields", + "_ordering", + "_snapshot", + "_timeout", + "_read_preference", + "_read_concern", + "_iter", + "_scalar", + "_as_pymongo", + "_limit", + "_skip", + "_empty", + "_hint", + "_collation", + "_auto_dereference", + "_search_text", + "only_fields", + "_max_time_ms", + "_comment", + "_batch_size", + ) for prop in copy_props: val = getattr(self, prop) @@ -780,6 +877,7 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._limit = n + queryset._empty = False # cancels the effect of empty # If a cursor object has already been created, apply the limit to it. if queryset._cursor_obj: @@ -824,6 +922,32 @@ class BaseQuerySet(object): return queryset + def collation(self, collation=None): + """ + Collation allows users to specify language-specific rules for string + comparison, such as rules for lettercase and accent marks. + :param collation: `~pymongo.collation.Collation` or dict with + following fields: + { + locale: str, + caseLevel: bool, + caseFirst: str, + strength: int, + numericOrdering: bool, + alternate: str, + maxVariable: str, + backwards: str + } + Collation should be added to indexes like in test example + """ + queryset = self.clone() + queryset._collation = collation + + if queryset._cursor_obj: + queryset._cursor_obj.collation(collation) + + return queryset + def batch_size(self, size): """Limit the number of documents returned in a single batch (each batch requires a round trip to the server). @@ -861,37 +985,43 @@ class BaseQuerySet(object): except LookUpError: pass - distinct = self._dereference(queryset._cursor.distinct(field), 1, - name=field, instance=self._document) + distinct = self._dereference( + queryset._cursor.distinct(field), 1, name=field, instance=self._document + ) - doc_field = self._document._fields.get(field.split('.', 1)[0]) + doc_field = self._document._fields.get(field.split(".", 1)[0]) instance = None # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') - ListField = _import_class('ListField') - GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField') + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") + ListField = _import_class("ListField") + GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") if isinstance(doc_field, ListField): - doc_field = getattr(doc_field, 'field', doc_field) + doc_field = getattr(doc_field, "field", doc_field) if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - instance = getattr(doc_field, 'document_type', None) + instance = getattr(doc_field, "document_type", None) # handle distinct on subdocuments - if '.' in field: - for field_part in field.split('.')[1:]: + if "." in field: + for field_part in field.split(".")[1:]: # if looping on embedded document, get the document type instance - if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): + if instance and isinstance( + doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): doc_field = instance # now get the subdocument doc_field = getattr(doc_field, field_part, doc_field) # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) if isinstance(doc_field, ListField): - doc_field = getattr(doc_field, 'field', doc_field) - if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)): - instance = getattr(doc_field, 'document_type', None) + doc_field = getattr(doc_field, "field", doc_field) + if isinstance( + doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): + instance = getattr(doc_field, "document_type", None) - if instance and isinstance(doc_field, (EmbeddedDocumentField, - GenericEmbeddedDocumentField)): + if instance and isinstance( + doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField) + ): distinct = [instance(**doc) for doc in distinct] return distinct @@ -915,7 +1045,7 @@ class BaseQuerySet(object): .. versionchanged:: 0.5 - Added subfield support """ fields = {f: QueryFieldList.ONLY for f in fields} - self.only_fields = fields.keys() + self.only_fields = list(fields.keys()) return self.fields(True, **fields) def exclude(self, *fields): @@ -952,9 +1082,11 @@ class BaseQuerySet(object): posts = BlogPost.objects(...).fields(comments=0) - To retrieve a subrange of array elements: + To retrieve a subrange or sublist of array elements, + support exist for both the `slice` and `elemMatch` projection operator: posts = BlogPost.objects(...).fields(slice__comments=5) + posts = BlogPost.objects(...).fields(elemMatch__comments="test") :param kwargs: A set of keyword arguments identifying what to include, exclude, or slice. @@ -963,14 +1095,14 @@ class BaseQuerySet(object): """ # Check for an operator and transform to mongo-style if there is - operators = ['slice'] + operators = ["slice", "elemMatch"] cleaned_fields = [] for key, value in kwargs.items(): - parts = key.split('__') + parts = key.split("__") if parts[0] in operators: op = parts.pop(0) - value = {'$' + op: value} - key = '.'.join(parts) + value = {"$" + op: value} + key = ".".join(parts) cleaned_fields.append((key, value)) # Sort fields by their values, explicitly excluded fields first, then @@ -991,7 +1123,8 @@ class BaseQuerySet(object): fields = [field for field, value in group] fields = queryset._fields_to_dbfields(fields) queryset._loaded_fields += QueryFieldList( - fields, value=value, _only_called=_only_called) + fields, value=value, _only_called=_only_called + ) return queryset @@ -1005,7 +1138,8 @@ class BaseQuerySet(object): """ queryset = self.clone() queryset._loaded_fields = QueryFieldList( - always_include=queryset._loaded_fields.always_include) + always_include=queryset._loaded_fields.always_include + ) return queryset def order_by(self, *keys): @@ -1040,31 +1174,33 @@ class BaseQuerySet(object): return queryset + def clear_cls_query(self): + """Clear the default "_cls" query. + + By default, all queries generated for documents that allow inheritance + include an extra "_cls" clause. In most cases this is desirable, but + sometimes you might achieve better performance if you clear that + default query. + + Scan the code for `_cls_query` to get more details. + """ + queryset = self.clone() + queryset._cls_query = {} + return queryset + def comment(self, text): """Add a comment to the query. See https://docs.mongodb.com/manual/reference/method/cursor.comment/#cursor.comment for details. """ - return self._chainable_method('comment', text) + return self._chainable_method("comment", text) - def explain(self, format=False): + def explain(self): """Return an explain plan record for the - :class:`~mongoengine.queryset.QuerySet`\ 's cursor. - - :param format: format the plan before returning it + :class:`~mongoengine.queryset.QuerySet` cursor. """ - plan = self._cursor.explain() - - # TODO remove this option completely - it's useless. If somebody - # wants to pretty-print the output, they easily can. - if format: - msg = ('"format" param of BaseQuerySet.explain has been ' - 'deprecated and will be removed in future versions.') - warnings.warn(msg, DeprecationWarning) - plan = pprint.pformat(plan) - - return plan + return self._cursor.explain() # DEPRECATED. Has no more impact on PyMongo 3+ def snapshot(self, enabled): @@ -1075,7 +1211,7 @@ class BaseQuerySet(object): ..versionchanged:: 0.5 - made chainable .. deprecated:: Ignored with PyMongo 3+ """ - msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' + msg = "snapshot is deprecated as it has no impact when using PyMongo 3+." warnings.warn(msg, DeprecationWarning) queryset = self.clone() queryset._snapshot = enabled @@ -1092,32 +1228,34 @@ class BaseQuerySet(object): queryset._timeout = enabled return queryset - # DEPRECATED. Has no more impact on PyMongo 3+ - def slave_okay(self, enabled): - """Enable or disable the slave_okay when querying. - - :param enabled: whether or not the slave_okay is enabled - - .. deprecated:: Ignored with PyMongo 3+ - """ - msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' - warnings.warn(msg, DeprecationWarning) - queryset = self.clone() - queryset._slave_okay = enabled - return queryset - def read_preference(self, read_preference): """Change the read_preference when querying. :param read_preference: override ReplicaSetConnection-level preference. """ - validate_read_preference('read_preference', read_preference) + validate_read_preference("read_preference", read_preference) queryset = self.clone() queryset._read_preference = read_preference queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_preference return queryset + def read_concern(self, read_concern): + """Change the read_concern when querying. + + :param read_concern: override ReplicaSetConnection-level + preference. + """ + if read_concern is not None and not isinstance(read_concern, Mapping): + raise TypeError("%r is not a valid read concern." % (read_concern,)) + + queryset = self.clone() + queryset._read_concern = ( + ReadConcern(**read_concern) if read_concern is not None else None + ) + queryset._cursor_obj = None # we need to re-create the cursor object whenever we apply read_concern + return queryset + def scalar(self, *fields): """Instead of returning Document instances, return either a specific value or a tuple of values in order. @@ -1161,7 +1299,7 @@ class BaseQuerySet(object): :param ms: the number of milliseconds before killing the query on the server """ - return self._chainable_method('max_time_ms', ms) + return self._chainable_method("max_time_ms", ms) # JSON Helpers @@ -1172,45 +1310,62 @@ class BaseQuerySet(object): def from_json(self, json_data): """Converts json data to unsaved objects""" son_data = json_util.loads(json_data) - return [self._document._from_son(data, only_fields=self.only_fields) for data in son_data] + return [ + self._document._from_son(data, only_fields=self.only_fields) + for data in son_data + ] + + def aggregate(self, pipeline, *suppl_pipeline, **kwargs): + """Perform a aggregate function based in your queryset params - def aggregate(self, *pipeline, **kwargs): - """ - Perform a aggregate function based in your queryset params :param pipeline: list of aggregation commands,\ see: http://docs.mongodb.org/manual/core/aggregation-pipeline/ - + :param suppl_pipeline: unpacked list of pipeline (added to support deprecation of the old interface) + parameter will be removed shortly + :param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call + See https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate .. versionadded:: 0.9 """ - initial_pipeline = [] + using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline) + user_pipeline = [pipeline] if isinstance(pipeline, dict) else list(pipeline) + if using_deprecated_interface: + msg = "Calling .aggregate() with un unpacked list (*pipeline) is deprecated, it will soon change and will expect a list (similar to pymongo.Collection.aggregate interface), see documentation" + warnings.warn(msg, DeprecationWarning) + + user_pipeline += suppl_pipeline + + initial_pipeline = [] if self._query: - initial_pipeline.append({'$match': self._query}) + initial_pipeline.append({"$match": self._query}) if self._ordering: - initial_pipeline.append({'$sort': dict(self._ordering)}) + initial_pipeline.append({"$sort": dict(self._ordering)}) if self._limit is not None: # As per MongoDB Documentation (https://docs.mongodb.com/manual/reference/operator/aggregation/limit/), # keeping limit stage right after sort stage is more efficient. But this leads to wrong set of documents # for a skip stage that might succeed these. So we need to maintain more documents in memory in such a # case (https://stackoverflow.com/a/24161461). - initial_pipeline.append({'$limit': self._limit + (self._skip or 0)}) + initial_pipeline.append({"$limit": self._limit + (self._skip or 0)}) if self._skip is not None: - initial_pipeline.append({'$skip': self._skip}) + initial_pipeline.append({"$skip": self._skip}) - pipeline = initial_pipeline + list(pipeline) + final_pipeline = initial_pipeline + user_pipeline - if self._read_preference is not None: - return self._collection.with_options(read_preference=self._read_preference) \ - .aggregate(pipeline, cursor={}, **kwargs) + collection = self._collection + if self._read_preference is not None or self._read_concern is not None: + collection = self._collection.with_options( + read_preference=self._read_preference, read_concern=self._read_concern + ) - return self._collection.aggregate(pipeline, cursor={}, **kwargs) + return collection.aggregate(final_pipeline, cursor={}, **kwargs) # JS functionality - def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, - scope=None): + def map_reduce( + self, map_f, reduce_f, output, finalize_f=None, limit=None, scope=None + ): """Perform a map/reduce query using the current query spec and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, it must be the last call made, as it does not return a maleable @@ -1250,66 +1405,66 @@ class BaseQuerySet(object): """ queryset = self.clone() - MapReduceDocument = _import_class('MapReduceDocument') + MapReduceDocument = _import_class("MapReduceDocument") - if not hasattr(self._collection, 'map_reduce'): - raise NotImplementedError('Requires MongoDB >= 1.7.1') + if not hasattr(self._collection, "map_reduce"): + raise NotImplementedError("Requires MongoDB >= 1.7.1") map_f_scope = {} if isinstance(map_f, Code): map_f_scope = map_f.scope - map_f = six.text_type(map_f) + map_f = str(map_f) map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) reduce_f_scope = {} if isinstance(reduce_f, Code): reduce_f_scope = reduce_f.scope - reduce_f = six.text_type(reduce_f) + reduce_f = str(reduce_f) reduce_f_code = queryset._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) - mr_args = {'query': queryset._query} + mr_args = {"query": queryset._query} if finalize_f: finalize_f_scope = {} if isinstance(finalize_f, Code): finalize_f_scope = finalize_f.scope - finalize_f = six.text_type(finalize_f) + finalize_f = str(finalize_f) finalize_f_code = queryset._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) - mr_args['finalize'] = finalize_f + mr_args["finalize"] = finalize_f if scope: - mr_args['scope'] = scope + mr_args["scope"] = scope if limit: - mr_args['limit'] = limit + mr_args["limit"] = limit - if output == 'inline' and not queryset._ordering: - map_reduce_function = 'inline_map_reduce' + if output == "inline" and not queryset._ordering: + map_reduce_function = "inline_map_reduce" else: - map_reduce_function = 'map_reduce' + map_reduce_function = "map_reduce" - if isinstance(output, six.string_types): - mr_args['out'] = output + if isinstance(output, str): + mr_args["out"] = output elif isinstance(output, dict): ordered_output = [] - for part in ('replace', 'merge', 'reduce'): + for part in ("replace", "merge", "reduce"): value = output.get(part) if value: ordered_output.append((part, value)) break else: - raise OperationError('actionData not specified for output') + raise OperationError("actionData not specified for output") - db_alias = output.get('db_alias') - remaing_args = ['db', 'sharded', 'nonAtomic'] + db_alias = output.get("db_alias") + remaing_args = ["db", "sharded", "nonAtomic"] if db_alias: - ordered_output.append(('db', get_db(db_alias).name)) + ordered_output.append(("db", get_db(db_alias).name)) del remaing_args[0] for part in remaing_args: @@ -1317,20 +1472,22 @@ class BaseQuerySet(object): if value: ordered_output.append((part, value)) - mr_args['out'] = SON(ordered_output) + mr_args["out"] = SON(ordered_output) results = getattr(queryset._collection, map_reduce_function)( - map_f, reduce_f, **mr_args) + map_f, reduce_f, **mr_args + ) - if map_reduce_function == 'map_reduce': + if map_reduce_function == "map_reduce": results = results.find() if queryset._ordering: results = results.sort(queryset._ordering) for doc in results: - yield MapReduceDocument(queryset._document, queryset._collection, - doc['_id'], doc['value']) + yield MapReduceDocument( + queryset._document, queryset._collection, doc["_id"], doc["value"] + ) def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be @@ -1361,16 +1518,13 @@ class BaseQuerySet(object): fields = [queryset._document._translate_field_name(f) for f in fields] collection = queryset._document._get_collection_name() - scope = { - 'collection': collection, - 'options': options or {}, - } + scope = {"collection": collection, "options": options or {}} query = queryset._query if queryset._where_clause: - query['$where'] = queryset._where_clause + query["$where"] = queryset._where_clause - scope['query'] = query + scope["query"] = query code = Code(code, scope=scope) db = queryset._document._get_db() @@ -1400,22 +1554,22 @@ class BaseQuerySet(object): """ db_field = self._fields_to_dbfields([field]).pop() pipeline = [ - {'$match': self._query}, - {'$group': {'_id': 'sum', 'total': {'$sum': '$' + db_field}}} + {"$match": self._query}, + {"$group": {"_id": "sum", "total": {"$sum": "$" + db_field}}}, ] # if we're performing a sum over a list field, we sum up all the # elements in the list, hence we need to $unwind the arrays first - ListField = _import_class('ListField') - field_parts = field.split('.') + ListField = _import_class("ListField") + field_parts = field.split(".") field_instances = self._document._lookup_field(field_parts) if isinstance(field_instances[-1], ListField): - pipeline.insert(1, {'$unwind': '$' + field}) + pipeline.insert(1, {"$unwind": "$" + field}) result = tuple(self._document._get_collection().aggregate(pipeline)) if result: - return result[0]['total'] + return result[0]["total"] return 0 def average(self, field): @@ -1426,22 +1580,22 @@ class BaseQuerySet(object): """ db_field = self._fields_to_dbfields([field]).pop() pipeline = [ - {'$match': self._query}, - {'$group': {'_id': 'avg', 'total': {'$avg': '$' + db_field}}} + {"$match": self._query}, + {"$group": {"_id": "avg", "total": {"$avg": "$" + db_field}}}, ] # if we're performing an average over a list field, we average out # all the elements in the list, hence we need to $unwind the arrays # first - ListField = _import_class('ListField') - field_parts = field.split('.') + ListField = _import_class("ListField") + field_parts = field.split(".") field_instances = self._document._lookup_field(field_parts) if isinstance(field_instances[-1], ListField): - pipeline.insert(1, {'$unwind': '$' + field}) + pipeline.insert(1, {"$unwind": "$" + field}) result = tuple(self._document._get_collection().aggregate(pipeline)) if result: - return result[0]['total'] + return result[0]["total"] return 0 def item_frequencies(self, field, normalize=False, map_reduce=True): @@ -1467,8 +1621,7 @@ class BaseQuerySet(object): document lookups """ if map_reduce: - return self._item_frequencies_map_reduce(field, - normalize=normalize) + return self._item_frequencies_map_reduce(field, normalize=normalize) return self._item_frequencies_exec_js(field, normalize=normalize) # Iterator helpers @@ -1476,25 +1629,25 @@ class BaseQuerySet(object): def __next__(self): """Wrap the result in a :class:`~mongoengine.Document` object. """ - if self._limit == 0 or self._none: + if self._none or self._empty: raise StopIteration - raw_doc = six.next(self._cursor) + raw_doc = next(self._cursor) if self._as_pymongo: return raw_doc doc = self._document._from_son( - raw_doc, _auto_dereference=self._auto_dereference, - only_fields=self.only_fields) + raw_doc, + _auto_dereference=self._auto_dereference, + only_fields=self.only_fields, + ) if self._scalar: return self._get_scalar(doc) return doc - next = __next__ # For Python2 support - def rewind(self): """Rewind the cursor to its unevaluated state. @@ -1514,15 +1667,16 @@ class BaseQuerySet(object): @property def _cursor_args(self): - fields_name = 'projection' + fields_name = "projection" # snapshot is not handled at all by PyMongo 3+ # TODO: evaluate similar possibilities using modifiers if self._snapshot: - msg = 'The snapshot option is not anymore available with PyMongo 3+' + msg = "The snapshot option is not anymore available with PyMongo 3+" warnings.warn(msg, DeprecationWarning) - cursor_args = { - 'no_cursor_timeout': not self._timeout - } + + cursor_args = {} + if not self._timeout: + cursor_args["no_cursor_timeout"] = True if self._loaded_fields: cursor_args[fields_name] = self._loaded_fields.as_dict() @@ -1531,7 +1685,7 @@ class BaseQuerySet(object): if fields_name not in cursor_args: cursor_args[fields_name] = {} - cursor_args[fields_name]['_text_score'] = {'$meta': 'textScore'} + cursor_args[fields_name]["_text_score"] = {"$meta": "textScore"} return cursor_args @@ -1547,13 +1701,13 @@ class BaseQuerySet(object): # XXX In PyMongo 3+, we define the read preference on a collection # level, not a cursor level. Thus, we need to get a cloned collection # object using `with_options` first. - if self._read_preference is not None: - self._cursor_obj = self._collection\ - .with_options(read_preference=self._read_preference)\ - .find(self._query, **self._cursor_args) + if self._read_preference is not None or self._read_concern is not None: + self._cursor_obj = self._collection.with_options( + read_preference=self._read_preference, read_concern=self._read_concern + ).find(self._query, **self._cursor_args) else: - self._cursor_obj = self._collection.find(self._query, - **self._cursor_args) + self._cursor_obj = self._collection.find(self._query, **self._cursor_args) + # Apply "where" clauses to cursor if self._where_clause: where_clause = self._sub_js_fields(self._where_clause) @@ -1569,9 +1723,9 @@ class BaseQuerySet(object): if self._ordering: # explicit ordering self._cursor_obj.sort(self._ordering) - elif self._ordering is None and self._document._meta['ordering']: + elif self._ordering is None and self._document._meta["ordering"]: # default ordering - order = self._get_order_by(self._document._meta['ordering']) + order = self._get_order_by(self._document._meta["ordering"]) self._cursor_obj.sort(order) if self._limit is not None: @@ -1583,6 +1737,9 @@ class BaseQuerySet(object): if self._hint != -1: self._cursor_obj.hint(self._hint) + if self._collation is not None: + self._cursor_obj.collation(self._collation) + if self._batch_size is not None: self._cursor_obj.batch_size(self._batch_size) @@ -1599,17 +1756,17 @@ class BaseQuerySet(object): def _query(self): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) - if self._class_check and self._initial_query: - if '_cls' in self._mongo_query: - self._mongo_query = {'$and': [self._initial_query, self._mongo_query]} + if self._cls_query: + if "_cls" in self._mongo_query: + self._mongo_query = {"$and": [self._cls_query, self._mongo_query]} else: - self._mongo_query.update(self._initial_query) + self._mongo_query.update(self._cls_query) return self._mongo_query @property def _dereference(self): if not self.__dereference: - self.__dereference = _import_class('DeReference')() + self.__dereference = _import_class("DeReference")() return self.__dereference def no_dereference(self): @@ -1642,7 +1799,9 @@ class BaseQuerySet(object): emit(null, 1); } } - """ % {'field': field} + """ % { + "field": field + } reduce_func = """ function(key, values) { var total = 0; @@ -1653,7 +1812,7 @@ class BaseQuerySet(object): return total; } """ - values = self.map_reduce(map_func, reduce_func, 'inline') + values = self.map_reduce(map_func, reduce_func, "inline") frequencies = {} for f in values: key = f.key @@ -1664,8 +1823,7 @@ class BaseQuerySet(object): if normalize: count = sum(frequencies.values()) - frequencies = {k: float(v) / count - for k, v in frequencies.items()} + frequencies = {k: float(v) / count for k, v in frequencies.items()} return frequencies @@ -1717,13 +1875,13 @@ class BaseQuerySet(object): } """ total, data, types = self.exec_js(freq_func, field) - values = {types.get(k): int(v) for k, v in iteritems(data)} + values = {types.get(k): int(v) for k, v in data.items()} if normalize: values = {k: float(v) / total for k, v in values.items()} frequencies = {} - for k, v in iteritems(values): + for k, v in values.items(): if isinstance(k, float): if int(k) == k: k = int(k) @@ -1735,16 +1893,15 @@ class BaseQuerySet(object): def _fields_to_dbfields(self, fields): """Translate fields' paths to their db equivalents.""" subclasses = [] - if self._document._meta['allow_inheritance']: - subclasses = [get_document(x) - for x in self._document._subclasses][1:] + if self._document._meta["allow_inheritance"]: + subclasses = [get_document(x) for x in self._document._subclasses][1:] db_field_paths = [] for field in fields: - field_parts = field.split('.') + field_parts = field.split(".") try: - field = '.'.join( - f if isinstance(f, six.string_types) else f.db_field + field = ".".join( + f if isinstance(f, str) else f.db_field for f in self._document._lookup_field(field_parts) ) db_field_paths.append(field) @@ -1755,8 +1912,8 @@ class BaseQuerySet(object): # through its subclasses and see if it exists on any of them. for subdoc in subclasses: try: - subfield = '.'.join( - f if isinstance(f, six.string_types) else f.db_field + subfield = ".".join( + f if isinstance(f, str) else f.db_field for f in subdoc._lookup_field(field_parts) ) db_field_paths.append(subfield) @@ -1783,18 +1940,18 @@ class BaseQuerySet(object): if not key: continue - if key == '$text_score': - key_list.append(('_text_score', {'$meta': 'textScore'})) + if key == "$text_score": + key_list.append(("_text_score", {"$meta": "textScore"})) continue direction = pymongo.ASCENDING - if key[0] == '-': + if key[0] == "-": direction = pymongo.DESCENDING - if key[0] in ('-', '+'): + if key[0] in ("-", "+"): key = key[1:] - key = key.replace('__', '.') + key = key.replace("__", ".") try: key = self._document._translate_field_name(key) except Exception: @@ -1806,9 +1963,8 @@ class BaseQuerySet(object): return key_list def _get_scalar(self, doc): - def lookup(obj, name): - chunks = name.split('__') + chunks = name.split("__") for chunk in chunks: obj = getattr(obj, chunk) return obj @@ -1828,21 +1984,20 @@ class BaseQuerySet(object): def field_sub(match): # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') + field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field + return '["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') + field_name = match.group(1).split(".") fields = self._document._lookup_field(field_name) # Substitute the correct name for the field into the javascript - return '.'.join([f.db_field for f in fields]) + return ".".join([f.db_field for f in fields]) - code = re.sub(r'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) - code = re.sub(r'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, - code) + code = re.sub(r"\[\s*~([A-z_][A-z_0-9.]+?)\s*\]", field_sub, code) + code = re.sub(r"\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}", field_path_sub, code) return code def _chainable_method(self, method_name, val): @@ -1859,22 +2014,6 @@ class BaseQuerySet(object): getattr(cursor, method_name)(val) # Cache the value on the queryset._{method_name} - setattr(queryset, '_' + method_name, val) + setattr(queryset, "_" + method_name, val) return queryset - - # Deprecated - def ensure_index(self, **kwargs): - """Deprecated use :func:`Document.ensure_index`""" - msg = ('Doc.objects()._ensure_index() is deprecated. ' - 'Use Doc.ensure_index() instead.') - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_index(**kwargs) - return self - - def _ensure_indexes(self): - """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ('Doc.objects()._ensure_indexes() is deprecated. ' - 'Use Doc.ensure_indexes() instead.') - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_indexes() diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index dba724af..443c895c 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -1,12 +1,15 @@ -__all__ = ('QueryFieldList',) +__all__ = ("QueryFieldList",) -class QueryFieldList(object): +class QueryFieldList: """Object that handles combinations of .only() and .exclude() calls""" + ONLY = 1 EXCLUDE = 0 - def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): + def __init__( + self, fields=None, value=ONLY, always_include=None, _only_called=False + ): """The QueryFieldList builder :param fields: A list of fields used in `.only()` or `.exclude()` @@ -49,7 +52,7 @@ class QueryFieldList(object): self.fields = f.fields - self.fields self._clean_slice() - if '_id' in f.fields: + if "_id" in f.fields: self._id = f.value if self.always_include: @@ -59,25 +62,23 @@ class QueryFieldList(object): else: self.fields -= self.always_include - if getattr(f, '_only_called', False): + if getattr(f, "_only_called", False): self._only_called = True return self def __bool__(self): return bool(self.fields) - __nonzero__ = __bool__ # For Py2 support - def as_dict(self): field_list = {field: self.value for field in self.fields} if self.slice: field_list.update(self.slice) if self._id is not None: - field_list['_id'] = self._id + field_list["_id"] = self._id return field_list def reset(self): - self.fields = set([]) + self.fields = set() self.slice = {} self.value = self.ONLY diff --git a/mongoengine/queryset/manager.py b/mongoengine/queryset/manager.py index f93dbb43..699526fd 100644 --- a/mongoengine/queryset/manager.py +++ b/mongoengine/queryset/manager.py @@ -1,10 +1,10 @@ from functools import partial from mongoengine.queryset.queryset import QuerySet -__all__ = ('queryset_manager', 'QuerySetManager') +__all__ = ("queryset_manager", "QuerySetManager") -class QuerySetManager(object): +class QuerySetManager: """ The default QuerySet Manager. @@ -33,7 +33,7 @@ class QuerySetManager(object): return self # owner is the document that contains the QuerySetManager - queryset_class = owner._meta.get('queryset_class', self.default) + queryset_class = owner._meta.get("queryset_class", self.default) queryset = queryset_class(owner, owner._get_collection()) if self.get_queryset: arg_count = self.get_queryset.__code__.co_argcount diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index c7c593b1..e2db8f0d 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,11 +1,22 @@ -import six - from mongoengine.errors import OperationError -from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, - NULLIFY, PULL) +from mongoengine.queryset.base import ( + BaseQuerySet, + CASCADE, + DENY, + DO_NOTHING, + NULLIFY, + PULL, +) -__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', - 'DENY', 'PULL') +__all__ = ( + "QuerySet", + "QuerySetNoCache", + "DO_NOTHING", + "NULLIFY", + "CASCADE", + "DENY", + "PULL", +) # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 @@ -57,12 +68,12 @@ class QuerySet(BaseQuerySet): def __repr__(self): """Provide a string representation of the QuerySet""" if self._iter: - return '.. queryset mid-iteration ..' + return ".. queryset mid-iteration .." self._populate_cache() - data = self._result_cache[:REPR_OUTPUT_SIZE + 1] + data = self._result_cache[: REPR_OUTPUT_SIZE + 1] if len(data) > REPR_OUTPUT_SIZE: - data[-1] = '...(remaining elements truncated)...' + data[-1] = "...(remaining elements truncated)..." return repr(data) def _iter_results(self): @@ -114,8 +125,8 @@ class QuerySet(BaseQuerySet): # Pull in ITER_CHUNK_SIZE docs from the database and store them in # the result cache. try: - for _ in six.moves.range(ITER_CHUNK_SIZE): - self._result_cache.append(six.next(self)) + for _ in range(ITER_CHUNK_SIZE): + self._result_cache.append(next(self)) except StopIteration: # Getting this exception means there are no more docs in the # db cursor. Set _has_more to False so that we can use that @@ -130,10 +141,11 @@ class QuerySet(BaseQuerySet): getting the count """ if with_limit_and_skip is False: - return super(QuerySet, self).count(with_limit_and_skip) + return super().count(with_limit_and_skip) if self._len is None: - self._len = super(QuerySet, self).count(with_limit_and_skip) + # cache the length + self._len = super().count(with_limit_and_skip) return self._len @@ -143,10 +155,9 @@ class QuerySet(BaseQuerySet): .. versionadded:: 0.8.3 Convert to non caching queryset """ if self._result_cache is not None: - raise OperationError('QuerySet already cached') + raise OperationError("QuerySet already cached") - return self._clone_into(QuerySetNoCache(self._document, - self._collection)) + return self._clone_into(QuerySetNoCache(self._document, self._collection)) class QuerySetNoCache(BaseQuerySet): @@ -165,17 +176,17 @@ class QuerySetNoCache(BaseQuerySet): .. versionchanged:: 0.6.13 Now doesnt modify the cursor """ if self._iter: - return '.. queryset mid-iteration ..' + return ".. queryset mid-iteration .." data = [] - for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): + for _ in range(REPR_OUTPUT_SIZE + 1): try: - data.append(six.next(self)) + data.append(next(self)) except StopIteration: break if len(data) > REPR_OUTPUT_SIZE: - data[-1] = '...(remaining elements truncated)...' + data[-1] = "...(remaining elements truncated)..." self.rewind() return repr(data) diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 128a4e44..3f1db8fa 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -3,28 +3,59 @@ from collections import defaultdict from bson import ObjectId, SON from bson.dbref import DBRef import pymongo -import six -from six import iteritems from mongoengine.base import UPDATE_OPERATORS from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError -__all__ = ('query', 'update') +__all__ = ("query", "update", "STRING_OPERATORS") -COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', - 'all', 'size', 'exists', 'not', 'elemMatch', 'type') -GEO_OPERATORS = ('within_distance', 'within_spherical_distance', - 'within_box', 'within_polygon', 'near', 'near_sphere', - 'max_distance', 'min_distance', 'geo_within', 'geo_within_box', - 'geo_within_polygon', 'geo_within_center', - 'geo_within_sphere', 'geo_intersects') -STRING_OPERATORS = ('contains', 'icontains', 'startswith', - 'istartswith', 'endswith', 'iendswith', - 'exact', 'iexact') -CUSTOM_OPERATORS = ('match',) -MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + - STRING_OPERATORS + CUSTOM_OPERATORS) +COMPARISON_OPERATORS = ( + "ne", + "gt", + "gte", + "lt", + "lte", + "in", + "nin", + "mod", + "all", + "size", + "exists", + "not", + "elemMatch", + "type", +) +GEO_OPERATORS = ( + "within_distance", + "within_spherical_distance", + "within_box", + "within_polygon", + "near", + "near_sphere", + "max_distance", + "min_distance", + "geo_within", + "geo_within_box", + "geo_within_polygon", + "geo_within_center", + "geo_within_sphere", + "geo_intersects", +) +STRING_OPERATORS = ( + "contains", + "icontains", + "startswith", + "istartswith", + "endswith", + "iendswith", + "exact", + "iexact", +) +CUSTOM_OPERATORS = ("match",) +MATCH_OPERATORS = ( + COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS +) # TODO make this less complex @@ -33,11 +64,11 @@ def query(_doc_cls=None, **kwargs): mongo_query = {} merge_query = defaultdict(list) for key, value in sorted(kwargs.items()): - if key == '__raw__': + if key == "__raw__": mongo_query.update(value) continue - parts = key.rsplit('__') + parts = key.rsplit("__") indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] parts = [part for part in parts if not part.isdigit()] # Check for an operator and transform to mongo-style if there is @@ -46,11 +77,11 @@ def query(_doc_cls=None, **kwargs): op = parts.pop() # Allow to escape operator-like field name by __ - if len(parts) > 1 and parts[-1] == '': + if len(parts) > 1 and parts[-1] == "": parts.pop() negate = False - if len(parts) > 1 and parts[-1] == 'not': + if len(parts) > 1 and parts[-1] == "not": parts.pop() negate = True @@ -62,18 +93,18 @@ def query(_doc_cls=None, **kwargs): raise InvalidQueryError(e) parts = [] - CachedReferenceField = _import_class('CachedReferenceField') - GenericReferenceField = _import_class('GenericReferenceField') + CachedReferenceField = _import_class("CachedReferenceField") + GenericReferenceField = _import_class("GenericReferenceField") cleaned_fields = [] for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): parts.append(field) append_field = False # is last and CachedReferenceField elif isinstance(field, CachedReferenceField) and fields[-1] == field: - parts.append('%s._id' % field.db_field) + parts.append("%s._id" % field.db_field) else: parts.append(field.db_field) @@ -83,15 +114,15 @@ def query(_doc_cls=None, **kwargs): # Convert value to proper value field = cleaned_fields[-1] - singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] + singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"] singular_ops += STRING_OPERATORS if op in singular_ops: value = field.prepare_query_value(op, value) if isinstance(field, CachedReferenceField) and value: - value = value['_id'] + value = value["_id"] - elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): + elif op in ("in", "nin", "all", "near") and not isinstance(value, dict): # Raise an error if the in/nin/all/near param is not iterable. value = _prepare_query_for_iterable(field, op, value) @@ -101,71 +132,77 @@ def query(_doc_cls=None, **kwargs): # * If the value is an ObjectId, the key should be "field_name._ref.$id". if isinstance(field, GenericReferenceField): if isinstance(value, DBRef): - parts[-1] += '._ref' + parts[-1] += "._ref" elif isinstance(value, ObjectId): - parts[-1] += '._ref.$id' + parts[-1] += "._ref.$id" # if op and op not in COMPARISON_OPERATORS: if op: if op in GEO_OPERATORS: value = _geo_operator(field, op, value) - elif op in ('match', 'elemMatch'): - ListField = _import_class('ListField') - EmbeddedDocumentField = _import_class('EmbeddedDocumentField') + elif op in ("match", "elemMatch"): + ListField = _import_class("ListField") + EmbeddedDocumentField = _import_class("EmbeddedDocumentField") if ( - isinstance(value, dict) and - isinstance(field, ListField) and - isinstance(field.field, EmbeddedDocumentField) + isinstance(value, dict) + and isinstance(field, ListField) + and isinstance(field.field, EmbeddedDocumentField) ): value = query(field.field.document_type, **value) else: value = field.prepare_query_value(op, value) - value = {'$elemMatch': value} + value = {"$elemMatch": value} elif op in CUSTOM_OPERATORS: - NotImplementedError('Custom method "%s" has not ' - 'been implemented' % op) + NotImplementedError( + 'Custom method "%s" has not ' "been implemented" % op + ) elif op not in STRING_OPERATORS: - value = {'$' + op: value} + value = {"$" + op: value} if negate: - value = {'$not': value} + value = {"$not": value} for i, part in indices: parts.insert(i, part) - key = '.'.join(parts) + key = ".".join(parts) - if op is None or key not in mongo_query: + if key not in mongo_query: mongo_query[key] = value - elif key in mongo_query: + else: if isinstance(mongo_query[key], dict) and isinstance(value, dict): mongo_query[key].update(value) # $max/minDistance needs to come last - convert to SON value_dict = mongo_query[key] - if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ - ('$near' in value_dict or '$nearSphere' in value_dict): + if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and ( + "$near" in value_dict or "$nearSphere" in value_dict + ): value_son = SON() - for k, v in iteritems(value_dict): - if k == '$maxDistance' or k == '$minDistance': + for k, v in value_dict.items(): + if k == "$maxDistance" or k == "$minDistance": continue value_son[k] = v # Required for MongoDB >= 2.6, may fail when combining # PyMongo 3+ and MongoDB < 2.6 near_embedded = False - for near_op in ('$near', '$nearSphere'): + for near_op in ("$near", "$nearSphere"): if isinstance(value_dict.get(near_op), dict): value_son[near_op] = SON(value_son[near_op]) - if '$maxDistance' in value_dict: - value_son[near_op]['$maxDistance'] = value_dict['$maxDistance'] - if '$minDistance' in value_dict: - value_son[near_op]['$minDistance'] = value_dict['$minDistance'] + if "$maxDistance" in value_dict: + value_son[near_op]["$maxDistance"] = value_dict[ + "$maxDistance" + ] + if "$minDistance" in value_dict: + value_son[near_op]["$minDistance"] = value_dict[ + "$minDistance" + ] near_embedded = True if not near_embedded: - if '$maxDistance' in value_dict: - value_son['$maxDistance'] = value_dict['$maxDistance'] - if '$minDistance' in value_dict: - value_son['$minDistance'] = value_dict['$minDistance'] + if "$maxDistance" in value_dict: + value_son["$maxDistance"] = value_dict["$maxDistance"] + if "$minDistance" in value_dict: + value_son["$minDistance"] = value_dict["$minDistance"] mongo_query[key] = value_son else: # Store for manually merging later @@ -177,10 +214,10 @@ def query(_doc_cls=None, **kwargs): del mongo_query[k] if isinstance(v, list): value = [{k: val} for val in v] - if '$and' in mongo_query.keys(): - mongo_query['$and'].extend(value) + if "$and" in mongo_query.keys(): + mongo_query["$and"].extend(value) else: - mongo_query['$and'] = value + mongo_query["$and"] = value return mongo_query @@ -192,15 +229,15 @@ def update(_doc_cls=None, **update): mongo_update = {} for key, value in update.items(): - if key == '__raw__': + if key == "__raw__": mongo_update.update(value) continue - parts = key.split('__') + parts = key.split("__") # if there is no operator, default to 'set' if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: - parts.insert(0, 'set') + parts.insert(0, "set") # Check for an operator and transform to mongo-style if there is op = None @@ -208,13 +245,13 @@ def update(_doc_cls=None, **update): op = parts.pop(0) # Convert Pythonic names to Mongo equivalents operator_map = { - 'push_all': 'pushAll', - 'pull_all': 'pullAll', - 'dec': 'inc', - 'add_to_set': 'addToSet', - 'set_on_insert': 'setOnInsert' + "push_all": "pushAll", + "pull_all": "pullAll", + "dec": "inc", + "add_to_set": "addToSet", + "set_on_insert": "setOnInsert", } - if op == 'dec': + if op == "dec": # Support decrement by flipping a positive value's sign # and using 'inc' value = -value @@ -227,7 +264,7 @@ def update(_doc_cls=None, **update): match = parts.pop() # Allow to escape operator-like field name by __ - if len(parts) > 1 and parts[-1] == '': + if len(parts) > 1 and parts[-1] == "": parts.pop() if _doc_cls: @@ -242,10 +279,10 @@ def update(_doc_cls=None, **update): appended_sub_field = False for field in fields: append_field = True - if isinstance(field, six.string_types): + if isinstance(field, str): # Convert the S operator to $ - if field == 'S': - field = '$' + if field == "S": + field = "$" parts.append(field) append_field = False else: @@ -253,7 +290,7 @@ def update(_doc_cls=None, **update): if append_field: appended_sub_field = False cleaned_fields.append(field) - if hasattr(field, 'field'): + if hasattr(field, "field"): cleaned_fields.append(field.field) appended_sub_field = True @@ -263,52 +300,53 @@ def update(_doc_cls=None, **update): else: field = cleaned_fields[-1] - GeoJsonBaseField = _import_class('GeoJsonBaseField') + GeoJsonBaseField = _import_class("GeoJsonBaseField") if isinstance(field, GeoJsonBaseField): value = field.to_mongo(value) - if op == 'pull': + if op == "pull": if field.required or value is not None: - if match in ('in', 'nin') and not isinstance(value, dict): + if match in ("in", "nin") and not isinstance(value, dict): value = _prepare_query_for_iterable(field, op, value) else: value = field.prepare_query_value(op, value) - elif op == 'push' and isinstance(value, (list, tuple, set)): + elif op == "push" and isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] - elif op in (None, 'set', 'push'): + elif op in (None, "set", "push"): if field.required or value is not None: value = field.prepare_query_value(op, value) - elif op in ('pushAll', 'pullAll'): + elif op in ("pushAll", "pullAll"): value = [field.prepare_query_value(op, v) for v in value] - elif op in ('addToSet', 'setOnInsert'): + elif op in ("addToSet", "setOnInsert"): if isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] elif field.required or value is not None: value = field.prepare_query_value(op, value) - elif op == 'unset': + elif op == "unset": value = 1 - elif op == 'inc': + elif op == "inc": value = field.prepare_query_value(op, value) if match: - match = '$' + match + match = "$" + match value = {match: value} - key = '.'.join(parts) + key = ".".join(parts) - if 'pull' in op and '.' in key: + if "pull" in op and "." in key: # Dot operators don't work on pull operations # unless they point to a list field # Otherwise it uses nested dict syntax - if op == 'pullAll': - raise InvalidQueryError('pullAll operations only support ' - 'a single field depth') + if op == "pullAll": + raise InvalidQueryError( + "pullAll operations only support a single field depth" + ) # Look for the last list field and use dot notation until there field_classes = [c.__class__ for c in cleaned_fields] field_classes.reverse() - ListField = _import_class('ListField') - EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') + ListField = _import_class("ListField") + EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") if ListField in field_classes or EmbeddedDocumentListField in field_classes: # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField # Then process as normal @@ -317,37 +355,36 @@ def update(_doc_cls=None, **update): else: _check_field = EmbeddedDocumentListField - last_listField = len( - cleaned_fields) - field_classes.index(_check_field) - key = '.'.join(parts[:last_listField]) + last_listField = len(cleaned_fields) - field_classes.index(_check_field) + key = ".".join(parts[:last_listField]) parts = parts[last_listField:] parts.insert(0, key) parts.reverse() for key in parts: value = {key: value} - elif op == 'addToSet' and isinstance(value, list): - value = {key: {'$each': value}} - elif op in ('push', 'pushAll'): + elif op == "addToSet" and isinstance(value, list): + value = {key: {"$each": value}} + elif op in ("push", "pushAll"): if parts[-1].isdigit(): - key = '.'.join(parts[0:-1]) + key = ".".join(parts[0:-1]) position = int(parts[-1]) # $position expects an iterable. If pushing a single value, # wrap it in a list. if not isinstance(value, (set, tuple, list)): value = [value] - value = {key: {'$each': value, '$position': position}} + value = {key: {"$each": value, "$position": position}} else: - if op == 'pushAll': - op = 'push' # convert to non-deprecated keyword + if op == "pushAll": + op = "push" # convert to non-deprecated keyword if not isinstance(value, (set, tuple, list)): value = [value] - value = {key: {'$each': value}} + value = {key: {"$each": value}} else: value = {key: value} else: value = {key: value} - key = '$' + op + key = "$" + op if key not in mongo_update: mongo_update[key] = value elif key in mongo_update and isinstance(mongo_update[key], dict): @@ -358,45 +395,47 @@ def update(_doc_cls=None, **update): def _geo_operator(field, op, value): """Helper to return the query for a given geo query.""" - if op == 'max_distance': - value = {'$maxDistance': value} - elif op == 'min_distance': - value = {'$minDistance': value} + if op == "max_distance": + value = {"$maxDistance": value} + elif op == "min_distance": + value = {"$minDistance": value} elif field._geo_index == pymongo.GEO2D: - if op == 'within_distance': - value = {'$within': {'$center': value}} - elif op == 'within_spherical_distance': - value = {'$within': {'$centerSphere': value}} - elif op == 'within_polygon': - value = {'$within': {'$polygon': value}} - elif op == 'near': - value = {'$near': value} - elif op == 'near_sphere': - value = {'$nearSphere': value} - elif op == 'within_box': - value = {'$within': {'$box': value}} - else: - raise NotImplementedError('Geo method "%s" has not been ' - 'implemented for a GeoPointField' % op) - else: - if op == 'geo_within': - value = {'$geoWithin': _infer_geometry(value)} - elif op == 'geo_within_box': - value = {'$geoWithin': {'$box': value}} - elif op == 'geo_within_polygon': - value = {'$geoWithin': {'$polygon': value}} - elif op == 'geo_within_center': - value = {'$geoWithin': {'$center': value}} - elif op == 'geo_within_sphere': - value = {'$geoWithin': {'$centerSphere': value}} - elif op == 'geo_intersects': - value = {'$geoIntersects': _infer_geometry(value)} - elif op == 'near': - value = {'$near': _infer_geometry(value)} + if op == "within_distance": + value = {"$within": {"$center": value}} + elif op == "within_spherical_distance": + value = {"$within": {"$centerSphere": value}} + elif op == "within_polygon": + value = {"$within": {"$polygon": value}} + elif op == "near": + value = {"$near": value} + elif op == "near_sphere": + value = {"$nearSphere": value} + elif op == "within_box": + value = {"$within": {"$box": value}} else: raise NotImplementedError( - 'Geo method "%s" has not been implemented for a %s ' - % (op, field._name) + 'Geo method "%s" has not been ' "implemented for a GeoPointField" % op + ) + else: + if op == "geo_within": + value = {"$geoWithin": _infer_geometry(value)} + elif op == "geo_within_box": + value = {"$geoWithin": {"$box": value}} + elif op == "geo_within_polygon": + value = {"$geoWithin": {"$polygon": value}} + elif op == "geo_within_center": + value = {"$geoWithin": {"$center": value}} + elif op == "geo_within_sphere": + value = {"$geoWithin": {"$centerSphere": value}} + elif op == "geo_intersects": + value = {"$geoIntersects": _infer_geometry(value)} + elif op == "near": + value = {"$near": _infer_geometry(value)} + else: + raise NotImplementedError( + 'Geo method "{}" has not been implemented for a {} '.format( + op, field._name + ) ) return value @@ -406,51 +445,58 @@ def _infer_geometry(value): given value. """ if isinstance(value, dict): - if '$geometry' in value: + if "$geometry" in value: return value - elif 'coordinates' in value and 'type' in value: - return {'$geometry': value} - raise InvalidQueryError('Invalid $geometry dictionary should have ' - 'type and coordinates keys') + elif "coordinates" in value and "type" in value: + return {"$geometry": value} + raise InvalidQueryError( + "Invalid $geometry dictionary should have type and coordinates keys" + ) elif isinstance(value, (list, set)): # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? try: value[0][0][0] - return {'$geometry': {'type': 'Polygon', 'coordinates': value}} + return {"$geometry": {"type": "Polygon", "coordinates": value}} except (TypeError, IndexError): pass try: value[0][0] - return {'$geometry': {'type': 'LineString', 'coordinates': value}} + return {"$geometry": {"type": "LineString", "coordinates": value}} except (TypeError, IndexError): pass try: value[0] - return {'$geometry': {'type': 'Point', 'coordinates': value}} + return {"$geometry": {"type": "Point", "coordinates": value}} except (TypeError, IndexError): pass - raise InvalidQueryError('Invalid $geometry data. Can be either a ' - 'dictionary or (nested) lists of coordinate(s)') + raise InvalidQueryError( + "Invalid $geometry data. Can be either a " + "dictionary or (nested) lists of coordinate(s)" + ) def _prepare_query_for_iterable(field, op, value): # We need a special check for BaseDocument, because - although it's iterable - using # it as such in the context of this method is most definitely a mistake. - BaseDocument = _import_class('BaseDocument') + BaseDocument = _import_class("BaseDocument") if isinstance(value, BaseDocument): - raise TypeError("When using the `in`, `nin`, `all`, or " - "`near`-operators you can\'t use a " - "`Document`, you must wrap your object " - "in a list (object -> [object]).") + raise TypeError( + "When using the `in`, `nin`, `all`, or " + "`near`-operators you can't use a " + "`Document`, you must wrap your object " + "in a list (object -> [object])." + ) - if not hasattr(value, '__iter__'): - raise TypeError("The `in`, `nin`, `all`, or " - "`near`-operators must be applied to an " - "iterable (e.g. a list).") + if not hasattr(value, "__iter__"): + raise TypeError( + "The `in`, `nin`, `all`, or " + "`near`-operators must be applied to an " + "iterable (e.g. a list)." + ) return [field.prepare_query_value(op, v) for v in value] diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 9d97094b..a2448f28 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -1,12 +1,18 @@ import copy +import warnings from mongoengine.errors import InvalidQueryError from mongoengine.queryset import transform -__all__ = ('Q', 'QNode') +__all__ = ("Q", "QNode") -class QNodeVisitor(object): +def warn_empty_is_deprecated(): + msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" + warnings.warn(msg, DeprecationWarning, stacklevel=2) + + +class QNodeVisitor: """Base visitor class for visiting Q-object nodes in a query tree. """ @@ -69,16 +75,16 @@ class QueryCompilerVisitor(QNodeVisitor): self.document = document def visit_combination(self, combination): - operator = '$and' + operator = "$and" if combination.operation == combination.OR: - operator = '$or' + operator = "$or" return {operator: combination.children} def visit_query(self, query): return transform.query(self.document, **query.query) -class QNode(object): +class QNode: """Base class for nodes in query trees.""" AND = 0 @@ -96,16 +102,19 @@ class QNode(object): """Combine this node with another node into a QCombination object. """ - if getattr(other, 'empty', True): + # If the other Q() is empty, ignore it and just use `self`. + if not bool(other): return self - if self.empty: + # Or if this Q is empty, ignore it and just use `other`. + if not bool(self): return other return QCombination(operation, [self, other]) @property def empty(self): + warn_empty_is_deprecated() return False def __or__(self, other): @@ -132,8 +141,11 @@ class QCombination(QNode): self.children.append(node) def __repr__(self): - op = ' & ' if self.operation is self.AND else ' | ' - return '(%s)' % op.join([repr(node) for node in self.children]) + op = " & " if self.operation is self.AND else " | " + return "(%s)" % op.join([repr(node) for node in self.children]) + + def __bool__(self): + return bool(self.children) def accept(self, visitor): for i in range(len(self.children)): @@ -144,8 +156,16 @@ class QCombination(QNode): @property def empty(self): + warn_empty_is_deprecated() return not bool(self.children) + def __eq__(self, other): + return ( + self.__class__ == other.__class__ + and self.operation == other.operation + and self.children == other.children + ) + class Q(QNode): """A simple query object, used in a query tree to build up more complex @@ -156,11 +176,18 @@ class Q(QNode): self.query = query def __repr__(self): - return 'Q(**%s)' % repr(self.query) + return "Q(**%s)" % repr(self.query) + + def __bool__(self): + return bool(self.query) + + def __eq__(self, other): + return self.__class__ == other.__class__ and self.query == other.query def accept(self, visitor): return visitor.visit_query(self) @property def empty(self): + warn_empty_is_deprecated() return not bool(self.query) diff --git a/mongoengine/signals.py b/mongoengine/signals.py index a892dec0..582b533d 100644 --- a/mongoengine/signals.py +++ b/mongoengine/signals.py @@ -1,5 +1,12 @@ -__all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', - 'post_save', 'pre_delete', 'post_delete') +__all__ = ( + "pre_init", + "post_init", + "pre_save", + "pre_save_post_validation", + "post_save", + "pre_delete", + "post_delete", +) signals_available = False try: @@ -7,11 +14,12 @@ try: signals_available = True except ImportError: - class Namespace(object): + + class Namespace: def signal(self, name, doc=None): return _FakeSignal(name, doc) - class _FakeSignal(object): + class _FakeSignal: """If blinker is unavailable, create a fake class with the same interface that allows sending of signals but will fail with an error on anything else. Instead of doing anything on send, it @@ -23,13 +31,16 @@ except ImportError: self.__doc__ = doc def _fail(self, *args, **kwargs): - raise RuntimeError('signalling support is unavailable ' - 'because the blinker library is ' - 'not installed.') + raise RuntimeError( + "signalling support is unavailable " + "because the blinker library is " + "not installed." + ) send = lambda *a, **kw: None # noqa - connect = disconnect = has_receivers_for = receivers_for = \ - temporarily_connected_to = _fail + connect = ( + disconnect + ) = has_receivers_for = receivers_for = temporarily_connected_to = _fail del _fail @@ -37,12 +48,12 @@ except ImportError: # not put signals in here. Create your own namespace instead. _signals = Namespace() -pre_init = _signals.signal('pre_init') -post_init = _signals.signal('post_init') -pre_save = _signals.signal('pre_save') -pre_save_post_validation = _signals.signal('pre_save_post_validation') -post_save = _signals.signal('post_save') -pre_delete = _signals.signal('pre_delete') -post_delete = _signals.signal('post_delete') -pre_bulk_insert = _signals.signal('pre_bulk_insert') -post_bulk_insert = _signals.signal('post_bulk_insert') +pre_init = _signals.signal("pre_init") +post_init = _signals.signal("post_init") +pre_save = _signals.signal("pre_save") +pre_save_post_validation = _signals.signal("pre_save_post_validation") +post_save = _signals.signal("post_save") +pre_delete = _signals.signal("pre_delete") +post_delete = _signals.signal("post_delete") +pre_bulk_insert = _signals.signal("pre_bulk_insert") +post_bulk_insert = _signals.signal("post_bulk_insert") diff --git a/python-mongoengine.spec b/python-mongoengine.spec index eddb488d..635c779f 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT # %{python_sitearch}/* %changelog -* See: http://docs.mongoengine.org/en/latest/changelog.html \ No newline at end of file +* See: http://docs.mongoengine.org/en/latest/changelog.html diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..ee788e7a --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,8 @@ +black +flake8 +flake8-import-order +pre-commit +pytest +ipdb +ipython +tox diff --git a/requirements.txt b/requirements.txt index 9bb319a5..0ce39f74 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,3 @@ -nose pymongo>=3.4 -six==1.10.0 -flake8 -flake8-import-order Sphinx==1.5.5 sphinx-rtd-theme==0.2.4 diff --git a/setup.cfg b/setup.cfg index 84086601..ae1b4f7e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,11 +1,10 @@ -[nosetests] -verbosity=2 -detailed-errors=1 -#tests=tests -cover-package=mongoengine - [flake8] -ignore=E501,F401,F403,F405,I201,I202,W504, W605 +ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503 exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests max-complexity=47 application-import-names=mongoengine,tests + +[tool:pytest] +# Limits the discovery to tests directory +# avoids that it runs for instance the benchmark +testpaths = tests diff --git a/setup.py b/setup.py index f1f5dea7..393de9c7 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,9 @@ import os import sys + +from pkg_resources import normalize_path from setuptools import find_packages, setup +from setuptools.command.test import test as TestCommand # Hack to silence atexit traceback in newer python versions try: @@ -8,13 +11,10 @@ try: except ImportError: pass -DESCRIPTION = ( - 'MongoEngine is a Python Object-Document ' - 'Mapper for working with MongoDB.' -) +DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." try: - with open('README.rst') as fin: + with open("README.rst") as fin: LONG_DESCRIPTION = fin.read() except Exception: LONG_DESCRIPTION = None @@ -24,63 +24,124 @@ def get_version(version_tuple): """Return the version tuple as a string, e.g. for (0, 10, 7), return '0.10.7'. """ - return '.'.join(map(str, version_tuple)) + return ".".join(map(str, version_tuple)) + + +class PyTest(TestCommand): + """Will force pytest to search for tests inside the build directory + for 2to3 converted code (used by tox), instead of the current directory. + Required as long as we need 2to3 + + Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations + Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html + """ + + # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands + # Allows to provide pytest command argument through the test runner command `python setup.py test` + # e.g: `python setup.py test -a "-k=test"` + # This only works for 1 argument though + user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] + + def initialize_options(self): + TestCommand.initialize_options(self) + self.pytest_args = "" + + def finalize_options(self): + TestCommand.finalize_options(self) + self.test_args = ["tests"] + self.test_suite = True + + def run_tests(self): + # import here, cause outside the eggs aren't loaded + from pkg_resources import _namespace_packages + import pytest + + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): + module = self.test_args[-1].split(".")[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += "." + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + map(sys.modules.__delitem__, del_modules) + + # Run on the build directory for 2to3-built code + # This will prevent the old 2.x code from being found + # by py.test discovery mechanism, that apparently + # ignores sys.path.. + ei_cmd = self.get_finalized_command("egg_info") + self.test_args = [normalize_path(ei_cmd.egg_base)] + + cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) + errno = pytest.main(cmd_args) + sys.exit(errno) # Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read -init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') -version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] +init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") +version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] -VERSION = get_version(eval(version_line.split('=')[-1])) +VERSION = get_version(eval(version_line.split("=")[-1])) CLASSIFIERS = [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", - 'Topic :: Database', - 'Topic :: Software Development :: Libraries :: Python Modules', + "Topic :: Database", + "Topic :: Software Development :: Libraries :: Python Modules", ] extra_opts = { - 'packages': find_packages(exclude=['tests', 'tests.*']), - 'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] + "packages": find_packages(exclude=["tests", "tests.*"]), + "tests_require": [ + "pytest<5.0", + "pytest-cov", + "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls + "blinker", + "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support + ], } -if sys.version_info[0] == 3: - extra_opts['use_2to3'] = True - if 'test' in sys.argv or 'nosetests' in sys.argv: - extra_opts['packages'] = find_packages() - extra_opts['package_data'] = { - 'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} -else: - extra_opts['tests_require'] += ['python-dateutil'] + +if "test" in sys.argv: + extra_opts["packages"] = find_packages() + extra_opts["package_data"] = { + "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] + } setup( - name='mongoengine', + name="mongoengine", version=VERSION, - author='Harry Marr', - author_email='harry.marr@gmail.com', + author="Harry Marr", + author_email="harry.marr@gmail.com", maintainer="Stefan Wojcik", maintainer_email="wojcikstefan@gmail.com", - url='http://mongoengine.org/', - download_url='https://github.com/MongoEngine/mongoengine/tarball/master', - license='MIT', + url="http://mongoengine.org/", + download_url="https://github.com/MongoEngine/mongoengine/tarball/master", + license="MIT", include_package_data=True, description=DESCRIPTION, long_description=LONG_DESCRIPTION, - platforms=['any'], + platforms=["any"], classifiers=CLASSIFIERS, - install_requires=['pymongo>=3.4', 'six'], - test_suite='nose.collector', + python_requires=">=3.5", + install_requires=["pymongo>=3.4, <4.0"], + cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/__init__.py b/tests/__init__.py index 08db7186..e69de29b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +0,0 @@ -from .all_warnings import AllWarnings -from .document import * -from .queryset import * -from .fields import * diff --git a/tests/all_warnings/__init__.py b/tests/all_warnings/__init__.py index 3aebe4ba..e69de29b 100644 --- a/tests/all_warnings/__init__.py +++ b/tests/all_warnings/__init__.py @@ -1,42 +0,0 @@ -""" -This test has been put into a module. This is because it tests warnings that -only get triggered on first hit. This way we can ensure its imported into the -top level and called first by the test suite. -""" -import unittest -import warnings - -from mongoengine import * - - -__all__ = ('AllWarnings', ) - - -class AllWarnings(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.warning_list = [] - self.showwarning_default = warnings.showwarning - warnings.showwarning = self.append_to_warning_list - - def append_to_warning_list(self, message, category, *args): - self.warning_list.append({"message": message, - "category": category}) - - def tearDown(self): - # restore default handling of warnings - warnings.showwarning = self.showwarning_default - - def test_document_collection_syntax_warning(self): - - class NonAbstractBase(Document): - meta = {'allow_inheritance': True} - - class InheritedDocumentFailTest(NonAbstractBase): - meta = {'collection': 'fail'} - - warning = self.warning_list[0] - self.assertEqual(SyntaxWarning, warning["category"]) - self.assertEqual('non_abstract_base', - InheritedDocumentFailTest._get_collection_name()) diff --git a/tests/all_warnings/test_warnings.py b/tests/all_warnings/test_warnings.py new file mode 100644 index 00000000..a9910121 --- /dev/null +++ b/tests/all_warnings/test_warnings.py @@ -0,0 +1,35 @@ +""" +This test has been put into a module. This is because it tests warnings that +only get triggered on first hit. This way we can ensure its imported into the +top level and called first by the test suite. +""" +import unittest +import warnings + +from mongoengine import * + + +class TestAllWarnings(unittest.TestCase): + def setUp(self): + connect(db="mongoenginetest") + self.warning_list = [] + self.showwarning_default = warnings.showwarning + warnings.showwarning = self.append_to_warning_list + + def append_to_warning_list(self, message, category, *args): + self.warning_list.append({"message": message, "category": category}) + + def tearDown(self): + # restore default handling of warnings + warnings.showwarning = self.showwarning_default + + def test_document_collection_syntax_warning(self): + class NonAbstractBase(Document): + meta = {"allow_inheritance": True} + + class InheritedDocumentFailTest(NonAbstractBase): + meta = {"collection": "fail"} + + warning = self.warning_list[0] + assert SyntaxWarning == warning["category"] + assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name() diff --git a/tests/document/__init__.py b/tests/document/__init__.py index dc35c969..e69de29b 100644 --- a/tests/document/__init__.py +++ b/tests/document/__init__.py @@ -1,13 +0,0 @@ -import unittest - -from .class_methods import * -from .delta import * -from .dynamic import * -from .indexes import * -from .inheritance import * -from .instance import * -from .json_serialisation import * -from .validation import * - -if __name__ == '__main__': - unittest.main() diff --git a/tests/document/delta.py b/tests/document/delta.py deleted file mode 100644 index 504c1707..00000000 --- a/tests/document/delta.py +++ /dev/null @@ -1,864 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest - -from bson import SON -from mongoengine import * -from mongoengine.pymongo_support import list_collection_names -from tests.utils import MongoDBTestCase - - -class DeltaTest(MongoDBTestCase): - - def setUp(self): - super(DeltaTest, self).setUp() - - class Person(Document): - name = StringField() - age = IntField() - - non_field = True - - meta = {"allow_inheritance": True} - - self.Person = Person - - def tearDown(self): - for collection in list_collection_names(self.db): - self.db.drop_collection(collection) - - def test_delta(self): - self.delta(Document) - self.delta(DynamicDocument) - - def delta(self, DocClass): - - class Doc(DocClass): - string_field = StringField() - int_field = IntField() - dict_field = DictField() - list_field = ListField() - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['string_field']) - self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) - - doc._changed_fields = [] - doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['int_field']) - self.assertEqual(doc._delta(), ({'int_field': 1}, {})) - - doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} - doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) - - doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] - doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) - - # Test unsetting - doc._changed_fields = [] - doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) - - doc._changed_fields = [] - doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({}, {'list_field': 1})) - - def test_delta_recursive(self): - self.delta_recursive(Document, EmbeddedDocument) - self.delta_recursive(DynamicDocument, EmbeddedDocument) - self.delta_recursive(Document, DynamicEmbeddedDocument) - self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) - - def delta_recursive(self, DocClass, EmbeddedClass): - - class Embedded(EmbeddedClass): - id = StringField() - string_field = StringField() - int_field = IntField() - dict_field = DictField() - list_field = ListField() - - class Doc(DocClass): - string_field = StringField() - int_field = IntField() - dict_field = DictField() - list_field = ListField() - embedded_field = EmbeddedDocumentField(Embedded) - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - embedded_1 = Embedded() - embedded_1.id = "010101" - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] - doc.embedded_field = embedded_1 - - self.assertEqual(doc._get_changed_fields(), ['embedded_field']) - - embedded_delta = { - 'id': "010101", - 'string_field': 'hello', - 'int_field': 1, - 'dict_field': {'hello': 'world'}, - 'list_field': ['1', 2, {'hello': 'world'}] - } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - self.assertEqual(doc._delta(), - ({'embedded_field': embedded_delta}, {})) - - doc.save() - doc = doc.reload(10) - - doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.dict_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.dict_field, {}) - - doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) - self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) - self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field, []) - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field']) - - self.assertEqual(doc.embedded_field._delta(), ({ - 'list_field': ['1', 2, { - '_cls': 'Embedded', - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - - self.assertEqual(doc._delta(), ({ - 'embedded_field.list_field': ['1', 2, { - '_cls': 'Embedded', - 'string_field': 'hello', - 'dict_field': {'hello': 'world'}, - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - doc.save() - doc = doc.reload(10) - - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) - for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], - embedded_2[k]) - - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field.2.string_field']) - self.assertEqual(doc.embedded_field._delta(), - ({'list_field.2.string_field': 'world'}, {})) - self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.string_field': 'world'}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'world') - - # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' - doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), - ['embedded_field.list_field.2']) - self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': { - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}} - }, {})) - self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': { - '_cls': 'Embedded', - 'string_field': 'hello world', - 'int_field': 1, - 'list_field': ['1', 2, {'hello': 'world'}], - 'dict_field': {'hello': 'world'}} - }, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'hello world') - - # Test list native methods - doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': - [2, {'hello': 'world'}]}, {})) - doc.save() - doc = doc.reload(10) - - doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), - ({'embedded_field.list_field.2.list_field': - [2, {'hello': 'world'}, 1]}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [2, {'hello': 'world'}, 1]) - - doc.embedded_field.list_field[2].list_field.sort(key=str) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [1, 2, {'hello': 'world'}]) - - del doc.embedded_field.list_field[2].list_field[2]['hello'] - self.assertEqual(doc._delta(), - ({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) - doc.save() - doc = doc.reload(10) - - del doc.embedded_field.list_field[2].list_field - self.assertEqual(doc._delta(), - ({}, {'embedded_field.list_field.2.list_field': 1})) - - doc.save() - doc = doc.reload(10) - - doc.dict_field['Embedded'] = embedded_1 - doc.save() - doc = doc.reload(10) - - doc.dict_field['Embedded'].string_field = 'Hello World' - self.assertEqual(doc._get_changed_fields(), - ['dict_field.Embedded.string_field']) - self.assertEqual(doc._delta(), - ({'dict_field.Embedded.string_field': 'Hello World'}, {})) - - def test_circular_reference_deltas(self): - self.circular_reference_deltas(Document, Document) - self.circular_reference_deltas(Document, DynamicDocument) - self.circular_reference_deltas(DynamicDocument, Document) - self.circular_reference_deltas(DynamicDocument, DynamicDocument) - - def circular_reference_deltas(self, DocClass1, DocClass2): - - class Person(DocClass1): - name = StringField() - owns = ListField(ReferenceField('Organization')) - - class Organization(DocClass2): - name = StringField() - owner = ReferenceField('Person') - - Person.drop_collection() - Organization.drop_collection() - - person = Person(name="owner").save() - organization = Organization(name="company").save() - - person.owns.append(organization) - organization.owner = person - - person.save() - organization.save() - - p = Person.objects[0].select_related() - o = Organization.objects.first() - self.assertEqual(p.owns[0], o) - self.assertEqual(o.owner, p) - - def test_circular_reference_deltas_2(self): - self.circular_reference_deltas_2(Document, Document) - self.circular_reference_deltas_2(Document, DynamicDocument) - self.circular_reference_deltas_2(DynamicDocument, Document) - self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) - - def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): - - class Person(DocClass1): - name = StringField() - owns = ListField(ReferenceField('Organization', dbref=dbref)) - employer = ReferenceField('Organization', dbref=dbref) - - class Organization(DocClass2): - name = StringField() - owner = ReferenceField('Person', dbref=dbref) - employees = ListField(ReferenceField('Person', dbref=dbref)) - - Person.drop_collection() - Organization.drop_collection() - - person = Person(name="owner").save() - employee = Person(name="employee").save() - organization = Organization(name="company").save() - - person.owns.append(organization) - organization.owner = person - - organization.employees.append(employee) - employee.employer = organization - - person.save() - organization.save() - employee.save() - - p = Person.objects.get(name="owner") - e = Person.objects.get(name="employee") - o = Organization.objects.first() - - self.assertEqual(p.owns[0], o) - self.assertEqual(o.owner, p) - self.assertEqual(e.employer, o) - - return person, organization, employee - - def test_delta_db_field(self): - self.delta_db_field(Document) - self.delta_db_field(DynamicDocument) - - def delta_db_field(self, DocClass): - - class Doc(DocClass): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['db_string_field']) - self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) - - doc._changed_fields = [] - doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['db_int_field']) - self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) - - doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} - doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) - self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) - - doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] - doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['db_list_field']) - self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) - - # Test unsetting - doc._changed_fields = [] - doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) - self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) - - doc._changed_fields = [] - doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['db_list_field']) - self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) - - # Test it saves that data - doc = Doc() - doc.save() - - doc.string_field = 'hello' - doc.int_field = 1 - doc.dict_field = {'hello': 'world'} - doc.list_field = ['1', 2, {'hello': 'world'}] - doc.save() - doc = doc.reload(10) - - self.assertEqual(doc.string_field, 'hello') - self.assertEqual(doc.int_field, 1) - self.assertEqual(doc.dict_field, {'hello': 'world'}) - self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) - - def test_delta_recursive_db_field(self): - self.delta_recursive_db_field(Document, EmbeddedDocument) - self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) - self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) - self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) - - def delta_recursive_db_field(self, DocClass, EmbeddedClass): - - class Embedded(EmbeddedClass): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - - class Doc(DocClass): - string_field = StringField(db_field='db_string_field') - int_field = IntField(db_field='db_int_field') - dict_field = DictField(db_field='db_dict_field') - list_field = ListField(db_field='db_list_field') - embedded_field = EmbeddedDocumentField(Embedded, - db_field='db_embedded_field') - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - embedded_1 = Embedded() - embedded_1.string_field = 'hello' - embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] - doc.embedded_field = embedded_1 - - self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) - - embedded_delta = { - 'db_string_field': 'hello', - 'db_int_field': 1, - 'db_dict_field': {'hello': 'world'}, - 'db_list_field': ['1', 2, {'hello': 'world'}] - } - self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) - self.assertEqual(doc._delta(), - ({'db_embedded_field': embedded_delta}, {})) - - doc.save() - doc = doc.reload(10) - - doc.embedded_field.dict_field = {} - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_dict_field']) - self.assertEqual(doc.embedded_field._delta(), - ({}, {'db_dict_field': 1})) - self.assertEqual(doc._delta(), - ({}, {'db_embedded_field.db_dict_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.dict_field, {}) - - doc.embedded_field.list_field = [] - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), - ({}, {'db_list_field': 1})) - self.assertEqual(doc._delta(), - ({}, {'db_embedded_field.db_list_field': 1})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field, []) - - embedded_2 = Embedded() - embedded_2.string_field = 'hello' - embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] - - doc.embedded_field.list_field = ['1', 2, embedded_2] - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field']) - self.assertEqual(doc.embedded_field._delta(), ({ - 'db_list_field': ['1', 2, { - '_cls': 'Embedded', - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - - self.assertEqual(doc._delta(), ({ - 'db_embedded_field.db_list_field': ['1', 2, { - '_cls': 'Embedded', - 'db_string_field': 'hello', - 'db_dict_field': {'hello': 'world'}, - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - }] - }, {})) - doc.save() - doc = doc.reload(10) - - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) - for k in doc.embedded_field.list_field[2]._fields: - self.assertEqual(doc.embedded_field.list_field[2][k], - embedded_2[k]) - - doc.embedded_field.list_field[2].string_field = 'world' - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field.2.db_string_field']) - self.assertEqual(doc.embedded_field._delta(), - ({'db_list_field.2.db_string_field': 'world'}, {})) - self.assertEqual(doc._delta(), - ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, - {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'world') - - # Test multiple assignments - doc.embedded_field.list_field[2].string_field = 'hello world' - doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] - self.assertEqual(doc._get_changed_fields(), - ['db_embedded_field.db_list_field.2']) - self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': { - '_cls': 'Embedded', - 'db_string_field': 'hello world', - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - 'db_dict_field': {'hello': 'world'}}}, {})) - self.assertEqual(doc._delta(), ({ - 'db_embedded_field.db_list_field.2': { - '_cls': 'Embedded', - 'db_string_field': 'hello world', - 'db_int_field': 1, - 'db_list_field': ['1', 2, {'hello': 'world'}], - 'db_dict_field': {'hello': 'world'}} - }, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].string_field, - 'hello world') - - # Test list native methods - doc.embedded_field.list_field[2].list_field.pop(0) - self.assertEqual(doc._delta(), - ({'db_embedded_field.db_list_field.2.db_list_field': - [2, {'hello': 'world'}]}, {})) - doc.save() - doc = doc.reload(10) - - doc.embedded_field.list_field[2].list_field.append(1) - self.assertEqual(doc._delta(), - ({'db_embedded_field.db_list_field.2.db_list_field': - [2, {'hello': 'world'}, 1]}, {})) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [2, {'hello': 'world'}, 1]) - - doc.embedded_field.list_field[2].list_field.sort(key=str) - doc.save() - doc = doc.reload(10) - self.assertEqual(doc.embedded_field.list_field[2].list_field, - [1, 2, {'hello': 'world'}]) - - del doc.embedded_field.list_field[2].list_field[2]['hello'] - self.assertEqual(doc._delta(), - ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) - doc.save() - doc = doc.reload(10) - - del doc.embedded_field.list_field[2].list_field - self.assertEqual(doc._delta(), ({}, - {'db_embedded_field.db_list_field.2.db_list_field': 1})) - - def test_delta_for_dynamic_documents(self): - class Person(DynamicDocument): - name = StringField() - meta = {'allow_inheritance': True} - - Person.drop_collection() - - p = Person(name="James", age=34) - self.assertEqual(p._delta(), ( - SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) - - p.doc = 123 - del p.doc - self.assertEqual(p._delta(), ( - SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) - - p = Person() - p.name = "Dean" - p.age = 22 - p.save() - - p.age = 24 - self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ['age']) - self.assertEqual(p._delta(), ({'age': 24}, {})) - - p = Person.objects(age=22).get() - p.age = 24 - self.assertEqual(p.age, 24) - self.assertEqual(p._get_changed_fields(), ['age']) - self.assertEqual(p._delta(), ({'age': 24}, {})) - - p.save() - self.assertEqual(1, Person.objects(age=24).count()) - - def test_dynamic_delta(self): - - class Doc(DynamicDocument): - pass - - Doc.drop_collection() - doc = Doc() - doc.save() - - doc = Doc.objects.first() - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(doc._delta(), ({}, {})) - - doc.string_field = 'hello' - self.assertEqual(doc._get_changed_fields(), ['string_field']) - self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) - - doc._changed_fields = [] - doc.int_field = 1 - self.assertEqual(doc._get_changed_fields(), ['int_field']) - self.assertEqual(doc._delta(), ({'int_field': 1}, {})) - - doc._changed_fields = [] - dict_value = {'hello': 'world', 'ping': 'pong'} - doc.dict_field = dict_value - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) - - doc._changed_fields = [] - list_value = ['1', 2, {'hello': 'world'}] - doc.list_field = list_value - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) - - # Test unsetting - doc._changed_fields = [] - doc.dict_field = {} - self.assertEqual(doc._get_changed_fields(), ['dict_field']) - self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) - - doc._changed_fields = [] - doc.list_field = [] - self.assertEqual(doc._get_changed_fields(), ['list_field']) - self.assertEqual(doc._delta(), ({}, {'list_field': 1})) - - def test_delta_with_dbref_true(self): - person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) - employee.name = 'test' - - self.assertEqual(organization._get_changed_fields(), []) - - updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertEqual({}, updates) - - organization.employees.append(person) - updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertIn('employees', updates) - - def test_delta_with_dbref_false(self): - person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) - employee.name = 'test' - - self.assertEqual(organization._get_changed_fields(), []) - - updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertEqual({}, updates) - - organization.employees.append(person) - updates, removals = organization._delta() - self.assertEqual({}, removals) - self.assertIn('employees', updates) - - def test_nested_nested_fields_mark_as_changed(self): - class EmbeddedDoc(EmbeddedDocument): - name = StringField() - - class MyDoc(Document): - subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) - name = StringField() - - MyDoc.drop_collection() - - mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() - - mydoc = MyDoc.objects.first() - subdoc = mydoc.subs['a']['b'] - subdoc.name = 'bar' - - self.assertEqual(["name"], subdoc._get_changed_fields()) - self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) - - mydoc._clear_changed_fields() - self.assertEqual([], mydoc._get_changed_fields()) - - def test_lower_level_mark_as_changed(self): - class EmbeddedDoc(EmbeddedDocument): - name = StringField() - - class MyDoc(Document): - subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) - - MyDoc.drop_collection() - - MyDoc().save() - - mydoc = MyDoc.objects.first() - mydoc.subs['a'] = EmbeddedDoc() - self.assertEqual(["subs.a"], mydoc._get_changed_fields()) - - subdoc = mydoc.subs['a'] - subdoc.name = 'bar' - - self.assertEqual(["name"], subdoc._get_changed_fields()) - self.assertEqual(["subs.a"], mydoc._get_changed_fields()) - mydoc.save() - - mydoc._clear_changed_fields() - self.assertEqual([], mydoc._get_changed_fields()) - - def test_upper_level_mark_as_changed(self): - class EmbeddedDoc(EmbeddedDocument): - name = StringField() - - class MyDoc(Document): - subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) - - MyDoc.drop_collection() - - MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save() - - mydoc = MyDoc.objects.first() - subdoc = mydoc.subs['a'] - subdoc.name = 'bar' - - self.assertEqual(["name"], subdoc._get_changed_fields()) - self.assertEqual(["subs.a.name"], mydoc._get_changed_fields()) - - mydoc.subs['a'] = EmbeddedDoc() - self.assertEqual(["subs.a"], mydoc._get_changed_fields()) - mydoc.save() - - mydoc._clear_changed_fields() - self.assertEqual([], mydoc._get_changed_fields()) - - def test_referenced_object_changed_attributes(self): - """Ensures that when you save a new reference to a field, the referenced object isn't altered""" - - class Organization(Document): - name = StringField() - - class User(Document): - name = StringField() - org = ReferenceField('Organization', required=True) - - Organization.drop_collection() - User.drop_collection() - - org1 = Organization(name='Org 1') - org1.save() - - org2 = Organization(name='Org 2') - org2.save() - - user = User(name='Fred', org=org1) - user.save() - - org1.reload() - org2.reload() - user.reload() - self.assertEqual(org1.name, 'Org 1') - self.assertEqual(org2.name, 'Org 2') - self.assertEqual(user.name, 'Fred') - - user.name = 'Harold' - user.org = org2 - - org2.name = 'New Org 2' - self.assertEqual(org2.name, 'New Org 2') - - user.save() - org2.save() - - self.assertEqual(org2.name, 'New Org 2') - org2.reload() - self.assertEqual(org2.name, 'New Org 2') - - def test_delta_for_nested_map_fields(self): - class UInfoDocument(Document): - phone = StringField() - - class EmbeddedRole(EmbeddedDocument): - type = StringField() - - class EmbeddedUser(EmbeddedDocument): - name = StringField() - roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) - rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) - info = ReferenceField(UInfoDocument) - - class Doc(Document): - users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) - num = IntField(default=-1) - - Doc.drop_collection() - - doc = Doc(num=1) - doc.users["007"] = EmbeddedUser(name="Agent007") - doc.save() - - uinfo = UInfoDocument(phone="79089269066") - uinfo.save() - - d = Doc.objects(num=1).first() - d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") - d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) - d.users["007"]["info"] = uinfo - delta = d._delta() - self.assertEqual(True, "users.007.roles.666" in delta[0]) - self.assertEqual(True, "users.007.rolist" in delta[0]) - self.assertEqual(True, "users.007.info" in delta[0]) - self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"]) - self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) - self.assertEqual(uinfo.id, delta[0]["users.007.info"]) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/document/inheritance.py b/tests/document/inheritance.py deleted file mode 100644 index d81039f4..00000000 --- a/tests/document/inheritance.py +++ /dev/null @@ -1,564 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest -import warnings - -from six import iteritems - -from mongoengine import (BooleanField, Document, EmbeddedDocument, - EmbeddedDocumentField, GenericReferenceField, - IntField, ReferenceField, StringField) -from mongoengine.pymongo_support import list_collection_names -from tests.utils import MongoDBTestCase -from tests.fixtures import Base - -__all__ = ('InheritanceTest', ) - - -class InheritanceTest(MongoDBTestCase): - - def tearDown(self): - for collection in list_collection_names(self.db): - self.db.drop_collection(collection) - - def test_constructor_cls(self): - # Ensures _cls is properly set during construction - # and when object gets reloaded (prevent regression of #1950) - class EmbedData(EmbeddedDocument): - data = StringField() - meta = {'allow_inheritance': True} - - class DataDoc(Document): - name = StringField() - embed = EmbeddedDocumentField(EmbedData) - meta = {'allow_inheritance': True} - - test_doc = DataDoc(name='test', embed=EmbedData(data='data')) - self.assertEqual(test_doc._cls, 'DataDoc') - self.assertEqual(test_doc.embed._cls, 'EmbedData') - test_doc.save() - saved_doc = DataDoc.objects.with_id(test_doc.id) - self.assertEqual(test_doc._cls, saved_doc._cls) - self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls) - test_doc.delete() - - def test_superclasses(self): - """Ensure that the correct list of superclasses is assembled. - """ - class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Fish._superclasses, ('Animal',)) - self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish')) - self.assertEqual(Mammal._superclasses, ('Animal',)) - self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal')) - self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal')) - - def test_external_superclasses(self): - """Ensure that the correct list of super classes is assembled when - importing part of the model. - """ - class Animal(Base): pass - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - - self.assertEqual(Animal._superclasses, ('Base', )) - self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',)) - self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal', - 'Base.Animal.Fish')) - self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',)) - self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal', - 'Base.Animal.Mammal')) - self.assertEqual(Human._superclasses, ('Base', 'Base.Animal', - 'Base.Animal.Mammal')) - - def test_subclasses(self): - """Ensure that the correct list of _subclasses (subclasses) is - assembled. - """ - class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - - self.assertEqual(Animal._subclasses, ('Animal', - 'Animal.Fish', - 'Animal.Fish.Guppy', - 'Animal.Mammal', - 'Animal.Mammal.Dog', - 'Animal.Mammal.Human')) - self.assertEqual(Fish._subclasses, ('Animal.Fish', - 'Animal.Fish.Guppy',)) - self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',)) - self.assertEqual(Mammal._subclasses, ('Animal.Mammal', - 'Animal.Mammal.Dog', - 'Animal.Mammal.Human')) - self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',)) - - def test_external_subclasses(self): - """Ensure that the correct list of _subclasses (subclasses) is - assembled when importing part of the model. - """ - class Animal(Base): pass - class Fish(Animal): pass - class Guppy(Fish): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - - self.assertEqual(Animal._subclasses, ('Base.Animal', - 'Base.Animal.Fish', - 'Base.Animal.Fish.Guppy', - 'Base.Animal.Mammal', - 'Base.Animal.Mammal.Dog', - 'Base.Animal.Mammal.Human')) - self.assertEqual(Fish._subclasses, ('Base.Animal.Fish', - 'Base.Animal.Fish.Guppy',)) - self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',)) - self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal', - 'Base.Animal.Mammal.Dog', - 'Base.Animal.Mammal.Human')) - self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',)) - - def test_dynamic_declarations(self): - """Test that declaring an extra class updates meta data""" - - class Animal(Document): - meta = {'allow_inheritance': True} - - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ('Animal',)) - - # Test dynamically adding a class changes the meta data - class Fish(Animal): - pass - - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish')) - - self.assertEqual(Fish._superclasses, ('Animal', )) - self.assertEqual(Fish._subclasses, ('Animal.Fish',)) - - # Test dynamically adding an inherited class changes the meta data - class Pike(Fish): - pass - - self.assertEqual(Animal._superclasses, ()) - self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', - 'Animal.Fish.Pike')) - - self.assertEqual(Fish._superclasses, ('Animal', )) - self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) - - self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish')) - self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',)) - - def test_inheritance_meta_data(self): - """Ensure that document may inherit fields from a superclass document. - """ - class Person(Document): - name = StringField() - age = IntField() - - meta = {'allow_inheritance': True} - - class Employee(Person): - salary = IntField() - - self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], - sorted(Employee._fields.keys())) - self.assertEqual(Employee._get_collection_name(), - Person._get_collection_name()) - - def test_inheritance_to_mongo_keys(self): - """Ensure that document may inherit fields from a superclass document. - """ - class Person(Document): - name = StringField() - age = IntField() - - meta = {'allow_inheritance': True} - - class Employee(Person): - salary = IntField() - - self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], - sorted(Employee._fields.keys())) - self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), - ['_cls', 'name', 'age']) - self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), - ['_cls', 'name', 'age', 'salary']) - self.assertEqual(Employee._get_collection_name(), - Person._get_collection_name()) - - def test_indexes_and_multiple_inheritance(self): - """ Ensure that all of the indexes are created for a document with - multiple inheritance. - """ - - class A(Document): - a = StringField() - - meta = { - 'allow_inheritance': True, - 'indexes': ['a'] - } - - class B(Document): - b = StringField() - - meta = { - 'allow_inheritance': True, - 'indexes': ['b'] - } - - class C(A, B): - pass - - A.drop_collection() - B.drop_collection() - C.drop_collection() - - C.ensure_indexes() - - self.assertEqual( - sorted([idx['key'] for idx in C._get_collection().index_information().values()]), - sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]]) - ) - - def test_polymorphic_queries(self): - """Ensure that the correct subclasses are returned from a query - """ - - class Animal(Document): - meta = {'allow_inheritance': True} - class Fish(Animal): pass - class Mammal(Animal): pass - class Dog(Mammal): pass - class Human(Mammal): pass - - Animal.drop_collection() - - Animal().save() - Fish().save() - Mammal().save() - Dog().save() - Human().save() - - classes = [obj.__class__ for obj in Animal.objects] - self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) - - classes = [obj.__class__ for obj in Mammal.objects] - self.assertEqual(classes, [Mammal, Dog, Human]) - - classes = [obj.__class__ for obj in Human.objects] - self.assertEqual(classes, [Human]) - - def test_allow_inheritance(self): - """Ensure that inheritance is disabled by default on simple - classes and that _cls will not be used. - """ - class Animal(Document): - name = StringField() - - # can't inherit because Animal didn't explicitly allow inheritance - with self.assertRaises(ValueError) as cm: - class Dog(Animal): - pass - self.assertIn("Document Animal may not be subclassed", str(cm.exception)) - - # Check that _cls etc aren't present on simple documents - dog = Animal(name='dog').save() - self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) - - collection = self.db[Animal._get_collection_name()] - obj = collection.find_one() - self.assertNotIn('_cls', obj) - - def test_cant_turn_off_inheritance_on_subclass(self): - """Ensure if inheritance is on in a subclass you cant turn it off. - """ - class Animal(Document): - name = StringField() - meta = {'allow_inheritance': True} - - with self.assertRaises(ValueError) as cm: - class Mammal(Animal): - meta = {'allow_inheritance': False} - self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False') - - def test_allow_inheritance_abstract_document(self): - """Ensure that abstract documents can set inheritance rules and that - _cls will not be used. - """ - class FinalDocument(Document): - meta = {'abstract': True, - 'allow_inheritance': False} - - class Animal(FinalDocument): - name = StringField() - - with self.assertRaises(ValueError) as cm: - class Mammal(Animal): - pass - - # Check that _cls isn't present in simple documents - doc = Animal(name='dog') - self.assertNotIn('_cls', doc.to_mongo()) - - def test_using_abstract_class_in_reference_field(self): - # Ensures no regression of #1920 - class AbstractHuman(Document): - meta = {'abstract': True} - - class Dad(AbstractHuman): - name = StringField() - - class Home(Document): - dad = ReferenceField(AbstractHuman) # Referencing the abstract class - address = StringField() - - dad = Dad(name='5').save() - Home(dad=dad, address='street').save() - - home = Home.objects.first() - home.address = 'garbage' - home.save() # Was failing with ValidationError - - def test_abstract_class_referencing_self(self): - # Ensures no regression of #1920 - class Human(Document): - meta = {'abstract': True} - creator = ReferenceField('self', dbref=True) - - class User(Human): - name = StringField() - - user = User(name='John').save() - user2 = User(name='Foo', creator=user).save() - - user2 = User.objects.with_id(user2.id) - user2.name = 'Bar' - user2.save() # Was failing with ValidationError - - def test_abstract_handle_ids_in_metaclass_properly(self): - - class City(Document): - continent = StringField() - meta = {'abstract': True, - 'allow_inheritance': False} - - class EuropeanCity(City): - name = StringField() - - berlin = EuropeanCity(name='Berlin', continent='Europe') - self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 3) - self.assertEqual(berlin._fields_ordered[0], 'id') - - def test_auto_id_not_set_if_specific_in_parent_class(self): - - class City(Document): - continent = StringField() - city_id = IntField(primary_key=True) - meta = {'abstract': True, - 'allow_inheritance': False} - - class EuropeanCity(City): - name = StringField() - - berlin = EuropeanCity(name='Berlin', continent='Europe') - self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 3) - self.assertEqual(berlin._fields_ordered[0], 'city_id') - - def test_auto_id_vs_non_pk_id_field(self): - - class City(Document): - continent = StringField() - id = IntField() - meta = {'abstract': True, - 'allow_inheritance': False} - - class EuropeanCity(City): - name = StringField() - - berlin = EuropeanCity(name='Berlin', continent='Europe') - self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) - self.assertEqual(len(berlin._fields_ordered), 4) - self.assertEqual(berlin._fields_ordered[0], 'auto_id_0') - berlin.save() - self.assertEqual(berlin.pk, berlin.auto_id_0) - - def test_abstract_document_creation_does_not_fail(self): - class City(Document): - continent = StringField() - meta = {'abstract': True, - 'allow_inheritance': False} - - city = City(continent='asia') - self.assertEqual(None, city.pk) - # TODO: expected error? Shouldn't we create a new error type? - with self.assertRaises(KeyError): - setattr(city, 'pk', 1) - - def test_allow_inheritance_embedded_document(self): - """Ensure embedded documents respect inheritance.""" - class Comment(EmbeddedDocument): - content = StringField() - - with self.assertRaises(ValueError): - class SpecialComment(Comment): - pass - - doc = Comment(content='test') - self.assertNotIn('_cls', doc.to_mongo()) - - class Comment(EmbeddedDocument): - content = StringField() - meta = {'allow_inheritance': True} - - doc = Comment(content='test') - self.assertIn('_cls', doc.to_mongo()) - - def test_document_inheritance(self): - """Ensure mutliple inheritance of abstract documents - """ - class DateCreatedDocument(Document): - meta = { - 'allow_inheritance': True, - 'abstract': True, - } - - class DateUpdatedDocument(Document): - meta = { - 'allow_inheritance': True, - 'abstract': True, - } - - try: - class MyDocument(DateCreatedDocument, DateUpdatedDocument): - pass - except Exception: - self.assertTrue(False, "Couldn't create MyDocument class") - - def test_abstract_documents(self): - """Ensure that a document superclass can be marked as abstract - thereby not using it as the name for the collection.""" - - defaults = {'index_background': True, - 'index_drop_dups': True, - 'index_opts': {'hello': 'world'}, - 'allow_inheritance': True, - 'queryset_class': 'QuerySet', - 'db_alias': 'myDB', - 'shard_key': ('hello', 'world')} - - meta_settings = {'abstract': True} - meta_settings.update(defaults) - - class Animal(Document): - name = StringField() - meta = meta_settings - - class Fish(Animal): pass - class Guppy(Fish): pass - - class Mammal(Animal): - meta = {'abstract': True} - class Human(Mammal): pass - - for k, v in iteritems(defaults): - for cls in [Animal, Fish, Guppy]: - self.assertEqual(cls._meta[k], v) - - self.assertNotIn('collection', Animal._meta) - self.assertNotIn('collection', Mammal._meta) - - self.assertEqual(Animal._get_collection_name(), None) - self.assertEqual(Mammal._get_collection_name(), None) - - self.assertEqual(Fish._get_collection_name(), 'fish') - self.assertEqual(Guppy._get_collection_name(), 'fish') - self.assertEqual(Human._get_collection_name(), 'human') - - # ensure that a subclass of a non-abstract class can't be abstract - with self.assertRaises(ValueError): - class EvilHuman(Human): - evil = BooleanField(default=True) - meta = {'abstract': True} - - def test_abstract_embedded_documents(self): - # 789: EmbeddedDocument shouldn't inherit abstract - class A(EmbeddedDocument): - meta = {"abstract": True} - - class B(A): - pass - - self.assertFalse(B._meta["abstract"]) - - def test_inherited_collections(self): - """Ensure that subclassed documents don't override parents' - collections - """ - - class Drink(Document): - name = StringField() - meta = {'allow_inheritance': True} - - class Drinker(Document): - drink = GenericReferenceField() - - try: - warnings.simplefilter("error") - - class AcloholicDrink(Drink): - meta = {'collection': 'booze'} - - except SyntaxWarning: - warnings.simplefilter("ignore") - - class AlcoholicDrink(Drink): - meta = {'collection': 'booze'} - - else: - raise AssertionError("SyntaxWarning should be triggered") - - warnings.resetwarnings() - - Drink.drop_collection() - AlcoholicDrink.drop_collection() - Drinker.drop_collection() - - red_bull = Drink(name='Red Bull') - red_bull.save() - - programmer = Drinker(drink=red_bull) - programmer.save() - - beer = AlcoholicDrink(name='Beer') - beer.save() - real_person = Drinker(drink=beer) - real_person.save() - - self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) - self.assertEqual(Drinker.objects[1].drink.name, beer.name) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/document/class_methods.py b/tests/document/test_class_methods.py similarity index 50% rename from tests/document/class_methods.py rename to tests/document/test_class_methods.py index 4fc648b7..be883b2a 100644 --- a/tests/document/class_methods.py +++ b/tests/document/test_class_methods.py @@ -2,18 +2,14 @@ import unittest from mongoengine import * -from mongoengine.pymongo_support import list_collection_names - -from mongoengine.queryset import NULLIFY, PULL from mongoengine.connection import get_db - -__all__ = ("ClassMethodsTest", ) +from mongoengine.pymongo_support import list_collection_names +from mongoengine.queryset import NULLIFY, PULL -class ClassMethodsTest(unittest.TestCase): - +class TestClassMethods(unittest.TestCase): def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") self.db = get_db() class Person(Document): @@ -33,54 +29,53 @@ class ClassMethodsTest(unittest.TestCase): def test_definition(self): """Ensure that document may be defined using fields. """ - self.assertEqual(['_cls', 'age', 'id', 'name'], - sorted(self.Person._fields.keys())) - self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"], - sorted([x.__class__.__name__ for x in - self.Person._fields.values()])) + assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys()) + assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted( + [x.__class__.__name__ for x in self.Person._fields.values()] + ) def test_get_db(self): """Ensure that get_db returns the expected db. """ db = self.Person._get_db() - self.assertEqual(self.db, db) + assert self.db == db def test_get_collection_name(self): """Ensure that get_collection_name returns the expected collection name. """ - collection_name = 'person' - self.assertEqual(collection_name, self.Person._get_collection_name()) + collection_name = "person" + assert collection_name == self.Person._get_collection_name() def test_get_collection(self): """Ensure that get_collection returns the expected collection. """ - collection_name = 'person' + collection_name = "person" collection = self.Person._get_collection() - self.assertEqual(self.db[collection_name], collection) + assert self.db[collection_name] == collection def test_drop_collection(self): """Ensure that the collection may be dropped from the database. """ - collection_name = 'person' - self.Person(name='Test').save() - self.assertIn(collection_name, list_collection_names(self.db)) + collection_name = "person" + self.Person(name="Test").save() + assert collection_name in list_collection_names(self.db) self.Person.drop_collection() - self.assertNotIn(collection_name, list_collection_names(self.db)) + assert collection_name not in list_collection_names(self.db) def test_register_delete_rule(self): """Ensure that register delete rule adds a delete rule to the document meta. """ + class Job(Document): employee = ReferenceField(self.Person) - self.assertEqual(self.Person._meta.get('delete_rules'), None) + assert self.Person._meta.get("delete_rules") is None - self.Person.register_delete_rule(Job, 'employee', NULLIFY) - self.assertEqual(self.Person._meta['delete_rules'], - {(Job, 'employee'): NULLIFY}) + self.Person.register_delete_rule(Job, "employee", NULLIFY) + assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY} def test_compare_indexes(self): """ Ensure that the indexes are properly created and that @@ -93,23 +88,27 @@ class ClassMethodsTest(unittest.TestCase): description = StringField() tags = StringField() - meta = { - 'indexes': [('author', 'title')] - } + meta = {"indexes": [("author", "title")]} BlogPost.drop_collection() BlogPost.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} - BlogPost.ensure_index(['author', 'description']) - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]}) + BlogPost.ensure_index(["author", "description"]) + assert BlogPost.compare_indexes() == { + "missing": [], + "extra": [[("author", 1), ("description", 1)]], + } - BlogPost._get_collection().drop_index('author_1_description_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + BlogPost._get_collection().drop_index("author_1_description_1") + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} - BlogPost._get_collection().drop_index('author_1_title_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []}) + BlogPost._get_collection().drop_index("author_1_title_1") + assert BlogPost.compare_indexes() == { + "missing": [[("author", 1), ("title", 1)]], + "extra": [], + } def test_compare_indexes_inheritance(self): """ Ensure that the indexes are properly created and that @@ -122,32 +121,34 @@ class ClassMethodsTest(unittest.TestCase): title = StringField() description = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class BlogPostWithTags(BlogPost): tags = StringField() tag_list = ListField(StringField()) - meta = { - 'indexes': [('author', 'tags')] - } + meta = {"indexes": [("author", "tags")]} BlogPost.drop_collection() BlogPost.ensure_indexes() BlogPostWithTags.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} - BlogPostWithTags.ensure_index(['author', 'tag_list']) - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]}) + BlogPostWithTags.ensure_index(["author", "tag_list"]) + assert BlogPost.compare_indexes() == { + "missing": [], + "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]], + } - BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) + BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1") + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} - BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') - self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []}) + BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1") + assert BlogPost.compare_indexes() == { + "missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], + "extra": [], + } def test_compare_indexes_multiple_subclasses(self): """ Ensure that compare_indexes behaves correctly if called from a @@ -159,32 +160,26 @@ class ClassMethodsTest(unittest.TestCase): title = StringField() description = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class BlogPostWithTags(BlogPost): tags = StringField() tag_list = ListField(StringField()) - meta = { - 'indexes': [('author', 'tags')] - } + meta = {"indexes": [("author", "tags")]} class BlogPostWithCustomField(BlogPost): custom = DictField() - meta = { - 'indexes': [('author', 'custom')] - } + meta = {"indexes": [("author", "custom")]} BlogPost.ensure_indexes() BlogPostWithTags.ensure_indexes() BlogPostWithCustomField.ensure_indexes() - self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) - self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []}) - self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []}) + assert BlogPost.compare_indexes() == {"missing": [], "extra": []} + assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []} + assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []} def test_compare_indexes_for_text_indexes(self): """ Ensure that compare_indexes behaves correctly for text indexes """ @@ -192,18 +187,21 @@ class ClassMethodsTest(unittest.TestCase): class Doc(Document): a = StringField() b = StringField() - meta = {'indexes': [ - {'fields': ['$a', "$b"], - 'default_language': 'english', - 'weights': {'a': 10, 'b': 2} - } - ]} + meta = { + "indexes": [ + { + "fields": ["$a", "$b"], + "default_language": "english", + "weights": {"a": 10, "b": 2}, + } + ] + } Doc.drop_collection() Doc.ensure_indexes() actual = Doc.compare_indexes() - expected = {'missing': [], 'extra': []} - self.assertEqual(actual, expected) + expected = {"missing": [], "extra": []} + assert actual == expected def test_list_indexes_inheritance(self): """ ensure that all of the indexes are listed regardless of the super- @@ -215,23 +213,17 @@ class ClassMethodsTest(unittest.TestCase): title = StringField() description = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class BlogPostWithTags(BlogPost): tags = StringField() - meta = { - 'indexes': [('author', 'tags')] - } + meta = {"indexes": [("author", "tags")]} class BlogPostWithTagsAndExtraText(BlogPostWithTags): extra_text = StringField() - meta = { - 'indexes': [('author', 'tags', 'extra_text')] - } + meta = {"indexes": [("author", "tags", "extra_text")]} BlogPost.drop_collection() @@ -239,17 +231,16 @@ class ClassMethodsTest(unittest.TestCase): BlogPostWithTags.ensure_indexes() BlogPostWithTagsAndExtraText.ensure_indexes() - self.assertEqual(BlogPost.list_indexes(), - BlogPostWithTags.list_indexes()) - self.assertEqual(BlogPost.list_indexes(), - BlogPostWithTagsAndExtraText.list_indexes()) - self.assertEqual(BlogPost.list_indexes(), - [[('_cls', 1), ('author', 1), ('tags', 1)], - [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], - [(u'_id', 1)], [('_cls', 1)]]) + assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes() + assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes() + assert BlogPost.list_indexes() == [ + [("_cls", 1), ("author", 1), ("tags", 1)], + [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], + [(u"_id", 1)], + [("_cls", 1)], + ] def test_register_delete_rule_inherited(self): - class Vaccine(Document): name = StringField(required=True) @@ -257,15 +248,17 @@ class ClassMethodsTest(unittest.TestCase): class Animal(Document): family = StringField(required=True) - vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) + vaccine_made = ListField( + ReferenceField("Vaccine", reverse_delete_rule=PULL) + ) meta = {"allow_inheritance": True, "indexes": ["family"]} class Cat(Animal): name = StringField(required=True) - self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) - self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) + assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL + assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL def test_collection_naming(self): """Ensure that a collection with a specified name may be used. @@ -273,80 +266,76 @@ class ClassMethodsTest(unittest.TestCase): class DefaultNamingTest(Document): pass - self.assertEqual('default_naming_test', - DefaultNamingTest._get_collection_name()) + + assert "default_naming_test" == DefaultNamingTest._get_collection_name() class CustomNamingTest(Document): - meta = {'collection': 'pimp_my_collection'} + meta = {"collection": "pimp_my_collection"} - self.assertEqual('pimp_my_collection', - CustomNamingTest._get_collection_name()) + assert "pimp_my_collection" == CustomNamingTest._get_collection_name() class DynamicNamingTest(Document): - meta = {'collection': lambda c: "DYNAMO"} - self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) + meta = {"collection": lambda c: "DYNAMO"} + + assert "DYNAMO" == DynamicNamingTest._get_collection_name() # Use Abstract class to handle backwards compatibility class BaseDocument(Document): - meta = { - 'abstract': True, - 'collection': lambda c: c.__name__.lower() - } + meta = {"abstract": True, "collection": lambda c: c.__name__.lower()} class OldNamingConvention(BaseDocument): pass - self.assertEqual('oldnamingconvention', - OldNamingConvention._get_collection_name()) + + assert "oldnamingconvention" == OldNamingConvention._get_collection_name() class InheritedAbstractNamingTest(BaseDocument): - meta = {'collection': 'wibble'} - self.assertEqual('wibble', - InheritedAbstractNamingTest._get_collection_name()) + meta = {"collection": "wibble"} + + assert "wibble" == InheritedAbstractNamingTest._get_collection_name() # Mixin tests class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } + meta = {"collection": lambda c: c.__name__.lower()} class OldMixinNamingConvention(Document, BaseMixin): pass - self.assertEqual('oldmixinnamingconvention', - OldMixinNamingConvention._get_collection_name()) + + assert ( + "oldmixinnamingconvention" + == OldMixinNamingConvention._get_collection_name() + ) class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } + meta = {"collection": lambda c: c.__name__.lower()} class BaseDocument(Document, BaseMixin): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class MyDocument(BaseDocument): pass - self.assertEqual('basedocument', MyDocument._get_collection_name()) + assert "basedocument" == MyDocument._get_collection_name() def test_custom_collection_name_operations(self): """Ensure that a collection with a specified name is used as expected. """ - collection_name = 'personCollTest' + collection_name = "personCollTest" class Person(Document): name = StringField() - meta = {'collection': collection_name} + meta = {"collection": collection_name} Person(name="Test User").save() - self.assertIn(collection_name, list_collection_names(self.db)) + assert collection_name in list_collection_names(self.db) user_obj = self.db[collection_name].find_one() - self.assertEqual(user_obj['name'], "Test User") + assert user_obj["name"] == "Test User" user_obj = Person.objects[0] - self.assertEqual(user_obj.name, "Test User") + assert user_obj.name == "Test User" Person.drop_collection() - self.assertNotIn(collection_name, list_collection_names(self.db)) + assert collection_name not in list_collection_names(self.db) def test_collection_name_and_primary(self): """Ensure that a collection with a specified name may be used. @@ -354,15 +343,15 @@ class ClassMethodsTest(unittest.TestCase): class Person(Document): name = StringField(primary_key=True) - meta = {'collection': 'app'} + meta = {"collection": "app"} Person(name="Test User").save() user_obj = Person.objects.first() - self.assertEqual(user_obj.name, "Test User") + assert user_obj.name == "Test User" Person.drop_collection() -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/test_delta.py b/tests/document/test_delta.py new file mode 100644 index 00000000..e7baaa23 --- /dev/null +++ b/tests/document/test_delta.py @@ -0,0 +1,926 @@ +# -*- coding: utf-8 -*- +import unittest + +from bson import SON +from mongoengine import * +from mongoengine.pymongo_support import list_collection_names +from tests.utils import MongoDBTestCase + + +class TestDelta(MongoDBTestCase): + def setUp(self): + super(TestDelta, self).setUp() + + class Person(Document): + name = StringField() + age = IntField() + + non_field = True + + meta = {"allow_inheritance": True} + + self.Person = Person + + def tearDown(self): + for collection in list_collection_names(self.db): + self.db.drop_collection(collection) + + def test_delta(self): + self.delta(Document) + self.delta(DynamicDocument) + + @staticmethod + def delta(DocClass): + class Doc(DocClass): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) + + doc.string_field = "hello" + assert doc._get_changed_fields() == ["string_field"] + assert doc._delta() == ({"string_field": "hello"}, {}) + + doc._changed_fields = [] + doc.int_field = 1 + assert doc._get_changed_fields() == ["int_field"] + assert doc._delta() == ({"int_field": 1}, {}) + + doc._changed_fields = [] + dict_value = {"hello": "world", "ping": "pong"} + doc.dict_field = dict_value + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({"dict_field": dict_value}, {}) + + doc._changed_fields = [] + list_value = ["1", 2, {"hello": "world"}] + doc.list_field = list_value + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({"list_field": list_value}, {}) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({}, {"dict_field": 1}) + + doc._changed_fields = [] + doc.list_field = [] + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({}, {"list_field": 1}) + + def test_delta_recursive(self): + self.delta_recursive(Document, EmbeddedDocument) + self.delta_recursive(DynamicDocument, EmbeddedDocument) + self.delta_recursive(Document, DynamicEmbeddedDocument) + self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) + + def delta_recursive(self, DocClass, EmbeddedClass): + class Embedded(EmbeddedClass): + id = StringField() + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + + class Doc(DocClass): + string_field = StringField() + int_field = IntField() + dict_field = DictField() + list_field = ListField() + embedded_field = EmbeddedDocumentField(Embedded) + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) + + embedded_1 = Embedded() + embedded_1.id = "010101" + embedded_1.string_field = "hello" + embedded_1.int_field = 1 + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] + doc.embedded_field = embedded_1 + + assert doc._get_changed_fields() == ["embedded_field"] + + embedded_delta = { + "id": "010101", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ["1", 2, {"hello": "world"}], + } + assert doc.embedded_field._delta() == (embedded_delta, {}) + assert doc._delta() == ({"embedded_field": embedded_delta}, {}) + + doc.save() + doc = doc.reload(10) + + doc.embedded_field.dict_field = {} + assert doc._get_changed_fields() == ["embedded_field.dict_field"] + assert doc.embedded_field._delta() == ({}, {"dict_field": 1}) + assert doc._delta() == ({}, {"embedded_field.dict_field": 1}) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.dict_field == {} + + doc.embedded_field.list_field = [] + assert doc._get_changed_fields() == ["embedded_field.list_field"] + assert doc.embedded_field._delta() == ({}, {"list_field": 1}) + assert doc._delta() == ({}, {"embedded_field.list_field": 1}) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field == [] + + embedded_2 = Embedded() + embedded_2.string_field = "hello" + embedded_2.int_field = 1 + embedded_2.dict_field = {"hello": "world"} + embedded_2.list_field = ["1", 2, {"hello": "world"}] + + doc.embedded_field.list_field = ["1", 2, embedded_2] + assert doc._get_changed_fields() == ["embedded_field.list_field"] + + assert doc.embedded_field._delta() == ( + { + "list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "dict_field": {"hello": "world"}, + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ) + + assert doc._delta() == ( + { + "embedded_field.list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "dict_field": {"hello": "world"}, + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ) + doc.save() + doc = doc.reload(10) + + assert doc.embedded_field.list_field[0] == "1" + assert doc.embedded_field.list_field[1] == 2 + for k in doc.embedded_field.list_field[2]._fields: + assert doc.embedded_field.list_field[2][k] == embedded_2[k] + + doc.embedded_field.list_field[2].string_field = "world" + assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"] + assert doc.embedded_field._delta() == ( + {"list_field.2.string_field": "world"}, + {}, + ) + assert doc._delta() == ( + {"embedded_field.list_field.2.string_field": "world"}, + {}, + ) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].string_field == "world" + + # Test multiple assignments + doc.embedded_field.list_field[2].string_field = "hello world" + doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] + assert doc._get_changed_fields() == ["embedded_field.list_field.2"] + assert doc.embedded_field._delta() == ( + { + "list_field.2": { + "_cls": "Embedded", + "string_field": "hello world", + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + "dict_field": {"hello": "world"}, + } + }, + {}, + ) + assert doc._delta() == ( + { + "embedded_field.list_field.2": { + "_cls": "Embedded", + "string_field": "hello world", + "int_field": 1, + "list_field": ["1", 2, {"hello": "world"}], + "dict_field": {"hello": "world"}, + } + }, + {}, + ) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].string_field == "hello world" + + # Test list native methods + doc.embedded_field.list_field[2].list_field.pop(0) + assert doc._delta() == ( + {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, + {}, + ) + doc.save() + doc = doc.reload(10) + + doc.embedded_field.list_field[2].list_field.append(1) + assert doc._delta() == ( + {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, + {}, + ) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] + + doc.embedded_field.list_field[2].list_field.sort(key=str) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] + + del doc.embedded_field.list_field[2].list_field[2]["hello"] + assert doc._delta() == ( + {}, + {"embedded_field.list_field.2.list_field.2.hello": 1}, + ) + doc.save() + doc = doc.reload(10) + + del doc.embedded_field.list_field[2].list_field + assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1}) + + doc.save() + doc = doc.reload(10) + + doc.dict_field["Embedded"] = embedded_1 + doc.save() + doc = doc.reload(10) + + doc.dict_field["Embedded"].string_field = "Hello World" + assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"] + assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {}) + + def test_circular_reference_deltas(self): + self.circular_reference_deltas(Document, Document) + self.circular_reference_deltas(Document, DynamicDocument) + self.circular_reference_deltas(DynamicDocument, Document) + self.circular_reference_deltas(DynamicDocument, DynamicDocument) + + def circular_reference_deltas(self, DocClass1, DocClass2): + class Person(DocClass1): + name = StringField() + owns = ListField(ReferenceField("Organization")) + + class Organization(DocClass2): + name = StringField() + owner = ReferenceField("Person") + + Person.drop_collection() + Organization.drop_collection() + + person = Person(name="owner").save() + organization = Organization(name="company").save() + + person.owns.append(organization) + organization.owner = person + + person.save() + organization.save() + + p = Person.objects[0].select_related() + o = Organization.objects.first() + assert p.owns[0] == o + assert o.owner == p + + def test_circular_reference_deltas_2(self): + self.circular_reference_deltas_2(Document, Document) + self.circular_reference_deltas_2(Document, DynamicDocument) + self.circular_reference_deltas_2(DynamicDocument, Document) + self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) + + def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): + class Person(DocClass1): + name = StringField() + owns = ListField(ReferenceField("Organization", dbref=dbref)) + employer = ReferenceField("Organization", dbref=dbref) + + class Organization(DocClass2): + name = StringField() + owner = ReferenceField("Person", dbref=dbref) + employees = ListField(ReferenceField("Person", dbref=dbref)) + + Person.drop_collection() + Organization.drop_collection() + + person = Person(name="owner").save() + employee = Person(name="employee").save() + organization = Organization(name="company").save() + + person.owns.append(organization) + organization.owner = person + + organization.employees.append(employee) + employee.employer = organization + + person.save() + organization.save() + employee.save() + + p = Person.objects.get(name="owner") + e = Person.objects.get(name="employee") + o = Organization.objects.first() + + assert p.owns[0] == o + assert o.owner == p + assert e.employer == o + + return person, organization, employee + + def test_delta_db_field(self): + self.delta_db_field(Document) + self.delta_db_field(DynamicDocument) + + def delta_db_field(self, DocClass): + class Doc(DocClass): + string_field = StringField(db_field="db_string_field") + int_field = IntField(db_field="db_int_field") + dict_field = DictField(db_field="db_dict_field") + list_field = ListField(db_field="db_list_field") + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) + + doc.string_field = "hello" + assert doc._get_changed_fields() == ["db_string_field"] + assert doc._delta() == ({"db_string_field": "hello"}, {}) + + doc._changed_fields = [] + doc.int_field = 1 + assert doc._get_changed_fields() == ["db_int_field"] + assert doc._delta() == ({"db_int_field": 1}, {}) + + doc._changed_fields = [] + dict_value = {"hello": "world", "ping": "pong"} + doc.dict_field = dict_value + assert doc._get_changed_fields() == ["db_dict_field"] + assert doc._delta() == ({"db_dict_field": dict_value}, {}) + + doc._changed_fields = [] + list_value = ["1", 2, {"hello": "world"}] + doc.list_field = list_value + assert doc._get_changed_fields() == ["db_list_field"] + assert doc._delta() == ({"db_list_field": list_value}, {}) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + assert doc._get_changed_fields() == ["db_dict_field"] + assert doc._delta() == ({}, {"db_dict_field": 1}) + + doc._changed_fields = [] + doc.list_field = [] + assert doc._get_changed_fields() == ["db_list_field"] + assert doc._delta() == ({}, {"db_list_field": 1}) + + # Test it saves that data + doc = Doc() + doc.save() + + doc.string_field = "hello" + doc.int_field = 1 + doc.dict_field = {"hello": "world"} + doc.list_field = ["1", 2, {"hello": "world"}] + doc.save() + doc = doc.reload(10) + + assert doc.string_field == "hello" + assert doc.int_field == 1 + assert doc.dict_field == {"hello": "world"} + assert doc.list_field == ["1", 2, {"hello": "world"}] + + def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self): + self.delta_recursive_db_field(Document, EmbeddedDocument) + + def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self): + self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) + + def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self): + self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) + + def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self): + self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) + + @staticmethod + def delta_recursive_db_field(DocClass, EmbeddedClass): + class Embedded(EmbeddedClass): + string_field = StringField(db_field="db_string_field") + int_field = IntField(db_field="db_int_field") + dict_field = DictField(db_field="db_dict_field") + list_field = ListField(db_field="db_list_field") + + class Doc(DocClass): + string_field = StringField(db_field="db_string_field") + int_field = IntField(db_field="db_int_field") + dict_field = DictField(db_field="db_dict_field") + list_field = ListField(db_field="db_list_field") + embedded_field = EmbeddedDocumentField( + Embedded, db_field="db_embedded_field" + ) + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) + + embedded_1 = Embedded() + embedded_1.string_field = "hello" + embedded_1.int_field = 1 + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] + doc.embedded_field = embedded_1 + + assert doc._get_changed_fields() == ["db_embedded_field"] + + embedded_delta = { + "db_string_field": "hello", + "db_int_field": 1, + "db_dict_field": {"hello": "world"}, + "db_list_field": ["1", 2, {"hello": "world"}], + } + assert doc.embedded_field._delta() == (embedded_delta, {}) + assert doc._delta() == ({"db_embedded_field": embedded_delta}, {}) + + doc.save() + doc = doc.reload(10) + + doc.embedded_field.dict_field = {} + assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"] + assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1}) + assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1}) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.dict_field == {} + + assert doc._get_changed_fields() == [] + doc.embedded_field.list_field = [] + assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] + assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) + assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1}) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field == [] + + embedded_2 = Embedded() + embedded_2.string_field = "hello" + embedded_2.int_field = 1 + embedded_2.dict_field = {"hello": "world"} + embedded_2.list_field = ["1", 2, {"hello": "world"}] + + doc.embedded_field.list_field = ["1", 2, embedded_2] + assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] + assert doc.embedded_field._delta() == ( + { + "db_list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "db_string_field": "hello", + "db_dict_field": {"hello": "world"}, + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ) + + assert doc._delta() == ( + { + "db_embedded_field.db_list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "db_string_field": "hello", + "db_dict_field": {"hello": "world"}, + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + }, + ] + }, + {}, + ) + doc.save() + doc = doc.reload(10) + + assert doc.embedded_field.list_field[0] == "1" + assert doc.embedded_field.list_field[1] == 2 + for k in doc.embedded_field.list_field[2]._fields: + assert doc.embedded_field.list_field[2][k] == embedded_2[k] + + doc.embedded_field.list_field[2].string_field = "world" + assert doc._get_changed_fields() == [ + "db_embedded_field.db_list_field.2.db_string_field" + ] + assert doc.embedded_field._delta() == ( + {"db_list_field.2.db_string_field": "world"}, + {}, + ) + assert doc._delta() == ( + {"db_embedded_field.db_list_field.2.db_string_field": "world"}, + {}, + ) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].string_field == "world" + + # Test multiple assignments + doc.embedded_field.list_field[2].string_field = "hello world" + doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] + assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"] + assert doc.embedded_field._delta() == ( + { + "db_list_field.2": { + "_cls": "Embedded", + "db_string_field": "hello world", + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + "db_dict_field": {"hello": "world"}, + } + }, + {}, + ) + assert doc._delta() == ( + { + "db_embedded_field.db_list_field.2": { + "_cls": "Embedded", + "db_string_field": "hello world", + "db_int_field": 1, + "db_list_field": ["1", 2, {"hello": "world"}], + "db_dict_field": {"hello": "world"}, + } + }, + {}, + ) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].string_field == "hello world" + + # Test list native methods + doc.embedded_field.list_field[2].list_field.pop(0) + assert doc._delta() == ( + { + "db_embedded_field.db_list_field.2.db_list_field": [ + 2, + {"hello": "world"}, + ] + }, + {}, + ) + doc.save() + doc = doc.reload(10) + + doc.embedded_field.list_field[2].list_field.append(1) + assert doc._delta() == ( + { + "db_embedded_field.db_list_field.2.db_list_field": [ + 2, + {"hello": "world"}, + 1, + ] + }, + {}, + ) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] + + doc.embedded_field.list_field[2].list_field.sort(key=str) + doc.save() + doc = doc.reload(10) + assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] + + del doc.embedded_field.list_field[2].list_field[2]["hello"] + assert doc._delta() == ( + {}, + {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}, + ) + doc.save() + doc = doc.reload(10) + + assert doc._delta() == ({}, {},) + del doc.embedded_field.list_field[2].list_field + assert doc._delta() == ( + {}, + {"db_embedded_field.db_list_field.2.db_list_field": 1}, + ) + + def test_delta_for_dynamic_documents(self): + class Person(DynamicDocument): + name = StringField() + meta = {"allow_inheritance": True} + + Person.drop_collection() + + p = Person(name="James", age=34) + assert p._delta() == ( + SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), + {}, + ) + + p.doc = 123 + del p.doc + assert p._delta() == ( + SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), + {}, + ) + + p = Person() + p.name = "Dean" + p.age = 22 + p.save() + + p.age = 24 + assert p.age == 24 + assert p._get_changed_fields() == ["age"] + assert p._delta() == ({"age": 24}, {}) + + p = Person.objects(age=22).get() + p.age = 24 + assert p.age == 24 + assert p._get_changed_fields() == ["age"] + assert p._delta() == ({"age": 24}, {}) + + p.save() + assert 1 == Person.objects(age=24).count() + + def test_dynamic_delta(self): + class Doc(DynamicDocument): + pass + + Doc.drop_collection() + doc = Doc() + doc.save() + + doc = Doc.objects.first() + assert doc._get_changed_fields() == [] + assert doc._delta() == ({}, {}) + + doc.string_field = "hello" + assert doc._get_changed_fields() == ["string_field"] + assert doc._delta() == ({"string_field": "hello"}, {}) + + doc._changed_fields = [] + doc.int_field = 1 + assert doc._get_changed_fields() == ["int_field"] + assert doc._delta() == ({"int_field": 1}, {}) + + doc._changed_fields = [] + dict_value = {"hello": "world", "ping": "pong"} + doc.dict_field = dict_value + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({"dict_field": dict_value}, {}) + + doc._changed_fields = [] + list_value = ["1", 2, {"hello": "world"}] + doc.list_field = list_value + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({"list_field": list_value}, {}) + + # Test unsetting + doc._changed_fields = [] + doc.dict_field = {} + assert doc._get_changed_fields() == ["dict_field"] + assert doc._delta() == ({}, {"dict_field": 1}) + + doc._changed_fields = [] + doc.list_field = [] + assert doc._get_changed_fields() == ["list_field"] + assert doc._delta() == ({}, {"list_field": 1}) + + def test_delta_with_dbref_true(self): + person, organization, employee = self.circular_reference_deltas_2( + Document, Document, True + ) + employee.name = "test" + + assert organization._get_changed_fields() == [] + + updates, removals = organization._delta() + assert removals == {} + assert updates == {} + + organization.employees.append(person) + updates, removals = organization._delta() + assert removals == {} + assert "employees" in updates + + def test_delta_with_dbref_false(self): + person, organization, employee = self.circular_reference_deltas_2( + Document, Document, False + ) + employee.name = "test" + + assert organization._get_changed_fields() == [] + + updates, removals = organization._delta() + assert removals == {} + assert updates == {} + + organization.employees.append(person) + updates, removals = organization._delta() + assert removals == {} + assert "employees" in updates + + def test_nested_nested_fields_mark_as_changed(self): + class EmbeddedDoc(EmbeddedDocument): + name = StringField() + + class MyDoc(Document): + subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) + name = StringField() + + MyDoc.drop_collection() + + mydoc = MyDoc( + name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}} + ).save() + + mydoc = MyDoc.objects.first() + subdoc = mydoc.subs["a"]["b"] + subdoc.name = "bar" + + assert subdoc._get_changed_fields() == ["name"] + assert mydoc._get_changed_fields() == ["subs.a.b.name"] + + mydoc._clear_changed_fields() + assert mydoc._get_changed_fields() == [] + + def test_lower_level_mark_as_changed(self): + class EmbeddedDoc(EmbeddedDocument): + name = StringField() + + class MyDoc(Document): + subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) + + MyDoc.drop_collection() + + MyDoc().save() + + mydoc = MyDoc.objects.first() + mydoc.subs["a"] = EmbeddedDoc() + assert mydoc._get_changed_fields() == ["subs.a"] + + subdoc = mydoc.subs["a"] + subdoc.name = "bar" + + assert subdoc._get_changed_fields() == ["name"] + assert mydoc._get_changed_fields() == ["subs.a"] + mydoc.save() + + mydoc._clear_changed_fields() + assert mydoc._get_changed_fields() == [] + + def test_upper_level_mark_as_changed(self): + class EmbeddedDoc(EmbeddedDocument): + name = StringField() + + class MyDoc(Document): + subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) + + MyDoc.drop_collection() + + MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save() + + mydoc = MyDoc.objects.first() + subdoc = mydoc.subs["a"] + subdoc.name = "bar" + + assert subdoc._get_changed_fields() == ["name"] + assert mydoc._get_changed_fields() == ["subs.a.name"] + + mydoc.subs["a"] = EmbeddedDoc() + assert mydoc._get_changed_fields() == ["subs.a"] + mydoc.save() + + mydoc._clear_changed_fields() + assert mydoc._get_changed_fields() == [] + + def test_referenced_object_changed_attributes(self): + """Ensures that when you save a new reference to a field, the referenced object isn't altered""" + + class Organization(Document): + name = StringField() + + class User(Document): + name = StringField() + org = ReferenceField("Organization", required=True) + + Organization.drop_collection() + User.drop_collection() + + org1 = Organization(name="Org 1") + org1.save() + + org2 = Organization(name="Org 2") + org2.save() + + user = User(name="Fred", org=org1) + user.save() + + org1.reload() + org2.reload() + user.reload() + assert org1.name == "Org 1" + assert org2.name == "Org 2" + assert user.name == "Fred" + + user.name = "Harold" + user.org = org2 + + org2.name = "New Org 2" + assert org2.name == "New Org 2" + + user.save() + org2.save() + + assert org2.name == "New Org 2" + org2.reload() + assert org2.name == "New Org 2" + + def test_delta_for_nested_map_fields(self): + class UInfoDocument(Document): + phone = StringField() + + class EmbeddedRole(EmbeddedDocument): + type = StringField() + + class EmbeddedUser(EmbeddedDocument): + name = StringField() + roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) + rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) + info = ReferenceField(UInfoDocument) + + class Doc(Document): + users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) + num = IntField(default=-1) + + Doc.drop_collection() + + doc = Doc(num=1) + doc.users["007"] = EmbeddedUser(name="Agent007") + doc.save() + + uinfo = UInfoDocument(phone="79089269066") + uinfo.save() + + d = Doc.objects(num=1).first() + d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") + d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) + d.users["007"]["info"] = uinfo + delta = d._delta() + assert True == ("users.007.roles.666" in delta[0]) + assert True == ("users.007.rolist" in delta[0]) + assert True == ("users.007.info" in delta[0]) + assert "superadmin" == delta[0]["users.007.roles.666"]["type"] + assert "oops" == delta[0]["users.007.rolist"][0]["type"] + assert uinfo.id == delta[0]["users.007.info"] + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/document/dynamic.py b/tests/document/test_dynamic.py similarity index 55% rename from tests/document/dynamic.py rename to tests/document/test_dynamic.py index 44548d27..0032dfd9 100644 --- a/tests/document/dynamic.py +++ b/tests/document/test_dynamic.py @@ -1,19 +1,20 @@ import unittest +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase -__all__ = ("TestDynamicDocument", ) +__all__ = ("TestDynamicDocument",) class TestDynamicDocument(MongoDBTestCase): - def setUp(self): super(TestDynamicDocument, self).setUp() class Person(DynamicDocument): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() @@ -26,16 +27,15 @@ class TestDynamicDocument(MongoDBTestCase): p.name = "James" p.age = 34 - self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", - "age": 34}) - self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) + assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34} + assert p.to_mongo().keys() == ["_cls", "name", "age"] p.save() - self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) + assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"] - self.assertEqual(self.Person.objects.first().age, 34) + assert self.Person.objects.first().age == 34 # Confirm no changes to self.Person - self.assertFalse(hasattr(self.Person, 'age')) + assert not hasattr(self.Person, "age") def test_change_scope_of_variable(self): """Test changing the scope of a dynamic field has no adverse effects""" @@ -45,11 +45,11 @@ class TestDynamicDocument(MongoDBTestCase): p.save() p = self.Person.objects.get() - p.misc = {'hello': 'world'} + p.misc = {"hello": "world"} p.save() p = self.Person.objects.get() - self.assertEqual(p.misc, {'hello': 'world'}) + assert p.misc == {"hello": "world"} def test_delete_dynamic_field(self): """Test deleting a dynamic field works""" @@ -60,23 +60,23 @@ class TestDynamicDocument(MongoDBTestCase): p.save() p = self.Person.objects.get() - p.misc = {'hello': 'world'} + p.misc = {"hello": "world"} p.save() p = self.Person.objects.get() - self.assertEqual(p.misc, {'hello': 'world'}) + assert p.misc == {"hello": "world"} collection = self.db[self.Person._get_collection_name()] obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) + assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"] del p.misc p.save() p = self.Person.objects.get() - self.assertFalse(hasattr(p, 'misc')) + assert not hasattr(p, "misc") obj = collection.find_one() - self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) + assert sorted(obj.keys()) == ["_cls", "_id", "name"] def test_reload_after_unsetting(self): p = self.Person() @@ -90,78 +90,55 @@ class TestDynamicDocument(MongoDBTestCase): p = self.Person.objects.create() p.update(age=1) - self.assertEqual(len(p._data), 3) - self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name']) + assert len(p._data) == 3 + assert sorted(p._data.keys()) == ["_cls", "id", "name"] p.reload() - self.assertEqual(len(p._data), 4) - self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) + assert len(p._data) == 4 + assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"] def test_fields_without_underscore(self): """Ensure we can query dynamic fields""" Person = self.Person - p = self.Person(name='Dean') + p = self.Person(name="Dean") p.save() raw_p = Person.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_cls': u'Person', - '_id': p.id, - 'name': u'Dean' - } - ) + assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"} - p.name = 'OldDean' - p.newattr = 'garbage' + p.name = "OldDean" + p.newattr = "garbage" p.save() raw_p = Person.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_cls': u'Person', - '_id': p.id, - 'name': 'OldDean', - 'newattr': u'garbage' - } - ) + assert raw_p == { + "_cls": u"Person", + "_id": p.id, + "name": "OldDean", + "newattr": u"garbage", + } def test_fields_containing_underscore(self): """Ensure we can query dynamic fields""" + class WeirdPerson(DynamicDocument): name = StringField() _name = StringField() WeirdPerson.drop_collection() - p = WeirdPerson(name='Dean', _name='Dean') + p = WeirdPerson(name="Dean", _name="Dean") p.save() raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_id': p.id, - '_name': u'Dean', - 'name': u'Dean' - } - ) + assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"} - p.name = 'OldDean' - p._name = 'NewDean' - p._newattr1 = 'garbage' # Unknown fields won't be added + p.name = "OldDean" + p._name = "NewDean" + p._newattr1 = "garbage" # Unknown fields won't be added p.save() raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) - self.assertEqual( - raw_p, - { - '_id': p.id, - '_name': u'NewDean', - 'name': u'OldDean', - } - ) + assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"} def test_dynamic_document_queries(self): """Ensure we can query dynamic fields""" @@ -170,10 +147,10 @@ class TestDynamicDocument(MongoDBTestCase): p.age = 22 p.save() - self.assertEqual(1, self.Person.objects(age=22).count()) + assert 1 == self.Person.objects(age=22).count() p = self.Person.objects(age=22) p = p.get() - self.assertEqual(22, p.age) + assert 22 == p.age def test_complex_dynamic_document_queries(self): class Person(DynamicDocument): @@ -193,26 +170,25 @@ class TestDynamicDocument(MongoDBTestCase): p2.age = 10 p2.save() - self.assertEqual(Person.objects(age__icontains='ten').count(), 2) - self.assertEqual(Person.objects(age__gte=10).count(), 1) + assert Person.objects(age__icontains="ten").count() == 2 + assert Person.objects(age__gte=10).count() == 1 def test_complex_data_lookups(self): """Ensure you can query dynamic document dynamic fields""" p = self.Person() - p.misc = {'hello': 'world'} + p.misc = {"hello": "world"} p.save() - self.assertEqual(1, self.Person.objects(misc__hello='world').count()) + assert 1 == self.Person.objects(misc__hello="world").count() def test_three_level_complex_data_lookups(self): """Ensure you can query three level document dynamic fields""" - p = self.Person.objects.create( - misc={'hello': {'hello2': 'world'}} - ) - self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) + self.Person.objects.create(misc={"hello": {"hello2": "world"}}) + assert 1 == self.Person.objects(misc__hello__hello2="world").count() def test_complex_embedded_document_validation(self): """Ensure embedded dynamic documents may be validated""" + class Embedded(DynamicEmbeddedDocument): content = URLField() @@ -222,27 +198,29 @@ class TestDynamicDocument(MongoDBTestCase): Doc.drop_collection() doc = Doc() - embedded_doc_1 = Embedded(content='http://mongoengine.org') + embedded_doc_1 = Embedded(content="http://mongoengine.org") embedded_doc_1.validate() - embedded_doc_2 = Embedded(content='this is not a url') - self.assertRaises(ValidationError, embedded_doc_2.validate) + embedded_doc_2 = Embedded(content="this is not a url") + with pytest.raises(ValidationError): + embedded_doc_2.validate() doc.embedded_field_1 = embedded_doc_1 doc.embedded_field_2 = embedded_doc_2 - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() def test_inheritance(self): """Ensure that dynamic document plays nice with inheritance""" + class Employee(self.Person): salary = IntField() Employee.drop_collection() - self.assertIn('name', Employee._fields) - self.assertIn('salary', Employee._fields) - self.assertEqual(Employee._get_collection_name(), - self.Person._get_collection_name()) + assert "name" in Employee._fields + assert "salary" in Employee._fields + assert Employee._get_collection_name() == self.Person._get_collection_name() joe_bloggs = Employee() joe_bloggs.name = "Joe Bloggs" @@ -250,14 +228,15 @@ class TestDynamicDocument(MongoDBTestCase): joe_bloggs.age = 20 joe_bloggs.save() - self.assertEqual(1, self.Person.objects(age=20).count()) - self.assertEqual(1, Employee.objects(age=20).count()) + assert 1 == self.Person.objects(age=20).count() + assert 1 == Employee.objects(age=20).count() joe_bloggs = self.Person.objects.first() - self.assertIsInstance(joe_bloggs, Employee) + assert isinstance(joe_bloggs, Employee) def test_embedded_dynamic_document(self): """Test dynamic embedded documents""" + class Embedded(DynamicEmbeddedDocument): pass @@ -268,33 +247,33 @@ class TestDynamicDocument(MongoDBTestCase): doc = Doc() embedded_1 = Embedded() - embedded_1.string_field = 'hello' + embedded_1.string_field = "hello" embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 - self.assertEqual(doc.to_mongo(), { + assert doc.to_mongo() == { "embedded_field": { "_cls": "Embedded", "string_field": "hello", "int_field": 1, "dict_field": {"hello": "world"}, - "list_field": ['1', 2, {'hello': 'world'}] + "list_field": ["1", 2, {"hello": "world"}], } - }) + } doc.save() doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(doc.embedded_field.list_field, - ['1', 2, {'hello': 'world'}]) + assert doc.embedded_field.__class__ == Embedded + assert doc.embedded_field.string_field == "hello" + assert doc.embedded_field.int_field == 1 + assert doc.embedded_field.dict_field == {"hello": "world"} + assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}] def test_complex_embedded_documents(self): """Test complex dynamic embedded documents setups""" + class Embedded(DynamicEmbeddedDocument): pass @@ -305,51 +284,54 @@ class TestDynamicDocument(MongoDBTestCase): doc = Doc() embedded_1 = Embedded() - embedded_1.string_field = 'hello' + embedded_1.string_field = "hello" embedded_1.int_field = 1 - embedded_1.dict_field = {'hello': 'world'} + embedded_1.dict_field = {"hello": "world"} embedded_2 = Embedded() - embedded_2.string_field = 'hello' + embedded_2.string_field = "hello" embedded_2.int_field = 1 - embedded_2.dict_field = {'hello': 'world'} - embedded_2.list_field = ['1', 2, {'hello': 'world'}] + embedded_2.dict_field = {"hello": "world"} + embedded_2.list_field = ["1", 2, {"hello": "world"}] - embedded_1.list_field = ['1', 2, embedded_2] + embedded_1.list_field = ["1", 2, embedded_2] doc.embedded_field = embedded_1 - self.assertEqual(doc.to_mongo(), { + assert doc.to_mongo() == { "embedded_field": { "_cls": "Embedded", "string_field": "hello", "int_field": 1, "dict_field": {"hello": "world"}, - "list_field": ['1', 2, - {"_cls": "Embedded", - "string_field": "hello", - "int_field": 1, - "dict_field": {"hello": "world"}, - "list_field": ['1', 2, {'hello': 'world'}]} - ] + "list_field": [ + "1", + 2, + { + "_cls": "Embedded", + "string_field": "hello", + "int_field": 1, + "dict_field": {"hello": "world"}, + "list_field": ["1", 2, {"hello": "world"}], + }, + ], } - }) + } doc.save() doc = Doc.objects.first() - self.assertEqual(doc.embedded_field.__class__, Embedded) - self.assertEqual(doc.embedded_field.string_field, "hello") - self.assertEqual(doc.embedded_field.int_field, 1) - self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(doc.embedded_field.list_field[0], '1') - self.assertEqual(doc.embedded_field.list_field[1], 2) + assert doc.embedded_field.__class__ == Embedded + assert doc.embedded_field.string_field == "hello" + assert doc.embedded_field.int_field == 1 + assert doc.embedded_field.dict_field == {"hello": "world"} + assert doc.embedded_field.list_field[0] == "1" + assert doc.embedded_field.list_field[1] == 2 embedded_field = doc.embedded_field.list_field[2] - self.assertEqual(embedded_field.__class__, Embedded) - self.assertEqual(embedded_field.string_field, "hello") - self.assertEqual(embedded_field.int_field, 1) - self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) - self.assertEqual(embedded_field.list_field, ['1', 2, - {'hello': 'world'}]) + assert embedded_field.__class__ == Embedded + assert embedded_field.string_field == "hello" + assert embedded_field.int_field == 1 + assert embedded_field.dict_field == {"hello": "world"} + assert embedded_field.list_field == ["1", 2, {"hello": "world"}] def test_dynamic_and_embedded(self): """Ensure embedded documents play nicely""" @@ -368,18 +350,18 @@ class TestDynamicDocument(MongoDBTestCase): person.address.city = "Lundenne" person.save() - self.assertEqual(Person.objects.first().address.city, "Lundenne") + assert Person.objects.first().address.city == "Lundenne" person = Person.objects.first() person.address = Address(city="Londinium") person.save() - self.assertEqual(Person.objects.first().address.city, "Londinium") + assert Person.objects.first().address.city == "Londinium" person = Person.objects.first() person.age = 35 person.save() - self.assertEqual(Person.objects.first().age, 35) + assert Person.objects.first().age == 35 def test_dynamic_embedded_works_with_only(self): """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" @@ -392,10 +374,15 @@ class TestDynamicDocument(MongoDBTestCase): Person.drop_collection() - Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save() + Person( + name="Eric", address=Address(city="San Francisco", street_number="1337") + ).save() - self.assertEqual(Person.objects.first().address.street_number, '1337') - self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337') + assert Person.objects.first().address.street_number == "1337" + assert ( + Person.objects.only("address__street_number").first().address.street_number + == "1337" + ) def test_dynamic_and_embedded_dict_access(self): """Ensure embedded dynamic documents work with dict[] style access""" @@ -419,21 +406,21 @@ class TestDynamicDocument(MongoDBTestCase): person["address"]["city"] = "Lundenne" person.save() - self.assertEqual(Person.objects.first().address.city, "Lundenne") + assert Person.objects.first().address.city == "Lundenne" - self.assertEqual(Person.objects.first().phone, "555-1212") + assert Person.objects.first().phone == "555-1212" person = Person.objects.first() person.address = Address(city="Londinium") person.save() - self.assertEqual(Person.objects.first().address.city, "Londinium") + assert Person.objects.first().address.city == "Londinium" person = Person.objects.first() person["age"] = 35 person.save() - self.assertEqual(Person.objects.first().age, 35) + assert Person.objects.first().age == 35 -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/indexes.py b/tests/document/test_indexes.py similarity index 51% rename from tests/document/indexes.py rename to tests/document/test_indexes.py index 764ef0c5..45d1cd23 100644 --- a/tests/document/indexes.py +++ b/tests/document/test_indexes.py @@ -2,21 +2,17 @@ import unittest from datetime import datetime -from nose.plugins.skip import SkipTest +from pymongo.collation import Collation from pymongo.errors import OperationFailure -import pymongo -from six import iteritems +import pytest from mongoengine import * from mongoengine.connection import get_db -__all__ = ("IndexesTest", ) - - -class IndexesTest(unittest.TestCase): +class TestIndexes(unittest.TestCase): def setUp(self): - self.connection = connect(db='mongoenginetest') + self.connection = connect(db="mongoenginetest") self.db = get_db() class Person(Document): @@ -45,52 +41,43 @@ class IndexesTest(unittest.TestCase): self._index_test(DynamicDocument) def _index_test(self, InheritFrom): - class BlogPost(InheritFrom): - date = DateTimeField(db_field='addDate', default=datetime.now) + date = DateTimeField(db_field="addDate", default=datetime.now) category = StringField() tags = ListField(StringField()) - meta = { - 'indexes': [ - '-date', - 'tags', - ('category', '-date') - ] - } + meta = {"indexes": ["-date", "tags", ("category", "-date")]} - expected_specs = [{'fields': [('addDate', -1)]}, - {'fields': [('tags', 1)]}, - {'fields': [('category', 1), ('addDate', -1)]}] - self.assertEqual(expected_specs, BlogPost._meta['index_specs']) + expected_specs = [ + {"fields": [("addDate", -1)]}, + {"fields": [("tags", 1)]}, + {"fields": [("category", 1), ("addDate", -1)]}, + ] + assert expected_specs == BlogPost._meta["index_specs"] BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') - self.assertEqual(len(info), 4) - info = [value['key'] for key, value in iteritems(info)] + assert len(info) == 4 + info = [value["key"] for key, value in info.items()] for expected in expected_specs: - self.assertIn(expected['fields'], info) + assert expected["fields"] in info def _index_test_inheritance(self, InheritFrom): - class BlogPost(InheritFrom): - date = DateTimeField(db_field='addDate', default=datetime.now) + date = DateTimeField(db_field="addDate", default=datetime.now) category = StringField() tags = ListField(StringField()) meta = { - 'indexes': [ - '-date', - 'tags', - ('category', '-date') - ], - 'allow_inheritance': True + "indexes": ["-date", "tags", ("category", "-date")], + "allow_inheritance": True, } - expected_specs = [{'fields': [('_cls', 1), ('addDate', -1)]}, - {'fields': [('_cls', 1), ('tags', 1)]}, - {'fields': [('_cls', 1), ('category', 1), - ('addDate', -1)]}] - self.assertEqual(expected_specs, BlogPost._meta['index_specs']) + expected_specs = [ + {"fields": [("_cls", 1), ("addDate", -1)]}, + {"fields": [("_cls", 1), ("tags", 1)]}, + {"fields": [("_cls", 1), ("category", 1), ("addDate", -1)]}, + ] + assert expected_specs == BlogPost._meta["index_specs"] BlogPost.ensure_indexes() info = BlogPost.objects._collection.index_information() @@ -98,25 +85,25 @@ class IndexesTest(unittest.TestCase): # NB: there is no index on _cls by itself, since # the indices on -date and tags will both contain # _cls as first element in the key - self.assertEqual(len(info), 4) - info = [value['key'] for key, value in iteritems(info)] + assert len(info) == 4 + info = [value["key"] for key, value in info.items()] for expected in expected_specs: - self.assertIn(expected['fields'], info) + assert expected["fields"] in info class ExtendedBlogPost(BlogPost): title = StringField() - meta = {'indexes': ['title']} + meta = {"indexes": ["title"]} - expected_specs.append({'fields': [('_cls', 1), ('title', 1)]}) - self.assertEqual(expected_specs, ExtendedBlogPost._meta['index_specs']) + expected_specs.append({"fields": [("_cls", 1), ("title", 1)]}) + assert expected_specs == ExtendedBlogPost._meta["index_specs"] BlogPost.drop_collection() ExtendedBlogPost.ensure_indexes() info = ExtendedBlogPost.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] + info = [value["key"] for key, value in info.items()] for expected in expected_specs: - self.assertIn(expected['fields'], info) + assert expected["fields"] in info def test_indexes_document_inheritance(self): """Ensure that indexes are used when meta[indexes] is specified for @@ -135,21 +122,13 @@ class IndexesTest(unittest.TestCase): class A(Document): title = StringField() - meta = { - 'indexes': [ - { - 'fields': ('title',), - }, - ], - 'allow_inheritance': True, - } + meta = {"indexes": [{"fields": ("title",)}], "allow_inheritance": True} class B(A): description = StringField() - self.assertEqual(A._meta['index_specs'], B._meta['index_specs']) - self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}], - A._meta['index_specs']) + assert A._meta["index_specs"] == B._meta["index_specs"] + assert [{"fields": [("_cls", 1), ("title", 1)]}] == A._meta["index_specs"] def test_index_no_cls(self): """Ensure index specs are inhertited correctly""" @@ -157,51 +136,45 @@ class IndexesTest(unittest.TestCase): class A(Document): title = StringField() meta = { - 'indexes': [ - {'fields': ('title',), 'cls': False}, - ], - 'allow_inheritance': True, - 'index_cls': False - } + "indexes": [{"fields": ("title",), "cls": False}], + "allow_inheritance": True, + "index_cls": False, + } - self.assertEqual([('title', 1)], A._meta['index_specs'][0]['fields']) + assert [("title", 1)] == A._meta["index_specs"][0]["fields"] A._get_collection().drop_indexes() A.ensure_indexes() info = A._get_collection().index_information() - self.assertEqual(len(info.keys()), 2) + assert len(info.keys()) == 2 class B(A): c = StringField() d = StringField() meta = { - 'indexes': [{'fields': ['c']}, {'fields': ['d'], 'cls': True}], - 'allow_inheritance': True + "indexes": [{"fields": ["c"]}, {"fields": ["d"], "cls": True}], + "allow_inheritance": True, } - self.assertEqual([('c', 1)], B._meta['index_specs'][1]['fields']) - self.assertEqual([('_cls', 1), ('d', 1)], B._meta['index_specs'][2]['fields']) + + assert [("c", 1)] == B._meta["index_specs"][1]["fields"] + assert [("_cls", 1), ("d", 1)] == B._meta["index_specs"][2]["fields"] def test_build_index_spec_is_not_destructive(self): - class MyDoc(Document): keywords = StringField() - meta = { - 'indexes': ['keywords'], - 'allow_inheritance': False - } + meta = {"indexes": ["keywords"], "allow_inheritance": False} - self.assertEqual(MyDoc._meta['index_specs'], - [{'fields': [('keywords', 1)]}]) + assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}] # Force index creation MyDoc.ensure_indexes() - self.assertEqual(MyDoc._meta['index_specs'], - [{'fields': [('keywords', 1)]}]) + assert MyDoc._meta["index_specs"] == [{"fields": [("keywords", 1)]}] def test_embedded_document_index_meta(self): """Ensure that embedded document indexes are created explicitly """ + class Rank(EmbeddedDocument): title = StringField(required=True) @@ -209,190 +182,169 @@ class IndexesTest(unittest.TestCase): name = StringField(required=True) rank = EmbeddedDocumentField(Rank, required=False) - meta = { - 'indexes': [ - 'rank.title', - ], - 'allow_inheritance': False - } + meta = {"indexes": ["rank.title"], "allow_inheritance": False} - self.assertEqual([{'fields': [('rank.title', 1)]}], - Person._meta['index_specs']) + assert [{"fields": [("rank.title", 1)]}] == Person._meta["index_specs"] Person.drop_collection() # Indexes are lazy so use list() to perform query list(Person.objects) info = Person.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('rank.title', 1)], info) + info = [value["key"] for key, value in info.items()] + assert [("rank.title", 1)] in info def test_explicit_geo2d_index(self): """Ensure that geo2d indexes work when created via meta[indexes] """ + class Place(Document): location = DictField() - meta = { - 'allow_inheritance': True, - 'indexes': [ - '*location.point', - ] - } + meta = {"allow_inheritance": True, "indexes": ["*location.point"]} - self.assertEqual([{'fields': [('location.point', '2d')]}], - Place._meta['index_specs']) + assert [{"fields": [("location.point", "2d")]}] == Place._meta["index_specs"] Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', '2d')], info) + info = [value["key"] for key, value in info.items()] + assert [("location.point", "2d")] in info def test_explicit_geo2d_index_embedded(self): """Ensure that geo2d indexes work when created via meta[indexes] """ + class EmbeddedLocation(EmbeddedDocument): location = DictField() class Place(Document): - current = DictField(field=EmbeddedDocumentField('EmbeddedLocation')) - meta = { - 'allow_inheritance': True, - 'indexes': [ - '*current.location.point', - ] - } + current = DictField(field=EmbeddedDocumentField("EmbeddedLocation")) + meta = {"allow_inheritance": True, "indexes": ["*current.location.point"]} - self.assertEqual([{'fields': [('current.location.point', '2d')]}], - Place._meta['index_specs']) + assert [{"fields": [("current.location.point", "2d")]}] == Place._meta[ + "index_specs" + ] Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('current.location.point', '2d')], info) + info = [value["key"] for key, value in info.items()] + assert [("current.location.point", "2d")] in info def test_explicit_geosphere_index(self): """Ensure that geosphere indexes work when created via meta[indexes] """ + class Place(Document): location = DictField() - meta = { - 'allow_inheritance': True, - 'indexes': [ - '(location.point', - ] - } + meta = {"allow_inheritance": True, "indexes": ["(location.point"]} - self.assertEqual([{'fields': [('location.point', '2dsphere')]}], - Place._meta['index_specs']) + assert [{"fields": [("location.point", "2dsphere")]}] == Place._meta[ + "index_specs" + ] Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', '2dsphere')], info) + info = [value["key"] for key, value in info.items()] + assert [("location.point", "2dsphere")] in info def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes] """ - raise SkipTest('GeoHaystack index creation is not supported for now' - 'from meta, as it requires a bucketSize parameter.') + pytest.skip( + "GeoHaystack index creation is not supported for now" + "from meta, as it requires a bucketSize parameter." + ) class Place(Document): location = DictField() name = StringField() - meta = { - 'indexes': [ - (')location.point', 'name') - ] - } - self.assertEqual([{'fields': [('location.point', 'geoHaystack'), ('name', 1)]}], - Place._meta['index_specs']) + meta = {"indexes": [(")location.point", "name")]} + + assert [ + {"fields": [("location.point", "geoHaystack"), ("name", 1)]} + ] == Place._meta["index_specs"] Place.ensure_indexes() info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', 'geoHaystack')], info) + info = [value["key"] for key, value in info.items()] + assert [("location.point", "geoHaystack")] in info def test_create_geohaystack_index(self): """Ensure that geohaystack indexes can be created """ + class Place(Document): location = DictField() name = StringField() - Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) + Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) info = Place._get_collection().index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info) + info = [value["key"] for key, value in info.items()] + assert [("location.point", "geoHaystack"), ("name", 1)] in info def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. """ + class BlogPost(Document): - date = DateTimeField(db_field='addDate', default=datetime.now) + date = DateTimeField(db_field="addDate", default=datetime.now) category = StringField() tags = ListField(StringField()) - meta = { - 'indexes': [ - {'fields': ['-date'], 'unique': True, 'sparse': True}, - ], - } + meta = {"indexes": [{"fields": ["-date"], "unique": True, "sparse": True}]} - self.assertEqual([{'fields': [('addDate', -1)], 'unique': True, - 'sparse': True}], - BlogPost._meta['index_specs']) + assert [ + {"fields": [("addDate", -1)], "unique": True, "sparse": True} + ] == BlogPost._meta["index_specs"] BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # _id, '-date' - self.assertEqual(len(info), 2) + assert len(info) == 2 # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() - info = [(value['key'], - value.get('unique', False), - value.get('sparse', False)) - for key, value in iteritems(info)] - self.assertIn(([('addDate', -1)], True, True), info) + info = [ + (value["key"], value.get("unique", False), value.get("sparse", False)) + for key, value in info.items() + ] + assert ([("addDate", -1)], True, True) in info BlogPost.drop_collection() def test_abstract_index_inheritance(self): - class UserBase(Document): user_guid = StringField(required=True) meta = { - 'abstract': True, - 'indexes': ['user_guid'], - 'allow_inheritance': True + "abstract": True, + "indexes": ["user_guid"], + "allow_inheritance": True, } class Person(UserBase): name = StringField() - meta = { - 'indexes': ['name'], - } + meta = {"indexes": ["name"]} + Person.drop_collection() - Person(name="test", user_guid='123').save() + Person(name="test", user_guid="123").save() - self.assertEqual(1, Person.objects.count()) + assert 1 == Person.objects.count() info = Person.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), - ['_cls_1_name_1', '_cls_1_user_guid_1', '_id_']) + assert sorted(info.keys()) == ["_cls_1_name_1", "_cls_1_user_guid_1", "_id_"] def test_disable_index_creation(self): """Tests setting auto_create_index to False on the connection will disable any index generation. """ + class User(Document): meta = { - 'allow_inheritance': True, - 'indexes': ['user_guid'], - 'auto_create_index': False + "allow_inheritance": True, + "indexes": ["user_guid"], + "auto_create_index": False, } user_guid = StringField(required=True) @@ -401,88 +353,81 @@ class IndexesTest(unittest.TestCase): User.drop_collection() - User(user_guid='123').save() - MongoUser(user_guid='123').save() + User(user_guid="123").save() + MongoUser(user_guid="123").save() - self.assertEqual(2, User.objects.count()) + assert 2 == User.objects.count() info = User.objects._collection.index_information() - self.assertEqual(list(info.keys()), ['_id_']) + assert list(info.keys()) == ["_id_"] User.ensure_indexes() info = User.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), ['_cls_1_user_guid_1', '_id_']) + assert sorted(info.keys()) == ["_cls_1_user_guid_1", "_id_"] def test_embedded_document_index(self): """Tests settings an index on an embedded document """ + class Date(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") class BlogPost(Document): title = StringField() date = EmbeddedDocumentField(Date) - meta = { - 'indexes': [ - '-date.year' - ], - } + meta = {"indexes": ["-date.year"]} BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() - self.assertEqual(sorted(info.keys()), ['_id_', 'date.yr_-1']) + assert sorted(info.keys()) == ["_id_", "date.yr_-1"] def test_list_embedded_document_index(self): """Ensure list embedded documents can be indexed """ + class Tag(EmbeddedDocument): - name = StringField(db_field='tag') + name = StringField(db_field="tag") class BlogPost(Document): title = StringField() tags = ListField(EmbeddedDocumentField(Tag)) - meta = { - 'indexes': [ - 'tags.name' - ] - } + meta = {"indexes": ["tags.name"]} BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # we don't use _cls in with list fields by default - self.assertEqual(sorted(info.keys()), ['_id_', 'tags.tag_1']) + assert sorted(info.keys()) == ["_id_", "tags.tag_1"] - post1 = BlogPost(title="Embedded Indexes tests in place", - tags=[Tag(name="about"), Tag(name="time")]) + post1 = BlogPost( + title="Embedded Indexes tests in place", + tags=[Tag(name="about"), Tag(name="time")], + ) post1.save() def test_recursive_embedded_objects_dont_break_indexes(self): - class RecursiveObject(EmbeddedDocument): - obj = EmbeddedDocumentField('self') + obj = EmbeddedDocumentField("self") class RecursiveDocument(Document): recursive_obj = EmbeddedDocumentField(RecursiveObject) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} RecursiveDocument.ensure_indexes() info = RecursiveDocument._get_collection().index_information() - self.assertEqual(sorted(info.keys()), ['_cls_1', '_id_']) + assert sorted(info.keys()) == ["_cls_1", "_id_"] def test_covered_index(self): """Ensure that covered indexes can be used """ + class Test(Document): a = IntField() b = IntField() - meta = { - 'indexes': ['a'], - 'allow_inheritance': False - } + meta = {"indexes": ["a"], "allow_inheritance": False} Test.drop_collection() @@ -491,45 +436,50 @@ class IndexesTest(unittest.TestCase): # Need to be explicit about covered indexes as mongoDB doesn't know if # the documents returned might have more keys in that here. - query_plan = Test.objects(id=obj.id).exclude('a').explain() - self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IDHACK' + query_plan = Test.objects(id=obj.id).exclude("a").explain() + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage") + == "IDHACK" ) - query_plan = Test.objects(id=obj.id).only('id').explain() - self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IDHACK' + query_plan = Test.objects(id=obj.id).only("id").explain() + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage") + == "IDHACK" ) - query_plan = Test.objects(a=1).only('a').exclude('id').explain() - self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IXSCAN' + query_plan = Test.objects(a=1).only("a").exclude("id").explain() + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage") + == "IXSCAN" ) - self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('stage'), - 'PROJECTION' + assert ( + query_plan.get("queryPlanner").get("winningPlan").get("stage") + == "PROJECTION" ) query_plan = Test.objects(a=1).explain() - self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), - 'IXSCAN' - ) - self.assertEqual( - query_plan.get('queryPlanner').get('winningPlan').get('stage'), - 'FETCH' + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage") + == "IXSCAN" ) + assert query_plan.get("queryPlanner").get("winningPlan").get("stage") == "FETCH" def test_index_on_id(self): class BlogPost(Document): - meta = { - 'indexes': [ - ['categories', 'id'] - ] - } + meta = {"indexes": [["categories", "id"]]} title = StringField(required=True) description = StringField(required=True) @@ -538,22 +488,14 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() indexes = BlogPost.objects._collection.index_information() - self.assertEqual(indexes['categories_1__id_1']['key'], - [('categories', 1), ('_id', 1)]) + assert indexes["categories_1__id_1"]["key"] == [("categories", 1), ("_id", 1)] def test_hint(self): - TAGS_INDEX_NAME = 'tags_1' + TAGS_INDEX_NAME = "tags_1" class BlogPost(Document): tags = ListField(StringField()) - meta = { - 'indexes': [ - { - 'fields': ['tags'], - 'name': TAGS_INDEX_NAME - } - ], - } + meta = {"indexes": [{"fields": ["tags"], "name": TAGS_INDEX_NAME}]} BlogPost.drop_collection() @@ -562,97 +504,141 @@ class IndexesTest(unittest.TestCase): BlogPost(tags=tags).save() # Hinting by shape should work. - self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) + assert BlogPost.objects.hint([("tags", 1)]).count() == 10 # Hinting by index name should work. - self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10) + assert BlogPost.objects.hint(TAGS_INDEX_NAME).count() == 10 # Clearing the hint should work fine. - self.assertEqual(BlogPost.objects.hint().count(), 10) - self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).hint().count(), 10) + assert BlogPost.objects.hint().count() == 10 + assert BlogPost.objects.hint([("ZZ", 1)]).hint().count() == 10 # Hinting on a non-existent index shape should fail. - with self.assertRaises(OperationFailure): - BlogPost.objects.hint([('ZZ', 1)]).count() + with pytest.raises(OperationFailure): + BlogPost.objects.hint([("ZZ", 1)]).count() # Hinting on a non-existent index name should fail. - with self.assertRaises(OperationFailure): - BlogPost.objects.hint('Bad Name').count() + with pytest.raises(OperationFailure): + BlogPost.objects.hint("Bad Name").count() # Invalid shape argument (missing list brackets) should fail. - with self.assertRaises(ValueError): - BlogPost.objects.hint(('tags', 1)).count() + with pytest.raises(ValueError): + BlogPost.objects.hint(("tags", 1)).count() + + def test_collation(self): + base = {"locale": "en", "strength": 2} + + class BlogPost(Document): + name = StringField() + meta = { + "indexes": [ + {"fields": ["name"], "name": "name_index", "collation": base} + ] + } + + BlogPost.drop_collection() + + names = ["tag1", "Tag2", "tag3", "Tag4", "tag5"] + for name in names: + BlogPost(name=name).save() + + query_result = BlogPost.objects.collation(base).order_by("name") + assert [x.name for x in query_result] == sorted(names, key=lambda x: x.lower()) + assert 5 == query_result.count() + + query_result = BlogPost.objects.collation(Collation(**base)).order_by("name") + assert [x.name for x in query_result] == sorted(names, key=lambda x: x.lower()) + assert 5 == query_result.count() + + incorrect_collation = {"arndom": "wrdo"} + with pytest.raises(OperationFailure) as exc_info: + BlogPost.objects.collation(incorrect_collation).count() + assert "Missing expected field" in str(exc_info.value) + + query_result = BlogPost.objects.collation({}).order_by("name") + assert [x.name for x in query_result] == sorted(names) def test_unique(self): """Ensure that uniqueness constraints are applied to fields. """ + class BlogPost(Document): title = StringField() slug = StringField(unique=True) BlogPost.drop_collection() - post1 = BlogPost(title='test1', slug='test') + post1 = BlogPost(title="test1", slug="test") post1.save() # Two posts with the same slug is not allowed - post2 = BlogPost(title='test2', slug='test') - self.assertRaises(NotUniqueError, post2.save) - self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2) + post2 = BlogPost(title="test2", slug="test") + with pytest.raises(NotUniqueError): + post2.save() + with pytest.raises(NotUniqueError): + BlogPost.objects.insert(post2) # Ensure backwards compatibility for errors - self.assertRaises(OperationError, post2.save) + with pytest.raises(OperationError): + post2.save() def test_primary_key_unique_not_working(self): """Relates to #1445""" + class Blog(Document): id = StringField(primary_key=True, unique=True) Blog.drop_collection() - with self.assertRaises(OperationFailure) as ctx_err: - Blog(id='garbage').save() + with pytest.raises(OperationFailure) as exc_info: + Blog(id="garbage").save() # One of the errors below should happen. Which one depends on the # PyMongo version and dict order. - err_msg = str(ctx_err.exception) - self.assertTrue( - any([ - "The field 'unique' is not valid for an _id index specification" in err_msg, - "The field 'background' is not valid for an _id index specification" in err_msg, - "The field 'sparse' is not valid for an _id index specification" in err_msg, - ]) + err_msg = str(exc_info.value) + assert any( + [ + "The field 'unique' is not valid for an _id index specification" + in err_msg, + "The field 'background' is not valid for an _id index specification" + in err_msg, + "The field 'sparse' is not valid for an _id index specification" + in err_msg, + ] ) def test_unique_with(self): """Ensure that unique_with constraints are applied to fields. """ + class Date(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") class BlogPost(Document): title = StringField() date = EmbeddedDocumentField(Date) - slug = StringField(unique_with='date.year') + slug = StringField(unique_with="date.year") BlogPost.drop_collection() - post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') + post1 = BlogPost(title="test1", date=Date(year=2009), slug="test") post1.save() # day is different so won't raise exception - post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') + post2 = BlogPost(title="test2", date=Date(year=2010), slug="test") post2.save() # Now there will be two docs with the same slug and the same day: fail - post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') - self.assertRaises(OperationError, post3.save) + post3 = BlogPost(title="test3", date=Date(year=2010), slug="test") + with pytest.raises(OperationError): + post3.save() def test_unique_embedded_document(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. """ + class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") slug = StringField(unique=True) class BlogPost(Document): @@ -661,19 +647,17 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() - post1 = BlogPost(title='test1', - sub=SubDocument(year=2009, slug="test")) + post1 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', - sub=SubDocument(year=2010, slug='another-slug')) + post2 = BlogPost(title="test2", sub=SubDocument(year=2010, slug="another-slug")) post2.save() # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', - sub=SubDocument(year=2010, slug='test')) - self.assertRaises(NotUniqueError, post3.save) + post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) + with pytest.raises(NotUniqueError): + post3.save() def test_unique_embedded_document_in_list(self): """ @@ -681,8 +665,9 @@ class IndexesTest(unittest.TestCase): embedded documents, even when the embedded documents in in a list field. """ + class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") slug = StringField(unique=True) class BlogPost(Document): @@ -692,18 +677,18 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() post1 = BlogPost( - title='test1', subs=[ - SubDocument(year=2009, slug='conflict'), - SubDocument(year=2009, slug='conflict') - ] + title="test1", + subs=[ + SubDocument(year=2009, slug="conflict"), + SubDocument(year=2009, slug="conflict"), + ], ) post1.save() - post2 = BlogPost( - title='test2', subs=[SubDocument(year=2014, slug='conflict')] - ) + post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() def test_unique_embedded_document_in_sorted_list(self): """ @@ -711,35 +696,35 @@ class IndexesTest(unittest.TestCase): embedded documents, even when the embedded documents in a sorted list field. """ + class SubDocument(EmbeddedDocument): year = IntField() slug = StringField(unique=True) class BlogPost(Document): title = StringField() - subs = SortedListField(EmbeddedDocumentField(SubDocument), - ordering='year') + subs = SortedListField(EmbeddedDocumentField(SubDocument), ordering="year") BlogPost.drop_collection() post1 = BlogPost( - title='test1', subs=[ - SubDocument(year=2009, slug='conflict'), - SubDocument(year=2009, slug='conflict') - ] + title="test1", + subs=[ + SubDocument(year=2009, slug="conflict"), + SubDocument(year=2009, slug="conflict"), + ], ) post1.save() # confirm that the unique index is created indexes = BlogPost._get_collection().index_information() - self.assertIn('subs.slug_1', indexes) - self.assertTrue(indexes['subs.slug_1']['unique']) + assert "subs.slug_1" in indexes + assert indexes["subs.slug_1"]["unique"] - post2 = BlogPost( - title='test2', subs=[SubDocument(year=2014, slug='conflict')] - ) + post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() def test_unique_embedded_document_in_embedded_document_list(self): """ @@ -747,6 +732,7 @@ class IndexesTest(unittest.TestCase): embedded documents, even when the embedded documents in an embedded list field. """ + class SubDocument(EmbeddedDocument): year = IntField() slug = StringField(unique=True) @@ -758,111 +744,90 @@ class IndexesTest(unittest.TestCase): BlogPost.drop_collection() post1 = BlogPost( - title='test1', subs=[ - SubDocument(year=2009, slug='conflict'), - SubDocument(year=2009, slug='conflict') - ] + title="test1", + subs=[ + SubDocument(year=2009, slug="conflict"), + SubDocument(year=2009, slug="conflict"), + ], ) post1.save() # confirm that the unique index is created indexes = BlogPost._get_collection().index_information() - self.assertIn('subs.slug_1', indexes) - self.assertTrue(indexes['subs.slug_1']['unique']) + assert "subs.slug_1" in indexes + assert indexes["subs.slug_1"]["unique"] - post2 = BlogPost( - title='test2', subs=[SubDocument(year=2014, slug='conflict')] - ) + post2 = BlogPost(title="test2", subs=[SubDocument(year=2014, slug="conflict")]) - self.assertRaises(NotUniqueError, post2.save) + with pytest.raises(NotUniqueError): + post2.save() def test_unique_with_embedded_document_and_embedded_unique(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. And work with unique_with as well. """ + class SubDocument(EmbeddedDocument): - year = IntField(db_field='yr') + year = IntField(db_field="yr") slug = StringField(unique=True) class BlogPost(Document): - title = StringField(unique_with='sub.year') + title = StringField(unique_with="sub.year") sub = EmbeddedDocumentField(SubDocument) BlogPost.drop_collection() - post1 = BlogPost(title='test1', - sub=SubDocument(year=2009, slug="test")) + post1 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception - post2 = BlogPost(title='test2', - sub=SubDocument(year=2010, slug='another-slug')) + post2 = BlogPost(title="test2", sub=SubDocument(year=2010, slug="another-slug")) post2.save() # Now there will be two docs with the same sub.slug - post3 = BlogPost(title='test3', - sub=SubDocument(year=2010, slug='test')) - self.assertRaises(NotUniqueError, post3.save) + post3 = BlogPost(title="test3", sub=SubDocument(year=2010, slug="test")) + with pytest.raises(NotUniqueError): + post3.save() # Now there will be two docs with the same title and year - post3 = BlogPost(title='test1', - sub=SubDocument(year=2009, slug='test-1')) - self.assertRaises(NotUniqueError, post3.save) + post3 = BlogPost(title="test1", sub=SubDocument(year=2009, slug="test-1")) + with pytest.raises(NotUniqueError): + post3.save() def test_ttl_indexes(self): - class Log(Document): created = DateTimeField(default=datetime.now) - meta = { - 'indexes': [ - {'fields': ['created'], 'expireAfterSeconds': 3600} - ] - } + meta = {"indexes": [{"fields": ["created"], "expireAfterSeconds": 3600}]} Log.drop_collection() # Indexes are lazy so use list() to perform query list(Log.objects) info = Log.objects._collection.index_information() - self.assertEqual(3600, - info['created_1']['expireAfterSeconds']) - - def test_index_drop_dups_silently_ignored(self): - class Customer(Document): - cust_id = IntField(unique=True, required=True) - meta = { - 'indexes': ['cust_id'], - 'index_drop_dups': True, - 'allow_inheritance': False, - } - - Customer.drop_collection() - Customer.objects.first() + assert 3600 == info["created_1"]["expireAfterSeconds"] def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by meta.indexes. """ + class Customer(Document): cust_id = IntField(unique=True, required=True) - meta = { - 'indexes': ['cust_id'], - 'allow_inheritance': False, - } + meta = {"indexes": ["cust_id"], "allow_inheritance": False} Customer.drop_collection() cust = Customer(cust_id=1) cust.save() cust_dupe = Customer(cust_id=1) - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): cust_dupe.save() cust = Customer(cust_id=2) cust.save() # duplicate key on update - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): cust.cust_id = 1 cust.save() @@ -870,37 +835,39 @@ class IndexesTest(unittest.TestCase): """If you set a field as primary, then unexpected behaviour can occur. You won't create a duplicate but you will update an existing document. """ + class User(Document): name = StringField(primary_key=True) password = StringField() User.drop_collection() - user = User(name='huangz', password='secret') + user = User(name="huangz", password="secret") user.save() - user = User(name='huangz', password='secret2') + user = User(name="huangz", password="secret2") user.save() - self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, 'secret2') + assert User.objects.count() == 1 + assert User.objects.get().password == "secret2" def test_unique_and_primary_create(self): """Create a new record with a duplicate primary key throws an exception """ + class User(Document): name = StringField(primary_key=True) password = StringField() User.drop_collection() - User.objects.create(name='huangz', password='secret') - with self.assertRaises(NotUniqueError): - User.objects.create(name='huangz', password='secret2') + User.objects.create(name="huangz", password="secret") + with pytest.raises(NotUniqueError): + User.objects.create(name="huangz", password="secret2") - self.assertEqual(User.objects.count(), 1) - self.assertEqual(User.objects.get().password, 'secret') + assert User.objects.count() == 1 + assert User.objects.get().password == "secret" def test_index_with_pk(self): """Ensure you can use `pk` as part of a query""" @@ -909,21 +876,24 @@ class IndexesTest(unittest.TestCase): comment_id = IntField(required=True) try: + class BlogPost(Document): comments = EmbeddedDocumentField(Comment) - meta = {'indexes': [ - {'fields': ['pk', 'comments.comment_id'], - 'unique': True}]} + meta = { + "indexes": [ + {"fields": ["pk", "comments.comment_id"], "unique": True} + ] + } + except UnboundLocalError: - self.fail('Unbound local error at index + pk definition') + self.fail("Unbound local error at index + pk definition") info = BlogPost.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] - index_item = [('_id', 1), ('comments.comment_id', 1)] - self.assertIn(index_item, info) + info = [value["key"] for key, value in info.items()] + index_item = [("_id", 1), ("comments.comment_id", 1)] + assert index_item in info def test_compound_key_embedded(self): - class CompoundKey(EmbeddedDocument): name = StringField(required=True) term = StringField(required=True) @@ -935,12 +905,10 @@ class IndexesTest(unittest.TestCase): my_key = CompoundKey(name="n", term="ok") report = ReportEmbedded(text="OK", key=my_key).save() - self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, - report.to_mongo()) - self.assertEqual(report, ReportEmbedded.objects.get(pk=my_key)) + assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo() + assert report == ReportEmbedded.objects.get(pk=my_key) def test_compound_key_dictfield(self): - class ReportDictField(Document): key = DictField(primary_key=True) text = StringField() @@ -948,65 +916,57 @@ class IndexesTest(unittest.TestCase): my_key = {"name": "n", "term": "ok"} report = ReportDictField(text="OK", key=my_key).save() - self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}}, - report.to_mongo()) + assert {"text": "OK", "_id": {"term": "ok", "name": "n"}} == report.to_mongo() # We can't directly call ReportDictField.objects.get(pk=my_key), # because dicts are unordered, and if the order in MongoDB is # different than the one in `my_key`, this test will fail. - self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name'])) - self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term'])) + assert report == ReportDictField.objects.get(pk__name=my_key["name"]) + assert report == ReportDictField.objects.get(pk__term=my_key["term"]) def test_string_indexes(self): - class MyDoc(Document): provider_ids = DictField() - meta = { - "indexes": ["provider_ids.foo", "provider_ids.bar"], - } + meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} info = MyDoc.objects._collection.index_information() - info = [value['key'] for key, value in iteritems(info)] - self.assertIn([('provider_ids.foo', 1)], info) - self.assertIn([('provider_ids.bar', 1)], info) + info = [value["key"] for key, value in info.items()] + assert [("provider_ids.foo", 1)] in info + assert [("provider_ids.bar", 1)] in info def test_sparse_compound_indexes(self): - class MyDoc(Document): provider_ids = DictField() meta = { - "indexes": [{'fields': ("provider_ids.foo", "provider_ids.bar"), - 'sparse': True}], + "indexes": [ + {"fields": ("provider_ids.foo", "provider_ids.bar"), "sparse": True} + ] } info = MyDoc.objects._collection.index_information() - self.assertEqual([('provider_ids.foo', 1), ('provider_ids.bar', 1)], - info['provider_ids.foo_1_provider_ids.bar_1']['key']) - self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) + assert [("provider_ids.foo", 1), ("provider_ids.bar", 1)] == info[ + "provider_ids.foo_1_provider_ids.bar_1" + ]["key"] + assert info["provider_ids.foo_1_provider_ids.bar_1"]["sparse"] def test_text_indexes(self): class Book(Document): title = DictField() - meta = { - "indexes": ["$title"], - } + meta = {"indexes": ["$title"]} indexes = Book.objects._collection.index_information() - self.assertIn("title_text", indexes) + assert "title_text" in indexes key = indexes["title_text"]["key"] - self.assertIn(('_fts', 'text'), key) + assert ("_fts", "text") in key def test_hashed_indexes(self): - class Book(Document): ref_id = StringField() - meta = { - "indexes": ["#ref_id"], - } + meta = {"indexes": ["#ref_id"]} indexes = Book.objects._collection.index_information() - self.assertIn("ref_id_hashed", indexes) - self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"]) + assert "ref_id_hashed" in indexes + assert ("ref_id", "hashed") in indexes["ref_id_hashed"]["key"] def test_indexes_after_database_drop(self): """ @@ -1017,35 +977,37 @@ class IndexesTest(unittest.TestCase): """ # Use a new connection and database since dropping the database could # cause concurrent tests to fail. - connection = connect(db='tempdatabase', - alias='test_indexes_after_database_drop') + connection = connect( + db="tempdatabase", alias="test_indexes_after_database_drop" + ) class BlogPost(Document): title = StringField() slug = StringField(unique=True) - meta = {'db_alias': 'test_indexes_after_database_drop'} + meta = {"db_alias": "test_indexes_after_database_drop"} try: BlogPost.drop_collection() # Create Post #1 - post1 = BlogPost(title='test1', slug='test') + post1 = BlogPost(title="test1", slug="test") post1.save() # Drop the Database - connection.drop_database('tempdatabase') + connection.drop_database("tempdatabase") # Re-create Post #1 - post1 = BlogPost(title='test1', slug='test') + post1 = BlogPost(title="test1", slug="test") post1.save() # Create Post #2 - post2 = BlogPost(title='test2', slug='test') - self.assertRaises(NotUniqueError, post2.save) + post2 = BlogPost(title="test2", slug="test") + with pytest.raises(NotUniqueError): + post2.save() finally: # Drop the temporary database at the end - connection.drop_database('tempdatabase') + connection.drop_database("tempdatabase") def test_index_dont_send_cls_option(self): """ @@ -1057,24 +1019,19 @@ class IndexesTest(unittest.TestCase): options that are passed to ensureIndex. For more details, see: https://jira.mongodb.org/browse/SERVER-769 """ + class TestDoc(Document): txt = StringField() meta = { - 'allow_inheritance': True, - 'indexes': [ - {'fields': ('txt',), 'cls': False} - ] + "allow_inheritance": True, + "indexes": [{"fields": ("txt",), "cls": False}], } class TestChildDoc(TestDoc): txt2 = StringField() - meta = { - 'indexes': [ - {'fields': ('txt2',), 'cls': False} - ] - } + meta = {"indexes": [{"fields": ("txt2",), "cls": False}]} TestDoc.drop_collection() TestDoc.ensure_indexes() @@ -1082,54 +1039,44 @@ class IndexesTest(unittest.TestCase): index_info = TestDoc._get_collection().index_information() for key in index_info: - del index_info[key]['v'] # drop the index version - we don't care about that here - if 'ns' in index_info[key]: - del index_info[key]['ns'] # drop the index namespace - we don't care about that here, MongoDB 3+ - if 'dropDups' in index_info[key]: - del index_info[key]['dropDups'] # drop the index dropDups - it is deprecated in MongoDB 3+ + del index_info[key][ + "v" + ] # drop the index version - we don't care about that here + if "ns" in index_info[key]: + del index_info[key][ + "ns" + ] # drop the index namespace - we don't care about that here, MongoDB 3+ - self.assertEqual(index_info, { - 'txt_1': { - 'key': [('txt', 1)], - 'background': False - }, - '_id_': { - 'key': [('_id', 1)], - }, - 'txt2_1': { - 'key': [('txt2', 1)], - 'background': False - }, - '_cls_1': { - 'key': [('_cls', 1)], - 'background': False, - } - }) + assert index_info == { + "txt_1": {"key": [("txt", 1)], "background": False}, + "_id_": {"key": [("_id", 1)]}, + "txt2_1": {"key": [("txt2", 1)], "background": False}, + "_cls_1": {"key": [("_cls", 1)], "background": False}, + } def test_compound_index_underscore_cls_not_overwritten(self): """ Test that the compound index doesn't get another _cls when it is specified """ + class TestDoc(Document): shard_1 = StringField() txt_1 = StringField() meta = { - 'collection': 'test', - 'allow_inheritance': True, - 'sparse': True, - 'shard_key': 'shard_1', - 'indexes': [ - ('shard_1', '_cls', 'txt_1'), - ] + "collection": "test", + "allow_inheritance": True, + "sparse": True, + "shard_key": "shard_1", + "indexes": [("shard_1", "_cls", "txt_1")], } TestDoc.drop_collection() TestDoc.ensure_indexes() index_info = TestDoc._get_collection().index_information() - self.assertIn('shard_1_1__cls_1_txt_1_1', index_info) + assert "shard_1_1__cls_1_txt_1_1" in index_info -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py new file mode 100644 index 00000000..53a1489b --- /dev/null +++ b/tests/document/test_inheritance.py @@ -0,0 +1,631 @@ +# -*- coding: utf-8 -*- +import unittest +import warnings + +import pytest + +from mongoengine import ( + BooleanField, + Document, + EmbeddedDocument, + EmbeddedDocumentField, + GenericReferenceField, + IntField, + ReferenceField, + StringField, +) +from mongoengine.pymongo_support import list_collection_names +from tests.fixtures import Base +from tests.utils import MongoDBTestCase + + +class TestInheritance(MongoDBTestCase): + def tearDown(self): + for collection in list_collection_names(self.db): + self.db.drop_collection(collection) + + def test_constructor_cls(self): + # Ensures _cls is properly set during construction + # and when object gets reloaded (prevent regression of #1950) + class EmbedData(EmbeddedDocument): + data = StringField() + meta = {"allow_inheritance": True} + + class DataDoc(Document): + name = StringField() + embed = EmbeddedDocumentField(EmbedData) + meta = {"allow_inheritance": True} + + test_doc = DataDoc(name="test", embed=EmbedData(data="data")) + assert test_doc._cls == "DataDoc" + assert test_doc.embed._cls == "EmbedData" + test_doc.save() + saved_doc = DataDoc.objects.with_id(test_doc.id) + assert test_doc._cls == saved_doc._cls + assert test_doc.embed._cls == saved_doc.embed._cls + test_doc.delete() + + def test_superclasses(self): + """Ensure that the correct list of superclasses is assembled. + """ + + class Animal(Document): + meta = {"allow_inheritance": True} + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + assert Animal._superclasses == () + assert Fish._superclasses == ("Animal",) + assert Guppy._superclasses == ("Animal", "Animal.Fish") + assert Mammal._superclasses == ("Animal",) + assert Dog._superclasses == ("Animal", "Animal.Mammal") + assert Human._superclasses == ("Animal", "Animal.Mammal") + + def test_external_superclasses(self): + """Ensure that the correct list of super classes is assembled when + importing part of the model. + """ + + class Animal(Base): + pass + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + assert Animal._superclasses == ("Base",) + assert Fish._superclasses == ("Base", "Base.Animal") + assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish") + assert Mammal._superclasses == ("Base", "Base.Animal") + assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") + assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") + + def test_subclasses(self): + """Ensure that the correct list of _subclasses (subclasses) is + assembled. + """ + + class Animal(Document): + meta = {"allow_inheritance": True} + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + assert Animal._subclasses == ( + "Animal", + "Animal.Fish", + "Animal.Fish.Guppy", + "Animal.Mammal", + "Animal.Mammal.Dog", + "Animal.Mammal.Human", + ) + assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy") + assert Guppy._subclasses == ("Animal.Fish.Guppy",) + assert Mammal._subclasses == ( + "Animal.Mammal", + "Animal.Mammal.Dog", + "Animal.Mammal.Human", + ) + assert Human._subclasses == ("Animal.Mammal.Human",) + + def test_external_subclasses(self): + """Ensure that the correct list of _subclasses (subclasses) is + assembled when importing part of the model. + """ + + class Animal(Base): + pass + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + assert Animal._subclasses == ( + "Base.Animal", + "Base.Animal.Fish", + "Base.Animal.Fish.Guppy", + "Base.Animal.Mammal", + "Base.Animal.Mammal.Dog", + "Base.Animal.Mammal.Human", + ) + assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") + assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",) + assert Mammal._subclasses == ( + "Base.Animal.Mammal", + "Base.Animal.Mammal.Dog", + "Base.Animal.Mammal.Human", + ) + assert Human._subclasses == ("Base.Animal.Mammal.Human",) + + def test_dynamic_declarations(self): + """Test that declaring an extra class updates meta data""" + + class Animal(Document): + meta = {"allow_inheritance": True} + + assert Animal._superclasses == () + assert Animal._subclasses == ("Animal",) + + # Test dynamically adding a class changes the meta data + class Fish(Animal): + pass + + assert Animal._superclasses == () + assert Animal._subclasses == ("Animal", "Animal.Fish") + + assert Fish._superclasses == ("Animal",) + assert Fish._subclasses == ("Animal.Fish",) + + # Test dynamically adding an inherited class changes the meta data + class Pike(Fish): + pass + + assert Animal._superclasses == () + assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike") + + assert Fish._superclasses == ("Animal",) + assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike") + + assert Pike._superclasses == ("Animal", "Animal.Fish") + assert Pike._subclasses == ("Animal.Fish.Pike",) + + def test_inheritance_meta_data(self): + """Ensure that document may inherit fields from a superclass document. + """ + + class Person(Document): + name = StringField() + age = IntField() + + meta = {"allow_inheritance": True} + + class Employee(Person): + salary = IntField() + + assert ["_cls", "age", "id", "name", "salary"] == sorted( + Employee._fields.keys() + ) + assert Employee._get_collection_name() == Person._get_collection_name() + + def test_inheritance_to_mongo_keys(self): + """Ensure that document may inherit fields from a superclass document. + """ + + class Person(Document): + name = StringField() + age = IntField() + + meta = {"allow_inheritance": True} + + class Employee(Person): + salary = IntField() + + assert ["_cls", "age", "id", "name", "salary"] == sorted( + Employee._fields.keys() + ) + assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] + assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ + "_cls", + "name", + "age", + "salary", + ] + assert Employee._get_collection_name() == Person._get_collection_name() + + def test_indexes_and_multiple_inheritance(self): + """ Ensure that all of the indexes are created for a document with + multiple inheritance. + """ + + class A(Document): + a = StringField() + + meta = {"allow_inheritance": True, "indexes": ["a"]} + + class B(Document): + b = StringField() + + meta = {"allow_inheritance": True, "indexes": ["b"]} + + class C(A, B): + pass + + A.drop_collection() + B.drop_collection() + C.drop_collection() + + C.ensure_indexes() + + assert sorted( + [idx["key"] for idx in C._get_collection().index_information().values()] + ) == sorted( + [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] + ) + + def test_polymorphic_queries(self): + """Ensure that the correct subclasses are returned from a query + """ + + class Animal(Document): + meta = {"allow_inheritance": True} + + class Fish(Animal): + pass + + class Mammal(Animal): + pass + + class Dog(Mammal): + pass + + class Human(Mammal): + pass + + Animal.drop_collection() + + Animal().save() + Fish().save() + Mammal().save() + Dog().save() + Human().save() + + classes = [obj.__class__ for obj in Animal.objects] + assert classes == [Animal, Fish, Mammal, Dog, Human] + + classes = [obj.__class__ for obj in Mammal.objects] + assert classes == [Mammal, Dog, Human] + + classes = [obj.__class__ for obj in Human.objects] + assert classes == [Human] + + def test_allow_inheritance(self): + """Ensure that inheritance is disabled by default on simple + classes and that _cls will not be used. + """ + + class Animal(Document): + name = StringField() + + # can't inherit because Animal didn't explicitly allow inheritance + with pytest.raises(ValueError, match="Document Animal may not be subclassed"): + + class Dog(Animal): + pass + + # Check that _cls etc aren't present on simple documents + dog = Animal(name="dog").save() + assert dog.to_mongo().keys() == ["_id", "name"] + + collection = self.db[Animal._get_collection_name()] + obj = collection.find_one() + assert "_cls" not in obj + + def test_cant_turn_off_inheritance_on_subclass(self): + """Ensure if inheritance is on in a subclass you cant turn it off. + """ + + class Animal(Document): + name = StringField() + meta = {"allow_inheritance": True} + + with pytest.raises(ValueError) as exc_info: + + class Mammal(Animal): + meta = {"allow_inheritance": False} + + assert ( + str(exc_info.value) + == 'Only direct subclasses of Document may set "allow_inheritance" to False' + ) + + def test_allow_inheritance_abstract_document(self): + """Ensure that abstract documents can set inheritance rules and that + _cls will not be used. + """ + + class FinalDocument(Document): + meta = {"abstract": True, "allow_inheritance": False} + + class Animal(FinalDocument): + name = StringField() + + with pytest.raises(ValueError): + + class Mammal(Animal): + pass + + # Check that _cls isn't present in simple documents + doc = Animal(name="dog") + assert "_cls" not in doc.to_mongo() + + def test_using_abstract_class_in_reference_field(self): + # Ensures no regression of #1920 + class AbstractHuman(Document): + meta = {"abstract": True} + + class Dad(AbstractHuman): + name = StringField() + + class Home(Document): + dad = ReferenceField(AbstractHuman) # Referencing the abstract class + address = StringField() + + dad = Dad(name="5").save() + Home(dad=dad, address="street").save() + + home = Home.objects.first() + home.address = "garbage" + home.save() # Was failing with ValidationError + + def test_abstract_class_referencing_self(self): + # Ensures no regression of #1920 + class Human(Document): + meta = {"abstract": True} + creator = ReferenceField("self", dbref=True) + + class User(Human): + name = StringField() + + user = User(name="John").save() + user2 = User(name="Foo", creator=user).save() + + user2 = User.objects.with_id(user2.id) + user2.name = "Bar" + user2.save() # Was failing with ValidationError + + def test_abstract_handle_ids_in_metaclass_properly(self): + class City(Document): + continent = StringField() + meta = {"abstract": True, "allow_inheritance": False} + + class EuropeanCity(City): + name = StringField() + + berlin = EuropeanCity(name="Berlin", continent="Europe") + assert len(berlin._db_field_map) == len(berlin._fields_ordered) + assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) + assert len(berlin._fields_ordered) == 3 + assert berlin._fields_ordered[0] == "id" + + def test_auto_id_not_set_if_specific_in_parent_class(self): + class City(Document): + continent = StringField() + city_id = IntField(primary_key=True) + meta = {"abstract": True, "allow_inheritance": False} + + class EuropeanCity(City): + name = StringField() + + berlin = EuropeanCity(name="Berlin", continent="Europe") + assert len(berlin._db_field_map) == len(berlin._fields_ordered) + assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) + assert len(berlin._fields_ordered) == 3 + assert berlin._fields_ordered[0] == "city_id" + + def test_auto_id_vs_non_pk_id_field(self): + class City(Document): + continent = StringField() + id = IntField() + meta = {"abstract": True, "allow_inheritance": False} + + class EuropeanCity(City): + name = StringField() + + berlin = EuropeanCity(name="Berlin", continent="Europe") + assert len(berlin._db_field_map) == len(berlin._fields_ordered) + assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) + assert len(berlin._fields_ordered) == 4 + assert berlin._fields_ordered[0] == "auto_id_0" + berlin.save() + assert berlin.pk == berlin.auto_id_0 + + def test_abstract_document_creation_does_not_fail(self): + class City(Document): + continent = StringField() + meta = {"abstract": True, "allow_inheritance": False} + + city = City(continent="asia") + assert city.pk is None + # TODO: expected error? Shouldn't we create a new error type? + with pytest.raises(KeyError): + setattr(city, "pk", 1) + + def test_allow_inheritance_embedded_document(self): + """Ensure embedded documents respect inheritance.""" + + class Comment(EmbeddedDocument): + content = StringField() + + with pytest.raises(ValueError): + + class SpecialComment(Comment): + pass + + doc = Comment(content="test") + assert "_cls" not in doc.to_mongo() + + class Comment(EmbeddedDocument): + content = StringField() + meta = {"allow_inheritance": True} + + doc = Comment(content="test") + assert "_cls" in doc.to_mongo() + + def test_document_inheritance(self): + """Ensure mutliple inheritance of abstract documents + """ + + class DateCreatedDocument(Document): + meta = {"allow_inheritance": True, "abstract": True} + + class DateUpdatedDocument(Document): + meta = {"allow_inheritance": True, "abstract": True} + + try: + + class MyDocument(DateCreatedDocument, DateUpdatedDocument): + pass + + except Exception: + assert False, "Couldn't create MyDocument class" + + def test_abstract_documents(self): + """Ensure that a document superclass can be marked as abstract + thereby not using it as the name for the collection.""" + + defaults = { + "index_background": True, + "index_opts": {"hello": "world"}, + "allow_inheritance": True, + "queryset_class": "QuerySet", + "db_alias": "myDB", + "shard_key": ("hello", "world"), + } + + meta_settings = {"abstract": True} + meta_settings.update(defaults) + + class Animal(Document): + name = StringField() + meta = meta_settings + + class Fish(Animal): + pass + + class Guppy(Fish): + pass + + class Mammal(Animal): + meta = {"abstract": True} + + class Human(Mammal): + pass + + for k, v in defaults.items(): + for cls in [Animal, Fish, Guppy]: + assert cls._meta[k] == v + + assert "collection" not in Animal._meta + assert "collection" not in Mammal._meta + + assert Animal._get_collection_name() is None + assert Mammal._get_collection_name() is None + + assert Fish._get_collection_name() == "fish" + assert Guppy._get_collection_name() == "fish" + assert Human._get_collection_name() == "human" + + # ensure that a subclass of a non-abstract class can't be abstract + with pytest.raises(ValueError): + + class EvilHuman(Human): + evil = BooleanField(default=True) + meta = {"abstract": True} + + def test_abstract_embedded_documents(self): + # 789: EmbeddedDocument shouldn't inherit abstract + class A(EmbeddedDocument): + meta = {"abstract": True} + + class B(A): + pass + + assert not B._meta["abstract"] + + def test_inherited_collections(self): + """Ensure that subclassed documents don't override parents' + collections + """ + + class Drink(Document): + name = StringField() + meta = {"allow_inheritance": True} + + class Drinker(Document): + drink = GenericReferenceField() + + try: + warnings.simplefilter("error") + + class AcloholicDrink(Drink): + meta = {"collection": "booze"} + + except SyntaxWarning: + warnings.simplefilter("ignore") + + class AlcoholicDrink(Drink): + meta = {"collection": "booze"} + + else: + raise AssertionError("SyntaxWarning should be triggered") + + warnings.resetwarnings() + + Drink.drop_collection() + AlcoholicDrink.drop_collection() + Drinker.drop_collection() + + red_bull = Drink(name="Red Bull") + red_bull.save() + + programmer = Drinker(drink=red_bull) + programmer.save() + + beer = AlcoholicDrink(name="Beer") + beer.save() + real_person = Drinker(drink=beer) + real_person.save() + + assert Drinker.objects[0].drink.name == red_bull.name + assert Drinker.objects[1].drink.name == beer.name + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/document/instance.py b/tests/document/test_instance.py similarity index 58% rename from tests/document/instance.py rename to tests/document/test_instance.py index 02617b67..8d42d15b 100644 --- a/tests/document/instance.py +++ b/tests/document/test_instance.py @@ -9,31 +9,38 @@ from datetime import datetime import bson from bson import DBRef, ObjectId from pymongo.errors import DuplicateKeyError -from six import iteritems +import pytest from mongoengine import * from mongoengine import signals from mongoengine.base import _document_registry, get_document from mongoengine.connection import get_db from mongoengine.context_managers import query_counter, switch_db -from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, \ - InvalidQueryError, NotRegistered, NotUniqueError, SaveConditionError) +from mongoengine.errors import ( + FieldDoesNotExist, + InvalidDocumentError, + InvalidQueryError, + NotRegistered, + NotUniqueError, + SaveConditionError, +) from mongoengine.mongodb_support import MONGODB_34, MONGODB_36, get_mongodb_version from mongoengine.pymongo_support import list_collection_names from mongoengine.queryset import NULLIFY, Q from tests import fixtures -from tests.fixtures import (PickleDynamicEmbedded, PickleDynamicTest, \ - PickleEmbedded, PickleSignalsTest, PickleTest) +from tests.fixtures import ( + PickleDynamicEmbedded, + PickleDynamicTest, + PickleEmbedded, + PickleSignalsTest, + PickleTest, +) from tests.utils import MongoDBTestCase, get_as_pymongo -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), - '../fields/mongoengine.png') - -__all__ = ("InstanceTest",) +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "../fields/mongoengine.png") -class InstanceTest(MongoDBTestCase): - +class TestDocumentInstance(MongoDBTestCase): def setUp(self): class Job(EmbeddedDocument): name = StringField() @@ -56,26 +63,24 @@ class InstanceTest(MongoDBTestCase): self.db.drop_collection(collection) def assertDbEqual(self, docs): - self.assertEqual( - list(self.Person._get_collection().find().sort("id")), - sorted(docs, key=lambda doc: doc["_id"])) + assert list(self.Person._get_collection().find().sort("id")) == sorted( + docs, key=lambda doc: doc["_id"] + ) def assertHasInstance(self, field, instance): - self.assertTrue(hasattr(field, "_instance")) - self.assertTrue(field._instance is not None) + assert hasattr(field, "_instance") + assert field._instance is not None if isinstance(field._instance, weakref.ProxyType): - self.assertTrue(field._instance.__eq__(instance)) + assert field._instance.__eq__(instance) else: - self.assertEqual(field._instance, instance) + assert field._instance == instance def test_capped_collection(self): """Ensure that capped collections work properly.""" + class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - 'max_size': 4096, - } + meta = {"max_documents": 10, "max_size": 4096} Log.drop_collection() @@ -83,35 +88,32 @@ class InstanceTest(MongoDBTestCase): for _ in range(10): Log().save() - self.assertEqual(Log.objects.count(), 10) + assert Log.objects.count() == 10 # Check that extra documents don't increase the size Log().save() - self.assertEqual(Log.objects.count(), 10) + assert Log.objects.count() == 10 options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertEqual(options['max'], 10) - self.assertEqual(options['size'], 4096) + assert options["capped"] is True + assert options["max"] == 10 + assert options["size"] == 4096 # Check that the document cannot be redefined with different options class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 11, - } + meta = {"max_documents": 11} # Accessing Document.objects creates the collection - with self.assertRaises(InvalidCollectionError): + with pytest.raises(InvalidCollectionError): Log.objects def test_capped_collection_default(self): """Ensure that capped collections defaults work properly.""" + class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - } + meta = {"max_documents": 10} Log.drop_collection() @@ -119,16 +121,14 @@ class InstanceTest(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertEqual(options['max'], 10) - self.assertEqual(options['size'], 10 * 2**20) + assert options["capped"] is True + assert options["max"] == 10 + assert options["size"] == 10 * 2 ** 20 # Check that the document with default value can be recreated class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_documents': 10, - } + meta = {"max_documents": 10} # Create the collection by accessing Document.objects Log.objects @@ -138,11 +138,10 @@ class InstanceTest(MongoDBTestCase): MongoDB rounds up max_size to next multiple of 256, recreating a doc with the same spec failed in mongoengine <0.10 """ + class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_size': 10000, - } + meta = {"max_size": 10000} Log.drop_collection() @@ -150,15 +149,13 @@ class InstanceTest(MongoDBTestCase): Log().save() options = Log.objects._collection.options() - self.assertEqual(options['capped'], True) - self.assertTrue(options['size'] >= 10000) + assert options["capped"] is True + assert options["size"] >= 10000 # Check that the document with odd max_size value can be recreated class Log(Document): date = DateTimeField(default=datetime.now) - meta = { - 'max_size': 10000, - } + meta = {"max_size": 10000} # Create the collection by accessing Document.objects Log.objects @@ -166,30 +163,32 @@ class InstanceTest(MongoDBTestCase): def test_repr(self): """Ensure that unicode representation works """ + class Article(Document): title = StringField() def __unicode__(self): return self.title - doc = Article(title=u'привет мир') + doc = Article(title=u"привет мир") - self.assertEqual('', repr(doc)) + assert "" == repr(doc) def test_repr_none(self): """Ensure None values are handled correctly.""" + class Article(Document): title = StringField() def __str__(self): return None - doc = Article(title=u'привет мир') - self.assertEqual('', repr(doc)) + doc = Article(title=u"привет мир") + assert "" == repr(doc) def test_queryset_resurrects_dropped_collection(self): self.Person.drop_collection() - self.assertEqual([], list(self.Person.objects())) + assert list(self.Person.objects()) == [] # Ensure works correctly with inhertited classes class Actor(self.Person): @@ -197,14 +196,15 @@ class InstanceTest(MongoDBTestCase): Actor.objects() self.Person.drop_collection() - self.assertEqual([], list(Actor.objects())) + assert list(Actor.objects()) == [] def test_polymorphic_references(self): """Ensure that the correct subclasses are returned from a query when using references / generic references """ + class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Fish(Animal): pass @@ -236,7 +236,7 @@ class InstanceTest(MongoDBTestCase): zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] - self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) + assert classes == [Animal, Fish, Mammal, Dog, Human] Zoo.drop_collection() @@ -249,13 +249,13 @@ class InstanceTest(MongoDBTestCase): zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] - self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) + assert classes == [Animal, Fish, Mammal, Dog, Human] def test_reference_inheritance(self): class Stats(Document): created = DateTimeField(default=datetime.now) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class CompareStats(Document): generated = DateTimeField(default=datetime.now) @@ -274,10 +274,11 @@ class InstanceTest(MongoDBTestCase): cmp_stats = CompareStats(stats=list_stats) cmp_stats.save() - self.assertEqual(list_stats, CompareStats.objects.first().stats) + assert list_stats == CompareStats.objects.first().stats def test_db_field_load(self): """Ensure we load data correctly from the right db field.""" + class Person(Document): name = StringField(required=True) _rank = StringField(required=False, db_field="rank") @@ -292,19 +293,18 @@ class InstanceTest(MongoDBTestCase): Person(name="Fred").save() - self.assertEqual(Person.objects.get(name="Jack").rank, "Corporal") - self.assertEqual(Person.objects.get(name="Fred").rank, "Private") + assert Person.objects.get(name="Jack").rank == "Corporal" + assert Person.objects.get(name="Fred").rank == "Private" def test_db_embedded_doc_field_load(self): """Ensure we load embedded document data correctly.""" + class Rank(EmbeddedDocument): title = StringField(required=True) class Person(Document): name = StringField(required=True) - rank_ = EmbeddedDocumentField(Rank, - required=False, - db_field='rank') + rank_ = EmbeddedDocumentField(Rank, required=False, db_field="rank") @property def rank(self): @@ -317,58 +317,59 @@ class InstanceTest(MongoDBTestCase): Person(name="Jack", rank_=Rank(title="Corporal")).save() Person(name="Fred").save() - self.assertEqual(Person.objects.get(name="Jack").rank, "Corporal") - self.assertEqual(Person.objects.get(name="Fred").rank, "Private") + assert Person.objects.get(name="Jack").rank == "Corporal" + assert Person.objects.get(name="Fred").rank == "Private" def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys.""" + class User(Document): username = StringField(primary_key=True) name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} User.drop_collection() - self.assertEqual(User._fields['username'].db_field, '_id') - self.assertEqual(User._meta['id_field'], 'username') + assert User._fields["username"].db_field == "_id" + assert User._meta["id_field"] == "username" - User.objects.create(username='test', name='test user') + User.objects.create(username="test", name="test user") user = User.objects.first() - self.assertEqual(user.id, 'test') - self.assertEqual(user.pk, 'test') + assert user.id == "test" + assert user.pk == "test" user_dict = User.objects._collection.find_one() - self.assertEqual(user_dict['_id'], 'test') + assert user_dict["_id"] == "test" def test_change_custom_id_field_in_subclass(self): """Subclasses cannot override which field is the primary key.""" + class User(Document): username = StringField(primary_key=True) name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} + + with pytest.raises(ValueError, match="Cannot override primary key field"): - with self.assertRaises(ValueError) as e: class EmailUser(User): email = StringField(primary_key=True) - exc = e.exception - self.assertEqual(str(exc), 'Cannot override primary key field') def test_custom_id_field_is_required(self): """Ensure the custom primary key field is required.""" + class User(Document): username = StringField(primary_key=True) name = StringField() - with self.assertRaises(ValidationError) as e: - User(name='test').save() - exc = e.exception - self.assertTrue("Field is required: ['username']" in str(exc)) + with pytest.raises(ValidationError) as exc_info: + User(name="test").save() + assert "Field is required: ['username']" in str(exc_info.value) def test_document_not_registered(self): class Place(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class NicePlace(Place): pass @@ -380,51 +381,53 @@ class InstanceTest(MongoDBTestCase): # Mimic Place and NicePlace definitions being in a different file # and the NicePlace model not being imported in at query time. - del(_document_registry['Place.NicePlace']) + del _document_registry["Place.NicePlace"] - with self.assertRaises(NotRegistered): + with pytest.raises(NotRegistered): list(Place.objects.all()) def test_document_registry_regressions(self): class Location(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Area(Location): - location = ReferenceField('Location', dbref=True) + location = ReferenceField("Location", dbref=True) Location.drop_collection() - self.assertEqual(Area, get_document("Area")) - self.assertEqual(Area, get_document("Location.Area")) + assert Area == get_document("Area") + assert Area == get_document("Location.Area") def test_creation(self): """Ensure that document may be created using keyword arguments.""" person = self.Person(name="Test User", age=30) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 30) + assert person.name == "Test User" + assert person.age == 30 def test_to_dbref(self): """Ensure that you can get a dbref of a document.""" person = self.Person(name="Test User", age=30) - self.assertRaises(OperationError, person.to_dbref) + with pytest.raises(OperationError): + person.to_dbref() person.save() person.to_dbref() def test_key_like_attribute_access(self): person = self.Person(age=30) - self.assertEqual(person['age'], 30) - with self.assertRaises(KeyError): - person['unknown_attr'] + assert person["age"] == 30 + with pytest.raises(KeyError): + person["unknown_attr"] def test_save_abstract_document(self): """Saving an abstract document should fail.""" + class Doc(Document): name = StringField() - meta = {'abstract': True} + meta = {"abstract": True} - with self.assertRaises(InvalidDocumentError): - Doc(name='aaa').save() + with pytest.raises(InvalidDocumentError): + Doc(name="aaa").save() def test_reload(self): """Ensure that attributes may be reloaded.""" @@ -436,39 +439,53 @@ class InstanceTest(MongoDBTestCase): person_obj.age = 21 person_obj.save() - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 20) + assert person.name == "Test User" + assert person.age == 20 - person.reload('age') - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 21) + person.reload("age") + assert person.name == "Test User" + assert person.age == 21 person.reload() - self.assertEqual(person.name, "Mr Test User") - self.assertEqual(person.age, 21) + assert person.name == "Mr Test User" + assert person.age == 21 person.reload() - self.assertEqual(person.name, "Mr Test User") - self.assertEqual(person.age, 21) + assert person.name == "Mr Test User" + assert person.age == 21 def test_reload_sharded(self): class Animal(Document): superphylum = StringField() - meta = {'shard_key': ('superphylum',)} + meta = {"shard_key": ("superphylum",)} Animal.drop_collection() - doc = Animal(superphylum='Deuterostomia') - doc.save() + doc = Animal.objects.create(superphylum="Deuterostomia") mongo_db = get_mongodb_version() - CMD_QUERY_KEY = 'command' if mongo_db >= MONGODB_36 else 'query' - + CMD_QUERY_KEY = "command" if mongo_db >= MONGODB_36 else "query" with query_counter() as q: doc.reload() - query_op = q.db.system.profile.find({'ns': 'mongoenginetest.animal'})[0] - self.assertEqual(set(query_op[CMD_QUERY_KEY]['filter'].keys()), set(['_id', 'superphylum'])) + query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] + assert set(query_op[CMD_QUERY_KEY]["filter"].keys()) == { + "_id", + "superphylum", + } - Animal.drop_collection() + def test_reload_sharded_with_db_field(self): + class Person(Document): + nationality = StringField(db_field="country") + meta = {"shard_key": ("nationality",)} + + Person.drop_collection() + doc = Person.objects.create(nationality="Poland") + + mongo_db = get_mongodb_version() + CMD_QUERY_KEY = "command" if mongo_db >= MONGODB_36 else "query" + with query_counter() as q: + doc.reload() + query_op = q.db.system.profile.find({"ns": "mongoenginetest.person"})[0] + assert set(query_op[CMD_QUERY_KEY]["filter"].keys()) == {"_id", "country"} def test_reload_sharded_nested(self): class SuperPhylum(EmbeddedDocument): @@ -476,10 +493,10 @@ class InstanceTest(MongoDBTestCase): class Animal(Document): superphylum = EmbeddedDocumentField(SuperPhylum) - meta = {'shard_key': ('superphylum.name',)} + meta = {"shard_key": ("superphylum.name",)} Animal.drop_collection() - doc = Animal(superphylum=SuperPhylum(name='Deuterostomia')) + doc = Animal(superphylum=SuperPhylum(name="Deuterostomia")) doc.save() doc.reload() Animal.drop_collection() @@ -488,49 +505,53 @@ class InstanceTest(MongoDBTestCase): """Ensures updating a doc with a specified shard_key includes it in the query. """ + class Animal(Document): is_mammal = BooleanField() name = StringField() - meta = {'shard_key': ('is_mammal', 'id')} + meta = {"shard_key": ("is_mammal", "id")} Animal.drop_collection() - doc = Animal(is_mammal=True, name='Dog') + doc = Animal(is_mammal=True, name="Dog") doc.save() mongo_db = get_mongodb_version() with query_counter() as q: - doc.name = 'Cat' + doc.name = "Cat" doc.save() - query_op = q.db.system.profile.find({'ns': 'mongoenginetest.animal'})[0] - self.assertEqual(query_op['op'], 'update') + query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] + assert query_op["op"] == "update" if mongo_db <= MONGODB_34: - self.assertEqual(set(query_op['query'].keys()), set(['_id', 'is_mammal'])) + assert set(query_op["query"].keys()) == set(["_id", "is_mammal"]) else: - self.assertEqual(set(query_op['command']['q'].keys()), set(['_id', 'is_mammal'])) + assert set(query_op["command"]["q"].keys()) == set(["_id", "is_mammal"]) Animal.drop_collection() def test_reload_with_changed_fields(self): """Ensures reloading will not affect changed fields""" + class User(Document): name = StringField() number = IntField() + User.drop_collection() user = User(name="Bob", number=1).save() user.name = "John" user.number = 2 - self.assertEqual(user._get_changed_fields(), ['name', 'number']) - user.reload('number') - self.assertEqual(user._get_changed_fields(), ['name']) + assert user._get_changed_fields() == ["name", "number"] + user.reload("number") + assert user._get_changed_fields() == ["name"] user.save() user.reload() - self.assertEqual(user.name, "John") + assert user.name == "John" def test_reload_referencing(self): """Ensures reloading updates weakrefs correctly.""" + class Embedded(EmbeddedDocument): dict_field = DictField() list_field = ListField() @@ -542,97 +563,101 @@ class InstanceTest(MongoDBTestCase): Doc.drop_collection() doc = Doc() - doc.dict_field = {'hello': 'world'} - doc.list_field = ['1', 2, {'hello': 'world'}] + doc.dict_field = {"hello": "world"} + doc.list_field = ["1", 2, {"hello": "world"}] embedded_1 = Embedded() - embedded_1.dict_field = {'hello': 'world'} - embedded_1.list_field = ['1', 2, {'hello': 'world'}] + embedded_1.dict_field = {"hello": "world"} + embedded_1.list_field = ["1", 2, {"hello": "world"}] doc.embedded_field = embedded_1 doc.save() doc = doc.reload(10) doc.list_field.append(1) - doc.dict_field['woot'] = "woot" + doc.dict_field["woot"] = "woot" doc.embedded_field.list_field.append(1) - doc.embedded_field.dict_field['woot'] = "woot" + doc.embedded_field.dict_field["woot"] = "woot" - self.assertEqual(doc._get_changed_fields(), [ - 'list_field', 'dict_field.woot', 'embedded_field.list_field', - 'embedded_field.dict_field.woot']) + changed = doc._get_changed_fields() + assert changed == [ + "list_field", + "dict_field.woot", + "embedded_field.list_field", + "embedded_field.dict_field.woot", + ] doc.save() - self.assertEqual(len(doc.list_field), 4) + assert len(doc.list_field) == 4 doc = doc.reload(10) - self.assertEqual(doc._get_changed_fields(), []) - self.assertEqual(len(doc.list_field), 4) - self.assertEqual(len(doc.dict_field), 2) - self.assertEqual(len(doc.embedded_field.list_field), 4) - self.assertEqual(len(doc.embedded_field.dict_field), 2) + assert doc._get_changed_fields() == [] + assert len(doc.list_field) == 4 + assert len(doc.dict_field) == 2 + assert len(doc.embedded_field.list_field) == 4 + assert len(doc.embedded_field.dict_field) == 2 doc.list_field.append(1) doc.save() - doc.dict_field['extra'] = 1 - doc = doc.reload(10, 'list_field') - self.assertEqual(doc._get_changed_fields(), ['dict_field.extra']) - self.assertEqual(len(doc.list_field), 5) - self.assertEqual(len(doc.dict_field), 3) - self.assertEqual(len(doc.embedded_field.list_field), 4) - self.assertEqual(len(doc.embedded_field.dict_field), 2) + doc.dict_field["extra"] = 1 + doc = doc.reload(10, "list_field") + assert doc._get_changed_fields() == ["dict_field.extra"] + assert len(doc.list_field) == 5 + assert len(doc.dict_field) == 3 + assert len(doc.embedded_field.list_field) == 4 + assert len(doc.embedded_field.dict_field) == 2 def test_reload_doesnt_exist(self): class Foo(Document): pass f = Foo() - with self.assertRaises(Foo.DoesNotExist): + with pytest.raises(Foo.DoesNotExist): f.reload() f.save() f.delete() - with self.assertRaises(Foo.DoesNotExist): + with pytest.raises(Foo.DoesNotExist): f.reload() def test_reload_of_non_strict_with_special_field_name(self): - """Ensures reloading works for documents with meta strict == False.""" + """Ensures reloading works for documents with meta strict is False.""" + class Post(Document): - meta = { - 'strict': False - } + meta = {"strict": False} title = StringField() items = ListField() Post.drop_collection() - Post._get_collection().insert_one({ - "title": "Items eclipse", - "items": ["more lorem", "even more ipsum"] - }) + Post._get_collection().insert_one( + {"title": "Items eclipse", "items": ["more lorem", "even more ipsum"]} + ) post = Post.objects.first() post.reload() - self.assertEqual(post.title, "Items eclipse") - self.assertEqual(post.items, ["more lorem", "even more ipsum"]) + assert post.title == "Items eclipse" + assert post.items == ["more lorem", "even more ipsum"] def test_dictionary_access(self): """Ensure that dictionary-style field access works properly.""" - person = self.Person(name='Test User', age=30, job=self.Job()) - self.assertEqual(person['name'], 'Test User') + person = self.Person(name="Test User", age=30, job=self.Job()) + assert person["name"] == "Test User" - self.assertRaises(KeyError, person.__getitem__, 'salary') - self.assertRaises(KeyError, person.__setitem__, 'salary', 50) + with pytest.raises(KeyError): + person.__getitem__("salary") + with pytest.raises(KeyError): + person.__setitem__("salary", 50) - person['name'] = 'Another User' - self.assertEqual(person['name'], 'Another User') + person["name"] = "Another User" + assert person["name"] == "Another User" # Length = length(assigned fields + id) - self.assertEqual(len(person), 5) + assert len(person) == 5 - self.assertIn('age', person) + assert "age" in person person.age = None - self.assertNotIn('age', person) - self.assertNotIn('nationality', person) + assert "age" not in person + assert "nationality" not in person def test_embedded_document_to_mongo(self): class Person(EmbeddedDocument): @@ -644,29 +669,33 @@ class InstanceTest(MongoDBTestCase): class Employee(Person): salary = IntField() - self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), - ['_cls', 'name', 'age']) - self.assertEqual( - Employee(name="Bob", age=35, salary=0).to_mongo().keys(), - ['_cls', 'name', 'age', 'salary']) + assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] + assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ + "_cls", + "name", + "age", + "salary", + ] def test_embedded_document_to_mongo_id(self): class SubDoc(EmbeddedDocument): id = StringField(required=True) sub_doc = SubDoc(id="abc") - self.assertEqual(sub_doc.to_mongo().keys(), ['id']) + assert sub_doc.to_mongo().keys() == ["id"] def test_embedded_document(self): """Ensure that embedded documents are set up correctly.""" + class Comment(EmbeddedDocument): content = StringField() - self.assertIn('content', Comment._fields) - self.assertNotIn('id', Comment._fields) + assert "content" in Comment._fields + assert "id" not in Comment._fields def test_embedded_document_instance(self): """Ensure that embedded documents can reference parent instance.""" + class Embedded(EmbeddedDocument): string = StringField() @@ -686,6 +715,7 @@ class InstanceTest(MongoDBTestCase): """Ensure that embedded documents in complex fields can reference parent instance. """ + class Embedded(EmbeddedDocument): string = StringField() @@ -702,15 +732,19 @@ class InstanceTest(MongoDBTestCase): def test_embedded_document_complex_instance_no_use_db_field(self): """Ensure that use_db_field is propagated to list of Emb Docs.""" + class Embedded(EmbeddedDocument): - string = StringField(db_field='s') + string = StringField(db_field="s") class Doc(Document): embedded_field = ListField(EmbeddedDocumentField(Embedded)) - d = Doc(embedded_field=[Embedded(string="Hi")]).to_mongo( - use_db_field=False).to_dict() - self.assertEqual(d['embedded_field'], [{'string': 'Hi'}]) + d = ( + Doc(embedded_field=[Embedded(string="Hi")]) + .to_mongo(use_db_field=False) + .to_dict() + ) + assert d["embedded_field"] == [{"string": "Hi"}] def test_instance_is_set_on_setattr(self): class Email(EmbeddedDocument): @@ -722,7 +756,7 @@ class InstanceTest(MongoDBTestCase): Account.drop_collection() acc = Account() - acc.email = Email(email='test@example.com') + acc.email = Email(email="test@example.com") self.assertHasInstance(acc._data["email"], acc) acc.save() @@ -738,7 +772,7 @@ class InstanceTest(MongoDBTestCase): Account.drop_collection() acc = Account() - acc.emails = [Email(email='test@example.com')] + acc.emails = [Email(email="test@example.com")] self.assertHasInstance(acc._data["emails"][0], acc) acc.save() @@ -753,7 +787,7 @@ class InstanceTest(MongoDBTestCase): def clean(self): raise CustomError() - with self.assertRaises(CustomError): + with pytest.raises(CustomError): TestDocument().save() TestDocument().save(clean=False) @@ -764,22 +798,19 @@ class InstanceTest(MongoDBTestCase): @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): - document.content = 'checked' + document.content = "checked" - signals.pre_save_post_validation.connect(BlogPost.pre_save_post_validation, sender=BlogPost) + signals.pre_save_post_validation.connect( + BlogPost.pre_save_post_validation, sender=BlogPost + ) BlogPost.drop_collection() - post = BlogPost(content='unchecked').save() - self.assertEqual(post.content, 'checked') + post = BlogPost(content="unchecked").save() + assert post.content == "checked" # Make sure pre_save_post_validation changes makes it to the db raw_doc = get_as_pymongo(post) - self.assertEqual( - raw_doc, - { - 'content': 'checked', - '_id': post.id - }) + assert raw_doc == {"content": "checked", "_id": post.id} # Important to disconnect as it could cause some assertions in test_signals # to fail (due to the garbage collection timing of this signal) @@ -800,23 +831,17 @@ class InstanceTest(MongoDBTestCase): # Ensure clean=False prevent call to clean t = TestDocument(status="published") t.save(clean=False) - self.assertEqual(t.status, "published") - self.assertEqual(t.cleaned, False) + assert t.status == "published" + assert t.cleaned is False t = TestDocument(status="published") - self.assertEqual(t.cleaned, False) + assert t.cleaned is False t.save(clean=True) - self.assertEqual(t.status, "published") - self.assertEqual(t.cleaned, True) + assert t.status == "published" + assert t.cleaned is True raw_doc = get_as_pymongo(t) # Make sure clean changes makes it to the db - self.assertEqual( - raw_doc, - { - 'status': 'published', - 'cleaned': True, - '_id': t.id - }) + assert raw_doc == {"status": "published", "cleaned": True, "_id": t.id} def test_document_embedded_clean(self): class TestEmbeddedDocument(EmbeddedDocument): @@ -824,12 +849,12 @@ class InstanceTest(MongoDBTestCase): y = IntField(required=True) z = IntField(required=True) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} def clean(self): if self.z: if self.z != self.x + self.y: - raise ValidationError('Value of z != x + y') + raise ValidationError("Value of z != x + y") else: self.z = self.x + self.y @@ -841,15 +866,15 @@ class InstanceTest(MongoDBTestCase): t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) - with self.assertRaises(ValidationError) as cm: + with pytest.raises(ValidationError) as exc_info: t.save() expected_msg = "Value of z != x + y" - self.assertIn(expected_msg, cm.exception.message) - self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}}) + assert expected_msg in str(exc_info.value) + assert exc_info.value.to_dict() == {"doc": {"__all__": expected_msg}} t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() - self.assertEqual(t.doc.z, 35) + assert t.doc.z == 35 # Asserts not raises t = TestDocument(doc=TestEmbeddedDocument(x=15, y=35, z=5)) @@ -858,7 +883,7 @@ class InstanceTest(MongoDBTestCase): def test_modify_empty(self): doc = self.Person(name="bob", age=10).save() - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): self.Person().modify(set__age=10) self.assertDbEqual([dict(doc.to_mongo())]) @@ -868,8 +893,8 @@ class InstanceTest(MongoDBTestCase): doc2 = self.Person(name="jim", age=20).save() docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] - with self.assertRaises(InvalidQueryError): - doc1.modify({'id': doc2.id}, set__value=20) + with pytest.raises(InvalidQueryError): + doc1.modify({"id": doc2.id}, set__value=20) self.assertDbEqual(docs) @@ -878,8 +903,8 @@ class InstanceTest(MongoDBTestCase): doc2 = self.Person(name="jim", age=20).save() docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] - n_modified = doc1.modify({'name': doc2.name}, set__age=100) - self.assertEqual(n_modified, 0) + n_modified = doc1.modify({"name": doc2.name}, set__age=100) + assert n_modified == 0 self.assertDbEqual(docs) @@ -888,15 +913,16 @@ class InstanceTest(MongoDBTestCase): doc2 = self.Person(id=ObjectId(), name="jim", age=20) docs = [dict(doc1.to_mongo())] - n_modified = doc2.modify({'name': doc2.name}, set__age=100) - self.assertEqual(n_modified, 0) + n_modified = doc2.modify({"name": doc2.name}, set__age=100) + assert n_modified == 0 self.assertDbEqual(docs) def test_modify_update(self): other_doc = self.Person(name="bob", age=10).save() doc = self.Person( - name="jim", age=20, job=self.Job(name="10gen", years=3)).save() + name="jim", age=20, job=self.Job(name="10gen", years=3) + ).save() doc_copy = doc._from_son(doc.to_mongo()) @@ -906,14 +932,15 @@ class InstanceTest(MongoDBTestCase): doc.job.years = 3 n_modified = doc.modify( - set__age=21, set__job__name="MongoDB", unset__job__years=True) - self.assertEqual(n_modified, 1) + set__age=21, set__job__name="MongoDB", unset__job__years=True + ) + assert n_modified == 1 doc_copy.age = 21 doc_copy.job.name = "MongoDB" del doc_copy.job.years - self.assertEqual(doc.to_json(), doc_copy.to_json()) - self.assertEqual(doc._get_changed_fields(), []) + assert doc.to_json() == doc_copy.to_json() + assert doc._get_changed_fields() == [] self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) @@ -926,68 +953,62 @@ class InstanceTest(MongoDBTestCase): content = EmbeddedDocumentField(Content) post = BlogPost.objects.create( - tags=['python'], content=Content(keywords=['ipsum'])) - - self.assertEqual(post.tags, ['python']) - post.modify(push__tags__0=['code', 'mongo']) - self.assertEqual(post.tags, ['code', 'mongo', 'python']) - - # Assert same order of the list items is maintained in the db - self.assertEqual( - BlogPost._get_collection().find_one({'_id': post.pk})['tags'], - ['code', 'mongo', 'python'] + tags=["python"], content=Content(keywords=["ipsum"]) ) - self.assertEqual(post.content.keywords, ['ipsum']) - post.modify(push__content__keywords__0=['lorem']) - self.assertEqual(post.content.keywords, ['lorem', 'ipsum']) + assert post.tags == ["python"] + post.modify(push__tags__0=["code", "mongo"]) + assert post.tags == ["code", "mongo", "python"] # Assert same order of the list items is maintained in the db - self.assertEqual( - BlogPost._get_collection().find_one({'_id': post.pk})['content']['keywords'], - ['lorem', 'ipsum'] - ) + assert BlogPost._get_collection().find_one({"_id": post.pk})["tags"] == [ + "code", + "mongo", + "python", + ] + + assert post.content.keywords == ["ipsum"] + post.modify(push__content__keywords__0=["lorem"]) + assert post.content.keywords == ["lorem", "ipsum"] + + # Assert same order of the list items is maintained in the db + assert BlogPost._get_collection().find_one({"_id": post.pk})["content"][ + "keywords" + ] == ["lorem", "ipsum"] def test_save(self): """Ensure that a document may be saved in the database.""" # Create person object and save it to the database - person = self.Person(name='Test User', age=30) + person = self.Person(name="Test User", age=30) person.save() # Ensure that the object is in the database raw_doc = get_as_pymongo(person) - self.assertEqual( - raw_doc, - { - '_cls': 'Person', - 'name': 'Test User', - 'age': 30, - '_id': person.id - }) + assert raw_doc == { + "_cls": "Person", + "name": "Test User", + "age": 30, + "_id": person.id, + } def test_save_skip_validation(self): class Recipient(Document): email = EmailField(required=True) - recipient = Recipient(email='not-an-email') - with self.assertRaises(ValidationError): + recipient = Recipient(email="not-an-email") + with pytest.raises(ValidationError): recipient.save() recipient.save(validate=False) raw_doc = get_as_pymongo(recipient) - self.assertEqual( - raw_doc, - { - 'email': 'not-an-email', - '_id': recipient.id - }) + assert raw_doc == {"email": "not-an-email", "_id": recipient.id} def test_save_with_bad_id(self): class Clown(Document): id = IntField(primary_key=True) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): Clown(id="not_an_int").save() def test_save_to_a_value_that_equates_to_false(self): @@ -1007,13 +1028,13 @@ class InstanceTest(MongoDBTestCase): user.save() user.reload() - self.assertEqual(user.thing.count, 0) + assert user.thing.count == 0 def test_save_max_recursion_not_hit(self): class Person(Document): name = StringField() - parent = ReferenceField('self') - friend = ReferenceField('self') + parent = ReferenceField("self") + friend = ReferenceField("self") Person.drop_collection() @@ -1031,36 +1052,36 @@ class InstanceTest(MongoDBTestCase): # Confirm can save and it resets the changed fields without hitting # max recursion error p0 = Person.objects.first() - p0.name = 'wpjunior' + p0.name = "wpjunior" p0.save() def test_save_max_recursion_not_hit_with_file_field(self): class Foo(Document): name = StringField() picture = FileField() - bar = ReferenceField('self') + bar = ReferenceField("self") Foo.drop_collection() - a = Foo(name='hello').save() + a = Foo(name="hello").save() a.bar = a - with open(TEST_IMAGE_PATH, 'rb') as test_image: + with open(TEST_IMAGE_PATH, "rb") as test_image: a.picture = test_image a.save() # Confirm can save and it resets the changed fields without hitting # max recursion error b = Foo.objects.with_id(a.id) - b.name = 'world' + b.name = "world" b.save() - self.assertEqual(b.picture, b.bar.picture, b.bar.bar.picture) + assert b.picture == b.bar.picture, b.bar.bar.picture def test_save_cascades(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") Person.drop_collection() @@ -1077,12 +1098,12 @@ class InstanceTest(MongoDBTestCase): p.save(cascade=True) p1.reload() - self.assertEqual(p1.name, p.parent.name) + assert p1.name == p.parent.name def test_save_cascade_kwargs(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") Person.drop_collection() @@ -1097,14 +1118,14 @@ class InstanceTest(MongoDBTestCase): p1.reload() p2.reload() - self.assertEqual(p1.name, p2.parent.name) + assert p1.name == p2.parent.name def test_save_cascade_meta_false(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") - meta = {'cascade': False} + meta = {"cascade": False} Person.drop_collection() @@ -1121,18 +1142,18 @@ class InstanceTest(MongoDBTestCase): p.save() p1.reload() - self.assertNotEqual(p1.name, p.parent.name) + assert p1.name != p.parent.name p.save(cascade=True) p1.reload() - self.assertEqual(p1.name, p.parent.name) + assert p1.name == p.parent.name def test_save_cascade_meta_true(self): class Person(Document): name = StringField() - parent = ReferenceField('self') + parent = ReferenceField("self") - meta = {'cascade': False} + meta = {"cascade": False} Person.drop_collection() @@ -1149,7 +1170,7 @@ class InstanceTest(MongoDBTestCase): p.save() p1.reload() - self.assertNotEqual(p1.name, p.parent.name) + assert p1.name != p.parent.name def test_save_cascades_generically(self): class Person(Document): @@ -1170,11 +1191,11 @@ class InstanceTest(MongoDBTestCase): p.save() p1.reload() - self.assertNotEqual(p1.name, p.parent.name) + assert p1.name != p.parent.name p.save(cascade=True) p1.reload() - self.assertEqual(p1.name, p.parent.name) + assert p1.name == p.parent.name def test_save_atomicity_condition(self): class Widget(Document): @@ -1194,63 +1215,63 @@ class InstanceTest(MongoDBTestCase): w1 = Widget(toggle=False, save_id=UUID(1)) # ignore save_condition on new record creation - w1.save(save_condition={'save_id': UUID(42)}) + w1.save(save_condition={"save_id": UUID(42)}) w1.reload() - self.assertFalse(w1.toggle) - self.assertEqual(w1.save_id, UUID(1)) - self.assertEqual(w1.count, 0) + assert not w1.toggle + assert w1.save_id == UUID(1) + assert w1.count == 0 # mismatch in save_condition prevents save and raise exception flip(w1) - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 1) - self.assertRaises(SaveConditionError, - w1.save, save_condition={'save_id': UUID(42)}) + assert w1.toggle + assert w1.count == 1 + with pytest.raises(SaveConditionError): + w1.save(save_condition={"save_id": UUID(42)}) w1.reload() - self.assertFalse(w1.toggle) - self.assertEqual(w1.count, 0) + assert not w1.toggle + assert w1.count == 0 # matched save_condition allows save flip(w1) - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 1) - w1.save(save_condition={'save_id': UUID(1)}) + assert w1.toggle + assert w1.count == 1 + w1.save(save_condition={"save_id": UUID(1)}) w1.reload() - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 1) + assert w1.toggle + assert w1.count == 1 # save_condition can be used to ensure atomic read & updates # i.e., prevent interleaved reads and writes from separate contexts w2 = Widget.objects.get() - self.assertEqual(w1, w2) + assert w1 == w2 old_id = w1.save_id flip(w1) w1.save_id = UUID(2) - w1.save(save_condition={'save_id': old_id}) + w1.save(save_condition={"save_id": old_id}) w1.reload() - self.assertFalse(w1.toggle) - self.assertEqual(w1.count, 2) + assert not w1.toggle + assert w1.count == 2 flip(w2) flip(w2) - self.assertRaises(SaveConditionError, - w2.save, save_condition={'save_id': old_id}) + with pytest.raises(SaveConditionError): + w2.save(save_condition={"save_id": old_id}) w2.reload() - self.assertFalse(w2.toggle) - self.assertEqual(w2.count, 2) + assert not w2.toggle + assert w2.count == 2 # save_condition uses mongoengine-style operator syntax flip(w1) - w1.save(save_condition={'count__lt': w1.count}) + w1.save(save_condition={"count__lt": w1.count}) w1.reload() - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 3) + assert w1.toggle + assert w1.count == 3 flip(w1) - self.assertRaises(SaveConditionError, - w1.save, save_condition={'count__gte': w1.count}) + with pytest.raises(SaveConditionError): + w1.save(save_condition={"count__gte": w1.count}) w1.reload() - self.assertTrue(w1.toggle) - self.assertEqual(w1.count, 3) + assert w1.toggle + assert w1.count == 3 def test_save_update_selectively(self): class WildBoy(Document): @@ -1259,19 +1280,19 @@ class InstanceTest(MongoDBTestCase): WildBoy.drop_collection() - WildBoy(age=12, name='John').save() + WildBoy(age=12, name="John").save() boy1 = WildBoy.objects().first() boy2 = WildBoy.objects().first() boy1.age = 99 boy1.save() - boy2.name = 'Bob' + boy2.name = "Bob" boy2.save() fresh_boy = WildBoy.objects().first() - self.assertEqual(fresh_boy.age, 99) - self.assertEqual(fresh_boy.name, 'Bob') + assert fresh_boy.age == 99 + assert fresh_boy.name == "Bob" def test_save_update_selectively_with_custom_pk(self): # Prevents regression of #2082 @@ -1282,95 +1303,95 @@ class InstanceTest(MongoDBTestCase): WildBoy.drop_collection() - WildBoy(pk_id='A', age=12, name='John').save() + WildBoy(pk_id="A", age=12, name="John").save() boy1 = WildBoy.objects().first() boy2 = WildBoy.objects().first() boy1.age = 99 boy1.save() - boy2.name = 'Bob' + boy2.name = "Bob" boy2.save() fresh_boy = WildBoy.objects().first() - self.assertEqual(fresh_boy.age, 99) - self.assertEqual(fresh_boy.name, 'Bob') + assert fresh_boy.age == 99 + assert fresh_boy.name == "Bob" def test_update(self): """Ensure that an existing document is updated instead of be overwritten. """ # Create person object and save it to the database - person = self.Person(name='Test User', age=30) + person = self.Person(name="Test User", age=30) person.save() # Create same person object, with same id, without age - same_person = self.Person(name='Test') + same_person = self.Person(name="Test") same_person.id = person.id same_person.save() # Confirm only one object - self.assertEqual(self.Person.objects.count(), 1) + assert self.Person.objects.count() == 1 # reload person.reload() same_person.reload() # Confirm the same - self.assertEqual(person, same_person) - self.assertEqual(person.name, same_person.name) - self.assertEqual(person.age, same_person.age) + assert person == same_person + assert person.name == same_person.name + assert person.age == same_person.age # Confirm the saved values - self.assertEqual(person.name, 'Test') - self.assertEqual(person.age, 30) + assert person.name == "Test" + assert person.age == 30 # Test only / exclude only updates included fields - person = self.Person.objects.only('name').get() - person.name = 'User' + person = self.Person.objects.only("name").get() + person.name = "User" person.save() person.reload() - self.assertEqual(person.name, 'User') - self.assertEqual(person.age, 30) + assert person.name == "User" + assert person.age == 30 # test exclude only updates set fields - person = self.Person.objects.exclude('name').get() + person = self.Person.objects.exclude("name").get() person.age = 21 person.save() person.reload() - self.assertEqual(person.name, 'User') - self.assertEqual(person.age, 21) + assert person.name == "User" + assert person.age == 21 # Test only / exclude can set non excluded / included fields - person = self.Person.objects.only('name').get() - person.name = 'Test' + person = self.Person.objects.only("name").get() + person.name = "Test" person.age = 30 person.save() person.reload() - self.assertEqual(person.name, 'Test') - self.assertEqual(person.age, 30) + assert person.name == "Test" + assert person.age == 30 # test exclude only updates set fields - person = self.Person.objects.exclude('name').get() - person.name = 'User' + person = self.Person.objects.exclude("name").get() + person.name = "User" person.age = 21 person.save() person.reload() - self.assertEqual(person.name, 'User') - self.assertEqual(person.age, 21) + assert person.name == "User" + assert person.age == 21 # Confirm does remove unrequired fields - person = self.Person.objects.exclude('name').get() + person = self.Person.objects.exclude("name").get() person.age = None person.save() person.reload() - self.assertEqual(person.name, 'User') - self.assertEqual(person.age, None) + assert person.name == "User" + assert person.age is None person = self.Person.objects.get() person.name = None @@ -1378,29 +1399,28 @@ class InstanceTest(MongoDBTestCase): person.save() person.reload() - self.assertEqual(person.name, None) - self.assertEqual(person.age, None) + assert person.name is None + assert person.age is None def test_update_rename_operator(self): """Test the $rename operator.""" coll = self.Person._get_collection() - doc = self.Person(name='John').save() - raw_doc = coll.find_one({'_id': doc.pk}) - self.assertEqual(set(raw_doc.keys()), set(['_id', '_cls', 'name'])) + doc = self.Person(name="John").save() + raw_doc = coll.find_one({"_id": doc.pk}) + assert set(raw_doc.keys()) == set(["_id", "_cls", "name"]) - doc.update(rename__name='first_name') - raw_doc = coll.find_one({'_id': doc.pk}) - self.assertEqual(set(raw_doc.keys()), - set(['_id', '_cls', 'first_name'])) - self.assertEqual(raw_doc['first_name'], 'John') + doc.update(rename__name="first_name") + raw_doc = coll.find_one({"_id": doc.pk}) + assert set(raw_doc.keys()) == set(["_id", "_cls", "first_name"]) + assert raw_doc["first_name"] == "John" def test_inserts_if_you_set_the_pk(self): - p1 = self.Person(name='p1', id=bson.ObjectId()).save() - p2 = self.Person(name='p2') + _ = self.Person(name="p1", id=bson.ObjectId()).save() + p2 = self.Person(name="p2") p2.id = bson.ObjectId() p2.save() - self.assertEqual(2, self.Person.objects.count()) + assert 2 == self.Person.objects.count() def test_can_save_if_not_included(self): class EmbeddedDoc(EmbeddedDocument): @@ -1410,33 +1430,34 @@ class InstanceTest(MongoDBTestCase): pass class Doc(Document): - string_field = StringField(default='1') + string_field = StringField(default="1") int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.now) embedded_document_field = EmbeddedDocumentField( - EmbeddedDoc, default=lambda: EmbeddedDoc()) + EmbeddedDoc, default=lambda: EmbeddedDoc() + ) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=bson.ObjectId) - reference_field = ReferenceField(Simple, default=lambda: - Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), - default=lambda: [1, 2, 3]) + default=lambda: Simple().save() + ) + sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc() + ) Simple.drop_collection() Doc.drop_collection() @@ -1447,27 +1468,27 @@ class InstanceTest(MongoDBTestCase): my_doc.save() my_doc = Doc.objects.get(string_field="string") - self.assertEqual(my_doc.string_field, "string") - self.assertEqual(my_doc.int_field, 1) + assert my_doc.string_field == "string" + assert my_doc.int_field == 1 def test_document_update(self): # try updating a non-saved document - with self.assertRaises(OperationError): - person = self.Person(name='dcrosta') - person.update(set__name='Dan Crosta') + with pytest.raises(OperationError): + person = self.Person(name="dcrosta") + person.update(set__name="Dan Crosta") - author = self.Person(name='dcrosta') + author = self.Person(name="dcrosta") author.save() - author.update(set__name='Dan Crosta') + author.update(set__name="Dan Crosta") author.reload() p1 = self.Person.objects.first() - self.assertEqual(p1.name, author.name) + assert p1.name == author.name # try sending an empty update - with self.assertRaises(OperationError): + with pytest.raises(OperationError): person = self.Person.objects.first() person.update() @@ -1476,7 +1497,7 @@ class InstanceTest(MongoDBTestCase): person = self.Person.objects.first() person.update(name="Dan") person.reload() - self.assertEqual("Dan", person.name) + assert "Dan" == person.name def test_update_unique_field(self): class Doc(Document): @@ -1485,14 +1506,14 @@ class InstanceTest(MongoDBTestCase): doc1 = Doc(name="first").save() doc2 = Doc(name="second").save() - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): doc2.update(set__name=doc1.name) def test_embedded_update(self): """Test update on `EmbeddedDocumentField` fields.""" + class Page(EmbeddedDocument): - log_message = StringField(verbose_name="Log message", - required=True) + log_message = StringField(verbose_name="Log message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) @@ -1507,33 +1528,35 @@ class InstanceTest(MongoDBTestCase): site.save() site = Site.objects.first() - self.assertEqual(site.page.log_message, "Error: Dummy message") + assert site.page.log_message == "Error: Dummy message" def test_update_list_field(self): """Test update on `ListField` with $pull + $in. """ + class Doc(Document): foo = ListField(StringField()) Doc.drop_collection() - doc = Doc(foo=['a', 'b', 'c']) + doc = Doc(foo=["a", "b", "c"]) doc.save() # Update doc = Doc.objects.first() - doc.update(pull__foo__in=['a', 'c']) + doc.update(pull__foo__in=["a", "c"]) doc = Doc.objects.first() - self.assertEqual(doc.foo, ['b']) + assert doc.foo == ["b"] def test_embedded_update_db_field(self): """Test update on `EmbeddedDocumentField` fields when db_field is other than default. """ + class Page(EmbeddedDocument): - log_message = StringField(verbose_name="Log message", - db_field="page_log_message", - required=True) + log_message = StringField( + verbose_name="Log message", db_field="page_log_message", required=True + ) class Site(Document): page = EmbeddedDocumentField(Page) @@ -1549,17 +1572,18 @@ class InstanceTest(MongoDBTestCase): site.save() site = Site.objects.first() - self.assertEqual(site.page.log_message, "Error: Dummy message") + assert site.page.log_message == "Error: Dummy message" def test_save_only_changed_fields(self): """Ensure save only sets / unsets changed fields.""" + class User(self.Person): active = BooleanField(default=True) User.drop_collection() # Create person object and save it to the database - user = User(name='Test User', age=30, active=True) + user = User(name="Test User", age=30, active=True) user.save() user.reload() @@ -1570,28 +1594,31 @@ class InstanceTest(MongoDBTestCase): user.age = 21 user.save() - same_person.name = 'User' + same_person.name = "User" same_person.save() person = self.Person.objects.get() - self.assertEqual(person.name, 'User') - self.assertEqual(person.age, 21) - self.assertEqual(person.active, False) + assert person.name == "User" + assert person.age == 21 + assert person.active is False - def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc(self): + def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc( + self, + ): # Refers to Issue #1685 class EmbeddedChildModel(EmbeddedDocument): id = DictField(primary_key=True) class ParentModel(Document): - child = EmbeddedDocumentField( - EmbeddedChildModel) + child = EmbeddedDocumentField(EmbeddedChildModel) - emb = EmbeddedChildModel(id={'1': [1]}) + emb = EmbeddedChildModel(id={"1": [1]}) changed_fields = ParentModel(child=emb)._get_changed_fields() - self.assertEqual(changed_fields, []) + assert changed_fields == [] - def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc(self): + def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc( + self, + ): # Refers to Issue #1685 class User(Document): id = IntField(primary_key=True) @@ -1604,12 +1631,12 @@ class InstanceTest(MongoDBTestCase): Message.drop_collection() # All objects share the same id, but each in a different collection - user = User(id=1, name='user-name').save() + user = User(id=1, name="user-name").save() message = Message(id=1, author=user).save() - message.author.name = 'tutu' - self.assertEqual(message._get_changed_fields(), []) - self.assertEqual(user._get_changed_fields(), ['name']) + message.author.name = "tutu" + assert message._get_changed_fields() == [] + assert user._get_changed_fields() == ["name"] def test__get_changed_fields_same_ids_embedded(self): # Refers to Issue #1768 @@ -1624,24 +1651,25 @@ class InstanceTest(MongoDBTestCase): Message.drop_collection() # All objects share the same id, but each in a different collection - user = User(id=1, name='user-name') # .save() + user = User(id=1, name="user-name") # .save() message = Message(id=1, author=user).save() - message.author.name = 'tutu' - self.assertEqual(message._get_changed_fields(), ['author.name']) + message.author.name = "tutu" + assert message._get_changed_fields() == ["author.name"] message.save() message_fetched = Message.objects.with_id(message.id) - self.assertEqual(message_fetched.author.name, 'tutu') + assert message_fetched.author.name == "tutu" def test_query_count_when_saving(self): """Ensure references don't cause extra fetches when saving""" + class Organization(Document): name = StringField() class User(Document): name = StringField() - orgs = ListField(ReferenceField('Organization')) + orgs = ListField(ReferenceField("Organization")) class Feed(Document): name = StringField() @@ -1667,70 +1695,71 @@ class InstanceTest(MongoDBTestCase): user = User.objects.first() # Even if stored as ObjectId's internally mongoengine uses DBRefs # As ObjectId's aren't automatically derefenced - self.assertIsInstance(user._data['orgs'][0], DBRef) - self.assertIsInstance(user.orgs[0], Organization) - self.assertIsInstance(user._data['orgs'][0], Organization) + assert isinstance(user._data["orgs"][0], DBRef) + assert isinstance(user.orgs[0], Organization) + assert isinstance(user._data["orgs"][0], Organization) # Changing a value with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription.objects.first() - self.assertEqual(q, 1) + assert q == 1 sub.name = "Test Sub" sub.save() - self.assertEqual(q, 2) + assert q == 2 # Changing a value that will cascade with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription.objects.first() - self.assertEqual(q, 1) + assert q == 1 sub.user.name = "Test" - self.assertEqual(q, 2) + assert q == 2 sub.save(cascade=True) - self.assertEqual(q, 3) + assert q == 3 # Changing a value and one that will cascade with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription.objects.first() sub.name = "Test Sub 2" - self.assertEqual(q, 1) + assert q == 1 sub.user.name = "Test 2" - self.assertEqual(q, 2) + assert q == 2 sub.save(cascade=True) - self.assertEqual(q, 4) # One for the UserSub and one for the User + assert q == 4 # One for the UserSub and one for the User # Saving with just the refs with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 sub = UserSubscription(user=u1.pk, feed=f1.pk) - self.assertEqual(q, 0) + assert q == 0 sub.save() - self.assertEqual(q, 1) + assert q == 1 # Saving with just the refs on a ListField with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 User(name="Bob", orgs=[o1.pk, o2.pk]).save() - self.assertEqual(q, 1) + assert q == 1 # Saving new objects with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 user = User.objects.first() - self.assertEqual(q, 1) + assert q == 1 feed = Feed.objects.first() - self.assertEqual(q, 2) + assert q == 2 sub = UserSubscription(user=user, feed=feed) - self.assertEqual(q, 2) # Check no change + assert q == 2 # Check no change sub.save() - self.assertEqual(q, 3) + assert q == 3 def test_set_unset_one_operation(self): """Ensure that $set and $unset actions are performed in the same operation. """ + class FooBar(Document): foo = StringField(default=None) bar = StringField(default=None) @@ -1738,19 +1767,20 @@ class InstanceTest(MongoDBTestCase): FooBar.drop_collection() # write an entity with a single prop - foo = FooBar(foo='foo').save() + foo = FooBar(foo="foo").save() - self.assertEqual(foo.foo, 'foo') + assert foo.foo == "foo" del foo.foo - foo.bar = 'bar' + foo.bar = "bar" with query_counter() as q: - self.assertEqual(0, q) + assert 0 == q foo.save() - self.assertEqual(1, q) + assert 1 == q def test_save_only_changed_fields_recursive(self): """Ensure save only sets / unsets changed fields.""" + class Comment(EmbeddedDocument): published = BooleanField(default=True) @@ -1762,70 +1792,69 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() # Create person object and save it to the database - person = User(name='Test User', age=30, active=True) + person = User(name="Test User", age=30, active=True) person.comments.append(Comment()) person.save() person.reload() person = self.Person.objects.get() - self.assertTrue(person.comments[0].published) + assert person.comments[0].published person.comments[0].published = False person.save() person = self.Person.objects.get() - self.assertFalse(person.comments[0].published) + assert not person.comments[0].published # Simple dict w - person.comments_dict['first_post'] = Comment() + person.comments_dict["first_post"] = Comment() person.save() person = self.Person.objects.get() - self.assertTrue(person.comments_dict['first_post'].published) + assert person.comments_dict["first_post"].published - person.comments_dict['first_post'].published = False + person.comments_dict["first_post"].published = False person.save() person = self.Person.objects.get() - self.assertFalse(person.comments_dict['first_post'].published) + assert not person.comments_dict["first_post"].published def test_delete(self): """Ensure that document may be deleted using the delete method.""" person = self.Person(name="Test User", age=30) person.save() - self.assertEqual(self.Person.objects.count(), 1) + assert self.Person.objects.count() == 1 person.delete() - self.assertEqual(self.Person.objects.count(), 0) + assert self.Person.objects.count() == 0 def test_save_custom_id(self): """Ensure that a document may be saved with a custom _id.""" # Create person object and save it to the database - person = self.Person(name='Test User', age=30, - id='497ce96f395f2f052a494fd4') + person = self.Person(name="Test User", age=30, id="497ce96f395f2f052a494fd4") person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] - person_obj = collection.find_one({'name': 'Test User'}) - self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') + person_obj = collection.find_one({"name": "Test User"}) + assert str(person_obj["_id"]) == "497ce96f395f2f052a494fd4" def test_save_custom_pk(self): """Ensure that a document may be saved with a custom _id using pk alias. """ # Create person object and save it to the database - person = self.Person(name='Test User', age=30, - pk='497ce96f395f2f052a494fd4') + person = self.Person(name="Test User", age=30, pk="497ce96f395f2f052a494fd4") person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] - person_obj = collection.find_one({'name': 'Test User'}) - self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') + person_obj = collection.find_one({"name": "Test User"}) + assert str(person_obj["_id"]) == "497ce96f395f2f052a494fd4" def test_save_list(self): """Ensure that a list field may be properly saved.""" + class Comment(EmbeddedDocument): content = StringField() @@ -1836,37 +1865,36 @@ class InstanceTest(MongoDBTestCase): BlogPost.drop_collection() - post = BlogPost(content='Went for a walk today...') - post.tags = tags = ['fun', 'leisure'] - comments = [Comment(content='Good for you'), Comment(content='Yay.')] + post = BlogPost(content="Went for a walk today...") + post.tags = tags = ["fun", "leisure"] + comments = [Comment(content="Good for you"), Comment(content="Yay.")] post.comments = comments post.save() collection = self.db[BlogPost._get_collection_name()] post_obj = collection.find_one() - self.assertEqual(post_obj['tags'], tags) - for comment_obj, comment in zip(post_obj['comments'], comments): - self.assertEqual(comment_obj['content'], comment['content']) + assert post_obj["tags"] == tags + for comment_obj, comment in zip(post_obj["comments"], comments): + assert comment_obj["content"] == comment["content"] def test_list_search_by_embedded(self): class User(Document): username = StringField(required=True) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Comment(EmbeddedDocument): comment = StringField() - user = ReferenceField(User, - required=True) + user = ReferenceField(User, required=True) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Page(Document): comments = ListField(EmbeddedDocumentField(Comment)) - meta = {'allow_inheritance': False, - 'indexes': [ - {'fields': ['comments.user']} - ]} + meta = { + "allow_inheritance": False, + "indexes": [{"fields": ["comments.user"]}], + } User.drop_collection() Page.drop_collection() @@ -1880,14 +1908,22 @@ class InstanceTest(MongoDBTestCase): u3 = User(username="hmarr") u3.save() - p1 = Page(comments=[Comment(user=u1, comment="Its very good"), - Comment(user=u2, comment="Hello world"), - Comment(user=u3, comment="Ping Pong"), - Comment(user=u1, comment="I like a beer")]) + p1 = Page( + comments=[ + Comment(user=u1, comment="Its very good"), + Comment(user=u2, comment="Hello world"), + Comment(user=u3, comment="Ping Pong"), + Comment(user=u1, comment="I like a beer"), + ] + ) p1.save() - p2 = Page(comments=[Comment(user=u1, comment="Its very good"), - Comment(user=u2, comment="Hello world")]) + p2 = Page( + comments=[ + Comment(user=u1, comment="Its very good"), + Comment(user=u2, comment="Hello world"), + ] + ) p2.save() p3 = Page(comments=[Comment(user=u3, comment="Its very good")]) @@ -1896,20 +1932,15 @@ class InstanceTest(MongoDBTestCase): p4 = Page(comments=[Comment(user=u2, comment="Heavy Metal song")]) p4.save() - self.assertEqual( - [p1, p2], - list(Page.objects.filter(comments__user=u1))) - self.assertEqual( - [p1, p2, p4], - list(Page.objects.filter(comments__user=u2))) - self.assertEqual( - [p1, p3], - list(Page.objects.filter(comments__user=u3))) + assert [p1, p2] == list(Page.objects.filter(comments__user=u1)) + assert [p1, p2, p4] == list(Page.objects.filter(comments__user=u2)) + assert [p1, p3] == list(Page.objects.filter(comments__user=u3)) def test_save_embedded_document(self): """Ensure that a document with an embedded document field may be saved in the database. """ + class EmployeeDetails(EmbeddedDocument): position = StringField() @@ -1918,26 +1949,26 @@ class InstanceTest(MongoDBTestCase): details = EmbeddedDocumentField(EmployeeDetails) # Create employee object and save it to the database - employee = Employee(name='Test Employee', age=50, salary=20000) - employee.details = EmployeeDetails(position='Developer') + employee = Employee(name="Test Employee", age=50, salary=20000) + employee.details = EmployeeDetails(position="Developer") employee.save() # Ensure that the object is in the database collection = self.db[self.Person._get_collection_name()] - employee_obj = collection.find_one({'name': 'Test Employee'}) - self.assertEqual(employee_obj['name'], 'Test Employee') - self.assertEqual(employee_obj['age'], 50) + employee_obj = collection.find_one({"name": "Test Employee"}) + assert employee_obj["name"] == "Test Employee" + assert employee_obj["age"] == 50 # Ensure that the 'details' embedded object saved correctly - self.assertEqual(employee_obj['details']['position'], 'Developer') + assert employee_obj["details"]["position"] == "Developer" def test_embedded_update_after_save(self): """Test update of `EmbeddedDocumentField` attached to a newly saved document. """ + class Page(EmbeddedDocument): - log_message = StringField(verbose_name="Log message", - required=True) + log_message = StringField(verbose_name="Log message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) @@ -1951,12 +1982,13 @@ class InstanceTest(MongoDBTestCase): site.save() site = Site.objects.first() - self.assertEqual(site.page.log_message, "Error: Dummy message") + assert site.page.log_message == "Error: Dummy message" def test_updating_an_embedded_document(self): """Ensure that a document with an embedded document field may be saved in the database. """ + class EmployeeDetails(EmbeddedDocument): position = StringField() @@ -1965,29 +1997,28 @@ class InstanceTest(MongoDBTestCase): details = EmbeddedDocumentField(EmployeeDetails) # Create employee object and save it to the database - employee = Employee(name='Test Employee', age=50, salary=20000) - employee.details = EmployeeDetails(position='Developer') + employee = Employee(name="Test Employee", age=50, salary=20000) + employee.details = EmployeeDetails(position="Developer") employee.save() # Test updating an embedded document - promoted_employee = Employee.objects.get(name='Test Employee') - promoted_employee.details.position = 'Senior Developer' + promoted_employee = Employee.objects.get(name="Test Employee") + promoted_employee.details.position = "Senior Developer" promoted_employee.save() promoted_employee.reload() - self.assertEqual(promoted_employee.name, 'Test Employee') - self.assertEqual(promoted_employee.age, 50) + assert promoted_employee.name == "Test Employee" + assert promoted_employee.age == 50 # Ensure that the 'details' embedded object saved correctly - self.assertEqual( - promoted_employee.details.position, 'Senior Developer') + assert promoted_employee.details.position == "Senior Developer" # Test removal promoted_employee.details = None promoted_employee.save() promoted_employee.reload() - self.assertEqual(promoted_employee.details, None) + assert promoted_employee.details is None def test_object_mixins(self): class NameMixin(object): @@ -1996,12 +2027,12 @@ class InstanceTest(MongoDBTestCase): class Foo(EmbeddedDocument, NameMixin): quantity = IntField() - self.assertEqual(['name', 'quantity'], sorted(Foo._fields.keys())) + assert ["name", "quantity"] == sorted(Foo._fields.keys()) class Bar(Document, NameMixin): widgets = StringField() - self.assertEqual(['id', 'name', 'widgets'], sorted(Bar._fields.keys())) + assert ["id", "name", "widgets"] == sorted(Bar._fields.keys()) def test_mixin_inheritance(self): class BaseMixIn(object): @@ -2015,33 +2046,33 @@ class InstanceTest(MongoDBTestCase): age = IntField() TestDoc.drop_collection() - t = TestDoc(count=12, data="test", - comment="great!", age=19) + t = TestDoc(count=12, data="test", comment="great!", age=19) t.save() t = TestDoc.objects.first() - self.assertEqual(t.age, 19) - self.assertEqual(t.comment, "great!") - self.assertEqual(t.data, "test") - self.assertEqual(t.count, 12) + assert t.age == 19 + assert t.comment == "great!" + assert t.data == "test" + assert t.count == 12 def test_save_reference(self): """Ensure that a document reference field may be saved in the database. """ + class BlogPost(Document): - meta = {'collection': 'blogpost_1'} + meta = {"collection": "blogpost_1"} content = StringField() author = ReferenceField(self.Person) BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - post = BlogPost(content='Watched some TV today... how exciting.') + post = BlogPost(content="Watched some TV today... how exciting.") # Should only reference author when saving post.author = author post.save() @@ -2049,28 +2080,30 @@ class InstanceTest(MongoDBTestCase): post_obj = BlogPost.objects.first() # Test laziness - self.assertIsInstance(post_obj._data['author'], bson.DBRef) - self.assertIsInstance(post_obj.author, self.Person) - self.assertEqual(post_obj.author.name, 'Test User') + assert isinstance(post_obj._data["author"], bson.DBRef) + assert isinstance(post_obj.author, self.Person) + assert post_obj.author.name == "Test User" # Ensure that the dereferenced object may be changed and saved post_obj.author.age = 25 post_obj.author.save() - author = list(self.Person.objects(name='Test User'))[-1] - self.assertEqual(author.age, 25) + author = list(self.Person.objects(name="Test User"))[-1] + assert author.age == 25 def test_duplicate_db_fields_raise_invalid_document_error(self): """Ensure a InvalidDocumentError is thrown if duplicate fields declare the same db_field. """ - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): + class Foo(Document): name = StringField() - name2 = StringField(db_field='name') + name2 = StringField(db_field="name") def test_invalid_son(self): """Raise an error if loading invalid data.""" + class Occurrence(EmbeddedDocument): number = IntField() @@ -2080,22 +2113,25 @@ class InstanceTest(MongoDBTestCase): forms = ListField(StringField(), default=list) occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) - with self.assertRaises(InvalidDocumentError): - Word._from_son({ - 'stem': [1, 2, 3], - 'forms': 1, - 'count': 'one', - 'occurs': {"hello": None} - }) + with pytest.raises(InvalidDocumentError): + Word._from_son( + { + "stem": [1, 2, 3], + "forms": 1, + "count": "one", + "occurs": {"hello": None}, + } + ) # Tests for issue #1438: https://github.com/MongoEngine/mongoengine/issues/1438 - with self.assertRaises(ValueError): - Word._from_son('this is not a valid SON dict') + with pytest.raises(ValueError): + Word._from_son("this is not a valid SON dict") def test_reverse_delete_rule_cascade_and_nullify(self): """Ensure that a referenced document is also deleted upon deletion. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) @@ -2104,48 +2140,50 @@ class InstanceTest(MongoDBTestCase): self.Person.drop_collection() BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - reviewer = self.Person(name='Re Viewer') + reviewer = self.Person(name="Re Viewer") reviewer.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.reviewer = reviewer post.save() reviewer.delete() # No effect on the BlogPost - self.assertEqual(BlogPost.objects.count(), 1) - self.assertEqual(BlogPost.objects.get().reviewer, None) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.get().reviewer is None # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() - self.assertEqual(BlogPost.objects.count(), 0) + assert BlogPost.objects.count() == 0 def test_reverse_delete_rule_pull(self): """Ensure that a referenced document is also deleted with pull. """ + class Record(Document): name = StringField() - children = ListField(ReferenceField('self', reverse_delete_rule=PULL)) + children = ListField(ReferenceField("self", reverse_delete_rule=PULL)) Record.drop_collection() - parent_record = Record(name='parent').save() - child_record = Record(name='child').save() + parent_record = Record(name="parent").save() + child_record = Record(name="child").save() parent_record.children.append(child_record) parent_record.save() child_record.delete() - self.assertEqual(Record.objects(name='parent').get().children, []) + assert Record.objects(name="parent").get().children == [] def test_reverse_delete_rule_with_custom_id_field(self): """Ensure that a referenced document with custom primary key is also deleted upon deletion. """ + class User(Document): name = StringField(primary_key=True) @@ -2156,21 +2194,22 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() Book.drop_collection() - user = User(name='Mike').save() - reviewer = User(name='John').save() - book = Book(author=user, reviewer=reviewer).save() + user = User(name="Mike").save() + reviewer = User(name="John").save() + _ = Book(author=user, reviewer=reviewer).save() reviewer.delete() - self.assertEqual(Book.objects.count(), 1) - self.assertEqual(Book.objects.get().reviewer, None) + assert Book.objects.count() == 1 + assert Book.objects.get().reviewer is None user.delete() - self.assertEqual(Book.objects.count(), 0) + assert Book.objects.count() == 0 def test_reverse_delete_rule_with_shared_id_among_collections(self): """Ensure that cascade delete rule doesn't mix id among collections. """ + class User(Document): id = IntField(primary_key=True) @@ -2183,26 +2222,27 @@ class InstanceTest(MongoDBTestCase): user_1 = User(id=1).save() user_2 = User(id=2).save() - book_1 = Book(id=1, author=user_2).save() + _ = Book(id=1, author=user_2).save() book_2 = Book(id=2, author=user_1).save() user_2.delete() # Deleting user_2 should also delete book_1 but not book_2 - self.assertEqual(Book.objects.count(), 1) - self.assertEqual(Book.objects.get(), book_2) + assert Book.objects.count() == 1 + assert Book.objects.get() == book_2 user_3 = User(id=3).save() - book_3 = Book(id=3, author=user_3).save() + _ = Book(id=3, author=user_3).save() user_3.delete() # Deleting user_3 should also delete book_3 - self.assertEqual(Book.objects.count(), 1) - self.assertEqual(Book.objects.get(), book_2) + assert Book.objects.count() == 1 + assert Book.objects.get() == book_2 def test_reverse_delete_rule_with_document_inheritance(self): """Ensure that a referenced document is also deleted upon deletion of a child document. """ + class Writer(self.Person): pass @@ -2214,58 +2254,61 @@ class InstanceTest(MongoDBTestCase): self.Person.drop_collection() BlogPost.drop_collection() - author = Writer(name='Test User') + author = Writer(name="Test User") author.save() - reviewer = Writer(name='Re Viewer') + reviewer = Writer(name="Re Viewer") reviewer.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.reviewer = reviewer post.save() reviewer.delete() - self.assertEqual(BlogPost.objects.count(), 1) - self.assertEqual(BlogPost.objects.get().reviewer, None) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.get().reviewer is None # Delete the Writer should lead to deletion of the BlogPost author.delete() - self.assertEqual(BlogPost.objects.count(), 0) + assert BlogPost.objects.count() == 0 def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): """Ensure that a referenced document is also deleted upon deletion for complex fields. """ + class BlogPost(Document): content = StringField() - authors = ListField(ReferenceField( - self.Person, reverse_delete_rule=CASCADE)) - reviewers = ListField(ReferenceField( - self.Person, reverse_delete_rule=NULLIFY)) + authors = ListField( + ReferenceField(self.Person, reverse_delete_rule=CASCADE) + ) + reviewers = ListField( + ReferenceField(self.Person, reverse_delete_rule=NULLIFY) + ) self.Person.drop_collection() BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - reviewer = self.Person(name='Re Viewer') + reviewer = self.Person(name="Re Viewer") reviewer.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.authors = [author] post.reviewers = [reviewer] post.save() # Deleting the reviewer should have no effect on the BlogPost reviewer.delete() - self.assertEqual(BlogPost.objects.count(), 1) - self.assertEqual(BlogPost.objects.get().reviewers, []) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.get().reviewers == [] # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() - self.assertEqual(BlogPost.objects.count(), 0) + assert BlogPost.objects.count() == 0 def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self): """Ensure the pre_delete signal is triggered upon a cascading @@ -2273,6 +2316,7 @@ class InstanceTest(MongoDBTestCase): delete the author which triggers deletion of blogpost via cascade blog post's pre_delete signal alters an editor attribute. """ + class Editor(self.Person): review_queue = IntField(default=0) @@ -2292,32 +2336,32 @@ class InstanceTest(MongoDBTestCase): BlogPost.drop_collection() Editor.drop_collection() - author = self.Person(name='Will S.').save() - editor = Editor(name='Max P.', review_queue=1).save() - BlogPost(content='wrote some books', author=author, - editor=editor).save() + author = self.Person(name="Will S.").save() + editor = Editor(name="Max P.", review_queue=1).save() + BlogPost(content="wrote some books", author=author, editor=editor).save() # delete the author, the post is also deleted due to the CASCADE rule author.delete() # the pre-delete signal should have decremented the editor's queue - editor = Editor.objects(name='Max P.').get() - self.assertEqual(editor.review_queue, 0) + editor = Editor.objects(name="Max P.").get() + assert editor.review_queue == 0 def test_two_way_reverse_delete_rule(self): """Ensure that Bi-Directional relationships work with reverse_delete_rule """ + class Bar(Document): content = StringField() - foo = ReferenceField('Foo') + foo = ReferenceField("Foo") class Foo(Document): content = StringField() bar = ReferenceField(Bar) - Bar.register_delete_rule(Foo, 'bar', NULLIFY) - Foo.register_delete_rule(Bar, 'foo', NULLIFY) + Bar.register_delete_rule(Foo, "bar", NULLIFY) + Foo.register_delete_rule(Bar, "foo", NULLIFY) Bar.drop_collection() Foo.drop_collection() @@ -2333,29 +2377,32 @@ class InstanceTest(MongoDBTestCase): f.delete() - self.assertEqual(Bar.objects.count(), 1) # No effect on the BlogPost - self.assertEqual(Bar.objects.get().foo, None) + assert Bar.objects.count() == 1 # No effect on the BlogPost + assert Bar.objects.get().foo is None def test_invalid_reverse_delete_rule_raise_errors(self): - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): + class Blog(Document): content = StringField() - authors = MapField(ReferenceField( - self.Person, reverse_delete_rule=CASCADE)) + authors = MapField( + ReferenceField(self.Person, reverse_delete_rule=CASCADE) + ) reviewers = DictField( - field=ReferenceField( - self.Person, - reverse_delete_rule=NULLIFY)) + field=ReferenceField(self.Person, reverse_delete_rule=NULLIFY) + ) + + with pytest.raises(InvalidDocumentError): - with self.assertRaises(InvalidDocumentError): class Parents(EmbeddedDocument): - father = ReferenceField('Person', reverse_delete_rule=DENY) - mother = ReferenceField('Person', reverse_delete_rule=DENY) + father = ReferenceField("Person", reverse_delete_rule=DENY) + mother = ReferenceField("Person", reverse_delete_rule=DENY) def test_reverse_delete_rule_cascade_recurs(self): """Ensure that a chain of documents is also deleted upon cascaded deletion. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) @@ -2368,26 +2415,27 @@ class InstanceTest(MongoDBTestCase): BlogPost.drop_collection() Comment.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.save() - comment = Comment(text='Kudos.') + comment = Comment(text="Kudos.") comment.post = post comment.save() # Delete the Person, which should lead to deletion of the BlogPost, # and, recursively to the Comment, too author.delete() - self.assertEqual(Comment.objects.count(), 0) + assert Comment.objects.count() == 0 def test_reverse_delete_rule_deny(self): """Ensure that a document cannot be referenced if there are still documents referring to it. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) @@ -2395,25 +2443,26 @@ class InstanceTest(MongoDBTestCase): self.Person.drop_collection() BlogPost.drop_collection() - author = self.Person(name='Test User') + author = self.Person(name="Test User") author.save() - post = BlogPost(content='Watched some TV') + post = BlogPost(content="Watched some TV") post.author = author post.save() # Delete the Person should be denied - self.assertRaises(OperationError, author.delete) # Should raise denied error - self.assertEqual(BlogPost.objects.count(), 1) # No objects may have been deleted - self.assertEqual(self.Person.objects.count(), 1) + with pytest.raises(OperationError): + author.delete() # Should raise denied error + assert BlogPost.objects.count() == 1 # No objects may have been deleted + assert self.Person.objects.count() == 1 # Other users, that don't have BlogPosts must be removable, like normal - author = self.Person(name='Another User') + author = self.Person(name="Another User") author.save() - self.assertEqual(self.Person.objects.count(), 2) + assert self.Person.objects.count() == 2 author.delete() - self.assertEqual(self.Person.objects.count(), 1) + assert self.Person.objects.count() == 1 def subclasses_and_unique_keys_works(self): class A(Document): @@ -2429,11 +2478,12 @@ class InstanceTest(MongoDBTestCase): A().save() B(foo=True).save() - self.assertEqual(A.objects.count(), 2) - self.assertEqual(B.objects.count(), 1) + assert A.objects.count() == 2 + assert B.objects.count() == 1 def test_document_hash(self): """Test document in list, dict, set.""" + class User(Document): pass @@ -2455,12 +2505,12 @@ class InstanceTest(MongoDBTestCase): # Make sure docs are properly identified in a list (__eq__ is used # for the comparison). all_user_list = list(User.objects.all()) - self.assertIn(u1, all_user_list) - self.assertIn(u2, all_user_list) - self.assertIn(u3, all_user_list) - self.assertNotIn(u4, all_user_list) # New object - self.assertNotIn(b1, all_user_list) # Other object - self.assertNotIn(b2, all_user_list) # Other object + assert u1 in all_user_list + assert u2 in all_user_list + assert u3 in all_user_list + assert u4 not in all_user_list # New object + assert b1 not in all_user_list # Other object + assert b2 not in all_user_list # Other object # Make sure docs can be used as keys in a dict (__hash__ is used # for hashing the docs). @@ -2468,56 +2518,60 @@ class InstanceTest(MongoDBTestCase): for u in User.objects.all(): all_user_dic[u] = "OK" - self.assertEqual(all_user_dic.get(u1, False), "OK") - self.assertEqual(all_user_dic.get(u2, False), "OK") - self.assertEqual(all_user_dic.get(u3, False), "OK") - self.assertEqual(all_user_dic.get(u4, False), False) # New object - self.assertEqual(all_user_dic.get(b1, False), False) # Other object - self.assertEqual(all_user_dic.get(b2, False), False) # Other object + assert all_user_dic.get(u1, False) == "OK" + assert all_user_dic.get(u2, False) == "OK" + assert all_user_dic.get(u3, False) == "OK" + assert all_user_dic.get(u4, False) is False # New object + assert all_user_dic.get(b1, False) is False # Other object + assert all_user_dic.get(b2, False) is False # Other object # Make sure docs are properly identified in a set (__hash__ is used # for hashing the docs). all_user_set = set(User.objects.all()) - self.assertIn(u1, all_user_set) - self.assertNotIn(u4, all_user_set) - self.assertNotIn(b1, all_user_list) - self.assertNotIn(b2, all_user_list) + assert u1 in all_user_set + assert u4 not in all_user_set + assert b1 not in all_user_list + assert b2 not in all_user_list # Make sure duplicate docs aren't accepted in the set - self.assertEqual(len(all_user_set), 3) + assert len(all_user_set) == 3 all_user_set.add(u1) all_user_set.add(u2) all_user_set.add(u3) - self.assertEqual(len(all_user_set), 3) + assert len(all_user_set) == 3 def test_picklable(self): - pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) + pickle_doc = PickleTest(number=1, string="One", lists=["1", "2"]) pickle_doc.embedded = PickleEmbedded() - pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + pickled_doc = pickle.dumps( + pickle_doc + ) # make sure pickling works even before the doc is saved pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected, pickle_doc) + assert resurrected == pickle_doc # Test pickling changed data pickle_doc.lists.append("3") pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected, pickle_doc) + assert resurrected == pickle_doc resurrected.string = "Two" resurrected.save() pickle_doc = PickleTest.objects.first() - self.assertEqual(resurrected, pickle_doc) - self.assertEqual(pickle_doc.string, "Two") - self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) + assert resurrected == pickle_doc + assert pickle_doc.string == "Two" + assert pickle_doc.lists == ["1", "2", "3"] def test_regular_document_pickle(self): - pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) - pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + pickle_doc = PickleTest(number=1, string="One", lists=["1", "2"]) + pickled_doc = pickle.dumps( + pickle_doc + ) # make sure pickling works even before the doc is saved pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) @@ -2527,42 +2581,45 @@ class InstanceTest(MongoDBTestCase): fixtures.PickleTest = fixtures.NewDocumentPickleTest resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected.__class__, - fixtures.NewDocumentPickleTest) - self.assertEqual(resurrected._fields_ordered, - fixtures.NewDocumentPickleTest._fields_ordered) - self.assertNotEqual(resurrected._fields_ordered, - pickle_doc._fields_ordered) + assert resurrected.__class__ == fixtures.NewDocumentPickleTest + assert ( + resurrected._fields_ordered + == fixtures.NewDocumentPickleTest._fields_ordered + ) + assert resurrected._fields_ordered != pickle_doc._fields_ordered # The local PickleTest is still a ref to the original fixtures.PickleTest = PickleTest def test_dynamic_document_pickle(self): pickle_doc = PickleDynamicTest( - name="test", number=1, string="One", lists=['1', '2']) + name="test", number=1, string="One", lists=["1", "2"] + ) pickle_doc.embedded = PickleDynamicEmbedded(foo="Bar") - pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + pickled_doc = pickle.dumps( + pickle_doc + ) # make sure pickling works even before the doc is saved pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) - self.assertEqual(resurrected, pickle_doc) - self.assertEqual(resurrected._fields_ordered, - pickle_doc._fields_ordered) - self.assertEqual(resurrected._dynamic_fields.keys(), - pickle_doc._dynamic_fields.keys()) + assert resurrected == pickle_doc + assert resurrected._fields_ordered == pickle_doc._fields_ordered + assert resurrected._dynamic_fields.keys() == pickle_doc._dynamic_fields.keys() - self.assertEqual(resurrected.embedded, pickle_doc.embedded) - self.assertEqual(resurrected.embedded._fields_ordered, - pickle_doc.embedded._fields_ordered) - self.assertEqual(resurrected.embedded._dynamic_fields.keys(), - pickle_doc.embedded._dynamic_fields.keys()) + assert resurrected.embedded == pickle_doc.embedded + assert ( + resurrected.embedded._fields_ordered == pickle_doc.embedded._fields_ordered + ) + assert ( + resurrected.embedded._dynamic_fields.keys() + == pickle_doc.embedded._dynamic_fields.keys() + ) def test_picklable_on_signals(self): - pickle_doc = PickleSignalsTest( - number=1, string="One", lists=['1', '2']) + pickle_doc = PickleSignalsTest(number=1, string="One", lists=["1", "2"]) pickle_doc.embedded = PickleEmbedded() pickle_doc.save() pickle_doc.delete() @@ -2571,13 +2628,14 @@ class InstanceTest(MongoDBTestCase): """Test creating a field with a field name that would override the "validate" method. """ - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): + class Blog(Document): validate = DictField() def test_mutating_documents(self): class B(EmbeddedDocument): - field1 = StringField(default='field1') + field1 = StringField(default="field1") class A(Document): b = EmbeddedDocumentField(B, default=lambda: B()) @@ -2587,27 +2645,28 @@ class InstanceTest(MongoDBTestCase): a = A() a.save() a.reload() - self.assertEqual(a.b.field1, 'field1') + assert a.b.field1 == "field1" class C(EmbeddedDocument): - c_field = StringField(default='cfield') + c_field = StringField(default="cfield") class B(EmbeddedDocument): - field1 = StringField(default='field1') + field1 = StringField(default="field1") field2 = EmbeddedDocumentField(C, default=lambda: C()) class A(Document): b = EmbeddedDocumentField(B, default=lambda: B()) a = A.objects()[0] - a.b.field2.c_field = 'new value' + a.b.field2.c_field = "new value" a.save() a.reload() - self.assertEqual(a.b.field2.c_field, 'new value') + assert a.b.field2.c_field == "new value" def test_can_save_false_values(self): """Ensures you can save False values on save.""" + class Doc(Document): foo = StringField() archived = BooleanField(default=False, required=True) @@ -2619,10 +2678,11 @@ class InstanceTest(MongoDBTestCase): d.archived = False d.save() - self.assertEqual(Doc.objects(archived=False).count(), 1) + assert Doc.objects(archived=False).count() == 1 def test_can_save_false_values_dynamic(self): """Ensures you can save False values on dynamic docs.""" + class Doc(DynamicDocument): foo = StringField() @@ -2633,10 +2693,11 @@ class InstanceTest(MongoDBTestCase): d.archived = False d.save() - self.assertEqual(Doc.objects(archived=False).count(), 1) + assert Doc.objects(archived=False).count() == 1 def test_do_not_save_unchanged_references(self): """Ensures cascading saves dont auto update""" + class Job(Document): name = StringField() @@ -2655,8 +2716,10 @@ class InstanceTest(MongoDBTestCase): person = Person(name="name", age=10, job=job) from pymongo.collection import Collection + orig_update = Collection.update try: + def fake_update(*args, **kwargs): self.fail("Unexpected update for %s" % args[0].name) return orig_update(*args, **kwargs) @@ -2670,9 +2733,9 @@ class InstanceTest(MongoDBTestCase): """DB Alias tests.""" # mongoenginetest - Is default connection alias from setUp() # Register Aliases - register_connection('testdb-1', 'mongoenginetest2') - register_connection('testdb-2', 'mongoenginetest3') - register_connection('testdb-3', 'mongoenginetest4') + register_connection("testdb-1", "mongoenginetest2") + register_connection("testdb-2", "mongoenginetest3") + register_connection("testdb-3", "mongoenginetest4") class User(Document): name = StringField() @@ -2691,8 +2754,8 @@ class InstanceTest(MongoDBTestCase): hp = Book.objects.create(name="Harry Potter") # Selects - self.assertEqual(User.objects.first(), bob) - self.assertEqual(Book.objects.first(), hp) + assert User.objects.first() == bob + assert Book.objects.first() == hp # DeReference class AuthorBooks(Document): @@ -2706,55 +2769,52 @@ class InstanceTest(MongoDBTestCase): ab = AuthorBooks.objects.create(author=bob, book=hp) # select - self.assertEqual(AuthorBooks.objects.first(), ab) - self.assertEqual(AuthorBooks.objects.first().book, hp) - self.assertEqual(AuthorBooks.objects.first().author, bob) - self.assertEqual(AuthorBooks.objects.filter(author=bob).first(), ab) - self.assertEqual(AuthorBooks.objects.filter(book=hp).first(), ab) + assert AuthorBooks.objects.first() == ab + assert AuthorBooks.objects.first().book == hp + assert AuthorBooks.objects.first().author == bob + assert AuthorBooks.objects.filter(author=bob).first() == ab + assert AuthorBooks.objects.filter(book=hp).first() == ab # DB Alias - self.assertEqual(User._get_db(), get_db("testdb-1")) - self.assertEqual(Book._get_db(), get_db("testdb-2")) - self.assertEqual(AuthorBooks._get_db(), get_db("testdb-3")) + assert User._get_db() == get_db("testdb-1") + assert Book._get_db() == get_db("testdb-2") + assert AuthorBooks._get_db() == get_db("testdb-3") # Collections - self.assertEqual( - User._get_collection(), - get_db("testdb-1")[User._get_collection_name()]) - self.assertEqual( - Book._get_collection(), - get_db("testdb-2")[Book._get_collection_name()]) - self.assertEqual( - AuthorBooks._get_collection(), - get_db("testdb-3")[AuthorBooks._get_collection_name()]) + assert User._get_collection() == get_db("testdb-1")[User._get_collection_name()] + assert Book._get_collection() == get_db("testdb-2")[Book._get_collection_name()] + assert ( + AuthorBooks._get_collection() + == get_db("testdb-3")[AuthorBooks._get_collection_name()] + ) def test_db_alias_overrides(self): """Test db_alias can be overriden.""" # Register a connection with db_alias testdb-2 - register_connection('testdb-2', 'mongoenginetest2') + register_connection("testdb-2", "mongoenginetest2") class A(Document): """Uses default db_alias """ + name = StringField() meta = {"allow_inheritance": True} class B(A): """Uses testdb-2 db_alias """ + meta = {"db_alias": "testdb-2"} A.objects.all() - self.assertEqual('testdb-2', B._meta.get('db_alias')) - self.assertEqual('mongoenginetest', - A._get_collection().database.name) - self.assertEqual('mongoenginetest2', - B._get_collection().database.name) + assert "testdb-2" == B._meta.get("db_alias") + assert "mongoenginetest" == A._get_collection().database.name + assert "mongoenginetest2" == B._get_collection().database.name def test_db_alias_propagates(self): """db_alias propagates?""" - register_connection('testdb-1', 'mongoenginetest2') + register_connection("testdb-1", "mongoenginetest2") class A(Document): name = StringField() @@ -2763,10 +2823,11 @@ class InstanceTest(MongoDBTestCase): class B(A): pass - self.assertEqual('testdb-1', B._meta.get('db_alias')) + assert "testdb-1" == B._meta.get("db_alias") def test_db_ref_usage(self): """DB Ref usage in dict_fields.""" + class User(Document): name = StringField() @@ -2774,9 +2835,7 @@ class InstanceTest(MongoDBTestCase): name = StringField() author = ReferenceField(User) extra = DictField() - meta = { - 'ordering': ['+name'] - } + meta = {"ordering": ["+name"]} def __unicode__(self): return self.name @@ -2798,12 +2857,19 @@ class InstanceTest(MongoDBTestCase): peter = User.objects.create(name="Peter") # Bob - Book.objects.create(name="1", author=bob, extra={ - "a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}) - Book.objects.create(name="2", author=bob, extra={ - "a": bob.to_dbref(), "b": karl.to_dbref()}) - Book.objects.create(name="3", author=bob, extra={ - "a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}) + Book.objects.create( + name="1", + author=bob, + extra={"a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}, + ) + Book.objects.create( + name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()} + ) + Book.objects.create( + name="3", + author=bob, + extra={"a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}, + ) Book.objects.create(name="4", author=bob) # Jon @@ -2811,86 +2877,105 @@ class InstanceTest(MongoDBTestCase): Book.objects.create(name="6", author=peter) Book.objects.create(name="7", author=jon) Book.objects.create(name="8", author=jon) - Book.objects.create(name="9", author=jon, - extra={"a": peter.to_dbref()}) + Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()}) # Checks - self.assertEqual(",".join([str(b) for b in Book.objects.all()]), - "1,2,3,4,5,6,7,8,9") + assert ",".join([str(b) for b in Book.objects.all()]) == "1,2,3,4,5,6,7,8,9" # bob related books - self.assertEqual(",".join([str(b) for b in Book.objects.filter( - Q(extra__a=bob) | - Q(author=bob) | - Q(extra__b=bob))]), - "1,2,3,4") + assert ( + ",".join( + [ + str(b) + for b in Book.objects.filter( + Q(extra__a=bob) | Q(author=bob) | Q(extra__b=bob) + ) + ] + ) + == "1,2,3,4" + ) # Susan & Karl related books - self.assertEqual(",".join([str(b) for b in Book.objects.filter( - Q(extra__a__all=[karl, susan]) | - Q(author__all=[karl, susan]) | - Q(extra__b__all=[ - karl.to_dbref(), susan.to_dbref()])) - ]), "1") + assert ( + ",".join( + [ + str(b) + for b in Book.objects.filter( + Q(extra__a__all=[karl, susan]) + | Q(author__all=[karl, susan]) + | Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()]) + ) + ] + ) + == "1" + ) # $Where - self.assertEqual(u",".join([str(b) for b in Book.objects.filter( - __raw__={ - "$where": """ + assert ( + u",".join( + [ + str(b) + for b in Book.objects.filter( + __raw__={ + "$where": """ function(){ return this.name == '1' || this.name == '2';}""" - })]), - "1,2") + } + ) + ] + ) + == "1,2" + ) def test_switch_db_instance(self): - register_connection('testdb-1', 'mongoenginetest2') + register_connection("testdb-1", "mongoenginetest2") class Group(Document): name = StringField() Group.drop_collection() - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: Group.drop_collection() Group(name="hello - default").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() group = Group.objects.first() - group.switch_db('testdb-1') + group.switch_db("testdb-1") group.name = "hello - testdb!" group.save() - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: group = Group.objects.first() - self.assertEqual("hello - testdb!", group.name) + assert "hello - testdb!" == group.name group = Group.objects.first() - self.assertEqual("hello - default", group.name) + assert "hello - default" == group.name # Slightly contrived now - perform an update # Only works as they have the same object_id - group.switch_db('testdb-1') + group.switch_db("testdb-1") group.update(set__name="hello - update") - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: group = Group.objects.first() - self.assertEqual("hello - update", group.name) + assert "hello - update" == group.name Group.drop_collection() - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() group = Group.objects.first() - self.assertEqual("hello - default", group.name) + assert "hello - default" == group.name # Totally contrived now - perform a delete # Only works as they have the same object_id - group.switch_db('testdb-1') + group.switch_db("testdb-1") group.delete() - with switch_db(Group, 'testdb-1') as Group: - self.assertEqual(0, Group.objects.count()) + with switch_db(Group, "testdb-1") as Group: + assert 0 == Group.objects.count() group = Group.objects.first() - self.assertEqual("hello - default", group.name) + assert "hello - default" == group.name def test_load_undefined_fields(self): class User(Document): @@ -2898,34 +2983,31 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'foo': 'Bar', - 'data': [1, 2, 3] - }) + User._get_collection().insert_one( + {"name": "John", "foo": "Bar", "data": [1, 2, 3]} + ) - self.assertRaises(FieldDoesNotExist, User.objects.first) + with pytest.raises(FieldDoesNotExist): + User.objects.first() def test_load_undefined_fields_with_strict_false(self): class User(Document): name = StringField() - meta = {'strict': False} + meta = {"strict": False} User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'foo': 'Bar', - 'data': [1, 2, 3] - }) + User._get_collection().insert_one( + {"name": "John", "foo": "Bar", "data": [1, 2, 3]} + ) user = User.objects.first() - self.assertEqual(user.name, 'John') - self.assertFalse(hasattr(user, 'foo')) - self.assertEqual(user._data['foo'], 'Bar') - self.assertFalse(hasattr(user, 'data')) - self.assertEqual(user._data['data'], [1, 2, 3]) + assert user.name == "John" + assert not hasattr(user, "foo") + assert user._data["foo"] == "Bar" + assert not hasattr(user, "data") + assert user._data["data"] == [1, 2, 3] def test_load_undefined_fields_on_embedded_document(self): class Thing(EmbeddedDocument): @@ -2937,16 +3019,15 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'thing': { - 'name': 'My thing', - 'foo': 'Bar', - 'data': [1, 2, 3] + User._get_collection().insert_one( + { + "name": "John", + "thing": {"name": "My thing", "foo": "Bar", "data": [1, 2, 3]}, } - }) + ) - self.assertRaises(FieldDoesNotExist, User.objects.first) + with pytest.raises(FieldDoesNotExist): + User.objects.first() def test_load_undefined_fields_on_embedded_document_with_strict_false_on_doc(self): class Thing(EmbeddedDocument): @@ -2956,26 +3037,25 @@ class InstanceTest(MongoDBTestCase): name = StringField() thing = EmbeddedDocumentField(Thing) - meta = {'strict': False} + meta = {"strict": False} User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'thing': { - 'name': 'My thing', - 'foo': 'Bar', - 'data': [1, 2, 3] + User._get_collection().insert_one( + { + "name": "John", + "thing": {"name": "My thing", "foo": "Bar", "data": [1, 2, 3]}, } - }) + ) - self.assertRaises(FieldDoesNotExist, User.objects.first) + with pytest.raises(FieldDoesNotExist): + User.objects.first() def test_load_undefined_fields_on_embedded_document_with_strict_false(self): class Thing(EmbeddedDocument): name = StringField() - meta = {'strict': False} + meta = {"strict": False} class User(Document): name = StringField() @@ -2983,22 +3063,20 @@ class InstanceTest(MongoDBTestCase): User.drop_collection() - User._get_collection().insert_one({ - 'name': 'John', - 'thing': { - 'name': 'My thing', - 'foo': 'Bar', - 'data': [1, 2, 3] + User._get_collection().insert_one( + { + "name": "John", + "thing": {"name": "My thing", "foo": "Bar", "data": [1, 2, 3]}, } - }) + ) user = User.objects.first() - self.assertEqual(user.name, 'John') - self.assertEqual(user.thing.name, 'My thing') - self.assertFalse(hasattr(user.thing, 'foo')) - self.assertEqual(user.thing._data['foo'], 'Bar') - self.assertFalse(hasattr(user.thing, 'data')) - self.assertEqual(user.thing._data['data'], [1, 2, 3]) + assert user.name == "John" + assert user.thing.name == "My thing" + assert not hasattr(user.thing, "foo") + assert user.thing._data["foo"] == "Bar" + assert not hasattr(user.thing, "data") + assert user.thing._data["data"] == [1, 2, 3] def test_spaces_in_keys(self): class Embedded(DynamicEmbeddedDocument): @@ -3009,20 +3087,18 @@ class InstanceTest(MongoDBTestCase): Doc.drop_collection() doc = Doc() - setattr(doc, 'hello world', 1) + setattr(doc, "hello world", 1) doc.save() - one = Doc.objects.filter(**{'hello world': 1}).count() - self.assertEqual(1, one) + one = Doc.objects.filter(**{"hello world": 1}).count() + assert 1 == one def test_shard_key(self): class LogEntry(Document): machine = StringField() log = StringField() - meta = { - 'shard_key': ('machine',) - } + meta = {"shard_key": ("machine",)} LogEntry.drop_collection() @@ -3030,13 +3106,13 @@ class InstanceTest(MongoDBTestCase): log.machine = "Localhost" log.save() - self.assertTrue(log.id is not None) + assert log.id is not None log.log = "Saving" log.save() # try to change the shard key - with self.assertRaises(OperationError): + with pytest.raises(OperationError): log.machine = "127.0.0.1" def test_shard_key_in_embedded_document(self): @@ -3044,24 +3120,22 @@ class InstanceTest(MongoDBTestCase): foo = StringField() class Bar(Document): - meta = { - 'shard_key': ('foo.foo',) - } + meta = {"shard_key": ("foo.foo",)} foo = EmbeddedDocumentField(Foo) bar = StringField() - foo_doc = Foo(foo='hello') - bar_doc = Bar(foo=foo_doc, bar='world') + foo_doc = Foo(foo="hello") + bar_doc = Bar(foo=foo_doc, bar="world") bar_doc.save() - self.assertTrue(bar_doc.id is not None) + assert bar_doc.id is not None - bar_doc.bar = 'baz' + bar_doc.bar = "baz" bar_doc.save() # try to change the shard key - with self.assertRaises(OperationError): - bar_doc.foo.foo = 'something' + with pytest.raises(OperationError): + bar_doc.foo.foo = "something" bar_doc.save() def test_shard_key_primary(self): @@ -3069,9 +3143,7 @@ class InstanceTest(MongoDBTestCase): machine = StringField(primary_key=True) log = StringField() - meta = { - 'shard_key': ('machine',) - } + meta = {"shard_key": ("machine",)} LogEntry.drop_collection() @@ -3079,13 +3151,13 @@ class InstanceTest(MongoDBTestCase): log.machine = "Localhost" log.save() - self.assertTrue(log.id is not None) + assert log.id is not None log.log = "Saving" log.save() # try to change the shard key - with self.assertRaises(OperationError): + with pytest.raises(OperationError): log.machine = "127.0.0.1" def test_kwargs_simple(self): @@ -3097,15 +3169,13 @@ class InstanceTest(MongoDBTestCase): doc = EmbeddedDocumentField(Embedded) def __eq__(self, other): - return (self.doc_name == other.doc_name and - self.doc == other.doc) + return self.doc_name == other.doc_name and self.doc == other.doc classic_doc = Doc(doc_name="my doc", doc=Embedded(name="embedded doc")) - dict_doc = Doc(**{"doc_name": "my doc", - "doc": {"name": "embedded doc"}}) + dict_doc = Doc(**{"doc_name": "my doc", "doc": {"name": "embedded doc"}}) - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + assert classic_doc == dict_doc + assert classic_doc._data == dict_doc._data def test_kwargs_complex(self): class Embedded(EmbeddedDocument): @@ -3116,65 +3186,69 @@ class InstanceTest(MongoDBTestCase): docs = ListField(EmbeddedDocumentField(Embedded)) def __eq__(self, other): - return (self.doc_name == other.doc_name and - self.docs == other.docs) + return self.doc_name == other.doc_name and self.docs == other.docs - classic_doc = Doc(doc_name="my doc", docs=[ - Embedded(name="embedded doc1"), - Embedded(name="embedded doc2")]) - dict_doc = Doc(**{"doc_name": "my doc", - "docs": [{"name": "embedded doc1"}, - {"name": "embedded doc2"}]}) + classic_doc = Doc( + doc_name="my doc", + docs=[Embedded(name="embedded doc1"), Embedded(name="embedded doc2")], + ) + dict_doc = Doc( + **{ + "doc_name": "my doc", + "docs": [{"name": "embedded doc1"}, {"name": "embedded doc2"}], + } + ) - self.assertEqual(classic_doc, dict_doc) - self.assertEqual(classic_doc._data, dict_doc._data) + assert classic_doc == dict_doc + assert classic_doc._data == dict_doc._data def test_positional_creation(self): - """Ensure that document may be created using positional arguments.""" - person = self.Person("Test User", 42) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 42) + """Document cannot be instantiated using positional arguments.""" + with pytest.raises(TypeError) as exc_info: + self.Person("Test User", 42) + + expected_msg = ( + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." + ) + assert str(exc_info.value) == expected_msg def test_mixed_creation(self): - """Ensure that document may be created using mixed arguments.""" - person = self.Person("Test User", age=42) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 42) + """Document cannot be instantiated using mixed arguments.""" + with pytest.raises(TypeError) as exc_info: + self.Person("Test User", age=42) + + expected_msg = ( + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." + ) + assert str(exc_info.value) == expected_msg def test_positional_creation_embedded(self): - """Ensure that embedded document may be created using positional - arguments. - """ - job = self.Job("Test Job", 4) - self.assertEqual(job.name, "Test Job") - self.assertEqual(job.years, 4) + """Embedded document cannot be created using positional arguments.""" + with pytest.raises(TypeError) as exc_info: + self.Job("Test Job", 4) + + expected_msg = ( + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." + ) + assert str(exc_info.value) == expected_msg def test_mixed_creation_embedded(self): - """Ensure that embedded document may be created using mixed - arguments. - """ - job = self.Job("Test Job", years=4) - self.assertEqual(job.name, "Test Job") - self.assertEqual(job.years, 4) + """Embedded document cannot be created using mixed arguments.""" + with pytest.raises(TypeError) as exc_info: + self.Job("Test Job", years=4) - def test_mixed_creation_dynamic(self): - """Ensure that document may be created using mixed arguments.""" - class Person(DynamicDocument): - name = StringField() - - person = Person("Test User", age=42) - self.assertEqual(person.name, "Test User") - self.assertEqual(person.age, 42) - - def test_bad_mixed_creation(self): - """Ensure that document gives correct error when duplicating - arguments. - """ - with self.assertRaises(TypeError): - return self.Person("Test User", 42, name="Bad User") + expected_msg = ( + "Instantiating a document with positional arguments is not " + "supported. Please use `field_name=value` keyword arguments." + ) + assert str(exc_info.value) == expected_msg def test_data_contains_id_field(self): """Ensure that asking for _data returns 'id'.""" + class Person(Document): name = StringField() @@ -3182,8 +3256,8 @@ class InstanceTest(MongoDBTestCase): Person(name="Harry Potter").save() person = Person.objects.first() - self.assertIn('id', person._data.keys()) - self.assertEqual(person._data.get('id'), person.id) + assert "id" in person._data.keys() + assert person._data.get("id") == person.id def test_complex_nesting_document_and_embedded_document(self): class Macro(EmbeddedDocument): @@ -3200,7 +3274,7 @@ class InstanceTest(MongoDBTestCase): def expand(self): self.flattened_parameter = {} - for parameter_name, parameter in iteritems(self.parameters): + for parameter_name, parameter in self.parameters.items(): parameter.expand() class NodesSystem(Document): @@ -3208,7 +3282,7 @@ class InstanceTest(MongoDBTestCase): nodes = MapField(ReferenceField(Node, dbref=False)) def save(self, *args, **kwargs): - for node_name, node in iteritems(self.nodes): + for node_name, node in self.nodes.items(): node.expand() node.save(*args, **kwargs) super(NodesSystem, self).save(*args, **kwargs) @@ -3223,9 +3297,9 @@ class InstanceTest(MongoDBTestCase): system.save() system = NodesSystem.objects.first() - self.assertEqual( - "UNDEFINED", - system.nodes["node"].parameters["param"].macros["test"].value) + assert ( + "UNDEFINED" == system.nodes["node"].parameters["param"].macros["test"].value + ) def test_embedded_document_equality(self): class Test(Document): @@ -3235,15 +3309,48 @@ class InstanceTest(MongoDBTestCase): ref = ReferenceField(Test) Test.drop_collection() - test = Test(field='123').save() # has id + test = Test(field="123").save() # has id e = Embedded(ref=test) f1 = Embedded._from_son(e.to_mongo()) f2 = Embedded._from_son(e.to_mongo()) - self.assertEqual(f1, f2) + assert f1 == f2 f1.ref # Dereferences lazily - self.assertEqual(f1, f2) + assert f1 == f2 + + def test_embedded_document_equality_with_lazy_ref(self): + class Job(EmbeddedDocument): + boss = LazyReferenceField("Person") + boss_dbref = LazyReferenceField("Person", dbref=True) + + class Person(Document): + job = EmbeddedDocumentField(Job) + + Person.drop_collection() + + boss = Person() + worker = Person(job=Job(boss=boss, boss_dbref=boss)) + boss.save() + worker.save() + + worker1 = Person.objects.get(id=worker.id) + + # worker1.job should be equal to the job used originally to create the + # document. + assert worker1.job == worker.job + + # worker1.job should be equal to a newly created Job EmbeddedDocument + # using either the Boss object or his ID. + assert worker1.job == Job(boss=boss, boss_dbref=boss) + assert worker1.job == Job(boss=boss.id, boss_dbref=boss.id) + + # The above equalities should also hold after worker1.job.boss has been + # fetch()ed. + worker1.job.boss.fetch() + assert worker1.job == worker.job + assert worker1.job == Job(boss=boss, boss_dbref=boss) + assert worker1.job == Job(boss=boss.id, boss_dbref=boss.id) def test_dbref_equality(self): class Test2(Document): @@ -3254,71 +3361,71 @@ class InstanceTest(MongoDBTestCase): class Test(Document): name = StringField() - test2 = ReferenceField('Test2') - test3 = ReferenceField('Test3') + test2 = ReferenceField("Test2") + test3 = ReferenceField("Test3") Test.drop_collection() Test2.drop_collection() Test3.drop_collection() - t2 = Test2(name='a') + t2 = Test2(name="a") t2.save() - t3 = Test3(name='x') + t3 = Test3(name="x") t3.id = t2.id t3.save() - t = Test(name='b', test2=t2, test3=t3) + t = Test(name="b", test2=t2, test3=t3) f = Test._from_son(t.to_mongo()) - dbref2 = f._data['test2'] + dbref2 = f._data["test2"] obj2 = f.test2 - self.assertIsInstance(dbref2, DBRef) - self.assertIsInstance(obj2, Test2) - self.assertEqual(obj2.id, dbref2.id) - self.assertEqual(obj2, dbref2) - self.assertEqual(dbref2, obj2) + assert isinstance(dbref2, DBRef) + assert isinstance(obj2, Test2) + assert obj2.id == dbref2.id + assert obj2 == dbref2 + assert dbref2 == obj2 - dbref3 = f._data['test3'] + dbref3 = f._data["test3"] obj3 = f.test3 - self.assertIsInstance(dbref3, DBRef) - self.assertIsInstance(obj3, Test3) - self.assertEqual(obj3.id, dbref3.id) - self.assertEqual(obj3, dbref3) - self.assertEqual(dbref3, obj3) + assert isinstance(dbref3, DBRef) + assert isinstance(obj3, Test3) + assert obj3.id == dbref3.id + assert obj3 == dbref3 + assert dbref3 == obj3 - self.assertEqual(obj2.id, obj3.id) - self.assertEqual(dbref2.id, dbref3.id) - self.assertNotEqual(dbref2, dbref3) - self.assertNotEqual(dbref3, dbref2) - self.assertNotEqual(dbref2, dbref3) - self.assertNotEqual(dbref3, dbref2) + assert obj2.id == obj3.id + assert dbref2.id == dbref3.id + assert dbref2 != dbref3 + assert dbref3 != dbref2 + assert dbref2 != dbref3 + assert dbref3 != dbref2 - self.assertNotEqual(obj2, dbref3) - self.assertNotEqual(dbref3, obj2) - self.assertNotEqual(obj2, dbref3) - self.assertNotEqual(dbref3, obj2) + assert obj2 != dbref3 + assert dbref3 != obj2 + assert obj2 != dbref3 + assert dbref3 != obj2 - self.assertNotEqual(obj3, dbref2) - self.assertNotEqual(dbref2, obj3) - self.assertNotEqual(obj3, dbref2) - self.assertNotEqual(dbref2, obj3) + assert obj3 != dbref2 + assert dbref2 != obj3 + assert obj3 != dbref2 + assert dbref2 != obj3 def test_default_values(self): class Person(Document): created_on = DateTimeField(default=lambda: datetime.utcnow()) name = StringField() - p = Person(name='alon') + p = Person(name="alon") p.save() - orig_created_on = Person.objects().only('created_on')[0].created_on + orig_created_on = Person.objects().only("created_on")[0].created_on - p2 = Person.objects().only('name')[0] - p2.name = 'alon2' + p2 = Person.objects().only("name")[0] + p2.name = "alon2" p2.save() - p3 = Person.objects().only('created_on')[0] - self.assertEqual(orig_created_on, p3.created_on) + p3 = Person.objects().only("created_on")[0] + assert orig_created_on == p3.created_on class Person(Document): created_on = DateTimeField(default=lambda: datetime.utcnow()) @@ -3327,87 +3434,127 @@ class InstanceTest(MongoDBTestCase): p4 = Person.objects()[0] p4.save() - self.assertEqual(p4.height, 189) + assert p4.height == 189 # However the default will not be fixed in DB - self.assertEqual(Person.objects(height=189).count(), 0) + assert Person.objects(height=189).count() == 0 # alter DB for the new default coll = Person._get_collection() for person in Person.objects.as_pymongo(): - if 'height' not in person: - coll.update_one({'_id': person['_id']}, {'$set': {'height': 189}}) + if "height" not in person: + coll.update_one({"_id": person["_id"]}, {"$set": {"height": 189}}) - self.assertEqual(Person.objects(height=189).count(), 1) + assert Person.objects(height=189).count() == 1 - def test_from_son(self): - # 771 - class MyPerson(self.Person): - meta = dict(shard_key=["id"]) - p = MyPerson.from_json('{"name": "name", "age": 27}', created=True) - self.assertEqual(p.id, None) - p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here - p = MyPerson._from_son({"name": "name", "age": 27}, created=True) - self.assertEqual(p.id, None) - p.id = "12345" # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here + def test_shard_key_mutability_after_from_json(self): + """Ensure that a document ID can be modified after from_json. - def test_from_son_created_False_without_id(self): - class MyPerson(Document): + If you instantiate a document by using from_json/_from_son and you + indicate that this should be considered a new document (vs a doc that + already exists in the database), then you should be able to modify + fields that are part of its shard key (note that this is not permitted + on docs that are already persisted). + + See https://github.com/mongoengine/mongoengine/issues/771 for details. + """ + + class Person(Document): + name = StringField() + age = IntField() + meta = {"shard_key": ("id", "name")} + + p = Person.from_json('{"name": "name", "age": 27}', created=True) + assert p._created is True + p.name = "new name" + p.id = "12345" + assert p.name == "new name" + assert p.id == "12345" + + def test_shard_key_mutability_after_from_son(self): + """Ensure that a document ID can be modified after _from_son. + + See `test_shard_key_mutability_after_from_json` above for more details. + """ + + class Person(Document): + name = StringField() + age = IntField() + meta = {"shard_key": ("id", "name")} + + p = Person._from_son({"name": "name", "age": 27}, created=True) + assert p._created is True + p.name = "new name" + p.id = "12345" + assert p.name == "new name" + assert p.id == "12345" + + def test_from_json_created_false_without_an_id(self): + class Person(Document): name = StringField() - MyPerson.objects.delete() + Person.objects.delete() - p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False) - self.assertFalse(p._created) - self.assertIsNone(p.id) + p = Person.from_json('{"name": "name"}', created=False) + assert p._created is False + assert p.id is None + + # Make sure the document is subsequently persisted correctly. p.save() - self.assertIsNotNone(p.id) - saved_p = MyPerson.objects.get(id=p.id) - self.assertEqual(saved_p.name, 'a_fancy_name') + assert p.id is not None + saved_p = Person.objects.get(id=p.id) + assert saved_p.name == "name" - def test_from_son_created_False_with_id(self): - # 1854 - class MyPerson(Document): + def test_from_json_created_false_with_an_id(self): + """See https://github.com/mongoengine/mongoengine/issues/1854""" + + class Person(Document): name = StringField() - MyPerson.objects.delete() + Person.objects.delete() - p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False) - self.assertFalse(p._created) - self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, 'a_fancy_name') - self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + p = Person.from_json( + '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=False + ) + assert p._created is False + assert p._changed_fields == [] + assert p.name == "name" + assert p.id == ObjectId("5b85a8b04ec5dc2da388296e") p.save() - with self.assertRaises(DoesNotExist): - # Since created=False and we gave an id in the json and _changed_fields is empty - # mongoengine assumes that the document exits with that structure already - # and calling .save() didn't save anything - MyPerson.objects.get(id=p.id) + with pytest.raises(DoesNotExist): + # Since the object is considered as already persisted (thanks to + # `created=False` and an existing ID), and we haven't changed any + # fields (i.e. `_changed_fields` is empty), the document is + # considered unchanged and hence the `save()` call above did + # nothing. + Person.objects.get(id=p.id) - self.assertFalse(p._created) - p.name = 'a new fancy name' - self.assertEqual(p._changed_fields, ['name']) + assert not p._created + p.name = "a new name" + assert p._changed_fields == ["name"] p.save() - saved_p = MyPerson.objects.get(id=p.id) - self.assertEqual(saved_p.name, p.name) + saved_p = Person.objects.get(id=p.id) + assert saved_p.name == p.name - def test_from_son_created_True_with_an_id(self): - class MyPerson(Document): + def test_from_json_created_true_with_an_id(self): + class Person(Document): name = StringField() - MyPerson.objects.delete() + Person.objects.delete() - p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True) - self.assertTrue(p._created) - self.assertEqual(p._changed_fields, []) - self.assertEqual(p.name, 'a_fancy_name') - self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) + p = Person.from_json( + '{"_id": "5b85a8b04ec5dc2da388296e", "name": "name"}', created=True + ) + assert p._created + assert p._changed_fields == [] + assert p.name == "name" + assert p.id == ObjectId("5b85a8b04ec5dc2da388296e") p.save() - saved_p = MyPerson.objects.get(id=p.id) - self.assertEqual(saved_p, p) - self.assertEqual(p.name, 'a_fancy_name') + saved_p = Person.objects.get(id=p.id) + assert saved_p == p + assert saved_p.name == "name" def test_null_field(self): # 734 @@ -3421,37 +3568,38 @@ class InstanceTest(MongoDBTestCase): cdt_fld = ComplexDateTimeField(null=True) User.objects.delete() - u = User(name='user') + u = User(name="user") u.save() - u_from_db = User.objects.get(name='user') + u_from_db = User.objects.get(name="user") u_from_db.height = None u_from_db.save() - self.assertEqual(u_from_db.height, None) + assert u_from_db.height is None # 864 - self.assertEqual(u_from_db.str_fld, None) - self.assertEqual(u_from_db.int_fld, None) - self.assertEqual(u_from_db.flt_fld, None) - self.assertEqual(u_from_db.dt_fld, None) - self.assertEqual(u_from_db.cdt_fld, None) + assert u_from_db.str_fld is None + assert u_from_db.int_fld is None + assert u_from_db.flt_fld is None + assert u_from_db.dt_fld is None + assert u_from_db.cdt_fld is None # 735 User.objects.delete() - u = User(name='user') + u = User(name="user") u.save() - User.objects(name='user').update_one(set__height=None, upsert=True) - u_from_db = User.objects.get(name='user') - self.assertEqual(u_from_db.height, None) + User.objects(name="user").update_one(set__height=None, upsert=True) + u_from_db = User.objects.get(name="user") + assert u_from_db.height is None def test_not_saved_eq(self): """Ensure we can compare documents not saved. """ + class Person(Document): pass p = Person() p1 = Person() - self.assertNotEqual(p, p1) - self.assertEqual(p, p) + assert p != p1 + assert p == p def test_list_iter(self): # 914 @@ -3459,18 +3607,64 @@ class InstanceTest(MongoDBTestCase): v = StringField() class A(Document): - l = ListField(EmbeddedDocumentField(B)) + array = ListField(EmbeddedDocumentField(B)) A.objects.delete() - A(l=[B(v='1'), B(v='2'), B(v='3')]).save() + A(array=[B(v="1"), B(v="2"), B(v="3")]).save() a = A.objects.get() - self.assertEqual(a.l._instance, a) - for idx, b in enumerate(a.l): - self.assertEqual(b._instance, a) - self.assertEqual(idx, 2) + assert a.array._instance == a + for idx, b in enumerate(a.array): + assert b._instance == a + assert idx == 2 + + def test_updating_listfield_manipulate_list(self): + class Company(Document): + name = StringField() + employees = ListField(field=DictField()) + + Company.drop_collection() + + comp = Company(name="BigBank", employees=[{"name": "John"}]) + comp.save() + comp.employees.append({"name": "Bill"}) + comp.save() + + stored_comp = get_as_pymongo(comp) + self.assertEqual( + stored_comp, + { + "_id": comp.id, + "employees": [{"name": "John"}, {"name": "Bill"}], + "name": "BigBank", + }, + ) + + comp = comp.reload() + comp.employees[0]["color"] = "red" + comp.employees[-1]["color"] = "blue" + comp.employees[-1].update({"size": "xl"}) + comp.save() + + assert len(comp.employees) == 2 + assert comp.employees[0] == {"name": "John", "color": "red"} + assert comp.employees[1] == {"name": "Bill", "size": "xl", "color": "blue"} + + stored_comp = get_as_pymongo(comp) + self.assertEqual( + stored_comp, + { + "_id": comp.id, + "employees": [ + {"name": "John", "color": "red"}, + {"size": "xl", "color": "blue", "name": "Bill"}, + ], + "name": "BigBank", + }, + ) def test_falsey_pk(self): """Ensure that we can create and update a document with Falsey PK.""" + class Person(Document): age = IntField(primary_key=True) height = FloatField() @@ -3484,6 +3678,7 @@ class InstanceTest(MongoDBTestCase): def test_push_with_position(self): """Ensure that push with position works properly for an instance.""" + class BlogPost(Document): slug = StringField() tags = ListField(StringField()) @@ -3495,10 +3690,11 @@ class InstanceTest(MongoDBTestCase): blog.update(push__tags__0=["mongodb", "code"]) blog.reload() - self.assertEqual(blog.tags, ['mongodb', 'code', 'python']) + assert blog.tags == ["mongodb", "code", "python"] def test_push_nested_list(self): """Ensure that push update works in nested list""" + class BlogPost(Document): slug = StringField() tags = ListField() @@ -3506,13 +3702,14 @@ class InstanceTest(MongoDBTestCase): blog = BlogPost(slug="test").save() blog.update(push__tags=["value1", 123]) blog.reload() - self.assertEqual(blog.tags, [["value1", 123]]) + assert blog.tags == [["value1", 123]] def test_accessing_objects_with_indexes_error(self): - insert_result = self.db.company.insert_many([{'name': 'Foo'}, - {'name': 'Foo'}]) # Force 2 doc with same name + insert_result = self.db.company.insert_many( + [{"name": "Foo"}, {"name": "Foo"}] + ) # Force 2 doc with same name REF_OID = insert_result.inserted_ids[0] - self.db.user.insert_one({'company': REF_OID}) # Force 2 doc with same name + self.db.user.insert_one({"company": REF_OID}) # Force 2 doc with same name class Company(Document): name = StringField(unique=True) @@ -3521,9 +3718,87 @@ class InstanceTest(MongoDBTestCase): company = ReferenceField(Company) # Ensure index creation exception aren't swallowed (#1688) - with self.assertRaises(DuplicateKeyError): + with pytest.raises(DuplicateKeyError): User.objects().select_related() + def test_embedded_document_failed_while_loading_instance_when_it_is_not_a_dict( + self, + ): + class LightSaber(EmbeddedDocument): + color = StringField() -if __name__ == '__main__': + class Jedi(Document): + light_saber = EmbeddedDocumentField(LightSaber) + + coll = Jedi._get_collection() + Jedi(light_saber=LightSaber(color="red")).save() + _ = list(Jedi.objects) # Ensure a proper document loads without errors + + # Forces a document with a wrong shape (may occur in case of migration) + value = u"I_should_be_a_dict" + coll.insert_one({"light_saber": value}) + + with pytest.raises(InvalidDocumentError) as exc_info: + list(Jedi.objects) + + assert str( + exc_info.value + ) == "Invalid data to create a `Jedi` instance.\nField 'light_saber' - The source SON object needs to be of type 'dict' but a '%s' was found" % type( + value + ) + + +class ObjectKeyTestCase(MongoDBTestCase): + def test_object_key_simple_document(self): + class Book(Document): + title = StringField() + + book = Book(title="Whatever") + assert book._object_key == {"pk": None} + + book.pk = ObjectId() + assert book._object_key == {"pk": book.pk} + + def test_object_key_with_custom_primary_key(self): + class Book(Document): + isbn = StringField(primary_key=True) + title = StringField() + + book = Book(title="Sapiens") + assert book._object_key == {"pk": None} + + book = Book(pk="0062316117") + assert book._object_key == {"pk": "0062316117"} + + def test_object_key_in_a_sharded_collection(self): + class Book(Document): + title = StringField() + meta = {"shard_key": ("pk", "title")} + + book = Book() + assert book._object_key == {"pk": None, "title": None} + book = Book(pk=ObjectId(), title="Sapiens") + assert book._object_key == {"pk": book.pk, "title": "Sapiens"} + + def test_object_key_with_custom_db_field(self): + class Book(Document): + author = StringField(db_field="creator") + meta = {"shard_key": ("pk", "author")} + + book = Book(pk=ObjectId(), author="Author") + assert book._object_key == {"pk": book.pk, "author": "Author"} + + def test_object_key_with_nested_shard_key(self): + class Author(EmbeddedDocument): + name = StringField() + + class Book(Document): + author = EmbeddedDocumentField(Author) + meta = {"shard_key": ("pk", "author.name")} + + book = Book(pk=ObjectId(), author=Author(name="Author")) + assert book._object_key == {"pk": book.pk, "author__name": "Author"} + + +if __name__ == "__main__": unittest.main() diff --git a/tests/document/json_serialisation.py b/tests/document/test_json_serialisation.py similarity index 66% rename from tests/document/json_serialisation.py rename to tests/document/test_json_serialisation.py index 251b65a2..593d34f8 100644 --- a/tests/document/json_serialisation.py +++ b/tests/document/test_json_serialisation.py @@ -1,22 +1,14 @@ import unittest import uuid -from nose.plugins.skip import SkipTest from datetime import datetime from bson import ObjectId -import pymongo - from mongoengine import * - -__all__ = ("TestJson",) +from tests.utils import MongoDBTestCase -class TestJson(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - +class TestJson(MongoDBTestCase): def test_json_names(self): """ Going to test reported issue: @@ -25,22 +17,24 @@ class TestJson(unittest.TestCase): a to_json with the original class names and not the abreviated mongodb document keys """ + class Embedded(EmbeddedDocument): - string = StringField(db_field='s') + string = StringField(db_field="s") class Doc(Document): - string = StringField(db_field='s') - embedded = EmbeddedDocumentField(Embedded, db_field='e') + string = StringField(db_field="s") + embedded = EmbeddedDocumentField(Embedded, db_field="e") doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) - doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':')) + doc_json = doc.to_json( + sort_keys=True, use_db_field=False, separators=(",", ":") + ) expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" - self.assertEqual(doc_json, expected_json) + assert doc_json == expected_json def test_json_simple(self): - class Embedded(EmbeddedDocument): string = StringField() @@ -49,16 +43,18 @@ class TestJson(unittest.TestCase): embedded_field = EmbeddedDocumentField(Embedded) def __eq__(self, other): - return (self.string == other.string and - self.embedded_field == other.embedded_field) + return ( + self.string == other.string + and self.embedded_field == other.embedded_field + ) doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) - doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) + doc_json = doc.to_json(sort_keys=True, separators=(",", ":")) expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" - self.assertEqual(doc_json, expected_json) + assert doc_json == expected_json - self.assertEqual(doc, Doc.from_json(doc.to_json())) + assert doc == Doc.from_json(doc.to_json()) def test_json_complex(self): class EmbeddedDoc(EmbeddedDocument): @@ -68,41 +64,43 @@ class TestJson(unittest.TestCase): pass class Doc(Document): - string_field = StringField(default='1') + string_field = StringField(default="1") int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.now) - embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, - default=lambda: EmbeddedDoc()) + embedded_document_field = EmbeddedDocumentField( + EmbeddedDoc, default=lambda: EmbeddedDoc() + ) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=ObjectId) - reference_field = ReferenceField(Simple, default=lambda: - Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), - default=lambda: [1, 2, 3]) + default=lambda: Simple().save() + ) + sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc() + ) def __eq__(self, other): import json + return json.loads(self.to_json()) == json.loads(other.to_json()) doc = Doc() - self.assertEqual(doc, Doc.from_json(doc.to_json())) + assert doc == Doc.from_json(doc.to_json()) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/document/validation.py b/tests/document/test_validation.py similarity index 53% rename from tests/document/validation.py rename to tests/document/test_validation.py index 30a285b2..2439f283 100644 --- a/tests/document/validation.py +++ b/tests/document/test_validation.py @@ -2,55 +2,60 @@ import unittest from datetime import datetime +import pytest + from mongoengine import * - -__all__ = ("ValidatorErrorTest",) +from tests.utils import MongoDBTestCase -class ValidatorErrorTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - +class TestValidatorError(MongoDBTestCase): def test_to_dict(self): """Ensure a ValidationError handles error to_dict correctly. """ - error = ValidationError('root') - self.assertEqual(error.to_dict(), {}) + error = ValidationError("root") + assert error.to_dict() == {} # 1st level error schema - error.errors = {'1st': ValidationError('bad 1st'), } - self.assertIn('1st', error.to_dict()) - self.assertEqual(error.to_dict()['1st'], 'bad 1st') + error.errors = {"1st": ValidationError("bad 1st")} + assert "1st" in error.to_dict() + assert error.to_dict()["1st"] == "bad 1st" # 2nd level error schema - error.errors = {'1st': ValidationError('bad 1st', errors={ - '2nd': ValidationError('bad 2nd'), - })} - self.assertIn('1st', error.to_dict()) - self.assertIsInstance(error.to_dict()['1st'], dict) - self.assertIn('2nd', error.to_dict()['1st']) - self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') + error.errors = { + "1st": ValidationError( + "bad 1st", errors={"2nd": ValidationError("bad 2nd")} + ) + } + assert "1st" in error.to_dict() + assert isinstance(error.to_dict()["1st"], dict) + assert "2nd" in error.to_dict()["1st"] + assert error.to_dict()["1st"]["2nd"] == "bad 2nd" # moar levels - error.errors = {'1st': ValidationError('bad 1st', errors={ - '2nd': ValidationError('bad 2nd', errors={ - '3rd': ValidationError('bad 3rd', errors={ - '4th': ValidationError('Inception'), - }), - }), - })} - self.assertIn('1st', error.to_dict()) - self.assertIn('2nd', error.to_dict()['1st']) - self.assertIn('3rd', error.to_dict()['1st']['2nd']) - self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd']) - self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], - 'Inception') + error.errors = { + "1st": ValidationError( + "bad 1st", + errors={ + "2nd": ValidationError( + "bad 2nd", + errors={ + "3rd": ValidationError( + "bad 3rd", errors={"4th": ValidationError("Inception")} + ) + }, + ) + }, + ) + } + assert "1st" in error.to_dict() + assert "2nd" in error.to_dict()["1st"] + assert "3rd" in error.to_dict()["1st"]["2nd"] + assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"] + assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception" - self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") + assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])" def test_model_validation(self): - class User(Document): username = StringField(primary_key=True) name = StringField(required=True) @@ -58,67 +63,69 @@ class ValidatorErrorTest(unittest.TestCase): try: User().validate() except ValidationError as e: - self.assertIn("User:None", e.message) - self.assertEqual(e.to_dict(), { - 'username': 'Field is required', - 'name': 'Field is required'}) + assert "User:None" in e.message + assert e.to_dict() == { + "username": "Field is required", + "name": "Field is required", + } user = User(username="RossC0", name="Ross").save() user.name = None try: user.save() except ValidationError as e: - self.assertIn("User:RossC0", e.message) - self.assertEqual(e.to_dict(), { - 'name': 'Field is required'}) + assert "User:RossC0" in e.message + assert e.to_dict() == {"name": "Field is required"} def test_fields_rewrite(self): class BasePerson(Document): name = StringField() age = IntField() - meta = {'abstract': True} + meta = {"abstract": True} class Person(BasePerson): name = StringField(required=True) p = Person(age=15) - self.assertRaises(ValidationError, p.validate) + with pytest.raises(ValidationError): + p.validate() def test_embedded_document_validation(self): """Ensure that embedded documents may be validated. """ + class Comment(EmbeddedDocument): date = DateTimeField() content = StringField(required=True) comment = Comment() - self.assertRaises(ValidationError, comment.validate) + with pytest.raises(ValidationError): + comment.validate() - comment.content = 'test' + comment.content = "test" comment.validate() comment.date = 4 - self.assertRaises(ValidationError, comment.validate) + with pytest.raises(ValidationError): + comment.validate() comment.date = datetime.now() comment.validate() - self.assertEqual(comment._instance, None) + assert comment._instance is None def test_embedded_db_field_validate(self): - class SubDoc(EmbeddedDocument): val = IntField(required=True) class Doc(Document): id = StringField(primary_key=True) - e = EmbeddedDocumentField(SubDoc, db_field='eb') + e = EmbeddedDocumentField(SubDoc, db_field="eb") try: Doc(id="bad").validate() except ValidationError as e: - self.assertIn("SubDoc:None", e.message) - self.assertEqual(e.to_dict(), { - "e": {'val': 'OK could not be converted to int'}}) + assert "SubDoc:None" in e.message + assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} Doc.drop_collection() @@ -126,25 +133,23 @@ class ValidatorErrorTest(unittest.TestCase): doc = Doc.objects.first() keys = doc._data.keys() - self.assertEqual(2, len(keys)) - self.assertIn('e', keys) - self.assertIn('id', keys) + assert 2 == len(keys) + assert "e" in keys + assert "id" in keys doc.e.val = "OK" try: doc.save() except ValidationError as e: - self.assertIn("Doc:test", e.message) - self.assertEqual(e.to_dict(), { - "e": {'val': 'OK could not be converted to int'}}) + assert "Doc:test" in e.message + assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} def test_embedded_weakref(self): - class SubDoc(EmbeddedDocument): val = IntField(required=True) class Doc(Document): - e = EmbeddedDocumentField(SubDoc, db_field='eb') + e = EmbeddedDocumentField(SubDoc, db_field="eb") Doc.drop_collection() @@ -153,23 +158,26 @@ class ValidatorErrorTest(unittest.TestCase): s = SubDoc() - self.assertRaises(ValidationError, s.validate) + with pytest.raises(ValidationError): + s.validate() d1.e = s d2.e = s del d1 - self.assertRaises(ValidationError, d2.validate) + with pytest.raises(ValidationError): + d2.validate() def test_parent_reference_in_child_document(self): """ Test to ensure a ReferenceField can store a reference to a parent class when inherited. Issue #954. """ + class Parent(Document): - meta = {'allow_inheritance': True} - reference = ReferenceField('self') + meta = {"allow_inheritance": True} + reference = ReferenceField("self") class Child(Parent): pass @@ -190,9 +198,10 @@ class ValidatorErrorTest(unittest.TestCase): Test to ensure a ReferenceField can store a reference to a parent class when inherited and when set via attribute. Issue #954. """ + class Parent(Document): - meta = {'allow_inheritance': True} - reference = ReferenceField('self') + meta = {"allow_inheritance": True} + reference = ReferenceField("self") class Child(Parent): pass @@ -210,5 +219,5 @@ class ValidatorErrorTest(unittest.TestCase): self.fail("ValidationError raised: %s" % e.message) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/__init__.py b/tests/fields/__init__.py index 4994d0c6..e69de29b 100644 --- a/tests/fields/__init__.py +++ b/tests/fields/__init__.py @@ -1,3 +0,0 @@ -from .fields import * -from .file_tests import * -from .geo import * diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index 8af75d4e..a9c0c7e5 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -1,27 +1,28 @@ # -*- coding: utf-8 -*- import uuid -from nose.plugins.skip import SkipTest -import six - from bson import Binary +import pytest from mongoengine import * from tests.utils import MongoDBTestCase -BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5') +BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( + "latin-1" +) class TestBinaryField(MongoDBTestCase): def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ + class Attachment(Document): content_type = StringField() blob = BinaryField() - BLOB = six.b('\xe6\x00\xc4\xff\x07') - MIME_TYPE = 'application/octet-stream' + BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") + MIME_TYPE = "application/octet-stream" Attachment.drop_collection() @@ -29,12 +30,13 @@ class TestBinaryField(MongoDBTestCase): attachment.save() attachment_1 = Attachment.objects().first() - self.assertEqual(MIME_TYPE, attachment_1.content_type) - self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) + assert MIME_TYPE == attachment_1.content_type + assert BLOB == bytes(attachment_1.blob) def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields. """ + class AttachmentRequired(Document): blob = BinaryField(required=True) @@ -42,13 +44,15 @@ class TestBinaryField(MongoDBTestCase): blob = BinaryField(max_bytes=4) attachment_required = AttachmentRequired() - self.assertRaises(ValidationError, attachment_required.validate) - attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) + with pytest.raises(ValidationError): + attachment_required.validate() + attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) attachment_required.validate() - _5_BYTES = six.b('\xe6\x00\xc4\xff\x07') - _4_BYTES = six.b('\xe6\x00\xc4\xff') - self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate) + _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") + _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") + with pytest.raises(ValidationError): + AttachmentSizeLimit(blob=_5_BYTES).validate() AttachmentSizeLimit(blob=_4_BYTES).validate() def test_validation_fails(self): @@ -57,8 +61,9 @@ class TestBinaryField(MongoDBTestCase): class Attachment(Document): blob = BinaryField() - for invalid_data in (2, u'Im_a_unicode', ['some_str']): - self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate) + for invalid_data in (2, u"Im_a_unicode", ["some_str"]): + with pytest.raises(ValidationError): + Attachment(blob=invalid_data).validate() def test__primary(self): class Attachment(Document): @@ -67,23 +72,21 @@ class TestBinaryField(MongoDBTestCase): Attachment.drop_collection() binary_id = uuid.uuid4().bytes att = Attachment(id=binary_id).save() - self.assertEqual(1, Attachment.objects.count()) - self.assertEqual(1, Attachment.objects.filter(id=att.id).count()) + assert 1 == Attachment.objects.count() + assert 1 == Attachment.objects.filter(id=att.id).count() att.delete() - self.assertEqual(0, Attachment.objects.count()) + assert 0 == Attachment.objects.count() def test_primary_filter_by_binary_pk_as_str(self): - raise SkipTest("Querying by id as string is not currently supported") - class Attachment(Document): id = BinaryField(primary_key=True) Attachment.drop_collection() binary_id = uuid.uuid4().bytes att = Attachment(id=binary_id).save() - self.assertEqual(1, Attachment.objects.filter(id=binary_id).count()) + assert 1 == Attachment.objects.filter(id=binary_id).count() att.delete() - self.assertEqual(0, Attachment.objects.count()) + assert 0 == Attachment.objects.count() def test_match_querying_with_bytes(self): class MyDocument(Document): @@ -93,7 +96,7 @@ class TestBinaryField(MongoDBTestCase): doc = MyDocument(bin_field=BIN_VALUE).save() matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() - self.assertEqual(matched_doc.id, doc.id) + assert matched_doc.id == doc.id def test_match_querying_with_binary(self): class MyDocument(Document): @@ -104,40 +107,37 @@ class TestBinaryField(MongoDBTestCase): doc = MyDocument(bin_field=BIN_VALUE).save() matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() - self.assertEqual(matched_doc.id, doc.id) + assert matched_doc.id == doc.id def test_modify_operation__set(self): """Ensures no regression of bug #1127""" + class MyDocument(Document): some_field = StringField() bin_field = BinaryField() MyDocument.drop_collection() - doc = MyDocument.objects(some_field='test').modify( - upsert=True, new=True, - set__bin_field=BIN_VALUE + doc = MyDocument.objects(some_field="test").modify( + upsert=True, new=True, set__bin_field=BIN_VALUE ) - self.assertEqual(doc.some_field, 'test') - if six.PY3: - self.assertEqual(doc.bin_field, BIN_VALUE) - else: - self.assertEqual(doc.bin_field, Binary(BIN_VALUE)) + assert doc.some_field == "test" + assert doc.bin_field == BIN_VALUE def test_update_one(self): """Ensures no regression of bug #1127""" + class MyDocument(Document): bin_field = BinaryField() MyDocument.drop_collection() - bin_data = six.b('\xe6\x00\xc4\xff\x07') + bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") doc = MyDocument(bin_field=bin_data).save() - n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE) - self.assertEqual(n_updated, 1) + n_updated = MyDocument.objects(bin_field=bin_data).update_one( + bin_field=BIN_VALUE + ) + assert n_updated == 1 fetched = MyDocument.objects.with_id(doc.id) - if six.PY3: - self.assertEqual(fetched.bin_field, BIN_VALUE) - else: - self.assertEqual(fetched.bin_field, Binary(BIN_VALUE)) + assert fetched.bin_field == BIN_VALUE diff --git a/tests/fields/test_boolean_field.py b/tests/fields/test_boolean_field.py index 7a2a3db6..041f9f56 100644 --- a/tests/fields/test_boolean_field.py +++ b/tests/fields/test_boolean_field.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase, get_as_pymongo @@ -11,15 +12,13 @@ class TestBooleanField(MongoDBTestCase): person = Person(admin=True) person.save() - self.assertEqual( - get_as_pymongo(person), - {'_id': person.id, - 'admin': True}) + assert get_as_pymongo(person) == {"_id": person.id, "admin": True} def test_validation(self): """Ensure that invalid values cannot be assigned to boolean fields. """ + class Person(Document): admin = BooleanField() @@ -28,22 +27,26 @@ class TestBooleanField(MongoDBTestCase): person.validate() person.admin = 2 - self.assertRaises(ValidationError, person.validate) - person.admin = 'Yes' - self.assertRaises(ValidationError, person.validate) - person.admin = 'False' - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() + person.admin = "Yes" + with pytest.raises(ValidationError): + person.validate() + person.admin = "False" + with pytest.raises(ValidationError): + person.validate() def test_weirdness_constructor(self): """When attribute is set in contructor, it gets cast into a bool which causes some weird behavior. We dont necessarily want to maintain this behavior but its a known issue """ + class Person(Document): admin = BooleanField() - new_person = Person(admin='False') - self.assertTrue(new_person.admin) + new_person = Person(admin="False") + assert new_person.admin - new_person = Person(admin='0') - self.assertTrue(new_person.admin) + new_person = Person(admin="0") + assert new_person.admin diff --git a/tests/fields/test_cached_reference_field.py b/tests/fields/test_cached_reference_field.py index 470ecc5d..bb4c57d2 100644 --- a/tests/fields/test_cached_reference_field.py +++ b/tests/fields/test_cached_reference_field.py @@ -1,18 +1,19 @@ # -*- coding: utf-8 -*- from decimal import Decimal -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase class TestCachedReferenceField(MongoDBTestCase): - def test_get_and_save(self): """ Tests #1047: CachedReferenceField creates DBRefs on to_python, but can't save them on to_mongo. """ + class Animal(Document): name = StringField() tag = StringField() @@ -24,10 +25,11 @@ class TestCachedReferenceField(MongoDBTestCase): Animal.drop_collection() Ocorrence.drop_collection() - Ocorrence(person="testte", - animal=Animal(name="Leopard", tag="heavy").save()).save() + Ocorrence( + person="testte", animal=Animal(name="Leopard", tag="heavy").save() + ).save() p = Ocorrence.objects.get() - p.person = 'new_testte' + p.person = "new_testte" p.save() def test_general_things(self): @@ -37,8 +39,7 @@ class TestCachedReferenceField(MongoDBTestCase): class Ocorrence(Document): person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag']) + animal = CachedReferenceField(Animal, fields=["tag"]) Animal.drop_collection() Ocorrence.drop_collection() @@ -46,30 +47,29 @@ class TestCachedReferenceField(MongoDBTestCase): a = Animal(name="Leopard", tag="heavy") a.save() - self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) + assert Animal._cached_reference_fields == [Ocorrence.animal] o = Ocorrence(person="teste", animal=a) o.save() p = Ocorrence(person="Wilson") p.save() - self.assertEqual(Ocorrence.objects(animal=None).count(), 1) + assert Ocorrence.objects(animal=None).count() == 1 - self.assertEqual( - a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) + assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk} - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') + assert o.to_mongo()["animal"]["tag"] == "heavy" # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() - count = Ocorrence.objects(animal__tag='heavy').count() - self.assertEqual(count, 1) + count = Ocorrence.objects(animal__tag="heavy").count() + assert count == 1 - ocorrence = Ocorrence.objects(animal__tag='heavy').first() - self.assertEqual(ocorrence.person, "teste") - self.assertIsInstance(ocorrence.animal, Animal) + ocorrence = Ocorrence.objects(animal__tag="heavy").first() + assert ocorrence.person == "teste" + assert isinstance(ocorrence.animal, Animal) def test_with_decimal(self): class PersonAuto(Document): @@ -78,28 +78,22 @@ class TestCachedReferenceField(MongoDBTestCase): class SocialTest(Document): group = StringField() - person = CachedReferenceField( - PersonAuto, - fields=('salary',)) + person = CachedReferenceField(PersonAuto, fields=("salary",)) PersonAuto.drop_collection() SocialTest.drop_collection() - p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) + p = PersonAuto(name="Alberto", salary=Decimal("7000.00")) p.save() s = SocialTest(group="dev", person=p) s.save() - self.assertEqual( - SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { - '_id': s.pk, - 'group': s.group, - 'person': { - '_id': p.pk, - 'salary': 7000.00 - } - }) + assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == { + "_id": s.pk, + "group": s.group, + "person": {"_id": p.pk, "salary": 7000.00}, + } def test_cached_reference_field_reference(self): class Group(Document): @@ -111,17 +105,14 @@ class TestCachedReferenceField(MongoDBTestCase): class SocialData(Document): obs = StringField() - tags = ListField( - StringField()) - person = CachedReferenceField( - Person, - fields=('group',)) + tags = ListField(StringField()) + person = CachedReferenceField(Person, fields=("group",)) Group.drop_collection() Person.drop_collection() SocialData.drop_collection() - g1 = Group(name='dev') + g1 = Group(name="dev") g1.save() g2 = Group(name="designers") @@ -136,25 +127,21 @@ class TestCachedReferenceField(MongoDBTestCase): p3 = Person(name="Afro design", group=g2) p3.save() - s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) + s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"]) s1.save() - s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) + s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"]) s2.save() - self.assertEqual(SocialData.objects._collection.find_one( - {'tags': 'tag2'}), { - '_id': s1.pk, - 'obs': 'testing 123', - 'tags': ['tag1', 'tag2'], - 'person': { - '_id': p1.pk, - 'group': g1.pk - } - }) + assert SocialData.objects._collection.find_one({"tags": "tag2"}) == { + "_id": s1.pk, + "obs": "testing 123", + "tags": ["tag1", "tag2"], + "person": {"_id": p1.pk, "group": g1.pk}, + } - self.assertEqual(SocialData.objects(person__group=g2).count(), 1) - self.assertEqual(SocialData.objects(person__group=g2).first(), s2) + assert SocialData.objects(person__group=g2).count() == 1 + assert SocialData.objects(person__group=g2).first() == s2 def test_cached_reference_field_push_with_fields(self): class Product(Document): @@ -163,185 +150,136 @@ class TestCachedReferenceField(MongoDBTestCase): Product.drop_collection() class Basket(Document): - products = ListField(CachedReferenceField(Product, fields=['name'])) + products = ListField(CachedReferenceField(Product, fields=["name"])) Basket.drop_collection() - product1 = Product(name='abc').save() - product2 = Product(name='def').save() + product1 = Product(name="abc").save() + product2 = Product(name="def").save() basket = Basket(products=[product1]).save() - self.assertEqual( - Basket.objects._collection.find_one(), - { - '_id': basket.pk, - 'products': [ - { - '_id': product1.pk, - 'name': product1.name - } - ] - } - ) + assert Basket.objects._collection.find_one() == { + "_id": basket.pk, + "products": [{"_id": product1.pk, "name": product1.name}], + } # push to list basket.update(push__products=product2) basket.reload() - self.assertEqual( - Basket.objects._collection.find_one(), - { - '_id': basket.pk, - 'products': [ - { - '_id': product1.pk, - 'name': product1.name - }, - { - '_id': product2.pk, - 'name': product2.name - } - ] - } - ) + assert Basket.objects._collection.find_one() == { + "_id": basket.pk, + "products": [ + {"_id": product1.pk, "name": product1.name}, + {"_id": product2.pk, "name": product2.name}, + ], + } def test_cached_reference_field_update_all(self): class Person(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) + TYPES = (("pf", "PF"), ("pj", "PJ")) name = StringField() tp = StringField(choices=TYPES) - father = CachedReferenceField('self', fields=('tp',)) + father = CachedReferenceField("self", fields=("tp",)) Person.drop_collection() a1 = Person(name="Wilson Father", tp="pj") a1.save() - a2 = Person(name='Wilson Junior', tp='pf', father=a1) + a2 = Person(name="Wilson Junior", tp="pf", father=a1) a2.save() a2 = Person.objects.with_id(a2.id) - self.assertEqual(a2.father.tp, a1.tp) + assert a2.father.tp == a1.tp - self.assertEqual(dict(a2.to_mongo()), { + assert dict(a2.to_mongo()) == { "_id": a2.pk, "name": u"Wilson Junior", "tp": u"pf", - "father": { - "_id": a1.pk, - "tp": u"pj" - } - }) + "father": {"_id": a1.pk, "tp": u"pj"}, + } - self.assertEqual(Person.objects(father=a1)._query, { - 'father._id': a1.pk - }) - self.assertEqual(Person.objects(father=a1).count(), 1) + assert Person.objects(father=a1)._query == {"father._id": a1.pk} + assert Person.objects(father=a1).count() == 1 Person.objects.update(set__tp="pf") Person.father.sync_all() a2.reload() - self.assertEqual(dict(a2.to_mongo()), { + assert dict(a2.to_mongo()) == { "_id": a2.pk, "name": u"Wilson Junior", "tp": u"pf", - "father": { - "_id": a1.pk, - "tp": u"pf" - } - }) + "father": {"_id": a1.pk, "tp": u"pf"}, + } def test_cached_reference_fields_on_embedded_documents(self): - with self.assertRaises(InvalidDocumentError): + with pytest.raises(InvalidDocumentError): + class Test(Document): name = StringField() - type('WrongEmbeddedDocument', ( - EmbeddedDocument,), { - 'test': CachedReferenceField(Test) - }) + type( + "WrongEmbeddedDocument", + (EmbeddedDocument,), + {"test": CachedReferenceField(Test)}, + ) def test_cached_reference_auto_sync(self): class Person(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) + TYPES = (("pf", "PF"), ("pj", "PJ")) name = StringField() - tp = StringField( - choices=TYPES - ) + tp = StringField(choices=TYPES) - father = CachedReferenceField('self', fields=('tp',)) + father = CachedReferenceField("self", fields=("tp",)) Person.drop_collection() a1 = Person(name="Wilson Father", tp="pj") a1.save() - a2 = Person(name='Wilson Junior', tp='pf', father=a1) + a2 = Person(name="Wilson Junior", tp="pf", father=a1) a2.save() - a1.tp = 'pf' + a1.tp = "pf" a1.save() a2.reload() - self.assertEqual(dict(a2.to_mongo()), { - '_id': a2.pk, - 'name': 'Wilson Junior', - 'tp': 'pf', - 'father': { - '_id': a1.pk, - 'tp': 'pf' - } - }) + assert dict(a2.to_mongo()) == { + "_id": a2.pk, + "name": "Wilson Junior", + "tp": "pf", + "father": {"_id": a1.pk, "tp": "pf"}, + } def test_cached_reference_auto_sync_disabled(self): class Persone(Document): - TYPES = ( - ('pf', "PF"), - ('pj', "PJ") - ) + TYPES = (("pf", "PF"), ("pj", "PJ")) name = StringField() - tp = StringField( - choices=TYPES - ) + tp = StringField(choices=TYPES) - father = CachedReferenceField( - 'self', fields=('tp',), auto_sync=False) + father = CachedReferenceField("self", fields=("tp",), auto_sync=False) Persone.drop_collection() a1 = Persone(name="Wilson Father", tp="pj") a1.save() - a2 = Persone(name='Wilson Junior', tp='pf', father=a1) + a2 = Persone(name="Wilson Junior", tp="pf", father=a1) a2.save() - a1.tp = 'pf' + a1.tp = "pf" a1.save() - self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { - '_id': a2.pk, - 'name': 'Wilson Junior', - 'tp': 'pf', - 'father': { - '_id': a1.pk, - 'tp': 'pj' - } - }) + assert Persone.objects._collection.find_one({"_id": a2.pk}) == { + "_id": a2.pk, + "name": "Wilson Junior", + "tp": "pf", + "father": {"_id": a1.pk, "tp": "pj"}, + } def test_cached_reference_embedded_fields(self): class Owner(EmbeddedDocument): - TPS = ( - ('n', "Normal"), - ('u', "Urgent") - ) + TPS = (("n", "Normal"), ("u", "Urgent")) name = StringField() - tp = StringField( - verbose_name="Type", - db_field="t", - choices=TPS) + tp = StringField(verbose_name="Type", db_field="t", choices=TPS) class Animal(Document): name = StringField() @@ -351,45 +289,41 @@ class TestCachedReferenceField(MongoDBTestCase): class Ocorrence(Document): person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag', 'owner.tp']) + animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"]) Animal.drop_collection() Ocorrence.drop_collection() - a = Animal(name="Leopard", tag="heavy", - owner=Owner(tp='u', name="Wilson Júnior") - ) + a = Animal( + name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior") + ) a.save() o = Ocorrence(person="teste", animal=a) o.save() - self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { - '_id': a.pk, - 'tag': 'heavy', - 'owner': { - 't': 'u' - } - }) - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') + assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == { + "_id": a.pk, + "tag": "heavy", + "owner": {"t": "u"}, + } + assert o.to_mongo()["animal"]["tag"] == "heavy" + assert o.to_mongo()["animal"]["owner"]["t"] == "u" # Check to_mongo with fields - self.assertNotIn('animal', o.to_mongo(fields=['person'])) + assert "animal" not in o.to_mongo(fields=["person"]) # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() - count = Ocorrence.objects( - animal__tag='heavy', animal__owner__tp='u').count() - self.assertEqual(count, 1) + count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count() + assert count == 1 ocorrence = Ocorrence.objects( - animal__tag='heavy', - animal__owner__tp='u').first() - self.assertEqual(ocorrence.person, "teste") - self.assertIsInstance(ocorrence.animal, Animal) + animal__tag="heavy", animal__owner__tp="u" + ).first() + assert ocorrence.person == "teste" + assert isinstance(ocorrence.animal, Animal) def test_cached_reference_embedded_list_fields(self): class Owner(EmbeddedDocument): @@ -404,43 +338,40 @@ class TestCachedReferenceField(MongoDBTestCase): class Ocorrence(Document): person = StringField() - animal = CachedReferenceField( - Animal, fields=['tag', 'owner.tags']) + animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"]) Animal.drop_collection() Ocorrence.drop_collection() - a = Animal(name="Leopard", tag="heavy", - owner=Owner(tags=['cool', 'funny'], - name="Wilson Júnior") - ) + a = Animal( + name="Leopard", + tag="heavy", + owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"), + ) a.save() o = Ocorrence(person="teste 2", animal=a) o.save() - self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { - '_id': a.pk, - 'tag': 'heavy', - 'owner': { - 'tags': ['cool', 'funny'] - } - }) + assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == { + "_id": a.pk, + "tag": "heavy", + "owner": {"tags": ["cool", "funny"]}, + } - self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') - self.assertEqual(o.to_mongo()['animal']['owner']['tags'], - ['cool', 'funny']) + assert o.to_mongo()["animal"]["tag"] == "heavy" + assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"] # counts Ocorrence(person="teste 2").save() Ocorrence(person="teste 3").save() query = Ocorrence.objects( - animal__tag='heavy', animal__owner__tags='cool')._query - self.assertEqual( - query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) + animal__tag="heavy", animal__owner__tags="cool" + )._query + assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"} ocorrence = Ocorrence.objects( - animal__tag='heavy', - animal__owner__tags='cool').first() - self.assertEqual(ocorrence.person, "teste 2") - self.assertIsInstance(ocorrence.animal, Animal) + animal__tag="heavy", animal__owner__tags="cool" + ).first() + assert ocorrence.person == "teste 2" + assert isinstance(ocorrence.animal, Animal) diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index 58dc4b43..d118ad23 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -1,9 +1,11 @@ # -*- coding: utf-8 -*- import datetime -import math import itertools +import math import re +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase @@ -14,9 +16,10 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): """Tests for complex datetime fields - which can handle microseconds without rounding. """ + class LogEntry(Document): date = ComplexDateTimeField() - date_with_dots = ComplexDateTimeField(separator='.') + date_with_dots = ComplexDateTimeField(separator=".") LogEntry.drop_collection() @@ -27,7 +30,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 # Post UTC - microseconds are rounded (down) nearest millisecond - with # default datetimefields @@ -35,7 +38,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 # Pre UTC dates microseconds below 1000 are dropped - with default # datetimefields @@ -43,7 +46,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 # Pre UTC microseconds above 1000 is wonky - with default datetimefields # log.date has an invalid microsecond value so I can't construct @@ -53,26 +56,34 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1) + assert log.date == d1 log1 = LogEntry.objects.get(date=d1) - self.assertEqual(log, log1) + assert log == log1 # Test string padding microsecond = map(int, [math.pow(10, x) for x in range(6)]) mm = dd = hh = ii = ss = [1, 10] for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): - stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] - self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) + stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] + assert ( + re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) + is not None + ) # Test separator - stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] - self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) + stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[ + "date_with_dots" + ] + assert ( + re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None + ) def test_complexdatetime_usage(self): """Tests for complex datetime fields - which can handle microseconds without rounding. """ + class LogEntry(Document): date = ComplexDateTimeField() @@ -84,62 +95,61 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): log.save() log1 = LogEntry.objects.get(date=d1) - self.assertEqual(log, log1) + assert log == log1 # create extra 59 log entries for a total of 60 for i in range(1951, 2010): d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 60) + assert LogEntry.objects.count() == 60 # Test ordering logs = LogEntry.objects.order_by("date") i = 0 while i < 59: - self.assertTrue(logs[i].date <= logs[i + 1].date) + assert logs[i].date <= logs[i + 1].date i += 1 logs = LogEntry.objects.order_by("-date") i = 0 while i < 59: - self.assertTrue(logs[i].date >= logs[i + 1].date) + assert logs[i].date >= logs[i + 1].date i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) + assert logs.count() == 30 logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 30) + assert logs.count() == 30 logs = LogEntry.objects.filter( date__lte=datetime.datetime(2011, 1, 1), date__gte=datetime.datetime(2000, 1, 1), ) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 LogEntry.drop_collection() # Test microsecond-level ordering/filtering for microsecond in (99, 999, 9999, 10000): - LogEntry( - date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond) - ).save() + LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save() - logs = list(LogEntry.objects.order_by('date')) + logs = list(LogEntry.objects.order_by("date")) for next_idx, log in enumerate(logs[:-1], start=1): next_log = logs[next_idx] - self.assertTrue(log.date < next_log.date) + assert log.date < next_log.date - logs = list(LogEntry.objects.order_by('-date')) + logs = list(LogEntry.objects.order_by("-date")) for next_idx, log in enumerate(logs[:-1], start=1): next_log = logs[next_idx] - self.assertTrue(log.date > next_log.date) + assert log.date > next_log.date logs = LogEntry.objects.filter( - date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) - self.assertEqual(logs.count(), 4) + date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000) + ) + assert logs.count() == 4 def test_no_default_value(self): class Log(Document): @@ -148,25 +158,26 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): Log.drop_collection() log = Log() - self.assertIsNone(log.timestamp) + assert log.timestamp is None log.save() fetched_log = Log.objects.with_id(log.id) - self.assertIsNone(fetched_log.timestamp) + assert fetched_log.timestamp is None def test_default_static_value(self): NOW = datetime.datetime.utcnow() + class Log(Document): timestamp = ComplexDateTimeField(default=NOW) Log.drop_collection() log = Log() - self.assertEqual(log.timestamp, NOW) + assert log.timestamp == NOW log.save() fetched_log = Log.objects.with_id(log.id) - self.assertEqual(fetched_log.timestamp, NOW) + assert fetched_log.timestamp == NOW def test_default_callable(self): NOW = datetime.datetime.utcnow() @@ -177,8 +188,23 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): Log.drop_collection() log = Log() - self.assertGreaterEqual(log.timestamp, NOW) + assert log.timestamp >= NOW log.save() fetched_log = Log.objects.with_id(log.id) - self.assertGreaterEqual(fetched_log.timestamp, NOW) + assert fetched_log.timestamp >= NOW + + def test_setting_bad_value_does_not_raise_unless_validate_is_called(self): + # test regression of #2253 + + class Log(Document): + timestamp = ComplexDateTimeField() + + Log.drop_collection() + + log = Log(timestamp="garbage") + with pytest.raises(ValidationError): + log.validate() + + with pytest.raises(ValidationError): + log.save() diff --git a/tests/fields/test_date_field.py b/tests/fields/test_date_field.py index 82adb514..42a4b7f1 100644 --- a/tests/fields/test_date_field.py +++ b/tests/fields/test_date_field.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import datetime -import six + +import pytest try: import dateutil @@ -8,7 +9,6 @@ except ImportError: dateutil = None from mongoengine import * - from tests.utils import MongoDBTestCase @@ -18,41 +18,47 @@ class TestDateField(MongoDBTestCase): Ensure an exception is raised when trying to cast an empty string to datetime. """ + class MyDoc(Document): dt = DateField() - md = MyDoc(dt='') - self.assertRaises(ValidationError, md.save) + md = MyDoc(dt="") + with pytest.raises(ValidationError): + md.save() def test_date_from_whitespace_string(self): """ Ensure an exception is raised when trying to cast a whitespace-only string to datetime. """ + class MyDoc(Document): dt = DateField() - md = MyDoc(dt=' ') - self.assertRaises(ValidationError, md.save) + md = MyDoc(dt=" ") + with pytest.raises(ValidationError): + md.save() def test_default_values_today(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): day = DateField(default=datetime.date.today) person = Person() person.validate() - self.assertEqual(person.day, person.day) - self.assertEqual(person.day, datetime.date.today()) - self.assertEqual(person._data['day'], person.day) + assert person.day == person.day + assert person.day == datetime.date.today() + assert person._data["day"] == person.day def test_date(self): """Tests showing pymongo date fields See: http://api.mongodb.org/python/current/api/bson/son.html#dt """ + class LogEntry(Document): date = DateField() @@ -63,7 +69,7 @@ class TestDateField(MongoDBTestCase): log.date = datetime.date.today() log.save() log.reload() - self.assertEqual(log.date, datetime.date.today()) + assert log.date == datetime.date.today() d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) @@ -71,30 +77,20 @@ class TestDateField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1.date()) - self.assertEqual(log.date, d2.date()) + assert log.date == d1.date() + assert log.date == d2.date() d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) log.date = d1 log.save() log.reload() - self.assertEqual(log.date, d1.date()) - self.assertEqual(log.date, d2.date()) - - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - self.assertEqual(log.date, d1.date()) - self.assertEqual(log.date, d2.date()) + assert log.date == d1.date() + assert log.date == d2.date() def test_regular_usage(self): """Tests for regular datetime fields""" + class LogEntry(Document): date = DateField() @@ -106,42 +102,43 @@ class TestDateField(MongoDBTestCase): log.validate() log.save() - for query in (d1, d1.isoformat(' ')): + for query in (d1, d1.isoformat(" ")): log1 = LogEntry.objects.get(date=query) - self.assertEqual(log, log1) + assert log == log1 if dateutil: - log1 = LogEntry.objects.get(date=d1.isoformat('T')) - self.assertEqual(log, log1) + log1 = LogEntry.objects.get(date=d1.isoformat("T")) + assert log == log1 # create additional 19 log entries for a total of 20 for i in range(1971, 1990): d = datetime.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 20) + assert LogEntry.objects.count() == 20 # Test ordering logs = LogEntry.objects.order_by("date") i = 0 while i < 19: - self.assertTrue(logs[i].date <= logs[i + 1].date) + assert logs[i].date <= logs[i + 1].date i += 1 logs = LogEntry.objects.order_by("-date") i = 0 while i < 19: - self.assertTrue(logs[i].date >= logs[i + 1].date) + assert logs[i].date >= logs[i + 1].date i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 def test_validation(self): """Ensure that invalid values cannot be assigned to datetime fields. """ + class LogEntry(Document): time = DateField() @@ -152,14 +149,16 @@ class TestDateField(MongoDBTestCase): log.time = datetime.date.today() log.validate() - log.time = datetime.datetime.now().isoformat(' ') + log.time = datetime.datetime.now().isoformat(" ") log.validate() if dateutil: - log.time = datetime.datetime.now().isoformat('T') + log.time = datetime.datetime.now().isoformat("T") log.validate() log.time = -1 - self.assertRaises(ValidationError, log.validate) - log.time = 'ABC' - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() + log.time = "ABC" + with pytest.raises(ValidationError): + log.validate() diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 92f0668a..48936af7 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import datetime as dt -import six + +import pytest try: import dateutil @@ -19,27 +20,32 @@ class TestDateTimeField(MongoDBTestCase): Ensure an exception is raised when trying to cast an empty string to datetime. """ + class MyDoc(Document): dt = DateTimeField() - md = MyDoc(dt='') - self.assertRaises(ValidationError, md.save) + md = MyDoc(dt="") + with pytest.raises(ValidationError): + md.save() def test_datetime_from_whitespace_string(self): """ Ensure an exception is raised when trying to cast a whitespace-only string to datetime. """ + class MyDoc(Document): dt = DateTimeField() - md = MyDoc(dt=' ') - self.assertRaises(ValidationError, md.save) + md = MyDoc(dt=" ") + with pytest.raises(ValidationError): + md.save() def test_default_value_utcnow(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): created = DateTimeField(default=dt.datetime.utcnow) @@ -47,9 +53,9 @@ class TestDateTimeField(MongoDBTestCase): person = Person() person.validate() person_created_t0 = person.created - self.assertLess(person.created - utcnow, dt.timedelta(seconds=1)) - self.assertEqual(person_created_t0, person.created) # make sure it does not change - self.assertEqual(person._data['created'], person.created) + assert person.created - utcnow < dt.timedelta(seconds=1) + assert person_created_t0 == person.created # make sure it does not change + assert person._data["created"] == person.created def test_handling_microseconds(self): """Tests showing pymongo datetime fields handling of microseconds. @@ -58,6 +64,7 @@ class TestDateTimeField(MongoDBTestCase): See: http://api.mongodb.org/python/current/api/bson/son.html#dt """ + class LogEntry(Document): date = DateTimeField() @@ -68,7 +75,7 @@ class TestDateTimeField(MongoDBTestCase): log.date = dt.date.today() log.save() log.reload() - self.assertEqual(log.date.date(), dt.date.today()) + assert log.date.date() == dt.date.today() # Post UTC - microseconds are rounded (down) nearest millisecond and # dropped @@ -78,8 +85,8 @@ class TestDateTimeField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertNotEqual(log.date, d1) - self.assertEqual(log.date, d2) + assert log.date != d1 + assert log.date == d2 # Post UTC - microseconds are rounded (down) nearest millisecond d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) @@ -87,22 +94,12 @@ class TestDateTimeField(MongoDBTestCase): log.date = d1 log.save() log.reload() - self.assertNotEqual(log.date, d1) - self.assertEqual(log.date, d2) - - if not six.PY3: - # Pre UTC dates microseconds below 1000 are dropped - # This does not seem to be true in PY3 - d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) - d2 = dt.datetime(1969, 12, 31, 23, 59, 59) - log.date = d1 - log.save() - log.reload() - self.assertNotEqual(log.date, d1) - self.assertEqual(log.date, d2) + assert log.date != d1 + assert log.date == d2 def test_regular_usage(self): """Tests for regular datetime fields""" + class LogEntry(Document): date = DateTimeField() @@ -114,51 +111,51 @@ class TestDateTimeField(MongoDBTestCase): log.validate() log.save() - for query in (d1, d1.isoformat(' ')): + for query in (d1, d1.isoformat(" ")): log1 = LogEntry.objects.get(date=query) - self.assertEqual(log, log1) + assert log == log1 if dateutil: - log1 = LogEntry.objects.get(date=d1.isoformat('T')) - self.assertEqual(log, log1) + log1 = LogEntry.objects.get(date=d1.isoformat("T")) + assert log == log1 # create additional 19 log entries for a total of 20 for i in range(1971, 1990): d = dt.datetime(i, 1, 1, 0, 0, 1) LogEntry(date=d).save() - self.assertEqual(LogEntry.objects.count(), 20) + assert LogEntry.objects.count() == 20 # Test ordering logs = LogEntry.objects.order_by("date") i = 0 while i < 19: - self.assertTrue(logs[i].date <= logs[i + 1].date) + assert logs[i].date <= logs[i + 1].date i += 1 logs = LogEntry.objects.order_by("-date") i = 0 while i < 19: - self.assertTrue(logs[i].date >= logs[i + 1].date) + assert logs[i].date >= logs[i + 1].date i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) - self.assertEqual(logs.count(), 10) + assert logs.count() == 10 logs = LogEntry.objects.filter( - date__lte=dt.datetime(1980, 1, 1), - date__gte=dt.datetime(1975, 1, 1), + date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1) ) - self.assertEqual(logs.count(), 5) + assert logs.count() == 5 def test_datetime_validation(self): """Ensure that invalid values cannot be assigned to datetime fields. """ + class LogEntry(Document): time = DateTimeField() @@ -169,45 +166,51 @@ class TestDateTimeField(MongoDBTestCase): log.time = dt.date.today() log.validate() - log.time = dt.datetime.now().isoformat(' ') + log.time = dt.datetime.now().isoformat(" ") log.validate() - log.time = '2019-05-16 21:42:57.897847' + log.time = "2019-05-16 21:42:57.897847" log.validate() if dateutil: - log.time = dt.datetime.now().isoformat('T') + log.time = dt.datetime.now().isoformat("T") log.validate() log.time = -1 - self.assertRaises(ValidationError, log.validate) - log.time = 'ABC' - self.assertRaises(ValidationError, log.validate) - log.time = '2019-05-16 21:GARBAGE:12' - self.assertRaises(ValidationError, log.validate) - log.time = '2019-05-16 21:42:57.GARBAGE' - self.assertRaises(ValidationError, log.validate) - log.time = '2019-05-16 21:42:57.123.456' - self.assertRaises(ValidationError, log.validate) + with pytest.raises(ValidationError): + log.validate() + log.time = "ABC" + with pytest.raises(ValidationError): + log.validate() + log.time = "2019-05-16 21:GARBAGE:12" + with pytest.raises(ValidationError): + log.validate() + log.time = "2019-05-16 21:42:57.GARBAGE" + with pytest.raises(ValidationError): + log.validate() + log.time = "2019-05-16 21:42:57.123.456" + with pytest.raises(ValidationError): + log.validate() def test_parse_datetime_as_str(self): class DTDoc(Document): date = DateTimeField() - date_str = '2019-03-02 22:26:01' + date_str = "2019-03-02 22:26:01" # make sure that passing a parsable datetime works dtd = DTDoc() dtd.date = date_str - self.assertIsInstance(dtd.date, six.string_types) + assert isinstance(dtd.date, str) dtd.save() dtd.reload() - self.assertIsInstance(dtd.date, dt.datetime) - self.assertEqual(str(dtd.date), date_str) + assert isinstance(dtd.date, dt.datetime) + assert str(dtd.date) == date_str - dtd.date = 'January 1st, 9999999999' - self.assertRaises(ValidationError, dtd.validate) + dtd.date = "January 1st, 9999999999" + with pytest.raises(ValidationError): + dtd.validate() class TestDateTimeTzAware(MongoDBTestCase): @@ -217,7 +220,7 @@ class TestDateTimeTzAware(MongoDBTestCase): connection._connections = {} connection._dbs = {} - connect(db='mongoenginetest', tz_aware=True) + connect(db="mongoenginetest", tz_aware=True) class LogEntry(Document): time = DateTimeField() @@ -228,4 +231,4 @@ class TestDateTimeTzAware(MongoDBTestCase): log = LogEntry.objects.first() log.time = dt.datetime(2013, 1, 1, 0, 0, 0) - self.assertEqual(['time'], log._changed_fields) + assert ["time"] == log._changed_fields diff --git a/tests/fields/test_decimal_field.py b/tests/fields/test_decimal_field.py index 0213b880..c531166f 100644 --- a/tests/fields/test_decimal_field.py +++ b/tests/fields/test_decimal_field.py @@ -1,39 +1,44 @@ # -*- coding: utf-8 -*- from decimal import Decimal -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase class TestDecimalField(MongoDBTestCase): - def test_validation(self): """Ensure that invalid values cannot be assigned to decimal fields. """ + class Person(Document): - height = DecimalField(min_value=Decimal('0.1'), - max_value=Decimal('3.5')) + height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5")) Person.drop_collection() - Person(height=Decimal('1.89')).save() + Person(height=Decimal("1.89")).save() person = Person.objects.first() - self.assertEqual(person.height, Decimal('1.89')) + assert person.height == Decimal("1.89") - person.height = '2.0' + person.height = "2.0" person.save() person.height = 0.01 - self.assertRaises(ValidationError, person.validate) - person.height = Decimal('0.01') - self.assertRaises(ValidationError, person.validate) - person.height = Decimal('4.0') - self.assertRaises(ValidationError, person.validate) - person.height = 'something invalid' - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() + person.height = Decimal("0.01") + with pytest.raises(ValidationError): + person.validate() + person.height = Decimal("4.0") + with pytest.raises(ValidationError): + person.validate() + person.height = "something invalid" + with pytest.raises(ValidationError): + person.validate() - person_2 = Person(height='something invalid') - self.assertRaises(ValidationError, person_2.validate) + person_2 = Person(height="something invalid") + with pytest.raises(ValidationError): + person_2.validate() def test_comparison(self): class Person(Document): @@ -46,11 +51,11 @@ class TestDecimalField(MongoDBTestCase): Person(money=8).save() Person(money=10).save() - self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count()) - self.assertEqual(2, Person.objects(money__gt=7).count()) - self.assertEqual(2, Person.objects(money__gt="7").count()) + assert 2 == Person.objects(money__gt=Decimal("7")).count() + assert 2 == Person.objects(money__gt=7).count() + assert 2 == Person.objects(money__gt="7").count() - self.assertEqual(3, Person.objects(money__gte="7").count()) + assert 3 == Person.objects(money__gte="7").count() def test_storage(self): class Person(Document): @@ -58,7 +63,14 @@ class TestDecimalField(MongoDBTestCase): string_value = DecimalField(precision=4, force_string=True) Person.drop_collection() - values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] + values_to_store = [ + 10, + 10.1, + 10.11, + "10.111", + Decimal("10.1111"), + Decimal("10.11111"), + ] for store_at_creation in [True, False]: for value in values_to_store: # to_python is called explicitly if values were sent in the kwargs of __init__ @@ -72,20 +84,27 @@ class TestDecimalField(MongoDBTestCase): # How its stored expected = [ - {'float_value': 10.0, 'string_value': '10.0000'}, - {'float_value': 10.1, 'string_value': '10.1000'}, - {'float_value': 10.11, 'string_value': '10.1100'}, - {'float_value': 10.111, 'string_value': '10.1110'}, - {'float_value': 10.1111, 'string_value': '10.1111'}, - {'float_value': 10.1111, 'string_value': '10.1111'}] + {"float_value": 10.0, "string_value": "10.0000"}, + {"float_value": 10.1, "string_value": "10.1000"}, + {"float_value": 10.11, "string_value": "10.1100"}, + {"float_value": 10.111, "string_value": "10.1110"}, + {"float_value": 10.1111, "string_value": "10.1111"}, + {"float_value": 10.1111, "string_value": "10.1111"}, + ] expected.extend(expected) - actual = list(Person.objects.exclude('id').as_pymongo()) - self.assertEqual(expected, actual) + actual = list(Person.objects.exclude("id").as_pymongo()) + assert expected == actual # How it comes out locally - expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), - Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] + expected = [ + Decimal("10.0000"), + Decimal("10.1000"), + Decimal("10.1100"), + Decimal("10.1110"), + Decimal("10.1111"), + Decimal("10.1111"), + ] expected.extend(expected) - for field_name in ['float_value', 'string_value']: + for field_name in ["float_value", "string_value"]: actual = list(Person.objects().scalar(field_name)) - self.assertEqual(expected, actual) + assert expected == actual diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index ade02ccf..12140916 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -1,100 +1,123 @@ # -*- coding: utf-8 -*- +from bson import InvalidDocument +import pytest + from mongoengine import * from mongoengine.base import BaseDict +from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version from tests.utils import MongoDBTestCase, get_as_pymongo class TestDictField(MongoDBTestCase): - def test_storage(self): class BlogPost(Document): info = DictField() BlogPost.drop_collection() - info = {'testkey': 'testvalue'} + info = {"testkey": "testvalue"} post = BlogPost(info=info).save() - self.assertEqual( - get_as_pymongo(post), - { - '_id': post.id, - 'info': info - } - ) + assert get_as_pymongo(post) == {"_id": post.id, "info": info} - def test_general_things(self): - """Ensure that dict types work as expected.""" + def test_validate_invalid_type(self): + class BlogPost(Document): + info = DictField() + + BlogPost.drop_collection() + + invalid_infos = ["my post", ["test", "test"], {1: "test"}] + for invalid_info in invalid_infos: + with pytest.raises(ValidationError): + BlogPost(info=invalid_info).validate() + + def test_keys_with_dots_or_dollars(self): class BlogPost(Document): info = DictField() BlogPost.drop_collection() post = BlogPost() - post.info = 'my post' - self.assertRaises(ValidationError, post.validate) - post.info = ['test', 'test'] - self.assertRaises(ValidationError, post.validate) + post.info = {"$title": "test"} + with pytest.raises(ValidationError): + post.validate() - post.info = {'$title': 'test'} - self.assertRaises(ValidationError, post.validate) + post.info = {"nested": {"$title": "test"}} + with pytest.raises(ValidationError): + post.validate() - post.info = {'nested': {'$title': 'test'}} - self.assertRaises(ValidationError, post.validate) + post.info = {"$title.test": "test"} + with pytest.raises(ValidationError): + post.validate() - post.info = {'the.title': 'test'} - self.assertRaises(ValidationError, post.validate) + post.info = {"nested": {"the.title": "test"}} + if get_mongodb_version() < MONGODB_36: + # MongoDB < 3.6 rejects dots + # To avoid checking the mongodb version from the DictField class + # we rely on MongoDB to reject the data during the save + post.validate() + with pytest.raises(InvalidDocument): + post.save() + else: + post.validate() - post.info = {'nested': {'the.title': 'test'}} - self.assertRaises(ValidationError, post.validate) + post.info = {"dollar_and_dot": {"te$st.test": "test"}} + if get_mongodb_version() < MONGODB_36: + post.validate() + with pytest.raises(InvalidDocument): + post.save() + else: + post.validate() - post.info = {1: 'test'} - self.assertRaises(ValidationError, post.validate) + def test_general_things(self): + """Ensure that dict types work as expected.""" - post.info = {'title': 'test'} + class BlogPost(Document): + info = DictField() + + BlogPost.drop_collection() + + post = BlogPost(info={"title": "test"}) post.save() post = BlogPost() - post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}} + post.info = {"title": "dollar_sign", "details": {"te$t": "test"}} post.save() post = BlogPost() - post.info = {'details': {'test': 'test'}} + post.info = {"details": {"test": "test"}} post.save() post = BlogPost() - post.info = {'details': {'test': 3}} + post.info = {"details": {"test": 3}} post.save() - self.assertEqual(BlogPost.objects.count(), 4) - self.assertEqual( - BlogPost.objects.filter(info__title__exact='test').count(), 1) - self.assertEqual( - BlogPost.objects.filter(info__details__test__exact='test').count(), 1) + assert BlogPost.objects.count() == 4 + assert BlogPost.objects.filter(info__title__exact="test").count() == 1 + assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1 - post = BlogPost.objects.filter(info__title__exact='dollar_sign').first() - self.assertIn('te$t', post['info']['details']) + post = BlogPost.objects.filter(info__title__exact="dollar_sign").first() + assert "te$t" in post["info"]["details"] # Confirm handles non strings or non existing keys - self.assertEqual( - BlogPost.objects.filter(info__details__test__exact=5).count(), 0) - self.assertEqual( - BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) + assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0 + assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0 - post = BlogPost.objects.create(info={'title': 'original'}) - post.info.update({'title': 'updated'}) + post = BlogPost.objects.create(info={"title": "original"}) + post.info.update({"title": "updated"}) post.save() post.reload() - self.assertEqual('updated', post.info['title']) + assert "updated" == post.info["title"] - post.info.setdefault('authors', []) + post.info.setdefault("authors", []) post.save() post.reload() - self.assertEqual([], post.info['authors']) + assert post.info["authors"] == [] def test_dictfield_dump_document(self): """Ensure a DictField can handle another document's dump.""" + class Doc(Document): field = DictField() @@ -106,51 +129,60 @@ class TestDictField(MongoDBTestCase): id = IntField(primary_key=True, default=1) recursive = DictField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class ToEmbedChild(ToEmbedParent): pass to_embed_recursive = ToEmbed(id=1).save() to_embed = ToEmbed( - id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() + id=2, recursive=to_embed_recursive.to_mongo().to_dict() + ).save() doc = Doc(field=to_embed.to_mongo().to_dict()) doc.save() - self.assertIsInstance(doc.field, dict) - self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}}) + assert isinstance(doc.field, dict) + assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}} # Same thing with a Document with a _cls field to_embed_recursive = ToEmbedChild(id=1).save() to_embed_child = ToEmbedChild( - id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() + id=2, recursive=to_embed_recursive.to_mongo().to_dict() + ).save() doc = Doc(field=to_embed_child.to_mongo().to_dict()) doc.save() - self.assertIsInstance(doc.field, dict) + assert isinstance(doc.field, dict) expected = { - '_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild', - 'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}} + "_id": 2, + "_cls": "ToEmbedParent.ToEmbedChild", + "recursive": { + "_id": 1, + "_cls": "ToEmbedParent.ToEmbedChild", + "recursive": {}, + }, } - self.assertEqual(doc.field, expected) + assert doc.field == expected def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" + class Simple(Document): mapping = DictField(field=IntField()) Simple.drop_collection() e = Simple() - e.mapping['someint'] = 1 + e.mapping["someint"] = 1 e.save() # try creating an invalid mapping - with self.assertRaises(ValidationError): - e.mapping['somestring'] = "abc" + with pytest.raises(ValidationError): + e.mapping["somestring"] = "abc" e.save() def test_dictfield_complex(self): """Ensure that the dict field can handle the complex types.""" + class SettingBase(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class StringSetting(SettingBase): value = StringField() @@ -164,73 +196,76 @@ class TestDictField(MongoDBTestCase): Simple.drop_collection() e = Simple() - e.mapping['somestring'] = StringSetting(value='foo') - e.mapping['someint'] = IntegerSetting(value=42) - e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', - 'float': 1.001, - 'complex': IntegerSetting(value=42), - 'list': [IntegerSetting(value=42), - StringSetting(value='foo')]} + e.mapping["somestring"] = StringSetting(value="foo") + e.mapping["someint"] = IntegerSetting(value=42) + e.mapping["nested_dict"] = { + "number": 1, + "string": "Hi!", + "float": 1.001, + "complex": IntegerSetting(value=42), + "list": [IntegerSetting(value=42), StringSetting(value="foo")], + } e.save() e2 = Simple.objects.get(id=e.id) - self.assertIsInstance(e2.mapping['somestring'], StringSetting) - self.assertIsInstance(e2.mapping['someint'], IntegerSetting) + assert isinstance(e2.mapping["somestring"], StringSetting) + assert isinstance(e2.mapping["someint"], IntegerSetting) # Test querying - self.assertEqual( - Simple.objects.filter(mapping__someint__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) + assert Simple.objects.filter(mapping__someint__value=42).count() == 1 + assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1 + assert ( + Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1 + ) + assert ( + Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1 + ) + assert ( + Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() + == 1 + ) # Confirm can update + Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)}) Simple.objects().update( - set__mapping={"someint": IntegerSetting(value=10)}) - Simple.objects().update( - set__mapping__nested_dict__list__1=StringSetting(value='Boo')) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) - self.assertEqual( - Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) + set__mapping__nested_dict__list__1=StringSetting(value="Boo") + ) + assert ( + Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() + == 0 + ) + assert ( + Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count() + == 1 + ) def test_push_dict(self): class MyModel(Document): events = ListField(DictField()) - doc = MyModel(events=[{'a': 1}]).save() + doc = MyModel(events=[{"a": 1}]).save() raw_doc = get_as_pymongo(doc) - expected_raw_doc = { - '_id': doc.id, - 'events': [{'a': 1}] - } - self.assertEqual(raw_doc, expected_raw_doc) + expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]} + assert raw_doc == expected_raw_doc MyModel.objects(id=doc.id).update(push__events={}) raw_doc = get_as_pymongo(doc) - expected_raw_doc = { - '_id': doc.id, - 'events': [{'a': 1}, {}] - } - self.assertEqual(raw_doc, expected_raw_doc) + expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]} + assert raw_doc == expected_raw_doc def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" + class D(Document): data = DictField() data2 = DictField(default=lambda: {}) d1 = D() - d1.data['foo'] = 'bar' - d1.data2['foo'] = 'bar' + d1.data["foo"] = "bar" + d1.data2["foo"] = "bar" d2 = D() - self.assertEqual(d2.data, {}) - self.assertEqual(d2.data2, {}) + assert d2.data == {} + assert d2.data2 == {} def test_dict_field_invalid_dict_value(self): class DictFieldTest(Document): @@ -240,11 +275,13 @@ class TestDictField(MongoDBTestCase): test = DictFieldTest(dictionary=None) test.dictionary # Just access to test getter - self.assertRaises(ValidationError, test.validate) + with pytest.raises(ValidationError): + test.validate() test = DictFieldTest(dictionary=False) test.dictionary # Just access to test getter - self.assertRaises(ValidationError, test.validate) + with pytest.raises(ValidationError): + test.validate() def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): class DictFieldTest(Document): @@ -255,31 +292,34 @@ class TestDictField(MongoDBTestCase): class Embedded(EmbeddedDocument): name = StringField() - embed = Embedded(name='garbage') + embed = Embedded(name="garbage") doc = DictFieldTest(dictionary=embed) - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: doc.validate() - self.assertIn("'dictionary'", str(ctx_err.exception)) - self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception)) + + error_msg = str(exc_info.value) + assert "'dictionary'" in error_msg + assert "Only dictionaries may be used in a DictField" in error_msg def test_atomic_update_dict_field(self): """Ensure that the entire DictField can be atomically updated.""" + class Simple(Document): mapping = DictField(field=ListField(IntField(required=True))) Simple.drop_collection() e = Simple() - e.mapping['someints'] = [1, 2] + e.mapping["someints"] = [1, 2] e.save() e.update(set__mapping={"ints": [3, 4]}) e.reload() - self.assertEqual(BaseDict, type(e.mapping)) - self.assertEqual({"ints": [3, 4]}, e.mapping) + assert isinstance(e.mapping, BaseDict) + assert {"ints": [3, 4]} == e.mapping # try creating an invalid mapping - with self.assertRaises(ValueError): - e.update(set__mapping={"somestrings": ["foo", "bar", ]}) + with pytest.raises(ValueError): + e.update(set__mapping={"somestrings": ["foo", "bar"]}) def test_dictfield_with_referencefield_complex_nesting_cases(self): """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" @@ -296,29 +336,33 @@ class TestDictField(MongoDBTestCase): mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) - mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))) - mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))) + mapping8 = DictField( + ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))) + ) + mapping9 = DictField( + ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))) + ) Doc.drop_collection() Simple.drop_collection() - d = Doc(s='aa').save() + d = Doc(s="aa").save() e = Simple() - e.mapping0['someint'] = e.mapping1['someint'] = d - e.mapping2['someint'] = e.mapping3['someint'] = [d] - e.mapping4['someint'] = e.mapping5['someint'] = {'d': d} - e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}] - e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}] + e.mapping0["someint"] = e.mapping1["someint"] = d + e.mapping2["someint"] = e.mapping3["someint"] = [d] + e.mapping4["someint"] = e.mapping5["someint"] = {"d": d} + e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}] + e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}] e.save() s = Simple.objects.first() - self.assertIsInstance(s.mapping0['someint'], Doc) - self.assertIsInstance(s.mapping1['someint'], Doc) - self.assertIsInstance(s.mapping2['someint'][0], Doc) - self.assertIsInstance(s.mapping3['someint'][0], Doc) - self.assertIsInstance(s.mapping4['someint']['d'], Doc) - self.assertIsInstance(s.mapping5['someint']['d'], Doc) - self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc) - self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc) - self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc) - self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc) + assert isinstance(s.mapping0["someint"], Doc) + assert isinstance(s.mapping1["someint"], Doc) + assert isinstance(s.mapping2["someint"][0], Doc) + assert isinstance(s.mapping3["someint"][0], Doc) + assert isinstance(s.mapping4["someint"]["d"], Doc) + assert isinstance(s.mapping5["someint"]["d"], Doc) + assert isinstance(s.mapping6["someint"][0]["d"], Doc) + assert isinstance(s.mapping7["someint"][0]["d"], Doc) + assert isinstance(s.mapping8["someint"][0]["d"][0], Doc) + assert isinstance(s.mapping9["someint"][0]["d"][0], Doc) diff --git a/tests/fields/test_email_field.py b/tests/fields/test_email_field.py index 3ce49d62..5a58ede4 100644 --- a/tests/fields/test_email_field.py +++ b/tests/fields/test_email_field.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- import sys -from unittest import SkipTest + +import pytest from mongoengine import * - from tests.utils import MongoDBTestCase @@ -12,52 +12,52 @@ class TestEmailField(MongoDBTestCase): class User(Document): email = EmailField() - user = User(email='ross@example.com') + user = User(email="ross@example.com") user.validate() - user = User(email='ross@example.co.uk') + user = User(email="ross@example.co.uk") user.validate() - user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S' - 'aJIazqqWkm7.net')) + user = User( + email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net") + ) user.validate() - user = User(email='new-tld@example.technology') + user = User(email="new-tld@example.technology") user.validate() - user = User(email='ross@example.com.') - self.assertRaises(ValidationError, user.validate) + user = User(email="ross@example.com.") + with pytest.raises(ValidationError): + user.validate() # unicode domain - user = User(email=u'user@пример.рф') + user = User(email=u"user@пример.рф") user.validate() # invalid unicode domain - user = User(email=u'user@пример') - self.assertRaises(ValidationError, user.validate) + user = User(email=u"user@пример") + with pytest.raises(ValidationError): + user.validate() # invalid data type user = User(email=123) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() def test_email_field_unicode_user(self): - # Don't run this test on pypy3, which doesn't support unicode regex: - # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode - if sys.version_info[:2] == (3, 2): - raise SkipTest('unicode email addresses are not supported on PyPy 3') - class User(Document): email = EmailField() # unicode user shouldn't validate by default... - user = User(email=u'Dörte@Sörensen.example.com') - self.assertRaises(ValidationError, user.validate) + user = User(email=u"Dörte@Sörensen.example.com") + with pytest.raises(ValidationError): + user.validate() # ...but it should be fine with allow_utf8_user set to True class User(Document): email = EmailField(allow_utf8_user=True) - user = User(email=u'Dörte@Sörensen.example.com') + user = User(email=u"Dörte@Sörensen.example.com") user.validate() def test_email_field_domain_whitelist(self): @@ -65,43 +65,48 @@ class TestEmailField(MongoDBTestCase): email = EmailField() # localhost domain shouldn't validate by default... - user = User(email='me@localhost') - self.assertRaises(ValidationError, user.validate) + user = User(email="me@localhost") + with pytest.raises(ValidationError): + user.validate() # ...but it should be fine if it's whitelisted class User(Document): - email = EmailField(domain_whitelist=['localhost']) + email = EmailField(domain_whitelist=["localhost"]) - user = User(email='me@localhost') + user = User(email="me@localhost") user.validate() def test_email_domain_validation_fails_if_invalid_idn(self): class User(Document): email = EmailField() - invalid_idn = '.google.com' - user = User(email='me@%s' % invalid_idn) - with self.assertRaises(ValidationError) as ctx_err: + invalid_idn = ".google.com" + user = User(email="me@%s" % invalid_idn) + + with pytest.raises(ValidationError) as exc_info: user.validate() - self.assertIn("domain failed IDN encoding", str(ctx_err.exception)) + assert "domain failed IDN encoding" in str(exc_info.value) def test_email_field_ip_domain(self): class User(Document): email = EmailField() - valid_ipv4 = 'email@[127.0.0.1]' - valid_ipv6 = 'email@[2001:dB8::1]' - invalid_ip = 'email@[324.0.0.1]' + valid_ipv4 = "email@[127.0.0.1]" + valid_ipv6 = "email@[2001:dB8::1]" + invalid_ip = "email@[324.0.0.1]" # IP address as a domain shouldn't validate by default... user = User(email=valid_ipv4) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() user = User(email=valid_ipv6) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() user = User(email=invalid_ip) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() # ...but it should be fine with allow_ip_domain set to True class User(Document): @@ -115,16 +120,18 @@ class TestEmailField(MongoDBTestCase): # invalid IP should still fail validation user = User(email=invalid_ip) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() def test_email_field_honors_regex(self): class User(Document): - email = EmailField(regex=r'\w+@example.com') + email = EmailField(regex=r"\w+@example.com") # Fails regex validation - user = User(email='me@foo.com') - self.assertRaises(ValidationError, user.validate) + user = User(email="me@foo.com") + with pytest.raises(ValidationError): + user.validate() # Passes regex validation - user = User(email='me@example.com') - self.assertIsNone(user.validate()) + user = User(email="me@example.com") + assert user.validate() is None diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index a262d054..13ca9c0b 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,7 +1,18 @@ # -*- coding: utf-8 -*- -from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \ - InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \ - ReferenceField +import pytest + +from mongoengine import ( + Document, + EmbeddedDocument, + EmbeddedDocumentField, + GenericEmbeddedDocumentField, + IntField, + InvalidQueryError, + ListField, + LookUpError, + StringField, + ValidationError, +) from tests.utils import MongoDBTestCase @@ -12,37 +23,41 @@ class TestEmbeddedDocumentField(MongoDBTestCase): name = StringField() field = EmbeddedDocumentField(MyDoc) - self.assertEqual(field.document_type_obj, MyDoc) + assert field.document_type_obj == MyDoc - field2 = EmbeddedDocumentField('MyDoc') - self.assertEqual(field2.document_type_obj, 'MyDoc') + field2 = EmbeddedDocumentField("MyDoc") + assert field2.document_type_obj == "MyDoc" def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): EmbeddedDocumentField(dict) def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): - class MyDoc(Document): name = StringField() - emb = EmbeddedDocumentField('MyDoc') - with self.assertRaises(ValidationError) as ctx: + emb = EmbeddedDocumentField("MyDoc") + with pytest.raises(ValidationError) as exc_info: emb.document_type - self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception)) + assert ( + "Invalid embedded document class provided to an EmbeddedDocumentField" + in str(exc_info.value) + ) def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): # Relates to #1661 class MyDoc(Document): name = StringField() - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): + class MyFailingDoc(Document): emb = EmbeddedDocumentField(MyDoc) - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): + class MyFailingdoc2(Document): - emb = EmbeddedDocumentField('MyDoc') + emb = EmbeddedDocumentField("MyDoc") def test_query_embedded_document_attribute(self): class AdminSettings(EmbeddedDocument): @@ -55,34 +70,31 @@ class TestEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - p = Person( - settings=AdminSettings(foo1='bar1', foo2='bar2'), - name='John', - ).save() + p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: - Person.objects(settings__notexist='bar').first() - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + with pytest.raises(InvalidQueryError) as exc_info: + Person.objects(settings__notexist="bar").first() + assert str(exc_info.value) == u'Cannot resolve field "notexist"' - with self.assertRaises(LookUpError): - Person.objects.only('settings.notexist') + with pytest.raises(LookUpError): + Person.objects.only("settings.notexist") # Test existing attribute - self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id) - only_p = Person.objects.only('settings.foo1').first() - self.assertEqual(only_p.settings.foo1, p.settings.foo1) - self.assertIsNone(only_p.settings.foo2) - self.assertIsNone(only_p.name) + assert Person.objects(settings__foo1="bar1").first().id == p.id + only_p = Person.objects.only("settings.foo1").first() + assert only_p.settings.foo1 == p.settings.foo1 + assert only_p.settings.foo2 is None + assert only_p.name is None - exclude_p = Person.objects.exclude('settings.foo1').first() - self.assertIsNone(exclude_p.settings.foo1) - self.assertEqual(exclude_p.settings.foo2, p.settings.foo2) - self.assertEqual(exclude_p.name, p.name) + exclude_p = Person.objects.exclude("settings.foo1").first() + assert exclude_p.settings.foo1 is None + assert exclude_p.settings.foo2 == p.settings.foo2 + assert exclude_p.name == p.name def test_query_embedded_document_attribute_with_inheritance(self): class BaseSettings(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} base_foo = StringField() class AdminSettings(BaseSettings): @@ -93,26 +105,26 @@ class TestEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) + p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) p.save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: - self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + with pytest.raises(InvalidQueryError) as exc_info: + assert Person.objects(settings__notexist="bar").first().id == p.id + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute - self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) - self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) + assert Person.objects(settings__base_foo="basefoo").first().id == p.id + assert Person.objects(settings__sub_foo="subfoo").first().id == p.id - only_p = Person.objects.only('settings.base_foo', 'settings._cls').first() - self.assertEqual(only_p.settings.base_foo, 'basefoo') - self.assertIsNone(only_p.settings.sub_foo) + only_p = Person.objects.only("settings.base_foo", "settings._cls").first() + assert only_p.settings.base_foo == "basefoo" + assert only_p.settings.sub_foo is None def test_query_list_embedded_document_with_inheritance(self): class Post(EmbeddedDocument): title = StringField(max_length=120, required=True) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class TextPost(Post): content = StringField() @@ -123,22 +135,21 @@ class TestEmbeddedDocumentField(MongoDBTestCase): class Record(Document): posts = ListField(EmbeddedDocumentField(Post)) - record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save() - record_text = Record(posts=[TextPost(content='a', title='foo')]).save() + record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save() + record_text = Record(posts=[TextPost(content="a", title="foo")]).save() records = list(Record.objects(posts__author=record_movie.posts[0].author)) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].id, record_movie.id) + assert len(records) == 1 + assert records[0].id == record_movie.id records = list(Record.objects(posts__content=record_text.posts[0].content)) - self.assertEqual(len(records), 1) - self.assertEqual(records[0].id, record_text.id) + assert len(records) == 1 + assert records[0].id == record_text.id - self.assertEqual(Record.objects(posts__title='foo').count(), 2) + assert Record.objects(posts__title="foo").count() == 2 class TestGenericEmbeddedDocumentField(MongoDBTestCase): - def test_generic_embedded_document(self): class Car(EmbeddedDocument): name = StringField() @@ -153,21 +164,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.like = Car(name='Fiat') + person = Person(name="Test User") + person.like = Car(name="Fiat") person.save() person = Person.objects.first() - self.assertIsInstance(person.like, Car) + assert isinstance(person.like, Car) person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() - self.assertIsInstance(person.like, Dish) + assert isinstance(person.like, Dish) def test_generic_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices.""" + class Car(EmbeddedDocument): name = StringField() @@ -181,20 +193,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.like = Car(name='Fiat') - self.assertRaises(ValidationError, person.validate) + person = Person(name="Test User") + person.like = Car(name="Fiat") + with pytest.raises(ValidationError): + person.validate() person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() - self.assertIsInstance(person.like, Dish) + assert isinstance(person.like, Dish) def test_generic_list_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices inside a list field. """ + class Car(EmbeddedDocument): name = StringField() @@ -208,39 +222,38 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.likes = [Car(name='Fiat')] - self.assertRaises(ValidationError, person.validate) + person = Person(name="Test User") + person.likes = [Car(name="Fiat")] + with pytest.raises(ValidationError): + person.validate() person.likes = [Dish(food="arroz", number=15)] person.save() person = Person.objects.first() - self.assertIsInstance(person.likes[0], Dish) + assert isinstance(person.likes[0], Dish) def test_choices_validation_documents(self): """ Ensure fields with document choices validate given a valid choice. """ + class UserComments(EmbeddedDocument): author = StringField() message = StringField() class BlogPost(Document): - comments = ListField( - GenericEmbeddedDocumentField(choices=(UserComments,)) - ) + comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) # Ensure Validation Passes - BlogPost(comments=[ - UserComments(author='user2', message='message2'), - ]).save() + BlogPost(comments=[UserComments(author="user2", message="message2")]).save() def test_choices_validation_documents_invalid(self): """ Ensure fields with document choices validate given an invalid choice. This should throw a ValidationError exception. """ + class UserComments(EmbeddedDocument): author = StringField() message = StringField() @@ -250,31 +263,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): message = StringField() class BlogPost(Document): - comments = ListField( - GenericEmbeddedDocumentField(choices=(UserComments,)) - ) + comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) # Single Entry Failure - post = BlogPost(comments=[ - ModeratorComments(author='mod1', message='message1'), - ]) - self.assertRaises(ValidationError, post.save) + post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")]) + with pytest.raises(ValidationError): + post.save() # Mixed Entry Failure - post = BlogPost(comments=[ - ModeratorComments(author='mod1', message='message1'), - UserComments(author='user2', message='message2'), - ]) - self.assertRaises(ValidationError, post.save) + post = BlogPost( + comments=[ + ModeratorComments(author="mod1", message="message1"), + UserComments(author="user2", message="message2"), + ] + ) + with pytest.raises(ValidationError): + post.save() def test_choices_validation_documents_inheritance(self): """ Ensure fields with document choices validate given subclass of choice. """ + class Comments(EmbeddedDocument): - meta = { - 'abstract': True - } + meta = {"abstract": True} author = StringField() message = StringField() @@ -282,14 +294,10 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): pass class BlogPost(Document): - comments = ListField( - GenericEmbeddedDocumentField(choices=(Comments,)) - ) + comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,))) # Save Valid EmbeddedDocument Type - BlogPost(comments=[ - UserComments(author='user2', message='message2'), - ]).save() + BlogPost(comments=[UserComments(author="user2", message="message2")]).save() def test_query_generic_embedded_document_attribute(self): class AdminSettings(EmbeddedDocument): @@ -299,28 +307,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): foo2 = StringField() class Person(Document): - settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings)) + settings = GenericEmbeddedDocumentField( + choices=(AdminSettings, NonAdminSettings) + ) Person.drop_collection() - p1 = Person(settings=AdminSettings(foo1='bar1')).save() - p2 = Person(settings=NonAdminSettings(foo2='bar2')).save() + p1 = Person(settings=AdminSettings(foo1="bar1")).save() + p2 = Person(settings=NonAdminSettings(foo2="bar2")).save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: - Person.objects(settings__notexist='bar').first() - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + with pytest.raises(InvalidQueryError) as exc_info: + Person.objects(settings__notexist="bar").first() + assert str(exc_info.value) == u'Cannot resolve field "notexist"' - with self.assertRaises(LookUpError): - Person.objects.only('settings.notexist') + with pytest.raises(LookUpError): + Person.objects.only("settings.notexist") # Test existing attribute - self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id) - self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id) + assert Person.objects(settings__foo1="bar1").first().id == p1.id + assert Person.objects(settings__foo2="bar2").first().id == p2.id def test_query_generic_embedded_document_attribute_with_inheritance(self): class BaseSettings(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} base_foo = StringField() class AdminSettings(BaseSettings): @@ -331,14 +341,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): Person.drop_collection() - p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) + p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) p.save() # Test non exiting attribute - with self.assertRaises(InvalidQueryError) as ctx_err: - self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) - self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') + with pytest.raises(InvalidQueryError) as exc_info: + assert Person.objects(settings__notexist="bar").first().id == p.id + assert str(exc_info.value) == u'Cannot resolve field "notexist"' # Test existing attribute - self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) - self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) + assert Person.objects(settings__base_foo="basefoo").first().id == p.id + assert Person.objects(settings__sub_foo="subfoo").first().id == p.id diff --git a/tests/fields/fields.py b/tests/fields/test_fields.py similarity index 59% rename from tests/fields/fields.py rename to tests/fields/test_fields.py index 68baab46..fe349d1e 100644 --- a/tests/fields/fields.py +++ b/tests/fields/test_fields.py @@ -2,31 +2,55 @@ import datetime import unittest -from nose.plugins.skip import SkipTest - from bson import DBRef, ObjectId, SON +import pytest -from mongoengine import Document, StringField, IntField, DateTimeField, DateField, ValidationError, \ - ComplexDateTimeField, FloatField, ListField, ReferenceField, DictField, EmbeddedDocument, EmbeddedDocumentField, \ - GenericReferenceField, DoesNotExist, NotRegistered, OperationError, DynamicField, \ - FieldDoesNotExist, EmbeddedDocumentListField, MultipleObjectsReturned, NotUniqueError, BooleanField,\ - ObjectIdField, SortedListField, GenericLazyReferenceField, LazyReferenceField, DynamicDocument -from mongoengine.base import (BaseField, EmbeddedDocumentList, _document_registry) +from mongoengine import ( + BooleanField, + ComplexDateTimeField, + DateField, + DateTimeField, + DictField, + Document, + DoesNotExist, + DynamicDocument, + DynamicField, + EmbeddedDocument, + EmbeddedDocumentField, + EmbeddedDocumentListField, + FieldDoesNotExist, + FloatField, + GenericLazyReferenceField, + GenericReferenceField, + IntField, + LazyReferenceField, + ListField, + MultipleObjectsReturned, + NotRegistered, + NotUniqueError, + ObjectIdField, + OperationError, + ReferenceField, + SortedListField, + StringField, + ValidationError, +) +from mongoengine.base import BaseField, EmbeddedDocumentList, _document_registry from mongoengine.errors import DeprecatedError from tests.utils import MongoDBTestCase -class FieldTest(MongoDBTestCase): - +class TestField(MongoDBTestCase): def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) day = DateField(default=datetime.date.today) @@ -34,30 +58,29 @@ class FieldTest(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, - ['age', 'created', 'day', 'name', 'userid'] - ) + assert data_to_be_saved == ["age", "created", "day", "name", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, person.name) - self.assertEqual(person.age, person.age) - self.assertEqual(person.userid, person.userid) - self.assertEqual(person.created, person.created) - self.assertEqual(person.day, person.day) + assert person.name == person.name + assert person.age == person.age + assert person.userid == person.userid + assert person.created == person.created + assert person.day == person.day - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) - self.assertEqual(person._data['day'], person.day) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created + assert person._data["day"] == person.day # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual( - data_to_be_saved, ['age', 'created', 'day', 'name', 'userid']) + assert data_to_be_saved == ["age", "created", "day", "name", "userid"] - def test_custom_field_validation_raise_deprecated_error_when_validation_return_something(self): + def test_custom_field_validation_raise_deprecated_error_when_validation_return_something( + self, + ): # Covers introduction of a breaking change in the validation parameter (0.18) def _not_empty(z): return bool(z) @@ -67,34 +90,34 @@ class FieldTest(MongoDBTestCase): Person.drop_collection() - error = ("validation argument for `name` must not return anything, " - "it should raise a ValidationError if validation fails") + error = ( + "validation argument for `name` must not return anything, " + "it should raise a ValidationError if validation fails" + ) - with self.assertRaises(DeprecatedError) as ctx_err: + with pytest.raises(DeprecatedError) as exc_info: Person(name="").validate() - self.assertEqual(str(ctx_err.exception), error) + assert str(exc_info.value) == error - with self.assertRaises(DeprecatedError) as ctx_err: + with pytest.raises(DeprecatedError) as exc_info: Person(name="").save() - self.assertEqual(str(ctx_err.exception), error) + assert str(exc_info.value) == error def test_custom_field_validation_raise_validation_error(self): def _not_empty(z): if not z: - raise ValidationError('cantbeempty') + raise ValidationError("cantbeempty") class Person(Document): name = StringField(validation=_not_empty) Person.drop_collection() - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: Person(name="").validate() - self.assertEqual("ValidationError (Person:None) (cantbeempty: ['name'])", str(ctx_err.exception)) - - with self.assertRaises(ValidationError): - Person(name="").save() - self.assertEqual("ValidationError (Person:None) (cantbeempty: ['name'])", str(ctx_err.exception)) + assert "ValidationError (Person:None) (cantbeempty: ['name'])" == str( + exc_info.value + ) Person(name="garbage").validate() Person(name="garbage").save() @@ -103,10 +126,11 @@ class FieldTest(MongoDBTestCase): """Ensure that default field values are used even when we explcitly initialize the doc with None values. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) # Trying setting values to None @@ -114,32 +138,33 @@ class FieldTest(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + assert data_to_be_saved == ["age", "created", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, person.name) - self.assertEqual(person.age, person.age) - self.assertEqual(person.userid, person.userid) - self.assertEqual(person.created, person.created) + assert person.name == person.name + assert person.age == person.age + assert person.userid == person.userid + assert person.created == person.created - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + assert data_to_be_saved == ["age", "created", "userid"] def test_default_values_when_setting_to_None(self): """Ensure that default field values are used when creating a document. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) person = Person() @@ -150,25 +175,27 @@ class FieldTest(MongoDBTestCase): # Confirm saving now would store values data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + assert data_to_be_saved == ["age", "created", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, None) - self.assertEqual(person.age, 30) - self.assertEqual(person.userid, 'test') - self.assertIsInstance(person.created, datetime.datetime) + assert person.name is None + assert person.age == 30 + assert person.userid == "test" + assert isinstance(person.created, datetime.datetime) - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + assert data_to_be_saved == ["age", "created", "userid"] - def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_doc(self): + def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_doc( + self, + ): """List field with default can be set to the empty list (strict)""" # Issue #1733 class Doc(Document): @@ -178,9 +205,11 @@ class FieldTest(MongoDBTestCase): doc.x = [] doc.save() reloaded = Doc.objects.get(id=doc.id) - self.assertEqual(reloaded.x, []) + assert reloaded.x == [] - def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc(self): + def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc( + self, + ): """List field with default can be set to the empty list (dynamic)""" # Issue #1733 class Doc(DynamicDocument): @@ -188,64 +217,73 @@ class FieldTest(MongoDBTestCase): doc = Doc(x=[1]).save() doc.x = [] - doc.y = 2 # Was triggering the bug + doc.y = 2 # Was triggering the bug doc.save() reloaded = Doc.objects.get(id=doc.id) - self.assertEqual(reloaded.x, []) + assert reloaded.x == [] def test_default_values_when_deleting_value(self): """Ensure that default field values are used after non-default values are explicitly deleted. """ + class Person(Document): name = StringField() age = IntField(default=30, required=False) - userid = StringField(default=lambda: 'test', required=True) + userid = StringField(default=lambda: "test", required=True) created = DateTimeField(default=datetime.datetime.utcnow) - person = Person(name="Ross", age=50, userid='different', - created=datetime.datetime(2014, 6, 12)) + person = Person( + name="Ross", + age=50, + userid="different", + created=datetime.datetime(2014, 6, 12), + ) del person.name del person.age del person.userid del person.created data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + assert data_to_be_saved == ["age", "created", "userid"] - self.assertTrue(person.validate() is None) + assert person.validate() is None - self.assertEqual(person.name, None) - self.assertEqual(person.age, 30) - self.assertEqual(person.userid, 'test') - self.assertIsInstance(person.created, datetime.datetime) - self.assertNotEqual(person.created, datetime.datetime(2014, 6, 12)) + assert person.name is None + assert person.age == 30 + assert person.userid == "test" + assert isinstance(person.created, datetime.datetime) + assert person.created != datetime.datetime(2014, 6, 12) - self.assertEqual(person._data['name'], person.name) - self.assertEqual(person._data['age'], person.age) - self.assertEqual(person._data['userid'], person.userid) - self.assertEqual(person._data['created'], person.created) + assert person._data["name"] == person.name + assert person._data["age"] == person.age + assert person._data["userid"] == person.userid + assert person._data["created"] == person.created # Confirm introspection changes nothing data_to_be_saved = sorted(person.to_mongo().keys()) - self.assertEqual(data_to_be_saved, ['age', 'created', 'userid']) + assert data_to_be_saved == ["age", "created", "userid"] def test_required_values(self): """Ensure that required field constraints are enforced.""" + class Person(Document): name = StringField(required=True) age = IntField(required=True) userid = StringField() person = Person(name="Test User") - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person = Person(age=30) - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() def test_not_required_handles_none_in_update(self): """Ensure that every fields should accept None if required is False. """ + class HandleNoneFields(Document): str_fld = StringField() int_fld = IntField() @@ -255,7 +293,7 @@ class FieldTest(MongoDBTestCase): HandleNoneFields.drop_collection() doc = HandleNoneFields() - doc.str_fld = u'spam ham egg' + doc.str_fld = u"spam ham egg" doc.int_fld = 42 doc.flt_fld = 4.2 doc.com_dt_fld = datetime.datetime.utcnow() @@ -267,20 +305,21 @@ class FieldTest(MongoDBTestCase): set__flt_fld=None, set__comp_dt_fld=None, ) - self.assertEqual(res, 1) + assert res == 1 # Retrive data from db and verify it. ret = HandleNoneFields.objects.all()[0] - self.assertIsNone(ret.str_fld) - self.assertIsNone(ret.int_fld) - self.assertIsNone(ret.flt_fld) + assert ret.str_fld is None + assert ret.int_fld is None + assert ret.flt_fld is None - self.assertIsNone(ret.comp_dt_fld) + assert ret.comp_dt_fld is None def test_not_required_handles_none_from_database(self): """Ensure that every field can handle null values from the database. """ + class HandleNoneFields(Document): str_fld = StringField(required=True) int_fld = IntField(required=True) @@ -290,100 +329,107 @@ class FieldTest(MongoDBTestCase): HandleNoneFields.drop_collection() doc = HandleNoneFields() - doc.str_fld = u'spam ham egg' + doc.str_fld = u"spam ham egg" doc.int_fld = 42 doc.flt_fld = 4.2 doc.comp_dt_fld = datetime.datetime.utcnow() doc.save() # Unset all the fields - obj = HandleNoneFields._get_collection().update({"_id": doc.id}, { - "$unset": { - "str_fld": 1, - "int_fld": 1, - "flt_fld": 1, - "comp_dt_fld": 1 - } - }) + HandleNoneFields._get_collection().update_one( + {"_id": doc.id}, + {"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}}, + ) # Retrive data from db and verify it. ret = HandleNoneFields.objects.first() - self.assertIsNone(ret.str_fld) - self.assertIsNone(ret.int_fld) - self.assertIsNone(ret.flt_fld) - self.assertIsNone(ret.comp_dt_fld) + assert ret.str_fld is None + assert ret.int_fld is None + assert ret.flt_fld is None + assert ret.comp_dt_fld is None # Retrieved object shouldn't pass validation when a re-save is # attempted. - self.assertRaises(ValidationError, ret.validate) + with pytest.raises(ValidationError): + ret.validate() def test_default_id_validation_as_objectid(self): """Ensure that invalid values cannot be assigned to an ObjectIdField. """ + class Person(Document): name = StringField() - person = Person(name='Test User') - self.assertEqual(person.id, None) + person = Person(name="Test User") + assert person.id is None person.id = 47 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() - person.id = 'abc' - self.assertRaises(ValidationError, person.validate) + person.id = "abc" + with pytest.raises(ValidationError): + person.validate() person.id = str(ObjectId()) person.validate() def test_string_validation(self): """Ensure that invalid values cannot be assigned to string fields.""" + class Person(Document): name = StringField(max_length=20) - userid = StringField(r'[0-9a-z_]+$') + userid = StringField(r"[0-9a-z_]+$") person = Person(name=34) - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() # Test regex validation on userid - person = Person(userid='test.User') - self.assertRaises(ValidationError, person.validate) + person = Person(userid="test.User") + with pytest.raises(ValidationError): + person.validate() - person.userid = 'test_user' - self.assertEqual(person.userid, 'test_user') + person.userid = "test_user" + assert person.userid == "test_user" person.validate() # Test max length validation on name - person = Person(name='Name that is more than twenty characters') - self.assertRaises(ValidationError, person.validate) + person = Person(name="Name that is more than twenty characters") + with pytest.raises(ValidationError): + person.validate() - person.name = 'Shorter name' + person.name = "Shorter name" person.validate() def test_db_field_validation(self): """Ensure that db_field doesn't accept invalid values.""" # dot in the name - with self.assertRaises(ValueError): + with pytest.raises(ValueError): + class User(Document): - name = StringField(db_field='user.name') + name = StringField(db_field="user.name") # name starting with $ - with self.assertRaises(ValueError): - class User(Document): - name = StringField(db_field='$name') + with pytest.raises(ValueError): + + class UserX1(Document): + name = StringField(db_field="$name") # name containing a null character - with self.assertRaises(ValueError): - class User(Document): - name = StringField(db_field='name\0') + with pytest.raises(ValueError): + + class UserX2(Document): + name = StringField(db_field="name\0") def test_list_validation(self): """Ensure that a list field only accepts lists with valid elements.""" access_level_choices = ( - ('a', u'Administration'), - ('b', u'Manager'), - ('c', u'Staff'), + ("a", u"Administration"), + ("b", u"Manager"), + ("c", u"Staff"), ) class User(Document): @@ -400,49 +446,57 @@ class FieldTest(MongoDBTestCase): authors_as_lazy = ListField(LazyReferenceField(User)) generic = ListField(GenericReferenceField()) generic_as_lazy = ListField(GenericLazyReferenceField()) - access_list = ListField(choices=access_level_choices, display_sep=', ') + access_list = ListField(choices=access_level_choices, display_sep=", ") User.drop_collection() BlogPost.drop_collection() - post = BlogPost(content='Went for a walk today...') + post = BlogPost(content="Went for a walk today...") post.validate() - post.tags = 'fun' - self.assertRaises(ValidationError, post.validate) + post.tags = "fun" + with pytest.raises(ValidationError): + post.validate() post.tags = [1, 2] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() - post.tags = ['fun', 'leisure'] + post.tags = ["fun", "leisure"] post.validate() - post.tags = ('fun', 'leisure') + post.tags = ("fun", "leisure") post.validate() - post.access_list = 'a,b' - self.assertRaises(ValidationError, post.validate) + post.access_list = "a,b" + with pytest.raises(ValidationError): + post.validate() - post.access_list = ['c', 'd'] - self.assertRaises(ValidationError, post.validate) + post.access_list = ["c", "d"] + with pytest.raises(ValidationError): + post.validate() - post.access_list = ['a', 'b'] + post.access_list = ["a", "b"] post.validate() - self.assertEqual(post.get_access_list_display(), u'Administration, Manager') + assert post.get_access_list_display() == u"Administration, Manager" - post.comments = ['a'] - self.assertRaises(ValidationError, post.validate) - post.comments = 'yay' - self.assertRaises(ValidationError, post.validate) + post.comments = ["a"] + with pytest.raises(ValidationError): + post.validate() + post.comments = "yay" + with pytest.raises(ValidationError): + post.validate() - comments = [Comment(content='Good for you'), Comment(content='Yay.')] + comments = [Comment(content="Good for you"), Comment(content="Yay.")] post.comments = comments post.validate() post.authors = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.authors = [User()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() user = User() user.save() @@ -450,34 +504,42 @@ class FieldTest(MongoDBTestCase): post.validate() post.authors_as_lazy = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.authors_as_lazy = [User()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.authors_as_lazy = [user] post.validate() post.generic = [1, 2] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic = [User(), Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic = [user] post.validate() post.generic_as_lazy = [1, 2] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic_as_lazy = [User(), Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic_as_lazy = [Comment()] - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() post.generic_as_lazy = [user] post.validate() @@ -485,41 +547,41 @@ class FieldTest(MongoDBTestCase): def test_sorted_list_sorting(self): """Ensure that a sorted list field properly sorts values. """ + class Comment(EmbeddedDocument): order = IntField() content = StringField() class BlogPost(Document): content = StringField() - comments = SortedListField(EmbeddedDocumentField(Comment), - ordering='order') + comments = SortedListField(EmbeddedDocumentField(Comment), ordering="order") tags = SortedListField(StringField()) BlogPost.drop_collection() - post = BlogPost(content='Went for a walk today...') + post = BlogPost(content="Went for a walk today...") post.save() - post.tags = ['leisure', 'fun'] + post.tags = ["leisure", "fun"] post.save() post.reload() - self.assertEqual(post.tags, ['fun', 'leisure']) + assert post.tags == ["fun", "leisure"] - comment1 = Comment(content='Good for you', order=1) - comment2 = Comment(content='Yay.', order=0) + comment1 = Comment(content="Good for you", order=1) + comment2 = Comment(content="Yay.", order=0) comments = [comment1, comment2] post.comments = comments post.save() post.reload() - self.assertEqual(post.comments[0].content, comment2.content) - self.assertEqual(post.comments[1].content, comment1.content) + assert post.comments[0].content == comment2.content + assert post.comments[1].content == comment1.content post.comments[0].order = 2 post.save() post.reload() - self.assertEqual(post.comments[0].content, comment1.content) - self.assertEqual(post.comments[1].content, comment2.content) + assert post.comments[0].content == comment1.content + assert post.comments[1].content == comment2.content def test_reverse_list_sorting(self): """Ensure that a reverse sorted list field properly sorts values""" @@ -529,77 +591,80 @@ class FieldTest(MongoDBTestCase): name = StringField() class CategoryList(Document): - categories = SortedListField(EmbeddedDocumentField(Category), - ordering='count', reverse=True) + categories = SortedListField( + EmbeddedDocumentField(Category), ordering="count", reverse=True + ) name = StringField() CategoryList.drop_collection() catlist = CategoryList(name="Top categories") - cat1 = Category(name='posts', count=10) - cat2 = Category(name='food', count=100) - cat3 = Category(name='drink', count=40) + cat1 = Category(name="posts", count=10) + cat2 = Category(name="food", count=100) + cat3 = Category(name="drink", count=40) catlist.categories = [cat1, cat2, cat3] catlist.save() catlist.reload() - self.assertEqual(catlist.categories[0].name, cat2.name) - self.assertEqual(catlist.categories[1].name, cat3.name) - self.assertEqual(catlist.categories[2].name, cat1.name) + assert catlist.categories[0].name == cat2.name + assert catlist.categories[1].name == cat3.name + assert catlist.categories[2].name == cat1.name def test_list_field(self): """Ensure that list types work as expected.""" + class BlogPost(Document): info = ListField() BlogPost.drop_collection() post = BlogPost() - post.info = 'my post' - self.assertRaises(ValidationError, post.validate) + post.info = "my post" + with pytest.raises(ValidationError): + post.validate() - post.info = {'title': 'test'} - self.assertRaises(ValidationError, post.validate) + post.info = {"title": "test"} + with pytest.raises(ValidationError): + post.validate() - post.info = ['test'] + post.info = ["test"] post.save() post = BlogPost() - post.info = [{'test': 'test'}] + post.info = [{"test": "test"}] post.save() post = BlogPost() - post.info = [{'test': 3}] + post.info = [{"test": 3}] post.save() - self.assertEqual(BlogPost.objects.count(), 3) - self.assertEqual( - BlogPost.objects.filter(info__exact='test').count(), 1) - self.assertEqual( - BlogPost.objects.filter(info__0__test='test').count(), 1) + assert BlogPost.objects.count() == 3 + assert BlogPost.objects.filter(info__exact="test").count() == 1 + assert BlogPost.objects.filter(info__0__test="test").count() == 1 # Confirm handles non strings or non existing keys - self.assertEqual( - BlogPost.objects.filter(info__0__test__exact='5').count(), 0) - self.assertEqual( - BlogPost.objects.filter(info__100__test__exact='test').count(), 0) + assert BlogPost.objects.filter(info__0__test__exact="5").count() == 0 + assert BlogPost.objects.filter(info__100__test__exact="test").count() == 0 # test queries by list post = BlogPost() - post.info = ['1', '2'] + post.info = ["1", "2"] post.save() - post = BlogPost.objects(info=['1', '2']).get() - post.info += ['3', '4'] + post = BlogPost.objects(info=["1", "2"]).get() + post.info += ["3", "4"] post.save() - self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4']).count(), 1) - post = BlogPost.objects(info=['1', '2', '3', '4']).get() + assert BlogPost.objects(info=["1", "2", "3", "4"]).count() == 1 + post = BlogPost.objects(info=["1", "2", "3", "4"]).get() post.info *= 2 post.save() - self.assertEqual(BlogPost.objects(info=['1', '2', '3', '4', '1', '2', '3', '4']).count(), 1) + assert ( + BlogPost.objects(info=["1", "2", "3", "4", "1", "2", "3", "4"]).count() == 1 + ) def test_list_field_manipulative_operators(self): """Ensure that ListField works with standard list operators that manipulate the list. """ + class BlogPost(Document): ref = StringField() info = ListField(StringField()) @@ -608,162 +673,162 @@ class FieldTest(MongoDBTestCase): post = BlogPost() post.ref = "1234" - post.info = ['0', '1', '2', '3', '4', '5'] + post.info = ["0", "1", "2", "3", "4", "5"] post.save() def reset_post(): - post.info = ['0', '1', '2', '3', '4', '5'] + post.info = ["0", "1", "2", "3", "4", "5"] post.save() # '__add__(listB)' # listA+listB # operator.add(listA, listB) reset_post() - temp = ['a', 'b'] + temp = ["a", "b"] post.info = post.info + temp - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] # '__delitem__(index)' # aka 'del list[index]' # aka 'operator.delitem(list, index)' reset_post() del post.info[2] # del from middle ('2') - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + assert post.info == ["0", "1", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + assert post.info == ["0", "1", "3", "4", "5"] # '__delitem__(slice(i, j))' # aka 'del list[i:j]' # aka 'operator.delitem(list, slice(i,j))' reset_post() del post.info[1:3] # removes '1', '2' - self.assertEqual(post.info, ['0', '3', '4', '5']) + assert post.info == ["0", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '3', '4', '5']) + assert post.info == ["0", "3", "4", "5"] # '__iadd__' # aka 'list += list' reset_post() - temp = ['a', 'b'] + temp = ["a", "b"] post.info += temp - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'a', 'b']) + assert post.info == ["0", "1", "2", "3", "4", "5", "a", "b"] # '__imul__' # aka 'list *= number' reset_post() post.info *= 2 - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] # '__mul__' # aka 'listA*listB' reset_post() post.info = post.info * 2 - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] # '__rmul__' # aka 'listB*listA' reset_post() post.info = 2 * post.info - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', '0', '1', '2', '3', '4', '5']) + assert post.info == ["0", "1", "2", "3", "4", "5", "0", "1", "2", "3", "4", "5"] # '__setitem__(index, value)' # aka 'list[index]=value' # aka 'setitem(list, value)' reset_post() - post.info[4] = 'a' - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.info[4] = "a" + assert post.info == ["0", "1", "2", "3", "a", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + assert post.info == ["0", "1", "2", "3", "a", "5"] # __setitem__(index, value) with a negative index reset_post() - post.info[-2] = 'a' - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + post.info[-2] = "a" + assert post.info == ["0", "1", "2", "3", "a", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', 'a', '5']) + assert post.info == ["0", "1", "2", "3", "a", "5"] # '__setitem__(slice(i, j), listB)' # aka 'listA[i:j] = listB' # aka 'setitem(listA, slice(i, j), listB)' reset_post() - post.info[1:3] = ['h', 'e', 'l', 'l', 'o'] - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.info[1:3] = ["h", "e", "l", "l", "o"] + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] # '__setitem__(slice(i, j), listB)' with negative i and j reset_post() - post.info[-5:-3] = ['h', 'e', 'l', 'l', 'o'] - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + post.info[-5:-3] = ["h", "e", "l", "l", "o"] + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', 'h', 'e', 'l', 'l', 'o', '3', '4', '5']) + assert post.info == ["0", "h", "e", "l", "l", "o", "3", "4", "5"] # negative # 'append' reset_post() - post.info.append('h') - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + post.info.append("h") + assert post.info == ["0", "1", "2", "3", "4", "5", "h"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h']) + assert post.info == ["0", "1", "2", "3", "4", "5", "h"] # 'extend' reset_post() - post.info.extend(['h', 'e', 'l', 'l', 'o']) - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + post.info.extend(["h", "e", "l", "l", "o"]) + assert post.info == ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '2', '3', '4', '5', 'h', 'e', 'l', 'l', 'o']) + assert post.info == ["0", "1", "2", "3", "4", "5", "h", "e", "l", "l", "o"] # 'insert' # 'pop' reset_post() x = post.info.pop(2) y = post.info.pop() - self.assertEqual(post.info, ['0', '1', '3', '4']) - self.assertEqual(x, '2') - self.assertEqual(y, '5') + assert post.info == ["0", "1", "3", "4"] + assert x == "2" + assert y == "5" post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '3', '4']) + assert post.info == ["0", "1", "3", "4"] # 'remove' reset_post() - post.info.remove('2') - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + post.info.remove("2") + assert post.info == ["0", "1", "3", "4", "5"] post.save() post.reload() - self.assertEqual(post.info, ['0', '1', '3', '4', '5']) + assert post.info == ["0", "1", "3", "4", "5"] # 'reverse' reset_post() post.info.reverse() - self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + assert post.info == ["5", "4", "3", "2", "1", "0"] post.save() post.reload() - self.assertEqual(post.info, ['5', '4', '3', '2', '1', '0']) + assert post.info == ["5", "4", "3", "2", "1", "0"] # 'sort': though this operator method does manipulate the list, it is # tested in the 'test_list_field_lexicograpic_operators' function @@ -775,16 +840,18 @@ class FieldTest(MongoDBTestCase): post = BlogPost() post.ref = "1234" - post.info = ['0', '1', '2', '3', '4', '5'] + post.info = ["0", "1", "2", "3", "4", "5"] # '__hash__' # aka 'hash(list)' - self.assertRaises(TypeError, lambda: hash(post.info)) + with pytest.raises(TypeError): + hash(post.info) def test_list_field_lexicographic_operators(self): """Ensure that ListField works with standard list operators that do lexigraphic ordering. """ + class BlogPost(Document): ref = StringField() text_info = ListField(StringField()) @@ -810,39 +877,39 @@ class FieldTest(MongoDBTestCase): blogLargeB.oid_info = [ "54495ad94c934721ede76f90", "54495ad94c934721ede76d23", - "54495ad94c934721ede76d00" + "54495ad94c934721ede76d00", ] blogLargeB.bool_info = [False, True] blogLargeB.save() blogLargeB.reload() # '__eq__' aka '==' - self.assertEqual(blogLargeA.text_info, blogLargeB.text_info) - self.assertEqual(blogLargeA.bool_info, blogLargeB.bool_info) + assert blogLargeA.text_info == blogLargeB.text_info + assert blogLargeA.bool_info == blogLargeB.bool_info # '__ge__' aka '>=' - self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) - self.assertGreaterEqual(blogLargeA.text_info, blogLargeB.text_info) - self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) - self.assertGreaterEqual(blogLargeA.bool_info, blogLargeB.bool_info) + assert blogLargeA.text_info >= blogSmall.text_info + assert blogLargeA.text_info >= blogLargeB.text_info + assert blogLargeA.bool_info >= blogSmall.bool_info + assert blogLargeA.bool_info >= blogLargeB.bool_info # '__gt__' aka '>' - self.assertGreaterEqual(blogLargeA.text_info, blogSmall.text_info) - self.assertGreaterEqual(blogLargeA.bool_info, blogSmall.bool_info) + assert blogLargeA.text_info >= blogSmall.text_info + assert blogLargeA.bool_info >= blogSmall.bool_info # '__le__' aka '<=' - self.assertLessEqual(blogSmall.text_info, blogLargeB.text_info) - self.assertLessEqual(blogLargeA.text_info, blogLargeB.text_info) - self.assertLessEqual(blogSmall.bool_info, blogLargeB.bool_info) - self.assertLessEqual(blogLargeA.bool_info, blogLargeB.bool_info) + assert blogSmall.text_info <= blogLargeB.text_info + assert blogLargeA.text_info <= blogLargeB.text_info + assert blogSmall.bool_info <= blogLargeB.bool_info + assert blogLargeA.bool_info <= blogLargeB.bool_info # '__lt__' aka '<' - self.assertLess(blogSmall.text_info, blogLargeB.text_info) - self.assertLess(blogSmall.bool_info, blogLargeB.bool_info) + assert blogSmall.text_info < blogLargeB.text_info + assert blogSmall.bool_info < blogLargeB.bool_info # '__ne__' aka '!=' - self.assertNotEqual(blogSmall.text_info, blogLargeB.text_info) - self.assertNotEqual(blogSmall.bool_info, blogLargeB.bool_info) + assert blogSmall.text_info != blogLargeB.text_info + assert blogSmall.bool_info != blogLargeB.bool_info # 'sort' blogLargeB.bool_info = [True, False, True, False] @@ -852,62 +919,63 @@ class FieldTest(MongoDBTestCase): sorted_target_list = [ ObjectId("54495ad94c934721ede76d00"), ObjectId("54495ad94c934721ede76d23"), - ObjectId("54495ad94c934721ede76f90") + ObjectId("54495ad94c934721ede76f90"), ] - self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) - self.assertEqual(blogLargeB.oid_info, sorted_target_list) - self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + assert blogLargeB.text_info == ["a", "j", "z"] + assert blogLargeB.oid_info == sorted_target_list + assert blogLargeB.bool_info == [False, False, True, True] blogLargeB.save() blogLargeB.reload() - self.assertEqual(blogLargeB.text_info, ["a", "j", "z"]) - self.assertEqual(blogLargeB.oid_info, sorted_target_list) - self.assertEqual(blogLargeB.bool_info, [False, False, True, True]) + assert blogLargeB.text_info == ["a", "j", "z"] + assert blogLargeB.oid_info == sorted_target_list + assert blogLargeB.bool_info == [False, False, True, True] def test_list_assignment(self): """Ensure that list field element assignment and slicing work.""" + class BlogPost(Document): info = ListField() BlogPost.drop_collection() post = BlogPost() - post.info = ['e1', 'e2', 3, '4', 5] + post.info = ["e1", "e2", 3, "4", 5] post.save() post.info[0] = 1 post.save() post.reload() - self.assertEqual(post.info[0], 1) + assert post.info[0] == 1 - post.info[1:3] = ['n2', 'n3'] + post.info[1:3] = ["n2", "n3"] post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', '4', 5]) + assert post.info == [1, "n2", "n3", "4", 5] - post.info[-1] = 'n5' + post.info[-1] = "n5" post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', '4', 'n5']) + assert post.info == [1, "n2", "n3", "4", "n5"] post.info[-2] = 4 post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', 4, 'n5']) + assert post.info == [1, "n2", "n3", 4, "n5"] post.info[1:-1] = [2] post.save() post.reload() - self.assertEqual(post.info, [1, 2, 'n5']) + assert post.info == [1, 2, "n5"] - post.info[:-1] = [1, 'n2', 'n3', 4] + post.info[:-1] = [1, "n2", "n3", 4] post.save() post.reload() - self.assertEqual(post.info, [1, 'n2', 'n3', 4, 'n5']) + assert post.info == [1, "n2", "n3", 4, "n5"] post.info[-4:3] = [2, 3] post.save() post.reload() - self.assertEqual(post.info, [1, 2, 3, 4, 'n5']) + assert post.info == [1, 2, 3, 4, "n5"] def test_list_field_passed_in_value(self): class Foo(Document): @@ -921,12 +989,13 @@ class FieldTest(MongoDBTestCase): foo = Foo(bars=[]) foo.bars.append(bar) - self.assertEqual(repr(foo.bars), '[]') + assert repr(foo.bars) == "[]" def test_list_field_strict(self): """Ensure that list field handles validation if provided a strict field type. """ + class Simple(Document): mapping = ListField(field=IntField()) @@ -937,23 +1006,53 @@ class FieldTest(MongoDBTestCase): e.save() # try creating an invalid mapping - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): e.mapping = ["abc"] e.save() + def test_list_field_max_length(self): + """Ensure ListField's max_length is respected.""" + + class Foo(Document): + items = ListField(IntField(), max_length=5) + + foo = Foo() + for i in range(1, 7): + foo.items.append(i) + if i < 6: + foo.save() + else: + with pytest.raises(ValidationError) as exc_info: + foo.save() + assert "List is too long" in str(exc_info.value) + + def test_list_field_max_length_set_operator(self): + """Ensure ListField's max_length is respected for a "set" operator.""" + + class Foo(Document): + items = ListField(IntField(), max_length=3) + + foo = Foo.objects.create(items=[1, 2, 3]) + with pytest.raises(ValidationError) as exc_info: + foo.modify(set__items=[1, 2, 3, 4]) + assert "List is too long" in str(exc_info.value) + def test_list_field_rejects_strings(self): """Strings aren't valid list field data types.""" + class Simple(Document): mapping = ListField() Simple.drop_collection() e = Simple() - e.mapping = 'hello world' - self.assertRaises(ValidationError, e.save) + e.mapping = "hello world" + with pytest.raises(ValidationError): + e.save() def test_complex_field_required(self): """Ensure required cant be None / Empty.""" + class Simple(Document): mapping = ListField(required=True) @@ -961,7 +1060,8 @@ class FieldTest(MongoDBTestCase): e = Simple() e.mapping = [] - self.assertRaises(ValidationError, e.save) + with pytest.raises(ValidationError): + e.save() class Simple(Document): mapping = DictField(required=True) @@ -969,12 +1069,14 @@ class FieldTest(MongoDBTestCase): Simple.drop_collection() e = Simple() e.mapping = {} - self.assertRaises(ValidationError, e.save) + with pytest.raises(ValidationError): + e.save() def test_complex_field_same_value_not_changed(self): """If a complex field is set to the same value, it should not be marked as changed. """ + class Simple(Document): mapping = ListField() @@ -982,7 +1084,7 @@ class FieldTest(MongoDBTestCase): e = Simple().save() e.mapping = [] - self.assertEqual([], e._changed_fields) + assert e._changed_fields == [] class Simple(Document): mapping = DictField() @@ -991,7 +1093,7 @@ class FieldTest(MongoDBTestCase): e = Simple().save() e.mapping = {} - self.assertEqual([], e._changed_fields) + assert e._changed_fields == [] def test_slice_marks_field_as_changed(self): class Simple(Document): @@ -999,11 +1101,11 @@ class FieldTest(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() simple.widgets[:3] = [] - self.assertEqual(['widgets'], simple._changed_fields) + assert ["widgets"] == simple._changed_fields simple.save() simple = simple.reload() - self.assertEqual(simple.widgets, [4]) + assert simple.widgets == [4] def test_del_slice_marks_field_as_changed(self): class Simple(Document): @@ -1011,11 +1113,11 @@ class FieldTest(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() del simple.widgets[:3] - self.assertEqual(['widgets'], simple._changed_fields) + assert ["widgets"] == simple._changed_fields simple.save() simple = simple.reload() - self.assertEqual(simple.widgets, [4]) + assert simple.widgets == [4] def test_list_field_with_negative_indices(self): class Simple(Document): @@ -1023,16 +1125,17 @@ class FieldTest(MongoDBTestCase): simple = Simple(widgets=[1, 2, 3, 4]).save() simple.widgets[-1] = 5 - self.assertEqual(['widgets.3'], simple._changed_fields) + assert ["widgets.3"] == simple._changed_fields simple.save() simple = simple.reload() - self.assertEqual(simple.widgets, [1, 2, 3, 5]) + assert simple.widgets == [1, 2, 3, 5] def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" + class SettingBase(EmbeddedDocument): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class StringSetting(SettingBase): value = StringField() @@ -1046,48 +1149,44 @@ class FieldTest(MongoDBTestCase): Simple.drop_collection() e = Simple() - e.mapping.append(StringSetting(value='foo')) + e.mapping.append(StringSetting(value="foo")) e.mapping.append(IntegerSetting(value=42)) - e.mapping.append({'number': 1, 'string': 'Hi!', 'float': 1.001, - 'complex': IntegerSetting(value=42), - 'list': [IntegerSetting(value=42), - StringSetting(value='foo')]}) + e.mapping.append( + { + "number": 1, + "string": "Hi!", + "float": 1.001, + "complex": IntegerSetting(value=42), + "list": [IntegerSetting(value=42), StringSetting(value="foo")], + } + ) e.save() e2 = Simple.objects.get(id=e.id) - self.assertIsInstance(e2.mapping[0], StringSetting) - self.assertIsInstance(e2.mapping[1], IntegerSetting) + assert isinstance(e2.mapping[0], StringSetting) + assert isinstance(e2.mapping[1], IntegerSetting) # Test querying - self.assertEqual( - Simple.objects.filter(mapping__1__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__number=1).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__complex__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__list__0__value=42).count(), 1) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1) + assert Simple.objects.filter(mapping__1__value=42).count() == 1 + assert Simple.objects.filter(mapping__2__number=1).count() == 1 + assert Simple.objects.filter(mapping__2__complex__value=42).count() == 1 + assert Simple.objects.filter(mapping__2__list__0__value=42).count() == 1 + assert Simple.objects.filter(mapping__2__list__1__value="foo").count() == 1 # Confirm can update Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) - self.assertEqual( - Simple.objects.filter(mapping__1__value=10).count(), 1) + assert Simple.objects.filter(mapping__1__value=10).count() == 1 - Simple.objects().update( - set__mapping__2__list__1=StringSetting(value='Boo')) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0) - self.assertEqual( - Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) + Simple.objects().update(set__mapping__2__list__1=StringSetting(value="Boo")) + assert Simple.objects.filter(mapping__2__list__1__value="foo").count() == 0 + assert Simple.objects.filter(mapping__2__list__1__value="Boo").count() == 1 def test_embedded_db_field(self): class Embedded(EmbeddedDocument): - number = IntField(default=0, db_field='i') + number = IntField(default=0, db_field="i") class Test(Document): - embedded = EmbeddedDocumentField(Embedded, db_field='x') + embedded = EmbeddedDocumentField(Embedded, db_field="x") Test.drop_collection() @@ -1098,68 +1197,65 @@ class FieldTest(MongoDBTestCase): Test.objects.update_one(inc__embedded__number=1) test = Test.objects.get() - self.assertEqual(test.embedded.number, 2) + assert test.embedded.number == 2 doc = self.db.test.find_one() - self.assertEqual(doc['x']['i'], 2) + assert doc["x"]["i"] == 2 def test_double_embedded_db_field(self): """Make sure multiple layers of embedded docs resolve db fields properly and can be initialized using dicts. """ + class C(EmbeddedDocument): txt = StringField() class B(EmbeddedDocument): - c = EmbeddedDocumentField(C, db_field='fc') + c = EmbeddedDocumentField(C, db_field="fc") class A(Document): - b = EmbeddedDocumentField(B, db_field='fb') + b = EmbeddedDocumentField(B, db_field="fb") - a = A( - b=B( - c=C(txt='hi') - ) - ) + a = A(b=B(c=C(txt="hi"))) a.validate() - a = A(b={'c': {'txt': 'hi'}}) + a = A(b={"c": {"txt": "hi"}}) a.validate() def test_double_embedded_db_field_from_son(self): """Make sure multiple layers of embedded docs resolve db fields from SON properly. """ + class C(EmbeddedDocument): txt = StringField() class B(EmbeddedDocument): - c = EmbeddedDocumentField(C, db_field='fc') + c = EmbeddedDocumentField(C, db_field="fc") class A(Document): - b = EmbeddedDocumentField(B, db_field='fb') + b = EmbeddedDocumentField(B, db_field="fb") - a = A._from_son(SON([ - ('fb', SON([ - ('fc', SON([ - ('txt', 'hi') - ])) - ])) - ])) - self.assertEqual(a.b.c.txt, 'hi') - - def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet(self): - raise SkipTest("Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet") + a = A._from_son(SON([("fb", SON([("fc", SON([("txt", "hi")]))]))])) + assert a.b.c.txt == "hi" + @pytest.mark.xfail( + reason="Using a string reference in an EmbeddedDocumentField does not work if the class isnt registerd yet", + raises=NotRegistered, + ) + def test_embedded_document_field_cant_reference_using_a_str_if_it_does_not_exist_yet( + self, + ): class MyDoc2(Document): - emb = EmbeddedDocumentField('MyDoc') + emb = EmbeddedDocumentField("MyFunkyDoc123") - class MyDoc(EmbeddedDocument): + class MyFunkyDoc123(EmbeddedDocument): name = StringField() def test_embedded_document_validation(self): """Ensure that invalid embedded documents cannot be assigned to embedded document fields. """ + class Comment(EmbeddedDocument): content = StringField() @@ -1173,30 +1269,34 @@ class FieldTest(MongoDBTestCase): Person.drop_collection() - person = Person(name='Test User') - person.preferences = 'My Preferences' - self.assertRaises(ValidationError, person.validate) + person = Person(name="Test User") + person.preferences = "My Preferences" + with pytest.raises(ValidationError): + person.validate() # Check that only the right embedded doc works - person.preferences = Comment(content='Nice blog post...') - self.assertRaises(ValidationError, person.validate) + person.preferences = Comment(content="Nice blog post...") + with pytest.raises(ValidationError): + person.validate() # Check that the embedded doc is valid person.preferences = PersonPreferences() - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() - person.preferences = PersonPreferences(food='Cheese', number=47) - self.assertEqual(person.preferences.food, 'Cheese') + person.preferences = PersonPreferences(food="Cheese", number=47) + assert person.preferences.food == "Cheese" person.validate() def test_embedded_document_inheritance(self): """Ensure that subclasses of embedded documents may be provided to EmbeddedDocumentFields of the superclass' type. """ + class User(EmbeddedDocument): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class PowerUser(User): power = IntField() @@ -1207,40 +1307,42 @@ class FieldTest(MongoDBTestCase): BlogPost.drop_collection() - post = BlogPost(content='What I did today...') - post.author = PowerUser(name='Test User', power=47) + post = BlogPost(content="What I did today...") + post.author = PowerUser(name="Test User", power=47) post.save() - self.assertEqual(47, BlogPost.objects.first().author.power) + assert 47 == BlogPost.objects.first().author.power def test_embedded_document_inheritance_with_list(self): """Ensure that nested list of subclassed embedded documents is handled correctly. """ + class Group(EmbeddedDocument): name = StringField() content = ListField(StringField()) class Basedoc(Document): groups = ListField(EmbeddedDocumentField(Group)) - meta = {'abstract': True} + meta = {"abstract": True} class User(Basedoc): - doctype = StringField(require=True, default='userdata') + doctype = StringField(require=True, default="userdata") User.drop_collection() - content = ['la', 'le', 'lu'] - group = Group(name='foo', content=content) + content = ["la", "le", "lu"] + group = Group(name="foo", content=content) foobar = User(groups=[group]) foobar.save() - self.assertEqual(content, User.objects.first().groups[0].content) + assert content == User.objects.first().groups[0].content def test_reference_miss(self): """Ensure an exception is raised when dereferencing an unknown document. """ + class Foo(Document): pass @@ -1257,20 +1359,23 @@ class FieldTest(MongoDBTestCase): # Reference is no longer valid foo.delete() bar = Bar.objects.get() - self.assertRaises(DoesNotExist, getattr, bar, 'ref') - self.assertRaises(DoesNotExist, getattr, bar, 'generic_ref') + with pytest.raises(DoesNotExist): + getattr(bar, "ref") + with pytest.raises(DoesNotExist): + getattr(bar, "generic_ref") # When auto_dereference is disabled, there is no trouble returning DBRef bar = Bar.objects.get() expected = foo.to_dbref() - bar._fields['ref']._auto_dereference = False - self.assertEqual(bar.ref, expected) - bar._fields['generic_ref']._auto_dereference = False - self.assertEqual(bar.generic_ref, {'_ref': expected, '_cls': 'Foo'}) + bar._fields["ref"]._auto_dereference = False + assert bar.ref == expected + bar._fields["generic_ref"]._auto_dereference = False + assert bar.generic_ref == {"_ref": expected, "_cls": "Foo"} def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -1280,9 +1385,9 @@ class FieldTest(MongoDBTestCase): User.drop_collection() Group.drop_collection() - user1 = User(name='user1') + user1 = User(name="user1") user1.save() - user2 = User(name='user2') + user2 = User(name="user2") user2.save() group = Group(members=[user1, user2]) @@ -1290,46 +1395,48 @@ class FieldTest(MongoDBTestCase): group_obj = Group.objects.first() - self.assertEqual(group_obj.members[0].name, user1.name) - self.assertEqual(group_obj.members[1].name, user2.name) + assert group_obj.members[0].name == user1.name + assert group_obj.members[1].name == user2.name def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ + class Employee(Document): name = StringField() - boss = ReferenceField('self') - friends = ListField(ReferenceField('self')) + boss = ReferenceField("self") + friends = ListField(ReferenceField("self")) Employee.drop_collection() - bill = Employee(name='Bill Lumbergh') + bill = Employee(name="Bill Lumbergh") bill.save() - michael = Employee(name='Michael Bolton') + michael = Employee(name="Michael Bolton") michael.save() - samir = Employee(name='Samir Nagheenanajar') + samir = Employee(name="Samir Nagheenanajar") samir.save() friends = [michael, samir] - peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) + peter = Employee(name="Peter Gibbons", boss=bill, friends=friends) peter.save() peter = Employee.objects.with_id(peter.id) - self.assertEqual(peter.boss, bill) - self.assertEqual(peter.friends, friends) + assert peter.boss == bill + assert peter.friends == friends def test_recursive_embedding(self): """Ensure that EmbeddedDocumentFields can contain their own documents. """ + class TreeNode(EmbeddedDocument): name = StringField() - children = ListField(EmbeddedDocumentField('self')) + children = ListField(EmbeddedDocumentField("self")) class Tree(Document): name = StringField() - children = ListField(EmbeddedDocumentField('TreeNode')) + children = ListField(EmbeddedDocumentField("TreeNode")) Tree.drop_collection() @@ -1342,61 +1449,64 @@ class FieldTest(MongoDBTestCase): tree.save() tree = Tree.objects.first() - self.assertEqual(len(tree.children), 1) + assert len(tree.children) == 1 - self.assertEqual(len(tree.children[0].children), 1) + assert len(tree.children[0].children) == 1 third_child = TreeNode(name="Child 3") tree.children[0].children.append(third_child) tree.save() - self.assertEqual(len(tree.children), 1) - self.assertEqual(tree.children[0].name, first_child.name) - self.assertEqual(tree.children[0].children[0].name, second_child.name) - self.assertEqual(tree.children[0].children[1].name, third_child.name) + assert len(tree.children) == 1 + assert tree.children[0].name == first_child.name + assert tree.children[0].children[0].name == second_child.name + assert tree.children[0].children[1].name == third_child.name # Test updating - tree.children[0].name = 'I am Child 1' - tree.children[0].children[0].name = 'I am Child 2' - tree.children[0].children[1].name = 'I am Child 3' + tree.children[0].name = "I am Child 1" + tree.children[0].children[0].name = "I am Child 2" + tree.children[0].children[1].name = "I am Child 3" tree.save() - self.assertEqual(tree.children[0].name, 'I am Child 1') - self.assertEqual(tree.children[0].children[0].name, 'I am Child 2') - self.assertEqual(tree.children[0].children[1].name, 'I am Child 3') + assert tree.children[0].name == "I am Child 1" + assert tree.children[0].children[0].name == "I am Child 2" + assert tree.children[0].children[1].name == "I am Child 3" # Test removal - self.assertEqual(len(tree.children[0].children), 2) - del(tree.children[0].children[1]) + assert len(tree.children[0].children) == 2 + del tree.children[0].children[1] tree.save() - self.assertEqual(len(tree.children[0].children), 1) + assert len(tree.children[0].children) == 1 tree.children[0].children.pop(0) tree.save() - self.assertEqual(len(tree.children[0].children), 0) - self.assertEqual(tree.children[0].children, []) + assert len(tree.children[0].children) == 0 + assert tree.children[0].children == [] tree.children[0].children.insert(0, third_child) tree.children[0].children.insert(0, second_child) tree.save() - self.assertEqual(len(tree.children[0].children), 2) - self.assertEqual(tree.children[0].children[0].name, second_child.name) - self.assertEqual(tree.children[0].children[1].name, third_child.name) + assert len(tree.children[0].children) == 2 + assert tree.children[0].children[0].name == second_child.name + assert tree.children[0].children[1].name == third_child.name def test_drop_abstract_document(self): """Ensure that an abstract document cannot be dropped given it has no underlying collection. """ + class AbstractDoc(Document): name = StringField() meta = {"abstract": True} - self.assertRaises(OperationError, AbstractDoc.drop_collection) + with pytest.raises(OperationError): + AbstractDoc.drop_collection() def test_reference_class_with_abstract_parent(self): """Ensure that a class with an abstract parent can be referenced. """ + class Sibling(Document): name = StringField() meta = {"abstract": True} @@ -1415,12 +1525,13 @@ class FieldTest(MongoDBTestCase): brother = Brother(name="Bob", sibling=sister) brother.save() - self.assertEqual(Brother.objects[0].sibling.name, sister.name) + assert Brother.objects[0].sibling.name == sister.name def test_reference_abstract_class(self): """Ensure that an abstract class instance cannot be used in the reference of that abstract class. """ + class Sibling(Document): name = StringField() meta = {"abstract": True} @@ -1436,12 +1547,14 @@ class FieldTest(MongoDBTestCase): sister = Sibling(name="Alice") brother = Brother(name="Bob", sibling=sister) - self.assertRaises(ValidationError, brother.save) + with pytest.raises(ValidationError): + brother.save() def test_abstract_reference_base_type(self): """Ensure that an an abstract reference fails validation when given a Document that does not inherit from the abstract type. """ + class Sibling(Document): name = StringField() meta = {"abstract": True} @@ -1458,14 +1571,16 @@ class FieldTest(MongoDBTestCase): mother = Mother(name="Carol") mother.save() brother = Brother(name="Bob", sibling=mother) - self.assertRaises(ValidationError, brother.save) + with pytest.raises(ValidationError): + brother.save() def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. """ + class Link(Document): title = StringField() - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Post(Document): title = StringField() @@ -1488,20 +1603,21 @@ class FieldTest(MongoDBTestCase): bm = Bookmark.objects(bookmark_object=post_1).first() - self.assertEqual(bm.bookmark_object, post_1) - self.assertIsInstance(bm.bookmark_object, Post) + assert bm.bookmark_object == post_1 + assert isinstance(bm.bookmark_object, Post) bm.bookmark_object = link_1 bm.save() bm = Bookmark.objects(bookmark_object=link_1).first() - self.assertEqual(bm.bookmark_object, link_1) - self.assertIsInstance(bm.bookmark_object, Link) + assert bm.bookmark_object == link_1 + assert isinstance(bm.bookmark_object, Link) def test_generic_reference_list(self): """Ensure that a ListField properly dereferences generic references. """ + class Link(Document): title = StringField() @@ -1526,13 +1642,14 @@ class FieldTest(MongoDBTestCase): user = User.objects(bookmarks__all=[post_1, link_1]).first() - self.assertEqual(user.bookmarks[0], post_1) - self.assertEqual(user.bookmarks[1], link_1) + assert user.bookmarks[0] == post_1 + assert user.bookmarks[1] == link_1 def test_generic_reference_document_not_registered(self): """Ensure dereferencing out of the document registry throws a `NotRegistered` error. """ + class Link(Document): title = StringField() @@ -1550,7 +1667,7 @@ class FieldTest(MongoDBTestCase): # Mimic User and Link definitions being in a different file # and the Link model not being imported in the User file. - del(_document_registry["Link"]) + del _document_registry["Link"] user = User.objects.first() try: @@ -1560,7 +1677,6 @@ class FieldTest(MongoDBTestCase): pass def test_generic_reference_is_none(self): - class Person(Document): name = StringField() city = GenericReferenceField() @@ -1568,11 +1684,11 @@ class FieldTest(MongoDBTestCase): Person.drop_collection() Person(name="Wilson Jr").save() - self.assertEqual(repr(Person.objects(city=None)), - "[]") + assert repr(Person.objects(city=None)) == "[]" def test_generic_reference_choices(self): """Ensure that a GenericReferenceField can handle choices.""" + class Link(Document): title = StringField() @@ -1593,17 +1709,19 @@ class FieldTest(MongoDBTestCase): post_1.save() bm = Bookmark(bookmark_object=link_1) - self.assertRaises(ValidationError, bm.validate) + with pytest.raises(ValidationError): + bm.validate() bm = Bookmark(bookmark_object=post_1) bm.save() bm = Bookmark.objects.first() - self.assertEqual(bm.bookmark_object, post_1) + assert bm.bookmark_object == post_1 def test_generic_reference_string_choices(self): """Ensure that a GenericReferenceField can handle choices as strings """ + class Link(Document): title = StringField() @@ -1611,7 +1729,7 @@ class FieldTest(MongoDBTestCase): title = StringField() class Bookmark(Document): - bookmark_object = GenericReferenceField(choices=('Post', Link)) + bookmark_object = GenericReferenceField(choices=("Post", Link)) Link.drop_collection() Post.drop_collection() @@ -1630,17 +1748,19 @@ class FieldTest(MongoDBTestCase): bm.save() bm = Bookmark(bookmark_object=bm) - self.assertRaises(ValidationError, bm.validate) + with pytest.raises(ValidationError): + bm.validate() def test_generic_reference_choices_no_dereference(self): """Ensure that a GenericReferenceField can handle choices on non-derefenreced (i.e. DBRef) elements """ + class Post(Document): title = StringField() class Bookmark(Document): - bookmark_object = GenericReferenceField(choices=(Post, )) + bookmark_object = GenericReferenceField(choices=(Post,)) other_field = StringField() Post.drop_collection() @@ -1654,13 +1774,14 @@ class FieldTest(MongoDBTestCase): bm = Bookmark.objects.get(id=bm.id) # bookmark_object is now a DBRef - bm.other_field = 'dummy_change' + bm.other_field = "dummy_change" bm.save() def test_generic_reference_list_choices(self): """Ensure that a ListField properly dereferences generic references and respects choices. """ + class Link(Document): title = StringField() @@ -1681,17 +1802,19 @@ class FieldTest(MongoDBTestCase): post_1.save() user = User(bookmarks=[link_1]) - self.assertRaises(ValidationError, user.validate) + with pytest.raises(ValidationError): + user.validate() user = User(bookmarks=[post_1]) user.save() user = User.objects.first() - self.assertEqual(user.bookmarks, [post_1]) + assert user.bookmarks == [post_1] def test_generic_reference_list_item_modification(self): """Ensure that modifications of related documents (through generic reference) don't influence on querying """ + class Post(Document): title = StringField() @@ -1714,13 +1837,14 @@ class FieldTest(MongoDBTestCase): user = User.objects(bookmarks__all=[post_1]).first() - self.assertNotEqual(user, None) - self.assertEqual(user.bookmarks[0], post_1) + assert user is not None + assert user.bookmarks[0] == post_1 def test_generic_reference_filter_by_dbref(self): """Ensure we can search for a specific generic reference by providing its ObjectId. """ + class Doc(Document): ref = GenericReferenceField() @@ -1729,13 +1853,14 @@ class FieldTest(MongoDBTestCase): doc1 = Doc.objects.create() doc2 = Doc.objects.create(ref=doc1) - doc = Doc.objects.get(ref=DBRef('doc', doc1.pk)) - self.assertEqual(doc, doc2) + doc = Doc.objects.get(ref=DBRef("doc", doc1.pk)) + assert doc == doc2 def test_generic_reference_is_not_tracked_in_parent_doc(self): """Ensure that modifications of related documents (through generic reference) don't influence the owner changed fields (#1934) """ + class Doc1(Document): name = StringField() @@ -1746,21 +1871,22 @@ class FieldTest(MongoDBTestCase): Doc1.drop_collection() Doc2.drop_collection() - doc1 = Doc1(name='garbage1').save() - doc11 = Doc1(name='garbage11').save() + doc1 = Doc1(name="garbage1").save() + doc11 = Doc1(name="garbage11").save() doc2 = Doc2(ref=doc1, refs=[doc11]).save() - doc2.ref.name = 'garbage2' - self.assertEqual(doc2._get_changed_fields(), []) + doc2.ref.name = "garbage2" + assert doc2._get_changed_fields() == [] - doc2.refs[0].name = 'garbage3' - self.assertEqual(doc2._get_changed_fields(), []) - self.assertEqual(doc2._delta(), ({}, {})) + doc2.refs[0].name = "garbage3" + assert doc2._get_changed_fields() == [] + assert doc2._delta() == ({}, {}) def test_generic_reference_field(self): """Ensure we can search for a specific generic reference by providing its DBRef. """ + class Doc(Document): ref = GenericReferenceField() @@ -1769,25 +1895,27 @@ class FieldTest(MongoDBTestCase): doc1 = Doc.objects.create() doc2 = Doc.objects.create(ref=doc1) - self.assertIsInstance(doc1.pk, ObjectId) + assert isinstance(doc1.pk, ObjectId) doc = Doc.objects.get(ref=doc1.pk) - self.assertEqual(doc, doc2) + assert doc == doc2 def test_choices_allow_using_sets_as_choices(self): """Ensure that sets can be used when setting choices """ - class Shirt(Document): - size = StringField(choices={'M', 'L'}) - Shirt(size='M').validate() + class Shirt(Document): + size = StringField(choices={"M", "L"}) + + Shirt(size="M").validate() def test_choices_validation_allow_no_value(self): """Ensure that .validate passes and no value was provided for a field setup with choices """ + class Shirt(Document): - size = StringField(choices=('S', 'M')) + size = StringField(choices=("S", "M")) shirt = Shirt() shirt.validate() @@ -1795,32 +1923,45 @@ class FieldTest(MongoDBTestCase): def test_choices_validation_accept_possible_value(self): """Ensure that value is in a container of allowed values. """ - class Shirt(Document): - size = StringField(choices=('S', 'M')) - shirt = Shirt(size='S') + class Shirt(Document): + size = StringField(choices=("S", "M")) + + shirt = Shirt(size="S") shirt.validate() def test_choices_validation_reject_unknown_value(self): """Ensure that unallowed value are rejected upon validation """ + class Shirt(Document): - size = StringField(choices=('S', 'M')) + size = StringField(choices=("S", "M")) shirt = Shirt(size="XS") - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): shirt.validate() def test_choices_get_field_display(self): """Test dynamic helper for returning the display value of a choices field. """ + class Shirt(Document): - size = StringField(max_length=3, choices=( - ('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), - ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) - style = StringField(max_length=3, choices=( - ('S', 'Small'), ('B', 'Baggy'), ('W', 'Wide')), default='W') + size = StringField( + max_length=3, + choices=( + ("S", "Small"), + ("M", "Medium"), + ("L", "Large"), + ("XL", "Extra Large"), + ("XXL", "Extra Extra Large"), + ), + ) + style = StringField( + max_length=3, + choices=(("S", "Small"), ("B", "Baggy"), ("W", "Wide")), + default="W", + ) Shirt.drop_collection() @@ -1828,31 +1969,32 @@ class FieldTest(MongoDBTestCase): shirt2 = Shirt() # Make sure get__display returns the default value (or None) - self.assertEqual(shirt1.get_size_display(), None) - self.assertEqual(shirt1.get_style_display(), 'Wide') + assert shirt1.get_size_display() is None + assert shirt1.get_style_display() == "Wide" - shirt1.size = 'XXL' - shirt1.style = 'B' - shirt2.size = 'M' - shirt2.style = 'S' - self.assertEqual(shirt1.get_size_display(), 'Extra Extra Large') - self.assertEqual(shirt1.get_style_display(), 'Baggy') - self.assertEqual(shirt2.get_size_display(), 'Medium') - self.assertEqual(shirt2.get_style_display(), 'Small') + shirt1.size = "XXL" + shirt1.style = "B" + shirt2.size = "M" + shirt2.style = "S" + assert shirt1.get_size_display() == "Extra Extra Large" + assert shirt1.get_style_display() == "Baggy" + assert shirt2.get_size_display() == "Medium" + assert shirt2.get_style_display() == "Small" # Set as Z - an invalid choice - shirt1.size = 'Z' - shirt1.style = 'Z' - self.assertEqual(shirt1.get_size_display(), 'Z') - self.assertEqual(shirt1.get_style_display(), 'Z') - self.assertRaises(ValidationError, shirt1.validate) + shirt1.size = "Z" + shirt1.style = "Z" + assert shirt1.get_size_display() == "Z" + assert shirt1.get_style_display() == "Z" + with pytest.raises(ValidationError): + shirt1.validate() def test_simple_choices_validation(self): """Ensure that value is in a container of allowed values. """ + class Shirt(Document): - size = StringField(max_length=3, - choices=('S', 'M', 'L', 'XL', 'XXL')) + size = StringField(max_length=3, choices=("S", "M", "L", "XL", "XXL")) Shirt.drop_collection() @@ -1863,43 +2005,45 @@ class FieldTest(MongoDBTestCase): shirt.validate() shirt.size = "XS" - self.assertRaises(ValidationError, shirt.validate) + with pytest.raises(ValidationError): + shirt.validate() def test_simple_choices_get_field_display(self): """Test dynamic helper for returning the display value of a choices field. """ + class Shirt(Document): - size = StringField(max_length=3, - choices=('S', 'M', 'L', 'XL', 'XXL')) - style = StringField(max_length=3, - choices=('Small', 'Baggy', 'wide'), - default='Small') + size = StringField(max_length=3, choices=("S", "M", "L", "XL", "XXL")) + style = StringField( + max_length=3, choices=("Small", "Baggy", "wide"), default="Small" + ) Shirt.drop_collection() shirt = Shirt() - self.assertEqual(shirt.get_size_display(), None) - self.assertEqual(shirt.get_style_display(), 'Small') + assert shirt.get_size_display() is None + assert shirt.get_style_display() == "Small" shirt.size = "XXL" shirt.style = "Baggy" - self.assertEqual(shirt.get_size_display(), 'XXL') - self.assertEqual(shirt.get_style_display(), 'Baggy') + assert shirt.get_size_display() == "XXL" + assert shirt.get_style_display() == "Baggy" # Set as Z - an invalid choice shirt.size = "Z" shirt.style = "Z" - self.assertEqual(shirt.get_size_display(), 'Z') - self.assertEqual(shirt.get_style_display(), 'Z') - self.assertRaises(ValidationError, shirt.validate) + assert shirt.get_size_display() == "Z" + assert shirt.get_style_display() == "Z" + with pytest.raises(ValidationError): + shirt.validate() def test_simple_choices_validation_invalid_value(self): """Ensure that error messages are correct. """ - SIZES = ('S', 'M', 'L', 'XL', 'XXL') - COLORS = (('R', 'Red'), ('B', 'Blue')) + SIZES = ("S", "M", "L", "XL", "XXL") + COLORS = (("R", "Red"), ("B", "Blue")) SIZE_MESSAGE = u"Value must be one of ('S', 'M', 'L', 'XL', 'XXL')" COLOR_MESSAGE = u"Value must be one of ['R', 'B']" @@ -1924,11 +2068,12 @@ class FieldTest(MongoDBTestCase): except ValidationError as error: # get the validation rules error_dict = error.to_dict() - self.assertEqual(error_dict['size'], SIZE_MESSAGE) - self.assertEqual(error_dict['color'], COLOR_MESSAGE) + assert error_dict["size"] == SIZE_MESSAGE + assert error_dict["color"] == COLOR_MESSAGE def test_recursive_validation(self): """Ensure that a validation result to_dict is available.""" + class Author(EmbeddedDocument): name = StringField(required=True) @@ -1940,40 +2085,40 @@ class FieldTest(MongoDBTestCase): title = StringField(required=True) comments = ListField(EmbeddedDocumentField(Comment)) - bob = Author(name='Bob') - post = Post(title='hello world') - post.comments.append(Comment(content='hello', author=bob)) + bob = Author(name="Bob") + post = Post(title="hello world") + post.comments.append(Comment(content="hello", author=bob)) post.comments.append(Comment(author=bob)) - self.assertRaises(ValidationError, post.validate) + with pytest.raises(ValidationError): + post.validate() try: post.validate() except ValidationError as error: # ValidationError.errors property - self.assertTrue(hasattr(error, 'errors')) - self.assertIsInstance(error.errors, dict) - self.assertIn('comments', error.errors) - self.assertIn(1, error.errors['comments']) - self.assertIsInstance(error.errors['comments'][1]['content'], ValidationError) + assert hasattr(error, "errors") + assert isinstance(error.errors, dict) + assert "comments" in error.errors + assert 1 in error.errors["comments"] + assert isinstance(error.errors["comments"][1]["content"], ValidationError) # ValidationError.schema property error_dict = error.to_dict() - self.assertIsInstance(error_dict, dict) - self.assertIn('comments', error_dict) - self.assertIn(1, error_dict['comments']) - self.assertIn('content', error_dict['comments'][1]) - self.assertEqual(error_dict['comments'][1]['content'], - u'Field is required') + assert isinstance(error_dict, dict) + assert "comments" in error_dict + assert 1 in error_dict["comments"] + assert "content" in error_dict["comments"][1] + assert error_dict["comments"][1]["content"] == u"Field is required" - post.comments[1].content = 'here we go' + post.comments[1].content = "here we go" post.validate() def test_tuples_as_tuples(self): """Ensure that tuples remain tuples when they are inside a ComplexBaseField. """ - class EnumField(BaseField): + class EnumField(BaseField): def __init__(self, **kwargs): super(EnumField, self).__init__(**kwargs) @@ -1988,56 +2133,58 @@ class FieldTest(MongoDBTestCase): TestDoc.drop_collection() - tuples = [(100, 'Testing')] + tuples = [(100, "Testing")] doc = TestDoc() doc.items = tuples doc.save() x = TestDoc.objects().get() - self.assertIsNotNone(x) - self.assertEqual(len(x.items), 1) - self.assertIn(tuple(x.items[0]), tuples) - self.assertIn(x.items[0], tuples) + assert x is not None + assert len(x.items) == 1 + assert tuple(x.items[0]) in tuples + assert x.items[0] in tuples def test_dynamic_fields_class(self): class Doc2(Document): - field_1 = StringField(db_field='f') + field_1 = StringField(db_field="f") class Doc(Document): my_id = IntField(primary_key=True) - embed_me = DynamicField(db_field='e') - field_x = StringField(db_field='x') + embed_me = DynamicField(db_field="e") + field_x = StringField(db_field="x") Doc.drop_collection() Doc2.drop_collection() doc2 = Doc2(field_1="hello") doc = Doc(my_id=1, embed_me=doc2, field_x="x") - self.assertRaises(OperationError, doc.save) + with pytest.raises(OperationError): + doc.save() doc2.save() doc.save() doc = Doc.objects.get() - self.assertEqual(doc.embed_me.field_1, "hello") + assert doc.embed_me.field_1 == "hello" def test_dynamic_fields_embedded_class(self): class Embed(EmbeddedDocument): - field_1 = StringField(db_field='f') + field_1 = StringField(db_field="f") class Doc(Document): my_id = IntField(primary_key=True) - embed_me = DynamicField(db_field='e') - field_x = StringField(db_field='x') + embed_me = DynamicField(db_field="e") + field_x = StringField(db_field="x") Doc.drop_collection() Doc(my_id=1, embed_me=Embed(field_1="hello"), field_x="x").save() doc = Doc.objects.get() - self.assertEqual(doc.embed_me.field_1, "hello") + assert doc.embed_me.field_1 == "hello" def test_dynamicfield_dump_document(self): """Ensure a DynamicField can handle another document's dump.""" + class Doc(Document): field = DynamicField() @@ -2049,7 +2196,7 @@ class FieldTest(MongoDBTestCase): id = IntField(primary_key=True, default=1) recursive = DynamicField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class ToEmbedChild(ToEmbedParent): pass @@ -2058,19 +2205,19 @@ class FieldTest(MongoDBTestCase): to_embed = ToEmbed(id=2, recursive=to_embed_recursive).save() doc = Doc(field=to_embed) doc.save() - self.assertIsInstance(doc.field, ToEmbed) - self.assertEqual(doc.field, to_embed) + assert isinstance(doc.field, ToEmbed) + assert doc.field == to_embed # Same thing with a Document with a _cls field to_embed_recursive = ToEmbedChild(id=1).save() to_embed_child = ToEmbedChild(id=2, recursive=to_embed_recursive).save() doc = Doc(field=to_embed_child) doc.save() - self.assertIsInstance(doc.field, ToEmbedChild) - self.assertEqual(doc.field, to_embed_child) + assert isinstance(doc.field, ToEmbedChild) + assert doc.field == to_embed_child def test_cls_field(self): class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Fish(Animal): pass @@ -2088,8 +2235,10 @@ class FieldTest(MongoDBTestCase): Dog().save() Fish().save() Human().save() - self.assertEqual(Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count(), 2) - self.assertEqual(Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count(), 0) + assert ( + Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count() == 2 + ) + assert Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count() == 0 def test_sparse_field(self): class Doc(Document): @@ -2104,32 +2253,34 @@ class FieldTest(MongoDBTestCase): trying to instantiate a document with a field that's not defined. """ + class Doc(Document): foo = StringField() - with self.assertRaises(FieldDoesNotExist): - Doc(bar='test') + with pytest.raises(FieldDoesNotExist): + Doc(bar="test") def test_undefined_field_exception_with_strict(self): """Tests if a `FieldDoesNotExist` exception is raised when trying to instantiate a document with a field that's not defined, even when strict is set to False. """ + class Doc(Document): foo = StringField() - meta = {'strict': False} + meta = {"strict": False} - with self.assertRaises(FieldDoesNotExist): - Doc(bar='test') + with pytest.raises(FieldDoesNotExist): + Doc(bar="test") -class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): - +class TestEmbeddedDocumentListField(MongoDBTestCase): def setUp(self): """ Create two BlogPost entries in the database, each with several EmbeddedDocuments. """ + class Comments(EmbeddedDocument): author = StringField() message = StringField() @@ -2142,20 +2293,24 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): self.Comments = Comments self.BlogPost = BlogPost - self.post1 = self.BlogPost(comments=[ - self.Comments(author='user1', message='message1'), - self.Comments(author='user2', message='message1') - ]).save() + self.post1 = self.BlogPost( + comments=[ + self.Comments(author="user1", message="message1"), + self.Comments(author="user2", message="message1"), + ] + ).save() - self.post2 = self.BlogPost(comments=[ - self.Comments(author='user2', message='message2'), - self.Comments(author='user2', message='message3'), - self.Comments(author='user3', message='message1') - ]).save() + self.post2 = self.BlogPost( + comments=[ + self.Comments(author="user2", message="message2"), + self.Comments(author="user2", message="message3"), + self.Comments(author="user3", message="message1"), + ] + ).save() def test_fails_upon_validate_if_provide_a_doc_instead_of_a_list_of_doc(self): # Relates to Issue #1464 - comment = self.Comments(author='John') + comment = self.Comments(author="John") class Title(Document): content = StringField() @@ -2163,17 +2318,21 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): # Test with an embeddedDocument instead of a list(embeddedDocument) # It's an edge case but it used to fail with a vague error, making it difficult to troubleshoot it post = self.BlogPost(comments=comment) - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: post.validate() - self.assertIn("'comments'", str(ctx_err.exception)) - self.assertIn('Only lists and tuples may be used in a list field', str(ctx_err.exception)) + + error_msg = str(exc_info.value) + assert "'comments'" in error_msg + assert "Only lists and tuples may be used in a list field" in error_msg # Test with a Document - post = self.BlogPost(comments=Title(content='garbage')) - with self.assertRaises(ValidationError) as e: + post = self.BlogPost(comments=Title(content="garbage")) + with pytest.raises(ValidationError) as exc_info: post.validate() - self.assertIn("'comments'", str(ctx_err.exception)) - self.assertIn('Only lists and tuples may be used in a list field', str(ctx_err.exception)) + + error_msg = str(exc_info.value) + assert "'comments'" in error_msg + assert "Only lists and tuples may be used in a list field" in error_msg def test_no_keyword_filter(self): """ @@ -2183,58 +2342,54 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): filtered = self.post1.comments.filter() # Ensure nothing was changed - self.assertListEqual(filtered, self.post1.comments) + assert filtered == self.post1.comments def test_single_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents with a single keyword. """ - filtered = self.post1.comments.filter(author='user1') + filtered = self.post1.comments.filter(author="user1") # Ensure only 1 entry was returned. - self.assertEqual(len(filtered), 1) + assert len(filtered) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, 'user1') + assert filtered[0].author == "user1" def test_multi_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents with multiple keywords. """ - filtered = self.post2.comments.filter( - author='user2', message='message2' - ) + filtered = self.post2.comments.filter(author="user2", message="message2") # Ensure only 1 entry was returned. - self.assertEqual(len(filtered), 1) + assert len(filtered) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, 'user2') - self.assertEqual(filtered[0].message, 'message2') + assert filtered[0].author == "user2" + assert filtered[0].message == "message2" def test_chained_filter(self): """ Tests chained filter methods of a List of Embedded Documents """ - filtered = self.post2.comments.filter(author='user2').filter( - message='message2' - ) + filtered = self.post2.comments.filter(author="user2").filter(message="message2") # Ensure only 1 entry was returned. - self.assertEqual(len(filtered), 1) + assert len(filtered) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(filtered[0].author, 'user2') - self.assertEqual(filtered[0].message, 'message2') + assert filtered[0].author == "user2" + assert filtered[0].message == "message2" def test_unknown_keyword_filter(self): """ Tests the filter method of a List of Embedded Documents when the keyword is not a known keyword. """ - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.post2.comments.filter(year=2) def test_no_keyword_exclude(self): @@ -2245,53 +2400,51 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): filtered = self.post1.comments.exclude() # Ensure everything was removed - self.assertListEqual(filtered, []) + assert filtered == [] def test_single_keyword_exclude(self): """ Tests the exclude method of a List of Embedded Documents with a single keyword. """ - excluded = self.post1.comments.exclude(author='user1') + excluded = self.post1.comments.exclude(author="user1") # Ensure only 1 entry was returned. - self.assertEqual(len(excluded), 1) + assert len(excluded) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(excluded[0].author, 'user2') + assert excluded[0].author == "user2" def test_multi_keyword_exclude(self): """ Tests the exclude method of a List of Embedded Documents with multiple keywords. """ - excluded = self.post2.comments.exclude( - author='user3', message='message1' - ) + excluded = self.post2.comments.exclude(author="user3", message="message1") # Ensure only 2 entries were returned. - self.assertEqual(len(excluded), 2) + assert len(excluded) == 2 # Ensure the entries returned are the correct entries. - self.assertEqual(excluded[0].author, 'user2') - self.assertEqual(excluded[1].author, 'user2') + assert excluded[0].author == "user2" + assert excluded[1].author == "user2" def test_non_matching_exclude(self): """ Tests the exclude method of a List of Embedded Documents when the keyword does not match any entries. """ - excluded = self.post2.comments.exclude(author='user4') + excluded = self.post2.comments.exclude(author="user4") # Ensure the 3 entries still exist. - self.assertEqual(len(excluded), 3) + assert len(excluded) == 3 def test_unknown_keyword_exclude(self): """ Tests the exclude method of a List of Embedded Documents when the keyword is not a known keyword. """ - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.post2.comments.exclude(year=2) def test_chained_filter_exclude(self): @@ -2299,56 +2452,56 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the exclude method after a filter method of a List of Embedded Documents. """ - excluded = self.post2.comments.filter(author='user2').exclude( - message='message2' + excluded = self.post2.comments.filter(author="user2").exclude( + message="message2" ) # Ensure only 1 entry was returned. - self.assertEqual(len(excluded), 1) + assert len(excluded) == 1 # Ensure the entry returned is the correct entry. - self.assertEqual(excluded[0].author, 'user2') - self.assertEqual(excluded[0].message, 'message3') + assert excluded[0].author == "user2" + assert excluded[0].message == "message3" def test_count(self): """ Tests the count method of a List of Embedded Documents. """ - self.assertEqual(self.post1.comments.count(), 2) - self.assertEqual(self.post1.comments.count(), len(self.post1.comments)) + assert self.post1.comments.count() == 2 + assert self.post1.comments.count() == len(self.post1.comments) def test_filtered_count(self): """ Tests the filter + count method of a List of Embedded Documents. """ - count = self.post1.comments.filter(author='user1').count() - self.assertEqual(count, 1) + count = self.post1.comments.filter(author="user1").count() + assert count == 1 def test_single_keyword_get(self): """ Tests the get method of a List of Embedded Documents using a single keyword. """ - comment = self.post1.comments.get(author='user1') - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user1') + comment = self.post1.comments.get(author="user1") + assert isinstance(comment, self.Comments) + assert comment.author == "user1" def test_multi_keyword_get(self): """ Tests the get method of a List of Embedded Documents using multiple keywords. """ - comment = self.post2.comments.get(author='user2', message='message2') - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user2') - self.assertEqual(comment.message, 'message2') + comment = self.post2.comments.get(author="user2", message="message2") + assert isinstance(comment, self.Comments) + assert comment.author == "user2" + assert comment.message == "message2" def test_no_keyword_multiple_return_get(self): """ Tests the get method of a List of Embedded Documents without a keyword to return multiple documents. """ - with self.assertRaises(MultipleObjectsReturned): + with pytest.raises(MultipleObjectsReturned): self.post1.comments.get() def test_keyword_multiple_return_get(self): @@ -2356,15 +2509,15 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the get method of a List of Embedded Documents with a keyword to return multiple documents. """ - with self.assertRaises(MultipleObjectsReturned): - self.post2.comments.get(author='user2') + with pytest.raises(MultipleObjectsReturned): + self.post2.comments.get(author="user2") def test_unknown_keyword_get(self): """ Tests the get method of a List of Embedded Documents with an unknown keyword. """ - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.post2.comments.get(year=2020) def test_no_result_get(self): @@ -2372,8 +2525,8 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): Tests the get method of a List of Embedded Documents where get returns no results. """ - with self.assertRaises(DoesNotExist): - self.post1.comments.get(author='user3') + with pytest.raises(DoesNotExist): + self.post1.comments.get(author="user3") def test_first(self): """ @@ -2383,28 +2536,23 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): comment = self.post1.comments.first() # Ensure a Comment object was returned. - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment, self.post1.comments[0]) + assert isinstance(comment, self.Comments) + assert comment == self.post1.comments[0] def test_create(self): """ Test the create method of a List of Embedded Documents. """ - comment = self.post1.comments.create( - author='user4', message='message1' - ) + comment = self.post1.comments.create(author="user4", message="message1") self.post1.save() # Ensure the returned value is the comment object. - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user4') - self.assertEqual(comment.message, 'message1') + assert isinstance(comment, self.Comments) + assert comment.author == "user4" + assert comment.message == "message1" # Ensure the new comment was actually saved to the database. - self.assertIn( - comment, - self.BlogPost.objects(comments__author='user4')[0].comments - ) + assert comment in self.BlogPost.objects(comments__author="user4")[0].comments def test_filtered_create(self): """ @@ -2412,21 +2560,18 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): to a call to the filter method. Filtering should have no effect on creation. """ - comment = self.post1.comments.filter(author='user1').create( - author='user4', message='message1' + comment = self.post1.comments.filter(author="user1").create( + author="user4", message="message1" ) self.post1.save() # Ensure the returned value is the comment object. - self.assertIsInstance(comment, self.Comments) - self.assertEqual(comment.author, 'user4') - self.assertEqual(comment.message, 'message1') + assert isinstance(comment, self.Comments) + assert comment.author == "user4" + assert comment.message == "message1" # Ensure the new comment was actually saved to the database. - self.assertIn( - comment, - self.BlogPost.objects(comments__author='user4')[0].comments - ) + assert comment in self.BlogPost.objects(comments__author="user4")[0].comments def test_no_keyword_update(self): """ @@ -2438,63 +2583,55 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): self.post1.save() # Ensure that nothing was altered. - self.assertIn( - original[0], - self.BlogPost.objects(id=self.post1.id)[0].comments - ) + assert original[0] in self.BlogPost.objects(id=self.post1.id)[0].comments - self.assertIn( - original[1], - self.BlogPost.objects(id=self.post1.id)[0].comments - ) + assert original[1] in self.BlogPost.objects(id=self.post1.id)[0].comments # Ensure the method returned 0 as the number of entries # modified - self.assertEqual(number, 0) + assert number == 0 def test_single_keyword_update(self): """ Tests the update method of a List of Embedded Documents with a single keyword. """ - number = self.post1.comments.update(author='user4') + number = self.post1.comments.update(author="user4") self.post1.save() comments = self.BlogPost.objects(id=self.post1.id)[0].comments # Ensure that the database was updated properly. - self.assertEqual(comments[0].author, 'user4') - self.assertEqual(comments[1].author, 'user4') + assert comments[0].author == "user4" + assert comments[1].author == "user4" # Ensure the method returned 2 as the number of entries # modified - self.assertEqual(number, 2) + assert number == 2 def test_unicode(self): """ Tests that unicode strings handled correctly """ - post = self.BlogPost(comments=[ - self.Comments(author='user1', message=u'сообщение'), - self.Comments(author='user2', message=u'хабарлама') - ]).save() - self.assertEqual(post.comments.get(message=u'сообщение').author, - 'user1') + post = self.BlogPost( + comments=[ + self.Comments(author="user1", message=u"сообщение"), + self.Comments(author="user2", message=u"хабарлама"), + ] + ).save() + assert post.comments.get(message=u"сообщение").author == "user1" def test_save(self): """ Tests the save method of a List of Embedded Documents. """ comments = self.post1.comments - new_comment = self.Comments(author='user4') + new_comment = self.Comments(author="user4") comments.append(new_comment) comments.save() # Ensure that the new comment has been added to the database. - self.assertIn( - new_comment, - self.BlogPost.objects(id=self.post1.id)[0].comments - ) + assert new_comment in self.BlogPost.objects(id=self.post1.id)[0].comments def test_delete(self): """ @@ -2505,19 +2642,17 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): # Ensure that all the comments under post1 were deleted in the # database. - self.assertListEqual( - self.BlogPost.objects(id=self.post1.id)[0].comments, [] - ) + assert self.BlogPost.objects(id=self.post1.id)[0].comments == [] # Ensure that post1 comments were deleted from the list. - self.assertListEqual(self.post1.comments, []) + assert self.post1.comments == [] # Ensure that comments still returned a EmbeddedDocumentList object. - self.assertIsInstance(self.post1.comments, EmbeddedDocumentList) + assert isinstance(self.post1.comments, EmbeddedDocumentList) # Ensure that the delete method returned 2 as the number of entries # deleted from the database - self.assertEqual(number, 2) + assert number == 2 def test_empty_list_embedded_documents_with_unique_field(self): """ @@ -2525,6 +2660,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): that have a unique field can be saved, but if the unique field is also sparse than multiple documents with an empty list can be saved. """ + class EmbeddedWithUnique(EmbeddedDocument): number = IntField(unique=True) @@ -2532,7 +2668,7 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique)) A(my_list=[]).save() - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): A(my_list=[]).save() class EmbeddedWithSparseUnique(EmbeddedDocument): @@ -2553,31 +2689,27 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): after the filter method has been called. """ comment = self.post1.comments[1] - number = self.post1.comments.filter(author='user2').delete() + number = self.post1.comments.filter(author="user2").delete() self.post1.save() # Ensure that only the user2 comment was deleted. - self.assertNotIn( - comment, self.BlogPost.objects(id=self.post1.id)[0].comments - ) - self.assertEqual( - len(self.BlogPost.objects(id=self.post1.id)[0].comments), 1 - ) + assert comment not in self.BlogPost.objects(id=self.post1.id)[0].comments + assert len(self.BlogPost.objects(id=self.post1.id)[0].comments) == 1 # Ensure that the user2 comment no longer exists in the list. - self.assertNotIn(comment, self.post1.comments) - self.assertEqual(len(self.post1.comments), 1) + assert comment not in self.post1.comments + assert len(self.post1.comments) == 1 # Ensure that the delete method returned 1 as the number of entries # deleted from the database - self.assertEqual(number, 1) + assert number == 1 def test_custom_data(self): """ Tests that custom data is saved in the field object and doesn't interfere with the rest of field functionalities. """ - custom_data = {'a': 'a_value', 'b': [1, 2]} + custom_data = {"a": "a_value", "b": [1, 2]} class CustomData(Document): a_field = IntField() @@ -2586,11 +2718,11 @@ class EmbeddedDocumentListFieldTestCase(MongoDBTestCase): CustomData.drop_collection() a1 = CustomData(a_field=1, c_field=2).save() - self.assertEqual(2, a1.c_field) - self.assertFalse(hasattr(a1.c_field, 'custom_data')) - self.assertTrue(hasattr(CustomData.c_field, 'custom_data')) - self.assertEqual(custom_data['a'], CustomData.c_field.custom_data['a']) + assert 2 == a1.c_field + assert not hasattr(a1.c_field, "custom_data") + assert hasattr(CustomData.c_field, "custom_data") + assert custom_data["a"] == CustomData.c_field.custom_data["a"] -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/file_tests.py b/tests/fields/test_file_field.py similarity index 59% rename from tests/fields/file_tests.py rename to tests/fields/test_file_field.py index a7722458..cbac9b69 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/test_file_field.py @@ -1,49 +1,51 @@ # -*- coding: utf-8 -*- import copy import os -import unittest import tempfile +import unittest +from io import BytesIO import gridfs -import six +import pytest -from nose.plugins.skip import SkipTest from mongoengine import * from mongoengine.connection import get_db -from mongoengine.python_support import StringIO try: from PIL import Image + HAS_PIL = True except ImportError: HAS_PIL = False from tests.utils import MongoDBTestCase -TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') -TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') +require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed") + +TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png") +TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") def get_file(path): """Use a BytesIO instead of a file to allow to have a one-liner and avoid that the file remains opened""" - bytes_io = StringIO() - with open(path, 'rb') as f: + bytes_io = BytesIO() + with open(path, "rb") as f: bytes_io.write(f.read()) bytes_io.seek(0) return bytes_io -class FileTest(MongoDBTestCase): - +class TestFileField(MongoDBTestCase): def tearDown(self): - self.db.drop_collection('fs.files') - self.db.drop_collection('fs.chunks') + self.db.drop_collection("fs.files") + self.db.drop_collection("fs.chunks") def test_file_field_optional(self): # Make sure FileField is optional and not required class DemoFile(Document): the_file = FileField() + DemoFile.objects.create() def test_file_fields(self): @@ -55,18 +57,21 @@ class FileTest(MongoDBTestCase): PutFile.drop_collection() - text = six.b('Hello, World!') - content_type = 'text/plain' + text = "Hello, World!".encode("latin-1") + content_type = "text/plain" putfile = PutFile() putfile.the_file.put(text, content_type=content_type, filename="hello") putfile.save() result = PutFile.objects.first() - self.assertEqual(putfile, result) - self.assertEqual("%s" % result.the_file, "" % result.the_file.grid_id) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) + assert putfile == result + assert ( + "%s" % result.the_file + == "" % result.the_file.grid_id + ) + assert result.the_file.read() == text + assert result.the_file.content_type == content_type result.the_file.delete() # Remove file from GridFS PutFile.objects.delete() @@ -74,29 +79,30 @@ class FileTest(MongoDBTestCase): PutFile.drop_collection() putfile = PutFile() - putstring = StringIO() + putstring = BytesIO() putstring.write(text) putstring.seek(0) putfile.the_file.put(putstring, content_type=content_type) putfile.save() result = PutFile.objects.first() - self.assertEqual(putfile, result) - self.assertEqual(result.the_file.read(), text) - self.assertEqual(result.the_file.content_type, content_type) + assert putfile == result + assert result.the_file.read() == text + assert result.the_file.content_type == content_type result.the_file.delete() def test_file_fields_stream(self): """Ensure that file fields can be written to and their data retrieved """ + class StreamFile(Document): the_file = FileField() StreamFile.drop_collection() - text = six.b('Hello, World!') - more_text = six.b('Foo Bar') - content_type = 'text/plain' + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") + content_type = "text/plain" streamfile = StreamFile() streamfile.the_file.new_file(content_type=content_type) @@ -106,32 +112,32 @@ class FileTest(MongoDBTestCase): streamfile.save() result = StreamFile.objects.first() - self.assertEqual(streamfile, result) - self.assertEqual(result.the_file.read(), text + more_text) - self.assertEqual(result.the_file.content_type, content_type) + assert streamfile == result + assert result.the_file.read() == text + more_text + assert result.the_file.content_type == content_type result.the_file.seek(0) - self.assertEqual(result.the_file.tell(), 0) - self.assertEqual(result.the_file.read(len(text)), text) - self.assertEqual(result.the_file.tell(), len(text)) - self.assertEqual(result.the_file.read(len(more_text)), more_text) - self.assertEqual(result.the_file.tell(), len(text + more_text)) + assert result.the_file.tell() == 0 + assert result.the_file.read(len(text)) == text + assert result.the_file.tell() == len(text) + assert result.the_file.read(len(more_text)) == more_text + assert result.the_file.tell() == len(text + more_text) result.the_file.delete() # Ensure deleted file returns None - self.assertTrue(result.the_file.read() is None) + assert result.the_file.read() is None def test_file_fields_stream_after_none(self): """Ensure that a file field can be written to after it has been saved as None """ + class StreamFile(Document): the_file = FileField() StreamFile.drop_collection() - text = six.b('Hello, World!') - more_text = six.b('Foo Bar') - content_type = 'text/plain' + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") streamfile = StreamFile() streamfile.save() @@ -142,27 +148,26 @@ class FileTest(MongoDBTestCase): streamfile.save() result = StreamFile.objects.first() - self.assertEqual(streamfile, result) - self.assertEqual(result.the_file.read(), text + more_text) - # self.assertEqual(result.the_file.content_type, content_type) + assert streamfile == result + assert result.the_file.read() == text + more_text + # assert result.the_file.content_type == content_type result.the_file.seek(0) - self.assertEqual(result.the_file.tell(), 0) - self.assertEqual(result.the_file.read(len(text)), text) - self.assertEqual(result.the_file.tell(), len(text)) - self.assertEqual(result.the_file.read(len(more_text)), more_text) - self.assertEqual(result.the_file.tell(), len(text + more_text)) + assert result.the_file.tell() == 0 + assert result.the_file.read(len(text)) == text + assert result.the_file.tell() == len(text) + assert result.the_file.read(len(more_text)) == more_text + assert result.the_file.tell() == len(text + more_text) result.the_file.delete() # Ensure deleted file returns None - self.assertTrue(result.the_file.read() is None) + assert result.the_file.read() is None def test_file_fields_set(self): - class SetFile(Document): the_file = FileField() - text = six.b('Hello, World!') - more_text = six.b('Foo Bar') + text = "Hello, World!".encode("latin-1") + more_text = "Foo Bar".encode("latin-1") SetFile.drop_collection() @@ -171,27 +176,26 @@ class FileTest(MongoDBTestCase): setfile.save() result = SetFile.objects.first() - self.assertEqual(setfile, result) - self.assertEqual(result.the_file.read(), text) + assert setfile == result + assert result.the_file.read() == text # Try replacing file with new one result.the_file.replace(more_text) result.save() result = SetFile.objects.first() - self.assertEqual(setfile, result) - self.assertEqual(result.the_file.read(), more_text) + assert setfile == result + assert result.the_file.read() == more_text result.the_file.delete() def test_file_field_no_default(self): - class GridDocument(Document): the_file = FileField() GridDocument.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() # Test without default @@ -199,34 +203,35 @@ class FileTest(MongoDBTestCase): doc_a.save() doc_b = GridDocument.objects.with_id(doc_a.id) - doc_b.the_file.replace(f, filename='doc_b') + doc_b.the_file.replace(f, filename="doc_b") doc_b.save() - self.assertNotEqual(doc_b.the_file.grid_id, None) + assert doc_b.the_file.grid_id is not None # Test it matches doc_c = GridDocument.objects.with_id(doc_b.id) - self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) + assert doc_b.the_file.grid_id == doc_c.the_file.grid_id # Test with default - doc_d = GridDocument(the_file=six.b('')) + doc_d = GridDocument(the_file="".encode("latin-1")) doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) - self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) + assert doc_d.the_file.grid_id == doc_e.the_file.grid_id - doc_e.the_file.replace(f, filename='doc_e') + doc_e.the_file.replace(f, filename="doc_e") doc_e.save() doc_f = GridDocument.objects.with_id(doc_e.id) - self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) + assert doc_e.the_file.grid_id == doc_f.the_file.grid_id db = GridDocument._get_db() grid_fs = gridfs.GridFS(db) - self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) + assert ["doc_b", "doc_e"] == grid_fs.list() def test_file_uniqueness(self): """Ensure that each instance of a FileField is unique """ + class TestFile(Document): name = StringField() the_file = FileField() @@ -234,15 +239,15 @@ class FileTest(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b('Hello, World!')) + test_file.the_file.put("Hello, World!".encode("latin-1")) test_file.save() # Second instance test_file_dupe = TestFile() data = test_file_dupe.the_file.read() # Should be None - self.assertNotEqual(test_file.name, test_file_dupe.name) - self.assertNotEqual(test_file.the_file.read(), data) + assert test_file.name != test_file_dupe.name + assert test_file.the_file.read() != data TestFile.drop_collection() @@ -255,61 +260,68 @@ class FileTest(MongoDBTestCase): photo = FileField() Animal.drop_collection() - marmot = Animal(genus='Marmota', family='Sciuridae') + marmot = Animal(genus="Marmota", family="Sciuridae") marmot_photo_content = get_file(TEST_IMAGE_PATH) # Retrieve a photo from disk - marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar') + marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar") marmot.photo.close() marmot.save() marmot = Animal.objects.get() - self.assertEqual(marmot.photo.content_type, 'image/jpeg') - self.assertEqual(marmot.photo.foo, 'bar') + assert marmot.photo.content_type == "image/jpeg" + assert marmot.photo.foo == "bar" def test_file_reassigning(self): class TestFile(Document): the_file = FileField() + TestFile.drop_collection() test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() - self.assertEqual(test_file.the_file.get().length, 8313) + assert test_file.the_file.get().length == 8313 test_file = TestFile.objects.first() test_file.the_file = get_file(TEST_IMAGE2_PATH) test_file.save() - self.assertEqual(test_file.the_file.get().length, 4971) + assert test_file.the_file.get().length == 4971 def test_file_boolean(self): """Ensure that a boolean test of a FileField indicates its presence """ + class TestFile(Document): the_file = FileField() + TestFile.drop_collection() test_file = TestFile() - self.assertFalse(bool(test_file.the_file)) - test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') + assert not bool(test_file.the_file) + test_file.the_file.put( + "Hello, World!".encode("latin-1"), content_type="text/plain" + ) test_file.save() - self.assertTrue(bool(test_file.the_file)) + assert bool(test_file.the_file) test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.content_type, "text/plain") + assert test_file.the_file.content_type == "text/plain" def test_file_cmp(self): """Test comparing against other types""" + class TestFile(Document): the_file = FileField() test_file = TestFile() - self.assertNotIn(test_file.the_file, [{"test": 1}]) + assert test_file.the_file not in [{"test": 1}] def test_file_disk_space(self): """ Test disk space usage when we delete/replace a file """ + class TestFile(Document): the_file = FileField() - text = six.b('Hello, World!') - content_type = 'text/plain' + text = "Hello, World!".encode("latin-1") + content_type = "text/plain" testfile = TestFile() testfile.the_file.put(text, content_type=content_type, filename="hello") @@ -320,16 +332,16 @@ class FileTest(MongoDBTestCase): files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 1) - self.assertEqual(len(list(chunks)), 1) + assert len(list(files)) == 1 + assert len(list(chunks)) == 1 # Deleting the docoument should delete the files testfile.delete() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 # Test case where we don't store a file in the first place testfile = TestFile() @@ -337,48 +349,46 @@ class FileTest(MongoDBTestCase): files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 testfile.delete() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 # Test case where we overwrite the file testfile = TestFile() testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.save() - text = six.b('Bonjour, World!') + text = "Bonjour, World!".encode("latin-1") testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.save() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 1) - self.assertEqual(len(list(chunks)), 1) + assert len(list(files)) == 1 + assert len(list(chunks)) == 1 testfile.delete() files = db.fs.files.find() chunks = db.fs.chunks.find() - self.assertEqual(len(list(files)), 0) - self.assertEqual(len(list(chunks)), 0) + assert len(list(files)) == 0 + assert len(list(chunks)) == 0 + @require_pil def test_image_field(self): - if not HAS_PIL: - raise SkipTest('PIL not installed') - class TestImage(Document): image = ImageField() TestImage.drop_collection() with tempfile.TemporaryFile() as f: - f.write(six.b("Hello World!")) + f.write("Hello World!".encode("latin-1")) f.flush() t = TestImage() @@ -386,7 +396,7 @@ class FileTest(MongoDBTestCase): t.image.put(f) self.fail("Should have raised an invalidation error") except ValidationError as e: - self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) + assert "%s" % e == "Invalid image: cannot identify image file %s" % f t = TestImage() t.image.put(get_file(TEST_IMAGE_PATH)) @@ -394,34 +404,31 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, 'PNG') + assert t.image.format == "PNG" w, h = t.image.size - self.assertEqual(w, 371) - self.assertEqual(h, 76) + assert w == 371 + assert h == 76 t.image.delete() + @require_pil def test_image_field_reassigning(self): - if not HAS_PIL: - raise SkipTest('PIL not installed') - class TestFile(Document): the_file = ImageField() + TestFile.drop_collection() test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() - self.assertEqual(test_file.the_file.size, (371, 76)) + assert test_file.the_file.size == (371, 76) test_file = TestFile.objects.first() test_file.the_file = get_file(TEST_IMAGE2_PATH) test_file.save() - self.assertEqual(test_file.the_file.size, (45, 101)) + assert test_file.the_file.size == (45, 101) + @require_pil def test_image_field_resize(self): - if not HAS_PIL: - raise SkipTest('PIL not installed') - class TestImage(Document): image = ImageField(size=(185, 37)) @@ -433,18 +440,16 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, 'PNG') + assert t.image.format == "PNG" w, h = t.image.size - self.assertEqual(w, 185) - self.assertEqual(h, 37) + assert w == 185 + assert h == 37 t.image.delete() + @require_pil def test_image_field_resize_force(self): - if not HAS_PIL: - raise SkipTest('PIL not installed') - class TestImage(Document): image = ImageField(size=(185, 37, True)) @@ -456,18 +461,16 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.format, 'PNG') + assert t.image.format == "PNG" w, h = t.image.size - self.assertEqual(w, 185) - self.assertEqual(h, 37) + assert w == 185 + assert h == 37 t.image.delete() + @require_pil def test_image_field_thumbnail(self): - if not HAS_PIL: - raise SkipTest('PIL not installed') - class TestImage(Document): image = ImageField(thumbnail_size=(92, 18)) @@ -479,19 +482,18 @@ class FileTest(MongoDBTestCase): t = TestImage.objects.first() - self.assertEqual(t.image.thumbnail.format, 'PNG') - self.assertEqual(t.image.thumbnail.width, 92) - self.assertEqual(t.image.thumbnail.height, 18) + assert t.image.thumbnail.format == "PNG" + assert t.image.thumbnail.width == 92 + assert t.image.thumbnail.height == 18 t.image.delete() def test_file_multidb(self): - register_connection('test_files', 'test_files') + register_connection("test_files", "test_files") class TestFile(Document): name = StringField() - the_file = FileField(db_alias="test_files", - collection_name="macumba") + the_file = FileField(db_alias="test_files", collection_name="macumba") TestFile.drop_collection() @@ -502,23 +504,21 @@ class FileTest(MongoDBTestCase): # First instance test_file = TestFile() test_file.name = "Hello, World!" - test_file.the_file.put(six.b('Hello, World!'), - name="hello.txt") + test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt") test_file.save() data = get_db("test_files").macumba.files.find_one() - self.assertEqual(data.get('name'), 'hello.txt') + assert data.get("name") == "hello.txt" test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") test_file = TestFile.objects.first() - test_file.the_file = six.b('HELLO, WORLD!') + test_file.the_file = "Hello, World!".encode("latin-1") test_file.save() test_file = TestFile.objects.first() - self.assertEqual(test_file.the_file.read(), - six.b('HELLO, WORLD!')) + assert test_file.the_file.read() == "Hello, World!".encode("latin-1") def test_copyable(self): class PutFile(Document): @@ -526,8 +526,8 @@ class FileTest(MongoDBTestCase): PutFile.drop_collection() - text = six.b('Hello, World!') - content_type = 'text/plain' + text = "Hello, World!".encode("latin-1") + content_type = "text/plain" putfile = PutFile() putfile.the_file.put(text, content_type=content_type) @@ -536,14 +536,11 @@ class FileTest(MongoDBTestCase): class TestFile(Document): name = StringField() - self.assertEqual(putfile, copy.copy(putfile)) - self.assertEqual(putfile, copy.deepcopy(putfile)) + assert putfile == copy.copy(putfile) + assert putfile == copy.deepcopy(putfile) + @require_pil def test_get_image_by_grid_id(self): - - if not HAS_PIL: - raise SkipTest('PIL not installed') - class TestImage(Document): image1 = ImageField() @@ -559,8 +556,7 @@ class FileTest(MongoDBTestCase): test = TestImage.objects.first() grid_id = test.image1.grid_id - self.assertEqual(1, TestImage.objects(Q(image1=grid_id) - or Q(image2=grid_id)).count()) + assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count() def test_complex_field_filefield(self): """Ensure you can add meta data to file""" @@ -571,21 +567,21 @@ class FileTest(MongoDBTestCase): photos = ListField(FileField()) Animal.drop_collection() - marmot = Animal(genus='Marmota', family='Sciuridae') + marmot = Animal(genus="Marmota", family="Sciuridae") - with open(TEST_IMAGE_PATH, 'rb') as marmot_photo: # Retrieve a photo from disk - photos_field = marmot._fields['photos'].field - new_proxy = photos_field.get_proxy_obj('photos', marmot) - new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') + with open(TEST_IMAGE_PATH, "rb") as marmot_photo: # Retrieve a photo from disk + photos_field = marmot._fields["photos"].field + new_proxy = photos_field.get_proxy_obj("photos", marmot) + new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar") marmot.photos.append(new_proxy) marmot.save() marmot = Animal.objects.get() - self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') - self.assertEqual(marmot.photos[0].foo, 'bar') - self.assertEqual(marmot.photos[0].get().length, 8313) + assert marmot.photos[0].content_type == "image/jpeg" + assert marmot.photos[0].foo == "bar" + assert marmot.photos[0].get().length == 8313 -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index fa92cf20..839494a9 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -import six +import pytest from mongoengine import * @@ -7,7 +7,6 @@ from tests.utils import MongoDBTestCase class TestFloatField(MongoDBTestCase): - def test_float_ne_operator(self): class TestDocument(Document): float_fld = FloatField() @@ -17,12 +16,13 @@ class TestFloatField(MongoDBTestCase): TestDocument(float_fld=None).save() TestDocument(float_fld=1).save() - self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) - self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count()) + assert 1 == TestDocument.objects(float_fld__ne=None).count() + assert 1 == TestDocument.objects(float_fld__ne=1).count() def test_validation(self): """Ensure that invalid values cannot be assigned to float fields. """ + class Person(Document): height = FloatField(min_value=0.1, max_value=3.5) @@ -33,26 +33,30 @@ class TestFloatField(MongoDBTestCase): person.height = 1.89 person.validate() - person.height = '2.0' - self.assertRaises(ValidationError, person.validate) + person.height = "2.0" + with pytest.raises(ValidationError): + person.validate() person.height = 0.01 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.height = 4.0 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() - person_2 = Person(height='something invalid') - self.assertRaises(ValidationError, person_2.validate) + person_2 = Person(height="something invalid") + with pytest.raises(ValidationError): + person_2.validate() big_person = BigPerson() - for value, value_type in enumerate(six.integer_types): - big_person.height = value_type(value) - big_person.validate() + big_person.height = int(0) + big_person.validate() big_person.height = 2 ** 500 big_person.validate() big_person.height = 2 ** 100000 # Too big for a float value - self.assertRaises(ValidationError, big_person.validate) + with pytest.raises(ValidationError): + big_person.validate() diff --git a/tests/fields/geo.py b/tests/fields/test_geo_fields.py similarity index 75% rename from tests/fields/geo.py rename to tests/fields/test_geo_fields.py index 37ed97f5..7618b3a0 100644 --- a/tests/fields/geo.py +++ b/tests/fields/test_geo_fields.py @@ -2,30 +2,23 @@ import unittest from mongoengine import * -from mongoengine.connection import get_db - -__all__ = ("GeoFieldTest", ) +from tests.utils import MongoDBTestCase -class GeoFieldTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - self.db = get_db() - +class TestGeoField(MongoDBTestCase): def _test_for_expected_error(self, Cls, loc, expected): try: Cls(loc=loc).validate() - self.fail('Should not validate the location {0}'.format(loc)) + self.fail("Should not validate the location {0}".format(loc)) except ValidationError as e: - self.assertEqual(expected, e.to_dict()['loc']) + assert expected == e.to_dict()["loc"] def test_geopoint_validation(self): class Location(Document): loc = GeoPointField() invalid_coords = [{"x": 1, "y": 2}, 5, "a"] - expected = 'GeoPointField can only accept tuples or lists of (x, y)' + expected = "GeoPointField can only accept tuples or lists of (x, y)" for coord in invalid_coords: self._test_for_expected_error(Location, coord, expected) @@ -40,7 +33,7 @@ class GeoFieldTest(unittest.TestCase): expected = "Both values (%s) in point must be float or int" % repr(coord) self._test_for_expected_error(Location, coord, expected) - invalid_coords = [21, 4, 'a'] + invalid_coords = [21, 4, "a"] for coord in invalid_coords: expected = "GeoPointField can only accept tuples or lists of (x, y)" self._test_for_expected_error(Location, coord, expected) @@ -50,7 +43,9 @@ class GeoFieldTest(unittest.TestCase): loc = PointField() invalid_coords = {"x": 1, "y": 2} - expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = ( + "PointField can only accept a valid GeoJson dictionary or lists of (x, y)" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": []} @@ -77,19 +72,16 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, coord, expected) Location(loc=[1, 2]).validate() - Location(loc={ - "type": "Point", - "coordinates": [ - 81.4471435546875, - 23.61432859499169 - ]}).validate() + Location( + loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]} + ).validate() def test_linestring_validation(self): class Location(Document): loc = LineStringField() invalid_coords = {"x": 1, "y": 2} - expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -97,7 +89,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} - expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" + expected = ( + "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [5, "a"] @@ -105,16 +99,25 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[1]] - expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) + expected = ( + "Invalid LineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[1, 2, 3]] - expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) + expected = ( + "Invalid LineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[{}, {}]], [("a", "b")]] for coord in invalid_coords: - expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) + expected = ( + "Invalid LineString:\nBoth values (%s) in point must be float or int" + % repr(coord[0]) + ) self._test_for_expected_error(Location, coord, expected) Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate() @@ -124,7 +127,9 @@ class GeoFieldTest(unittest.TestCase): loc = PolygonField() invalid_coords = {"x": 1, "y": 2} - expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = ( + "PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -136,7 +141,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[5, "a"]]] - expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" + expected = ( + "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[]]] @@ -162,7 +169,7 @@ class GeoFieldTest(unittest.TestCase): loc = MultiPointField() invalid_coords = {"x": 1, "y": 2} - expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -188,19 +195,19 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, coord, expected) Location(loc=[[1, 2]]).validate() - Location(loc={ - "type": "MultiPoint", - "coordinates": [ - [1, 2], - [81.4471435546875, 23.61432859499169] - ]}).validate() + Location( + loc={ + "type": "MultiPoint", + "coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]], + } + ).validate() def test_multilinestring_validation(self): class Location(Document): loc = MultiLineStringField() invalid_coords = {"x": 1, "y": 2} - expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -216,16 +223,25 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[1]]] - expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) + expected = ( + "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0][0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[1, 2, 3]]] - expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) + expected = ( + "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" + % repr(invalid_coords[0][0]) + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] for coord in invalid_coords: - expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) + expected = ( + "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" + % repr(coord[0][0]) + ) self._test_for_expected_error(Location, coord, expected) Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() @@ -235,7 +251,7 @@ class GeoFieldTest(unittest.TestCase): loc = MultiPolygonField() invalid_coords = {"x": 1, "y": 2} - expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' + expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MadeUp", "coordinates": [[]]} @@ -243,7 +259,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]} - expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + expected = ( + "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[5, "a"]]]] @@ -255,7 +273,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[1, 2, 3]]]] - expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + expected = ( + "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" + ) self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] @@ -263,7 +283,9 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, invalid_coords, expected) invalid_coords = [[[[1, 2], [3, 4]]]] - expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point" + expected = ( + "Invalid MultiPolygon:\nLineStrings must start and end at the same point" + ) self._test_for_expected_error(Location, invalid_coords, expected) Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate() @@ -271,17 +293,19 @@ class GeoFieldTest(unittest.TestCase): def test_indexes_geopoint(self): """Ensure that indexes are created automatically for GeoPointFields. """ + class Event(Document): title = StringField() location = GeoPointField() geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) + assert geo_indicies == [{"fields": [("location", "2d")]}] def test_geopoint_embedded_indexes(self): """Ensure that indexes are created automatically for GeoPointFields on embedded documents. """ + class Venue(EmbeddedDocument): location = GeoPointField() name = StringField() @@ -291,11 +315,12 @@ class GeoFieldTest(unittest.TestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) + assert geo_indicies == [{"fields": [("venue.location", "2d")]}] def test_indexes_2dsphere(self): """Ensure that indexes are created automatically for GeoPointFields. """ + class Event(Document): title = StringField() point = PointField() @@ -303,13 +328,14 @@ class GeoFieldTest(unittest.TestCase): polygon = PolygonField() geo_indicies = Event._geo_indices() - self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies) + assert {"fields": [("line", "2dsphere")]} in geo_indicies + assert {"fields": [("polygon", "2dsphere")]} in geo_indicies + assert {"fields": [("point", "2dsphere")]} in geo_indicies def test_indexes_2dsphere_embedded(self): """Ensure that indexes are created automatically for GeoPointFields. """ + class Venue(EmbeddedDocument): name = StringField() point = PointField() @@ -321,12 +347,11 @@ class GeoFieldTest(unittest.TestCase): venue = EmbeddedDocumentField(Venue) geo_indicies = Event._geo_indices() - self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies) - self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies) + assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies + assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies + assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies def test_geo_indexes_recursion(self): - class Location(Document): name = StringField() location = GeoPointField() @@ -338,14 +363,14 @@ class GeoFieldTest(unittest.TestCase): Location.drop_collection() Parent.drop_collection() - Parent(name='Berlin').save() + Parent(name="Berlin").save() info = Parent._get_collection().index_information() - self.assertNotIn('location_2d', info) + assert "location_2d" not in info info = Location._get_collection().index_information() - self.assertIn('location_2d', info) + assert "location_2d" in info - self.assertEqual(len(Parent._geo_indices()), 0) - self.assertEqual(len(Location._geo_indices()), 1) + assert len(Parent._geo_indices()) == 0 + assert len(Location._geo_indices()) == 1 def test_geo_indexes_auto_index(self): @@ -354,18 +379,18 @@ class GeoFieldTest(unittest.TestCase): location = PointField(auto_index=False) datetime = DateTimeField() - meta = { - 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] - } + meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} - self.assertEqual([], Log._geo_indices()) + assert Log._geo_indices() == [] Log.drop_collection() Log.ensure_indexes() info = Log._get_collection().index_information() - self.assertEqual(info["location_2dsphere_datetime_1"]["key"], - [('location', '2dsphere'), ('datetime', 1)]) + assert info["location_2dsphere_datetime_1"]["key"] == [ + ("location", "2dsphere"), + ("datetime", 1), + ] # Test listing explicitly class Log(Document): @@ -373,20 +398,20 @@ class GeoFieldTest(unittest.TestCase): datetime = DateTimeField() meta = { - 'indexes': [ - {'fields': [("location", "2dsphere"), ("datetime", 1)]} - ] + "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] } - self.assertEqual([], Log._geo_indices()) + assert Log._geo_indices() == [] Log.drop_collection() Log.ensure_indexes() info = Log._get_collection().index_information() - self.assertEqual(info["location_2dsphere_datetime_1"]["key"], - [('location', '2dsphere'), ('datetime', 1)]) + assert info["location_2dsphere_datetime_1"]["key"] == [ + ("location", "2dsphere"), + ("datetime", 1), + ] -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/fields/test_int_field.py b/tests/fields/test_int_field.py index 1b1f7ad9..1f9c5a77 100644 --- a/tests/fields/test_int_field.py +++ b/tests/fields/test_int_field.py @@ -1,14 +1,16 @@ # -*- coding: utf-8 -*- +import pytest + from mongoengine import * from tests.utils import MongoDBTestCase class TestIntField(MongoDBTestCase): - def test_int_validation(self): """Ensure that invalid values cannot be assigned to int fields. """ + class Person(Document): age = IntField(min_value=0, max_value=110) @@ -23,11 +25,14 @@ class TestIntField(MongoDBTestCase): person.validate() person.age = -1 - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() person.age = 120 - self.assertRaises(ValidationError, person.validate) - person.age = 'ten' - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() + person.age = "ten" + with pytest.raises(ValidationError): + person.validate() def test_ne_operator(self): class TestDocument(Document): @@ -38,5 +43,5 @@ class TestIntField(MongoDBTestCase): TestDocument(int_fld=None).save() TestDocument(int_fld=1).save() - self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) - self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count()) + assert 1 == TestDocument.objects(int_fld__ne=None).count() + assert 1 == TestDocument.objects(int_fld__ne=1).count() diff --git a/tests/fields/test_lazy_reference_field.py b/tests/fields/test_lazy_reference_field.py index b10506e7..50e60262 100644 --- a/tests/fields/test_lazy_reference_field.py +++ b/tests/fields/test_lazy_reference_field.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from bson import DBRef, ObjectId +import pytest from mongoengine import * from mongoengine.base import LazyReference @@ -11,7 +12,8 @@ class TestLazyReferenceField(MongoDBTestCase): def test_lazy_reference_config(self): # Make sure ReferenceField only accepts a document class or a string # with a document class name. - self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) + with pytest.raises(ValidationError): + LazyReferenceField(EmbeddedDocument) def test___repr__(self): class Animal(Document): @@ -25,7 +27,7 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal() oc = Ocurrence(animal=animal) - self.assertIn('LazyReference', repr(oc.animal)) + assert "LazyReference" in repr(oc.animal) def test___getattr___unknown_attr_raises_attribute_error(self): class Animal(Document): @@ -39,7 +41,7 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal().save() oc = Ocurrence(animal=animal) - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): oc.animal.not_exist def test_lazy_reference_simple(self): @@ -57,19 +59,19 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() Ocurrence(person="test", animal=animal).save() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) + assert isinstance(p.animal, LazyReference) fetched_animal = p.animal.fetch() - self.assertEqual(fetched_animal, animal) + assert fetched_animal == animal # `fetch` keep cache on referenced document by default... animal.tag = "not so heavy" animal.save() double_fetch = p.animal.fetch() - self.assertIs(fetched_animal, double_fetch) - self.assertEqual(double_fetch.tag, "heavy") + assert fetched_animal is double_fetch + assert double_fetch.tag == "heavy" # ...unless specified otherwise fetch_force = p.animal.fetch(force=True) - self.assertIsNot(fetch_force, fetched_animal) - self.assertEqual(fetch_force.tag, "not so heavy") + assert fetch_force is not fetched_animal + assert fetch_force.tag == "not so heavy" def test_lazy_reference_fetch_invalid_ref(self): class Animal(Document): @@ -87,13 +89,13 @@ class TestLazyReferenceField(MongoDBTestCase): Ocurrence(person="test", animal=animal).save() animal.delete() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) - with self.assertRaises(DoesNotExist): + assert isinstance(p.animal, LazyReference) + with pytest.raises(DoesNotExist): p.animal.fetch() def test_lazy_reference_set(self): class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} name = StringField() tag = StringField() @@ -109,21 +111,20 @@ class TestLazyReferenceField(MongoDBTestCase): nick = StringField() animal = Animal(name="Leopard", tag="heavy").save() - sub_animal = SubAnimal(nick='doggo', name='dog').save() + sub_animal = SubAnimal(nick="doggo", name="dog").save() for ref in ( - animal, - animal.pk, - DBRef(animal._get_collection_name(), animal.pk), - LazyReference(Animal, animal.pk), - - sub_animal, - sub_animal.pk, - DBRef(sub_animal._get_collection_name(), sub_animal.pk), - LazyReference(SubAnimal, sub_animal.pk), - ): + animal, + animal.pk, + DBRef(animal._get_collection_name(), animal.pk), + LazyReference(Animal, animal.pk), + sub_animal, + sub_animal.pk, + DBRef(sub_animal._get_collection_name(), sub_animal.pk), + LazyReference(SubAnimal, sub_animal.pk), + ): p = Ocurrence(person="test", animal=ref).save() p.reload() - self.assertIsInstance(p.animal, LazyReference) + assert isinstance(p.animal, LazyReference) p.animal.fetch() def test_lazy_reference_bad_set(self): @@ -144,19 +145,20 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() baddoc = BadDoc().save() for bad in ( - 42, - 'foo', - baddoc, - DBRef(baddoc._get_collection_name(), animal.pk), - LazyReference(BadDoc, animal.pk) - ): - with self.assertRaises(ValidationError): - p = Ocurrence(person="test", animal=bad).save() + 42, + "foo", + baddoc, + DBRef(baddoc._get_collection_name(), animal.pk), + LazyReference(BadDoc, animal.pk), + ): + with pytest.raises(ValidationError): + Ocurrence(person="test", animal=bad).save() def test_lazy_reference_query_conversion(self): """Ensure that LazyReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -172,26 +174,27 @@ class TestLazyReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id # Same thing by passing a LazyReference instance post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_lazy_reference_query_conversion_dbref(self): """Ensure that LazyReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -207,21 +210,21 @@ class TestLazyReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id # Same thing by passing a LazyReference instance post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_lazy_reference_passthrough(self): class Animal(Document): @@ -238,21 +241,21 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() Ocurrence(animal=animal, animal_passthrough=animal).save() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) - with self.assertRaises(KeyError): - p.animal['name'] - with self.assertRaises(AttributeError): + assert isinstance(p.animal, LazyReference) + with pytest.raises(KeyError): + p.animal["name"] + with pytest.raises(AttributeError): p.animal.name - self.assertEqual(p.animal.pk, animal.pk) + assert p.animal.pk == animal.pk - self.assertEqual(p.animal_passthrough.name, "Leopard") - self.assertEqual(p.animal_passthrough['name'], "Leopard") + assert p.animal_passthrough.name == "Leopard" + assert p.animal_passthrough["name"] == "Leopard" # Should not be able to access referenced document's methods - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): p.animal.save - with self.assertRaises(KeyError): - p.animal['save'] + with pytest.raises(KeyError): + p.animal["save"] def test_lazy_reference_not_set(self): class Animal(Document): @@ -266,9 +269,9 @@ class TestLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - Ocurrence(person='foo').save() + Ocurrence(person="foo").save() p = Ocurrence.objects.get() - self.assertIs(p.animal, None) + assert p.animal is None def test_lazy_reference_equality(self): class Animal(Document): @@ -279,12 +282,12 @@ class TestLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() animalref = LazyReference(Animal, animal.pk) - self.assertEqual(animal, animalref) - self.assertEqual(animalref, animal) + assert animal == animalref + assert animalref == animal other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) - self.assertNotEqual(animal, other_animalref) - self.assertNotEqual(other_animalref, animal) + assert animal != other_animalref + assert other_animalref != animal def test_lazy_reference_embedded(self): class Animal(Document): @@ -303,21 +306,21 @@ class TestLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - animal1 = Animal('doggo').save() - animal2 = Animal('cheeta').save() + animal1 = Animal(name="doggo").save() + animal2 = Animal(name="cheeta").save() def check_fields_type(occ): - self.assertIsInstance(occ.direct, LazyReference) + assert isinstance(occ.direct, LazyReference) for elem in occ.in_list: - self.assertIsInstance(elem, LazyReference) - self.assertIsInstance(occ.in_embedded.direct, LazyReference) + assert isinstance(elem, LazyReference) + assert isinstance(occ.in_embedded.direct, LazyReference) for elem in occ.in_embedded.in_list: - self.assertIsInstance(elem, LazyReference) + assert isinstance(elem, LazyReference) occ = Ocurrence( in_list=[animal1, animal2], - in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, - direct=animal1 + in_embedded={"in_list": [animal1, animal2], "direct": animal1}, + direct=animal1, ).save() check_fields_type(occ) occ.reload() @@ -345,19 +348,19 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() Ocurrence(person="test", animal=animal).save() p = Ocurrence.objects.get() - self.assertIsInstance(p.animal, LazyReference) + assert isinstance(p.animal, LazyReference) fetched_animal = p.animal.fetch() - self.assertEqual(fetched_animal, animal) + assert fetched_animal == animal # `fetch` keep cache on referenced document by default... animal.tag = "not so heavy" animal.save() double_fetch = p.animal.fetch() - self.assertIs(fetched_animal, double_fetch) - self.assertEqual(double_fetch.tag, "heavy") + assert fetched_animal is double_fetch + assert double_fetch.tag == "heavy" # ...unless specified otherwise fetch_force = p.animal.fetch(force=True) - self.assertIsNot(fetch_force, fetched_animal) - self.assertEqual(fetch_force.tag, "not so heavy") + assert fetch_force is not fetched_animal + assert fetch_force.tag == "not so heavy" def test_generic_lazy_reference_choices(self): class Animal(Document): @@ -383,14 +386,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase): mineral = Mineral(name="Granite").save() occ_animal = Ocurrence(living_thing=animal, thing=animal).save() - occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() - with self.assertRaises(ValidationError): + _ = Ocurrence(living_thing=vegetal, thing=vegetal).save() + with pytest.raises(ValidationError): Ocurrence(living_thing=mineral).save() occ = Ocurrence.objects.get(living_thing=animal) - self.assertEqual(occ, occ_animal) - self.assertIsInstance(occ.thing, LazyReference) - self.assertIsInstance(occ.living_thing, LazyReference) + assert occ == occ_animal + assert isinstance(occ.thing, LazyReference) + assert isinstance(occ.living_thing, LazyReference) occ.thing = vegetal occ.living_thing = vegetal @@ -398,12 +401,12 @@ class TestGenericLazyReferenceField(MongoDBTestCase): occ.thing = mineral occ.living_thing = mineral - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): occ.save() def test_generic_lazy_reference_set(self): class Animal(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} name = StringField() tag = StringField() @@ -419,19 +422,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase): nick = StringField() animal = Animal(name="Leopard", tag="heavy").save() - sub_animal = SubAnimal(nick='doggo', name='dog').save() + sub_animal = SubAnimal(nick="doggo", name="dog").save() for ref in ( - animal, - LazyReference(Animal, animal.pk), - {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, - - sub_animal, - LazyReference(SubAnimal, sub_animal.pk), - {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, - ): + animal, + LazyReference(Animal, animal.pk), + {"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)}, + sub_animal, + LazyReference(SubAnimal, sub_animal.pk), + { + "_cls": "SubAnimal", + "_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk), + }, + ): p = Ocurrence(person="test", animal=ref).save() p.reload() - self.assertIsInstance(p.animal, (LazyReference, Document)) + assert isinstance(p.animal, (LazyReference, Document)) p.animal.fetch() def test_generic_lazy_reference_bad_set(self): @@ -441,7 +446,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): class Ocurrence(Document): person = StringField() - animal = GenericLazyReferenceField(choices=['Animal']) + animal = GenericLazyReferenceField(choices=["Animal"]) Animal.drop_collection() Ocurrence.drop_collection() @@ -451,14 +456,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal(name="Leopard", tag="heavy").save() baddoc = BadDoc().save() - for bad in ( - 42, - 'foo', - baddoc, - LazyReference(BadDoc, animal.pk) - ): - with self.assertRaises(ValidationError): - p = Ocurrence(person="test", animal=bad).save() + for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): + with pytest.raises(ValidationError): + Ocurrence(person="test", animal=bad).save() def test_generic_lazy_reference_query_conversion(self): class Member(Document): @@ -476,21 +476,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id # Same thing by passing a LazyReference instance post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_generic_lazy_reference_not_set(self): class Animal(Document): @@ -504,9 +504,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - Ocurrence(person='foo').save() + Ocurrence(person="foo").save() p = Ocurrence.objects.get() - self.assertIs(p.animal, None) + assert p.animal is None def test_generic_lazy_reference_accepts_string_instead_of_class(self): class Animal(Document): @@ -515,7 +515,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): class Ocurrence(Document): person = StringField() - animal = GenericLazyReferenceField('Animal') + animal = GenericLazyReferenceField("Animal") Animal.drop_collection() Ocurrence.drop_collection() @@ -523,7 +523,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): animal = Animal().save() Ocurrence(animal=animal).save() p = Ocurrence.objects.get() - self.assertEqual(p.animal, animal) + assert p.animal == animal def test_generic_lazy_reference_embedded(self): class Animal(Document): @@ -542,27 +542,33 @@ class TestGenericLazyReferenceField(MongoDBTestCase): Animal.drop_collection() Ocurrence.drop_collection() - animal1 = Animal('doggo').save() - animal2 = Animal('cheeta').save() + animal1 = Animal(name="doggo").save() + animal2 = Animal(name="cheeta").save() def check_fields_type(occ): - self.assertIsInstance(occ.direct, LazyReference) + assert isinstance(occ.direct, LazyReference) for elem in occ.in_list: - self.assertIsInstance(elem, LazyReference) - self.assertIsInstance(occ.in_embedded.direct, LazyReference) + assert isinstance(elem, LazyReference) + assert isinstance(occ.in_embedded.direct, LazyReference) for elem in occ.in_embedded.in_list: - self.assertIsInstance(elem, LazyReference) + assert isinstance(elem, LazyReference) occ = Ocurrence( in_list=[animal1, animal2], - in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, - direct=animal1 + in_embedded={"in_list": [animal1, animal2], "direct": animal1}, + direct=animal1, ).save() check_fields_type(occ) occ.reload() check_fields_type(occ) - animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} - animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} + animal1_ref = { + "_cls": "Animal", + "_ref": DBRef(animal1._get_collection_name(), animal1.pk), + } + animal2_ref = { + "_cls": "Animal", + "_ref": DBRef(animal2._get_collection_name(), animal2.pk), + } occ.direct = animal1_ref occ.in_list = [animal1_ref, animal2_ref] occ.in_embedded.direct = animal1_ref diff --git a/tests/fields/test_long_field.py b/tests/fields/test_long_field.py index 3f307809..330051c3 100644 --- a/tests/fields/test_long_field.py +++ b/tests/fields/test_long_field.py @@ -1,10 +1,5 @@ -# -*- coding: utf-8 -*- -import six - -try: - from bson.int64 import Int64 -except ImportError: - Int64 = long +from bson.int64 import Int64 +import pytest from mongoengine import * from mongoengine.connection import get_db @@ -13,23 +8,26 @@ from tests.utils import MongoDBTestCase class TestLongField(MongoDBTestCase): - def test_long_field_is_considered_as_int64(self): """ Tests that long fields are stored as long in mongo, even if long value is small enough to be an int. """ + class TestLongFieldConsideredAsInt64(Document): some_long = LongField() doc = TestLongFieldConsideredAsInt64(some_long=42).save() db = get_db() - self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64) - self.assertIsInstance(doc.some_long, six.integer_types) + assert isinstance( + db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 + ) + assert isinstance(doc.some_long, int) def test_long_validation(self): """Ensure that invalid values cannot be assigned to long fields. """ + class TestDocument(Document): value = LongField(min_value=0, max_value=110) @@ -38,11 +36,14 @@ class TestLongField(MongoDBTestCase): doc.validate() doc.value = -1 - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() doc.value = 120 - self.assertRaises(ValidationError, doc.validate) - doc.value = 'ten' - self.assertRaises(ValidationError, doc.validate) + with pytest.raises(ValidationError): + doc.validate() + doc.value = "ten" + with pytest.raises(ValidationError): + doc.validate() def test_long_ne_operator(self): class TestDocument(Document): @@ -53,4 +54,4 @@ class TestLongField(MongoDBTestCase): TestDocument(long_fld=None).save() TestDocument(long_fld=1).save() - self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) + assert 1 == TestDocument.objects(long_fld__ne=None).count() diff --git a/tests/fields/test_map_field.py b/tests/fields/test_map_field.py index cb27cfff..8b8b1c46 100644 --- a/tests/fields/test_map_field.py +++ b/tests/fields/test_map_field.py @@ -1,29 +1,31 @@ # -*- coding: utf-8 -*- import datetime -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase class TestMapField(MongoDBTestCase): - def test_mapfield(self): """Ensure that the MapField handles the declared type.""" + class Simple(Document): mapping = MapField(IntField()) Simple.drop_collection() e = Simple() - e.mapping['someint'] = 1 + e.mapping["someint"] = 1 e.save() - with self.assertRaises(ValidationError): - e.mapping['somestring'] = "abc" + with pytest.raises(ValidationError): + e.mapping["somestring"] = "abc" e.save() - with self.assertRaises(ValidationError): + with pytest.raises(ValidationError): + class NoDeclaredType(Document): mapping = MapField() @@ -45,38 +47,37 @@ class TestMapField(MongoDBTestCase): Extensible.drop_collection() e = Extensible() - e.mapping['somestring'] = StringSetting(value='foo') - e.mapping['someint'] = IntegerSetting(value=42) + e.mapping["somestring"] = StringSetting(value="foo") + e.mapping["someint"] = IntegerSetting(value=42) e.save() e2 = Extensible.objects.get(id=e.id) - self.assertIsInstance(e2.mapping['somestring'], StringSetting) - self.assertIsInstance(e2.mapping['someint'], IntegerSetting) + assert isinstance(e2.mapping["somestring"], StringSetting) + assert isinstance(e2.mapping["someint"], IntegerSetting) - with self.assertRaises(ValidationError): - e.mapping['someint'] = 123 + with pytest.raises(ValidationError): + e.mapping["someint"] = 123 e.save() def test_embedded_mapfield_db_field(self): class Embedded(EmbeddedDocument): - number = IntField(default=0, db_field='i') + number = IntField(default=0, db_field="i") class Test(Document): - my_map = MapField(field=EmbeddedDocumentField(Embedded), - db_field='x') + my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x") Test.drop_collection() test = Test() - test.my_map['DICTIONARY_KEY'] = Embedded(number=1) + test.my_map["DICTIONARY_KEY"] = Embedded(number=1) test.save() Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) test = Test.objects.get() - self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) + assert test.my_map["DICTIONARY_KEY"].number == 2 doc = self.db.test.find_one() - self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) + assert doc["x"]["DICTIONARY_KEY"]["i"] == 2 def test_mapfield_numerical_index(self): """Ensure that MapField accept numeric strings as indexes.""" @@ -90,9 +91,9 @@ class TestMapField(MongoDBTestCase): Test.drop_collection() test = Test() - test.my_map['1'] = Embedded(name='test') + test.my_map["1"] = Embedded(name="test") test.save() - test.my_map['1'].name = 'test updated' + test.my_map["1"].name = "test updated" test.save() def test_map_field_lookup(self): @@ -110,15 +111,20 @@ class TestMapField(MongoDBTestCase): actions = MapField(EmbeddedDocumentField(Action)) Log.drop_collection() - Log(name="wilson", visited={'friends': datetime.datetime.now()}, - actions={'friends': Action(operation='drink', object='beer')}).save() + Log( + name="wilson", + visited={"friends": datetime.datetime.now()}, + actions={"friends": Action(operation="drink", object="beer")}, + ).save() - self.assertEqual(1, Log.objects( - visited__friends__exists=True).count()) + assert 1 == Log.objects(visited__friends__exists=True).count() - self.assertEqual(1, Log.objects( - actions__friends__operation='drink', - actions__friends__object='beer').count()) + assert ( + 1 + == Log.objects( + actions__friends__operation="drink", actions__friends__object="beer" + ).count() + ) def test_map_field_unicode(self): class Info(EmbeddedDocument): @@ -130,15 +136,11 @@ class TestMapField(MongoDBTestCase): BlogPost.drop_collection() - tree = BlogPost(info_dict={ - u"éééé": { - 'description': u"VALUE: éééé" - } - }) + tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}}) tree.save() - self.assertEqual( - BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, - u"VALUE: éééé" + assert ( + BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description + == u"VALUE: éééé" ) diff --git a/tests/fields/test_reference_field.py b/tests/fields/test_reference_field.py index 5e1fc605..949eac67 100644 --- a/tests/fields/test_reference_field.py +++ b/tests/fields/test_reference_field.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -from bson import SON, DBRef +from bson import DBRef, SON +import pytest from mongoengine import * - from tests.utils import MongoDBTestCase @@ -24,19 +24,22 @@ class TestReferenceField(MongoDBTestCase): # Make sure ReferenceField only accepts a document class or a string # with a document class name. - self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) + with pytest.raises(ValidationError): + ReferenceField(EmbeddedDocument) - user = User(name='Test User') + user = User(name="Test User") # Ensure that the referenced object must have been saved - post1 = BlogPost(content='Chips and gravy taste good.') + post1 = BlogPost(content="Chips and gravy taste good.") post1.author = user - self.assertRaises(ValidationError, post1.save) + with pytest.raises(ValidationError): + post1.save() # Check that an invalid object type cannot be used - post2 = BlogPost(content='Chips and chilli taste good.') + post2 = BlogPost(content="Chips and chilli taste good.") post1.author = post2 - self.assertRaises(ValidationError, post1.validate) + with pytest.raises(ValidationError): + post1.validate() # Ensure ObjectID's are accepted as references user_object_id = user.pk @@ -52,42 +55,27 @@ class TestReferenceField(MongoDBTestCase): # Make sure referencing a saved document of the *wrong* type fails post2.save() post1.author = post2 - self.assertRaises(ValidationError, post1.validate) - - def test_objectid_reference_fields(self): - """Make sure storing Object ID references works.""" - - class Person(Document): - name = StringField() - parent = ReferenceField('self') - - Person.drop_collection() - - p1 = Person(name="John").save() - Person(name="Ross", parent=p1.pk).save() - - p = Person.objects.get(name="Ross") - self.assertEqual(p.parent, p1) + with pytest.raises(ValidationError): + post1.validate() def test_dbref_reference_fields(self): """Make sure storing references as bson.dbref.DBRef works.""" class Person(Document): name = StringField() - parent = ReferenceField('self', dbref=True) + parent = ReferenceField("self", dbref=True) Person.drop_collection() p1 = Person(name="John").save() Person(name="Ross", parent=p1).save() - self.assertEqual( - Person._get_collection().find_one({'name': 'Ross'})['parent'], - DBRef('person', p1.pk) + assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef( + "person", p1.pk ) p = Person.objects.get(name="Ross") - self.assertEqual(p.parent, p1) + assert p.parent == p1 def test_dbref_to_mongo(self): """Make sure that calling to_mongo on a ReferenceField which @@ -97,21 +85,15 @@ class TestReferenceField(MongoDBTestCase): class Person(Document): name = StringField() - parent = ReferenceField('self', dbref=False) + parent = ReferenceField("self", dbref=False) - p = Person( - name='Steve', - parent=DBRef('person', 'abcdefghijklmnop') - ) - self.assertEqual(p.to_mongo(), SON([ - ('name', u'Steve'), - ('parent', 'abcdefghijklmnop') - ])) + p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop")) + assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")]) def test_objectid_reference_fields(self): class Person(Document): name = StringField() - parent = ReferenceField('self', dbref=False) + parent = ReferenceField("self", dbref=False) Person.drop_collection() @@ -119,18 +101,19 @@ class TestReferenceField(MongoDBTestCase): Person(name="Ross", parent=p1).save() col = Person._get_collection() - data = col.find_one({'name': 'Ross'}) - self.assertEqual(data['parent'], p1.pk) + data = col.find_one({"name": "Ross"}) + assert data["parent"] == p1.pk p = Person.objects.get(name="Ross") - self.assertEqual(p.parent, p1) + assert p.parent == p1 def test_undefined_reference(self): """Ensure that ReferenceFields may reference undefined Documents. """ + class Product(Document): name = StringField() - company = ReferenceField('Company') + company = ReferenceField("Company") class Company(Document): name = StringField() @@ -138,28 +121,29 @@ class TestReferenceField(MongoDBTestCase): Product.drop_collection() Company.drop_collection() - ten_gen = Company(name='10gen') + ten_gen = Company(name="10gen") ten_gen.save() - mongodb = Product(name='MongoDB', company=ten_gen) + mongodb = Product(name="MongoDB", company=ten_gen) mongodb.save() - me = Product(name='MongoEngine') + me = Product(name="MongoEngine") me.save() obj = Product.objects(company=ten_gen).first() - self.assertEqual(obj, mongodb) - self.assertEqual(obj.company, ten_gen) + assert obj == mongodb + assert obj.company == ten_gen obj = Product.objects(company=None).first() - self.assertEqual(obj, me) + assert obj == me obj = Product.objects.get(company=None) - self.assertEqual(obj, me) + assert obj == me def test_reference_query_conversion(self): """Ensure that ReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -175,22 +159,23 @@ class TestReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id def test_reference_query_conversion_dbref(self): """Ensure that ReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ + class Member(Document): user_num = IntField(primary_key=True) @@ -206,14 +191,14 @@ class TestReferenceField(MongoDBTestCase): m2 = Member(user_num=2) m2.save() - post1 = BlogPost(title='post 1', author=m1) + post1 = BlogPost(title="post 1", author=m1) post1.save() - post2 = BlogPost(title='post 2', author=m2) + post2 = BlogPost(title="post 2", author=m2) post2.save() post = BlogPost.objects(author=m1).first() - self.assertEqual(post.id, post1.id) + assert post.id == post1.id post = BlogPost.objects(author=m2).first() - self.assertEqual(post.id, post2.id) + assert post.id == post2.id diff --git a/tests/fields/test_sequence_field.py b/tests/fields/test_sequence_field.py index 6124c65e..81d648fd 100644 --- a/tests/fields/test_sequence_field.py +++ b/tests/fields/test_sequence_field.py @@ -11,79 +11,79 @@ class TestSequenceField(MongoDBTestCase): id = SequenceField(primary_key=True) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == list(range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 Person.id.set_next_value(1000) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 1000) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 1000 def test_sequence_field_get_next_value(self): class Person(Document): id = SequenceField(primary_key=True) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - self.assertEqual(Person.id.get_next_value(), 11) - self.db['mongoengine.counters'].drop() + assert Person.id.get_next_value() == 11 + self.db["mongoengine.counters"].drop() - self.assertEqual(Person.id.get_next_value(), 1) + assert Person.id.get_next_value() == 1 class Person(Document): id = SequenceField(primary_key=True, value_decorator=str) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - self.assertEqual(Person.id.get_next_value(), '11') - self.db['mongoengine.counters'].drop() + assert Person.id.get_next_value() == "11" + self.db["mongoengine.counters"].drop() - self.assertEqual(Person.id.get_next_value(), '1') + assert Person.id.get_next_value() == "1" def test_sequence_field_sequence_name(self): class Person(Document): - id = SequenceField(primary_key=True, sequence_name='jelly') + id = SequenceField(primary_key=True, sequence_name="jelly") name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == list(range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) + assert c["next"] == 10 Person.id.set_next_value(1000) - c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) - self.assertEqual(c['next'], 1000) + c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) + assert c["next"] == 1000 def test_multiple_sequence_fields(self): class Person(Document): @@ -91,56 +91,56 @@ class TestSequenceField(MongoDBTestCase): counter = SequenceField() name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == list(range(1, 11)) counters = [i.counter for i in Person.objects] - self.assertEqual(counters, range(1, 11)) + assert counters == list(range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 Person.id.set_next_value(1000) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 1000) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 1000 Person.counter.set_next_value(999) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'}) - self.assertEqual(c['next'], 999) + c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"}) + assert c["next"] == 999 def test_sequence_fields_reload(self): class Animal(Document): counter = SequenceField() name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Animal.drop_collection() a = Animal(name="Boi").save() - self.assertEqual(a.counter, 1) + assert a.counter == 1 a.reload() - self.assertEqual(a.counter, 1) + assert a.counter == 1 a.counter = None - self.assertEqual(a.counter, 2) + assert a.counter == 2 a.save() - self.assertEqual(a.counter, 2) + assert a.counter == 2 a = Animal.objects.first() - self.assertEqual(a.counter, 2) + assert a.counter == 2 a.reload() - self.assertEqual(a.counter, 2) + assert a.counter == 2 def test_multiple_sequence_fields_on_docs(self): class Animal(Document): @@ -151,7 +151,7 @@ class TestSequenceField(MongoDBTestCase): id = SequenceField(primary_key=True) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Animal.drop_collection() Person.drop_collection() @@ -159,44 +159,44 @@ class TestSequenceField(MongoDBTestCase): Animal(name="Animal %s" % x).save() Person(name="Person %s" % x).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 - c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, range(1, 11)) + assert ids == list(range(1, 11)) id = [i.id for i in Animal.objects] - self.assertEqual(id, range(1, 11)) + assert id == list(range(1, 11)) - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 - c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) + assert c["next"] == 10 def test_sequence_field_value_decorator(self): class Person(Document): id = SequenceField(primary_key=True, value_decorator=str) name = StringField() - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Person.drop_collection() for x in range(10): p = Person(name="Person %s" % x) p.save() - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 ids = [i.id for i in Person.objects] - self.assertEqual(ids, map(str, range(1, 11))) + assert ids == [str(i) for i in range(1, 11)] - c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) - self.assertEqual(c['next'], 10) + c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) + assert c["next"] == 10 def test_embedded_sequence_field(self): class Comment(EmbeddedDocument): @@ -207,23 +207,27 @@ class TestSequenceField(MongoDBTestCase): title = StringField(required=True) comments = ListField(EmbeddedDocumentField(Comment)) - self.db['mongoengine.counters'].drop() + self.db["mongoengine.counters"].drop() Post.drop_collection() - Post(title="MongoEngine", - comments=[Comment(content="NoSQL Rocks"), - Comment(content="MongoEngine Rocks")]).save() - c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) - self.assertEqual(c['next'], 2) + Post( + title="MongoEngine", + comments=[ + Comment(content="NoSQL Rocks"), + Comment(content="MongoEngine Rocks"), + ], + ).save() + c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"}) + assert c["next"] == 2 post = Post.objects.first() - self.assertEqual(1, post.comments[0].id) - self.assertEqual(2, post.comments[1].id) + assert 1 == post.comments[0].id + assert 2 == post.comments[1].id def test_inherited_sequencefield(self): class Base(Document): name = StringField() counter = SequenceField() - meta = {'abstract': True} + meta = {"abstract": True} class Foo(Base): pass @@ -231,24 +235,25 @@ class TestSequenceField(MongoDBTestCase): class Bar(Base): pass - bar = Bar(name='Bar') + bar = Bar(name="Bar") bar.save() - foo = Foo(name='Foo') + foo = Foo(name="Foo") foo.save() - self.assertTrue('base.counter' in - self.db['mongoengine.counters'].find().distinct('_id')) - self.assertFalse(('foo.counter' or 'bar.counter') in - self.db['mongoengine.counters'].find().distinct('_id')) - self.assertNotEqual(foo.counter, bar.counter) - self.assertEqual(foo._fields['counter'].owner_document, Base) - self.assertEqual(bar._fields['counter'].owner_document, Base) + assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") + assert not ( + ("foo.counter" or "bar.counter") + in self.db["mongoengine.counters"].find().distinct("_id") + ) + assert foo.counter != bar.counter + assert foo._fields["counter"].owner_document == Base + assert bar._fields["counter"].owner_document == Base def test_no_inherited_sequencefield(self): class Base(Document): name = StringField() - meta = {'abstract': True} + meta = {"abstract": True} class Foo(Base): counter = SequenceField() @@ -256,16 +261,18 @@ class TestSequenceField(MongoDBTestCase): class Bar(Base): counter = SequenceField() - bar = Bar(name='Bar') + bar = Bar(name="Bar") bar.save() - foo = Foo(name='Foo') + foo = Foo(name="Foo") foo.save() - self.assertFalse('base.counter' in - self.db['mongoengine.counters'].find().distinct('_id')) - self.assertTrue(('foo.counter' and 'bar.counter') in - self.db['mongoengine.counters'].find().distinct('_id')) - self.assertEqual(foo.counter, bar.counter) - self.assertEqual(foo._fields['counter'].owner_document, Foo) - self.assertEqual(bar._fields['counter'].owner_document, Bar) + assert "base.counter" not in self.db["mongoengine.counters"].find().distinct( + "_id" + ) + existing_counters = self.db["mongoengine.counters"].find().distinct("_id") + assert "foo.counter" in existing_counters + assert "bar.counter" in existing_counters + assert foo.counter == bar.counter + assert foo._fields["counter"].owner_document == Foo + assert bar._fields["counter"].owner_document == Bar diff --git a/tests/fields/test_url_field.py b/tests/fields/test_url_field.py index ddbf707e..c449e467 100644 --- a/tests/fields/test_url_field.py +++ b/tests/fields/test_url_field.py @@ -1,53 +1,60 @@ # -*- coding: utf-8 -*- -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase class TestURLField(MongoDBTestCase): - def test_validation(self): """Ensure that URLFields validate urls properly.""" + class Link(Document): url = URLField() link = Link() - link.url = 'google' - self.assertRaises(ValidationError, link.validate) + link.url = "google" + with pytest.raises(ValidationError): + link.validate() - link.url = 'http://www.google.com:8080' + link.url = "http://www.google.com:8080" link.validate() def test_unicode_url_validation(self): """Ensure unicode URLs are validated properly.""" + class Link(Document): url = URLField() link = Link() - link.url = u'http://привет.com' + link.url = u"http://привет.com" # TODO fix URL validation - this *IS* a valid URL # For now we just want to make sure that the error message is correct - with self.assertRaises(ValidationError) as ctx_err: + with pytest.raises(ValidationError) as exc_info: link.validate() - self.assertEqual(unicode(ctx_err.exception), - u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") + assert ( + str(exc_info.value) + == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" + ) def test_url_scheme_validation(self): """Ensure that URLFields validate urls with specific schemes properly. """ + class Link(Document): url = URLField() class SchemeLink(Document): - url = URLField(schemes=['ws', 'irc']) + url = URLField(schemes=["ws", "irc"]) link = Link() - link.url = 'ws://google.com' - self.assertRaises(ValidationError, link.validate) + link.url = "ws://google.com" + with pytest.raises(ValidationError): + link.validate() scheme_link = SchemeLink() - scheme_link.url = 'ws://google.com' + scheme_link.url = "ws://google.com" scheme_link.validate() def test_underscore_allowed_in_domains_names(self): @@ -55,5 +62,5 @@ class TestURLField(MongoDBTestCase): url = URLField() link = Link() - link.url = 'https://san_leandro-ca.geebo.com' + link.url = "https://san_leandro-ca.geebo.com" link.validate() diff --git a/tests/fields/test_uuid_field.py b/tests/fields/test_uuid_field.py index 7b7faaf2..21b7a090 100644 --- a/tests/fields/test_uuid_field.py +++ b/tests/fields/test_uuid_field.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- import uuid -from mongoengine import * +import pytest +from mongoengine import * from tests.utils import MongoDBTestCase, get_as_pymongo @@ -14,12 +15,7 @@ class TestUUIDField(MongoDBTestCase): def test_storage(self): uid = uuid.uuid4() person = Person(api_key=uid).save() - self.assertEqual( - get_as_pymongo(person), - {'_id': person.id, - 'api_key': str(uid) - } - ) + assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)} def test_field_string(self): """Test UUID fields storing as String @@ -28,8 +24,8 @@ class TestUUIDField(MongoDBTestCase): uu = uuid.uuid4() Person(api_key=uu).save() - self.assertEqual(1, Person.objects(api_key=uu).count()) - self.assertEqual(uu, Person.objects.first().api_key) + assert 1 == Person.objects(api_key=uu).count() + assert uu == Person.objects.first().api_key person = Person() valid = (uuid.uuid4(), uuid.uuid1()) @@ -37,11 +33,14 @@ class TestUUIDField(MongoDBTestCase): person.api_key = api_key person.validate() - invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', - '9d159858-549b-4975-9f98-dd2f987c113') + invalid = ( + "9d159858-549b-4975-9f98-dd2f987c113g", + "9d159858-549b-4975-9f98-dd2f987c113", + ) for api_key in invalid: person.api_key = api_key - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() def test_field_binary(self): """Test UUID fields storing as Binary object.""" @@ -49,8 +48,8 @@ class TestUUIDField(MongoDBTestCase): uu = uuid.uuid4() Person(api_key=uu).save() - self.assertEqual(1, Person.objects(api_key=uu).count()) - self.assertEqual(uu, Person.objects.first().api_key) + assert 1 == Person.objects(api_key=uu).count() + assert uu == Person.objects.first().api_key person = Person() valid = (uuid.uuid4(), uuid.uuid1()) @@ -58,8 +57,11 @@ class TestUUIDField(MongoDBTestCase): person.api_key = api_key person.validate() - invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', - '9d159858-549b-4975-9f98-dd2f987c113') + invalid = ( + "9d159858-549b-4975-9f98-dd2f987c113g", + "9d159858-549b-4975-9f98-dd2f987c113", + ) for api_key in invalid: person.api_key = api_key - self.assertRaises(ValidationError, person.validate) + with pytest.raises(ValidationError): + person.validate() diff --git a/tests/fixtures.py b/tests/fixtures.py index b8303b99..59fc3bf3 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument): class PickleTest(Document): number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + string = StringField(choices=(("One", "1"), ("Two", "2"))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) photo = FileField() @@ -19,7 +19,7 @@ class PickleTest(Document): class NewDocumentPickleTest(Document): number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + string = StringField(choices=(("One", "1"), ("Two", "2"))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) photo = FileField() @@ -36,17 +36,17 @@ class PickleDynamicTest(DynamicDocument): class PickleSignalsTest(Document): number = IntField() - string = StringField(choices=(('One', '1'), ('Two', '2'))) + string = StringField(choices=(("One", "1"), ("Two", "2"))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) @classmethod def post_save(self, sender, document, created, **kwargs): - pickled = pickle.dumps(document) + pickle.dumps(document) @classmethod def post_delete(self, sender, document, **kwargs): - pickled = pickle.dumps(document) + pickle.dumps(document) signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) @@ -58,4 +58,4 @@ class Mixin(object): class Base(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} diff --git a/tests/queryset/__init__.py b/tests/queryset/__init__.py index 31016966..e69de29b 100644 --- a/tests/queryset/__init__.py +++ b/tests/queryset/__init__.py @@ -1,6 +0,0 @@ -from .transform import * -from .field_list import * -from .queryset import * -from .visitor import * -from .geo import * -from .modify import * diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py deleted file mode 100644 index 250e2601..00000000 --- a/tests/queryset/field_list.py +++ /dev/null @@ -1,440 +0,0 @@ -import unittest - -from mongoengine import * -from mongoengine.queryset import QueryFieldList - -__all__ = ("QueryFieldListTest", "OnlyExcludeAllTest") - - -class QueryFieldListTest(unittest.TestCase): - - def test_empty(self): - q = QueryFieldList() - self.assertFalse(q) - - q = QueryFieldList(always_include=['_cls']) - self.assertFalse(q) - - def test_include_include(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True) - self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1}) - - def test_include_exclude(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 1}) - - def test_exclude_exclude(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0}) - - def test_exclude_include(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) - self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'c': 1}) - - def test_always_include(self): - q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) - - def test_reset(self): - q = QueryFieldList(always_include=['x', 'y']) - q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) - q.reset() - self.assertFalse(q) - q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) - self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1}) - - def test_using_a_slice(self): - q = QueryFieldList() - q += QueryFieldList(fields=['a'], value={"$slice": 5}) - self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) - - -class OnlyExcludeAllTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - - class Person(Document): - name = StringField() - age = IntField() - meta = {'allow_inheritance': True} - - Person.drop_collection() - self.Person = Person - - def test_mixing_only_exclude(self): - - class MyDoc(Document): - a = StringField() - b = StringField() - c = StringField() - d = StringField() - e = StringField() - f = StringField() - - include = ['a', 'b', 'c', 'd', 'e'] - exclude = ['d', 'e'] - only = ['b', 'c'] - - qs = MyDoc.objects.fields(**{i: 1 for i in include}) - self.assertEqual(qs._loaded_fields.as_dict(), - {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) - qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) - qs = qs.exclude(*exclude) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) - - qs = MyDoc.objects.fields(**{i: 1 for i in include}) - qs = qs.exclude(*exclude) - self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) - qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) - - qs = MyDoc.objects.exclude(*exclude) - qs = qs.fields(**{i: 1 for i in include}) - self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) - qs = qs.only(*only) - self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) - - def test_slicing(self): - - class MyDoc(Document): - a = ListField() - b = ListField() - c = ListField() - d = ListField() - e = ListField() - f = ListField() - - include = ['a', 'b', 'c', 'd', 'e'] - exclude = ['d', 'e'] - only = ['b', 'c'] - - qs = MyDoc.objects.fields(**{i: 1 for i in include}) - qs = qs.exclude(*exclude) - qs = qs.only(*only) - qs = qs.fields(slice__b=5) - self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}, 'c': 1}) - - qs = qs.fields(slice__c=[5, 1]) - self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) - - qs = qs.exclude('c') - self.assertEqual(qs._loaded_fields.as_dict(), - {'b': {'$slice': 5}}) - - def test_mix_slice_with_other_fields(self): - class MyDoc(Document): - a = ListField() - b = ListField() - c = ListField() - - qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) - self.assertEqual(qs._loaded_fields.as_dict(), - {'c': {'$slice': 2}, 'a': 1}) - - def test_only(self): - """Ensure that QuerySet.only only returns the requested fields. - """ - person = self.Person(name='test', age=25) - person.save() - - obj = self.Person.objects.only('name').get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, None) - - obj = self.Person.objects.only('age').get() - self.assertEqual(obj.name, None) - self.assertEqual(obj.age, person.age) - - obj = self.Person.objects.only('name', 'age').get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, person.age) - - obj = self.Person.objects.only(*('id', 'name',)).get() - self.assertEqual(obj.name, person.name) - self.assertEqual(obj.age, None) - - # Check polymorphism still works - class Employee(self.Person): - salary = IntField(db_field='wage') - - employee = Employee(name='test employee', age=40, salary=30000) - employee.save() - - obj = self.Person.objects(id=employee.id).only('age').get() - self.assertIsInstance(obj, Employee) - - # Check field names are looked up properly - obj = Employee.objects(id=employee.id).only('salary').get() - self.assertEqual(obj.salary, employee.salary) - self.assertEqual(obj.name, None) - - def test_only_with_subfields(self): - class User(EmbeddedDocument): - name = StringField() - email = StringField() - - class Comment(EmbeddedDocument): - title = StringField() - text = StringField() - - class VariousData(EmbeddedDocument): - some = BooleanField() - - class BlogPost(Document): - content = StringField() - author = EmbeddedDocumentField(User) - comments = ListField(EmbeddedDocumentField(Comment)) - various = MapField(field=EmbeddedDocumentField(VariousData)) - - BlogPost.drop_collection() - - post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}}) - post.author = User(name='Test User') - post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] - post.save() - - obj = BlogPost.objects.only('author.name',).get() - self.assertEqual(obj.content, None) - self.assertEqual(obj.author.email, None) - self.assertEqual(obj.author.name, 'Test User') - self.assertEqual(obj.comments, []) - - obj = BlogPost.objects.only('various.test_dynamic.some').get() - self.assertEqual(obj.various["test_dynamic"].some, True) - - obj = BlogPost.objects.only('content', 'comments.title',).get() - self.assertEqual(obj.content, 'Had a good coffee today...') - self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[1].title, 'Coffee') - self.assertEqual(obj.comments[0].text, None) - self.assertEqual(obj.comments[1].text, None) - - obj = BlogPost.objects.only('comments',).get() - self.assertEqual(obj.content, None) - self.assertEqual(obj.author, None) - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[1].title, 'Coffee') - self.assertEqual(obj.comments[0].text, 'Great post!') - self.assertEqual(obj.comments[1].text, 'I hate coffee') - - BlogPost.drop_collection() - - def test_exclude(self): - class User(EmbeddedDocument): - name = StringField() - email = StringField() - - class Comment(EmbeddedDocument): - title = StringField() - text = StringField() - - class BlogPost(Document): - content = StringField() - author = EmbeddedDocumentField(User) - comments = ListField(EmbeddedDocumentField(Comment)) - - BlogPost.drop_collection() - - post = BlogPost(content='Had a good coffee today...') - post.author = User(name='Test User') - post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] - post.save() - - obj = BlogPost.objects.exclude('author', 'comments.text').get() - self.assertEqual(obj.author, None) - self.assertEqual(obj.content, 'Had a good coffee today...') - self.assertEqual(obj.comments[0].title, 'I aggree') - self.assertEqual(obj.comments[0].text, None) - - BlogPost.drop_collection() - - def test_exclude_only_combining(self): - class Attachment(EmbeddedDocument): - name = StringField() - content = StringField() - - class Email(Document): - sender = StringField() - to = StringField() - subject = StringField() - body = StringField() - content_type = StringField() - attachments = ListField(EmbeddedDocumentField(Attachment)) - - Email.drop_collection() - email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') - email.attachments = [ - Attachment(name='file1.doc', content='ABC'), - Attachment(name='file2.doc', content='XYZ'), - ] - email.save() - - obj = Email.objects.exclude('content_type').exclude('body').get() - self.assertEqual(obj.sender, 'me') - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, 'From Russia with Love') - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) - - obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() - self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, None) - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) - - obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() - self.assertEqual(obj.attachments[0].name, 'file1.doc') - self.assertEqual(obj.attachments[0].content, None) - self.assertEqual(obj.sender, None) - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, None) - self.assertEqual(obj.body, None) - self.assertEqual(obj.content_type, None) - - Email.drop_collection() - - def test_all_fields(self): - - class Email(Document): - sender = StringField() - to = StringField() - subject = StringField() - body = StringField() - content_type = StringField() - - Email.drop_collection() - - email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') - email.save() - - obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() - self.assertEqual(obj.sender, 'me') - self.assertEqual(obj.to, 'you') - self.assertEqual(obj.subject, 'From Russia with Love') - self.assertEqual(obj.body, 'Hello!') - self.assertEqual(obj.content_type, 'text/plain') - - Email.drop_collection() - - def test_slicing_fields(self): - """Ensure that query slicing an array works. - """ - class Numbers(Document): - n = ListField(IntField()) - - Numbers.drop_collection() - - numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) - numbers.save() - - # first three - numbers = Numbers.objects.fields(slice__n=3).get() - self.assertEqual(numbers.n, [0, 1, 2]) - - # last three - numbers = Numbers.objects.fields(slice__n=-3).get() - self.assertEqual(numbers.n, [-3, -2, -1]) - - # skip 2, limit 3 - numbers = Numbers.objects.fields(slice__n=[2, 3]).get() - self.assertEqual(numbers.n, [2, 3, 4]) - - # skip to fifth from last, limit 4 - numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2]) - - # skip to fifth from last, limit 10 - numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) - - # skip to fifth from last, limit 10 dict method - numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() - self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) - - def test_slicing_nested_fields(self): - """Ensure that query slicing an embedded array works. - """ - - class EmbeddedNumber(EmbeddedDocument): - n = ListField(IntField()) - - class Numbers(Document): - embedded = EmbeddedDocumentField(EmbeddedNumber) - - Numbers.drop_collection() - - numbers = Numbers() - numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) - numbers.save() - - # first three - numbers = Numbers.objects.fields(slice__embedded__n=3).get() - self.assertEqual(numbers.embedded.n, [0, 1, 2]) - - # last three - numbers = Numbers.objects.fields(slice__embedded__n=-3).get() - self.assertEqual(numbers.embedded.n, [-3, -2, -1]) - - # skip 2, limit 3 - numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() - self.assertEqual(numbers.embedded.n, [2, 3, 4]) - - # skip to fifth from last, limit 4 - numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) - - # skip to fifth from last, limit 10 - numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) - - # skip to fifth from last, limit 10 dict method - numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() - self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) - - def test_exclude_from_subclasses_docs(self): - - class Base(Document): - username = StringField() - - meta = {'allow_inheritance': True} - - class Anon(Base): - anon = BooleanField() - - class User(Base): - password = StringField() - wibble = StringField() - - Base.drop_collection() - User(username="mongodb", password="secret").save() - - user = Base.objects().exclude("password", "wibble").first() - self.assertEqual(user.password, None) - - self.assertRaises(LookUpError, Base.objects.exclude, "made_up") - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/queryset/test_field_list.py b/tests/queryset/test_field_list.py new file mode 100644 index 00000000..be7903fd --- /dev/null +++ b/tests/queryset/test_field_list.py @@ -0,0 +1,467 @@ +import unittest + +import pytest + +from mongoengine import * +from mongoengine.queryset import QueryFieldList + + +class TestQueryFieldList: + def test_empty(self): + q = QueryFieldList() + assert not q + + q = QueryFieldList(always_include=["_cls"]) + assert not q + + def test_include_include(self): + q = QueryFieldList() + q += QueryFieldList( + fields=["a", "b"], value=QueryFieldList.ONLY, _only_called=True + ) + assert q.as_dict() == {"a": 1, "b": 1} + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + assert q.as_dict() == {"a": 1, "b": 1, "c": 1} + + def test_include_exclude(self): + q = QueryFieldList() + q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.ONLY) + assert q.as_dict() == {"a": 1, "b": 1} + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE) + assert q.as_dict() == {"a": 1} + + def test_exclude_exclude(self): + q = QueryFieldList() + q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE) + assert q.as_dict() == {"a": 0, "b": 0} + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.EXCLUDE) + assert q.as_dict() == {"a": 0, "b": 0, "c": 0} + + def test_exclude_include(self): + q = QueryFieldList() + q += QueryFieldList(fields=["a", "b"], value=QueryFieldList.EXCLUDE) + assert q.as_dict() == {"a": 0, "b": 0} + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + assert q.as_dict() == {"c": 1} + + def test_always_include(self): + q = QueryFieldList(always_include=["x", "y"]) + q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + assert q.as_dict() == {"x": 1, "y": 1, "c": 1} + + def test_reset(self): + q = QueryFieldList(always_include=["x", "y"]) + q += QueryFieldList(fields=["a", "b", "x"], value=QueryFieldList.EXCLUDE) + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + assert q.as_dict() == {"x": 1, "y": 1, "c": 1} + q.reset() + assert not q + q += QueryFieldList(fields=["b", "c"], value=QueryFieldList.ONLY) + assert q.as_dict() == {"x": 1, "y": 1, "b": 1, "c": 1} + + def test_using_a_slice(self): + q = QueryFieldList() + q += QueryFieldList(fields=["a"], value={"$slice": 5}) + assert q.as_dict() == {"a": {"$slice": 5}} + + +class TestOnlyExcludeAll(unittest.TestCase): + def setUp(self): + connect(db="mongoenginetest") + + class Person(Document): + name = StringField() + age = IntField() + meta = {"allow_inheritance": True} + + Person.drop_collection() + self.Person = Person + + def test_mixing_only_exclude(self): + class MyDoc(Document): + a = StringField() + b = StringField() + c = StringField() + d = StringField() + e = StringField() + f = StringField() + + include = ["a", "b", "c", "d", "e"] + exclude = ["d", "e"] + only = ["b", "c"] + + qs = MyDoc.objects.fields(**{i: 1 for i in include}) + assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1, "d": 1, "e": 1} + qs = qs.only(*only) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} + qs = qs.exclude(*exclude) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} + + qs = MyDoc.objects.fields(**{i: 1 for i in include}) + qs = qs.exclude(*exclude) + assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1} + qs = qs.only(*only) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} + + qs = MyDoc.objects.exclude(*exclude) + qs = qs.fields(**{i: 1 for i in include}) + assert qs._loaded_fields.as_dict() == {"a": 1, "b": 1, "c": 1} + qs = qs.only(*only) + assert qs._loaded_fields.as_dict() == {"b": 1, "c": 1} + + def test_slicing(self): + class MyDoc(Document): + a = ListField() + b = ListField() + c = ListField() + d = ListField() + e = ListField() + f = ListField() + + include = ["a", "b", "c", "d", "e"] + exclude = ["d", "e"] + only = ["b", "c"] + + qs = MyDoc.objects.fields(**{i: 1 for i in include}) + qs = qs.exclude(*exclude) + qs = qs.only(*only) + qs = qs.fields(slice__b=5) + assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}, "c": 1} + + qs = qs.fields(slice__c=[5, 1]) + assert qs._loaded_fields.as_dict() == { + "b": {"$slice": 5}, + "c": {"$slice": [5, 1]}, + } + + qs = qs.exclude("c") + assert qs._loaded_fields.as_dict() == {"b": {"$slice": 5}} + + def test_mix_slice_with_other_fields(self): + class MyDoc(Document): + a = ListField() + b = ListField() + c = ListField() + + qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) + assert qs._loaded_fields.as_dict() == {"c": {"$slice": 2}, "a": 1} + + def test_only(self): + """Ensure that QuerySet.only only returns the requested fields. + """ + person = self.Person(name="test", age=25) + person.save() + + obj = self.Person.objects.only("name").get() + assert obj.name == person.name + assert obj.age is None + + obj = self.Person.objects.only("age").get() + assert obj.name is None + assert obj.age == person.age + + obj = self.Person.objects.only("name", "age").get() + assert obj.name == person.name + assert obj.age == person.age + + obj = self.Person.objects.only(*("id", "name")).get() + assert obj.name == person.name + assert obj.age is None + + # Check polymorphism still works + class Employee(self.Person): + salary = IntField(db_field="wage") + + employee = Employee(name="test employee", age=40, salary=30000) + employee.save() + + obj = self.Person.objects(id=employee.id).only("age").get() + assert isinstance(obj, Employee) + + # Check field names are looked up properly + obj = Employee.objects(id=employee.id).only("salary").get() + assert obj.salary == employee.salary + assert obj.name is None + + def test_only_with_subfields(self): + class User(EmbeddedDocument): + name = StringField() + email = StringField() + + class Comment(EmbeddedDocument): + title = StringField() + text = StringField() + + class VariousData(EmbeddedDocument): + some = BooleanField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + comments = ListField(EmbeddedDocumentField(Comment)) + various = MapField(field=EmbeddedDocumentField(VariousData)) + + BlogPost.drop_collection() + + post = BlogPost( + content="Had a good coffee today...", + various={"test_dynamic": {"some": True}}, + ) + post.author = User(name="Test User") + post.comments = [ + Comment(title="I aggree", text="Great post!"), + Comment(title="Coffee", text="I hate coffee"), + ] + post.save() + + obj = BlogPost.objects.only("author.name").get() + assert obj.content is None + assert obj.author.email is None + assert obj.author.name == "Test User" + assert obj.comments == [] + + obj = BlogPost.objects.only("various.test_dynamic.some").get() + assert obj.various["test_dynamic"].some is True + + obj = BlogPost.objects.only("content", "comments.title").get() + assert obj.content == "Had a good coffee today..." + assert obj.author is None + assert obj.comments[0].title == "I aggree" + assert obj.comments[1].title == "Coffee" + assert obj.comments[0].text is None + assert obj.comments[1].text is None + + obj = BlogPost.objects.only("comments").get() + assert obj.content is None + assert obj.author is None + assert obj.comments[0].title == "I aggree" + assert obj.comments[1].title == "Coffee" + assert obj.comments[0].text == "Great post!" + assert obj.comments[1].text == "I hate coffee" + + BlogPost.drop_collection() + + def test_exclude(self): + class User(EmbeddedDocument): + name = StringField() + email = StringField() + + class Comment(EmbeddedDocument): + title = StringField() + text = StringField() + + class BlogPost(Document): + content = StringField() + author = EmbeddedDocumentField(User) + comments = ListField(EmbeddedDocumentField(Comment)) + + BlogPost.drop_collection() + + post = BlogPost(content="Had a good coffee today...") + post.author = User(name="Test User") + post.comments = [ + Comment(title="I aggree", text="Great post!"), + Comment(title="Coffee", text="I hate coffee"), + ] + post.save() + + obj = BlogPost.objects.exclude("author", "comments.text").get() + assert obj.author is None + assert obj.content == "Had a good coffee today..." + assert obj.comments[0].title == "I aggree" + assert obj.comments[0].text is None + + BlogPost.drop_collection() + + def test_exclude_only_combining(self): + class Attachment(EmbeddedDocument): + name = StringField() + content = StringField() + + class Email(Document): + sender = StringField() + to = StringField() + subject = StringField() + body = StringField() + content_type = StringField() + attachments = ListField(EmbeddedDocumentField(Attachment)) + + Email.drop_collection() + email = Email( + sender="me", + to="you", + subject="From Russia with Love", + body="Hello!", + content_type="text/plain", + ) + email.attachments = [ + Attachment(name="file1.doc", content="ABC"), + Attachment(name="file2.doc", content="XYZ"), + ] + email.save() + + obj = Email.objects.exclude("content_type").exclude("body").get() + assert obj.sender == "me" + assert obj.to == "you" + assert obj.subject == "From Russia with Love" + assert obj.body is None + assert obj.content_type is None + + obj = Email.objects.only("sender", "to").exclude("body", "sender").get() + assert obj.sender is None + assert obj.to == "you" + assert obj.subject is None + assert obj.body is None + assert obj.content_type is None + + obj = ( + Email.objects.exclude("attachments.content") + .exclude("body") + .only("to", "attachments.name") + .get() + ) + assert obj.attachments[0].name == "file1.doc" + assert obj.attachments[0].content is None + assert obj.sender is None + assert obj.to == "you" + assert obj.subject is None + assert obj.body is None + assert obj.content_type is None + + Email.drop_collection() + + def test_all_fields(self): + class Email(Document): + sender = StringField() + to = StringField() + subject = StringField() + body = StringField() + content_type = StringField() + + Email.drop_collection() + + email = Email( + sender="me", + to="you", + subject="From Russia with Love", + body="Hello!", + content_type="text/plain", + ) + email.save() + + obj = ( + Email.objects.exclude("content_type", "body") + .only("to", "body") + .all_fields() + .get() + ) + assert obj.sender == "me" + assert obj.to == "you" + assert obj.subject == "From Russia with Love" + assert obj.body == "Hello!" + assert obj.content_type == "text/plain" + + Email.drop_collection() + + def test_slicing_fields(self): + """Ensure that query slicing an array works. + """ + + class Numbers(Document): + n = ListField(IntField()) + + Numbers.drop_collection() + + numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) + numbers.save() + + # first three + numbers = Numbers.objects.fields(slice__n=3).get() + assert numbers.n == [0, 1, 2] + + # last three + numbers = Numbers.objects.fields(slice__n=-3).get() + assert numbers.n == [-3, -2, -1] + + # skip 2, limit 3 + numbers = Numbers.objects.fields(slice__n=[2, 3]).get() + assert numbers.n == [2, 3, 4] + + # skip to fifth from last, limit 4 + numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() + assert numbers.n == [-5, -4, -3, -2] + + # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() + assert numbers.n == [-5, -4, -3, -2, -1] + + # skip to fifth from last, limit 10 dict method + numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() + assert numbers.n == [-5, -4, -3, -2, -1] + + def test_slicing_nested_fields(self): + """Ensure that query slicing an embedded array works. + """ + + class EmbeddedNumber(EmbeddedDocument): + n = ListField(IntField()) + + class Numbers(Document): + embedded = EmbeddedDocumentField(EmbeddedNumber) + + Numbers.drop_collection() + + numbers = Numbers() + numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) + numbers.save() + + # first three + numbers = Numbers.objects.fields(slice__embedded__n=3).get() + assert numbers.embedded.n == [0, 1, 2] + + # last three + numbers = Numbers.objects.fields(slice__embedded__n=-3).get() + assert numbers.embedded.n == [-3, -2, -1] + + # skip 2, limit 3 + numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() + assert numbers.embedded.n == [2, 3, 4] + + # skip to fifth from last, limit 4 + numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() + assert numbers.embedded.n == [-5, -4, -3, -2] + + # skip to fifth from last, limit 10 + numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() + assert numbers.embedded.n == [-5, -4, -3, -2, -1] + + # skip to fifth from last, limit 10 dict method + numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() + assert numbers.embedded.n == [-5, -4, -3, -2, -1] + + def test_exclude_from_subclasses_docs(self): + class Base(Document): + username = StringField() + + meta = {"allow_inheritance": True} + + class Anon(Base): + anon = BooleanField() + + class User(Base): + password = StringField() + wibble = StringField() + + Base.drop_collection() + User(username="mongodb", password="secret").save() + + user = Base.objects().exclude("password", "wibble").first() + assert user.password is None + + with pytest.raises(LookUpError): + Base.objects.exclude("made_up") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/queryset/geo.py b/tests/queryset/test_geo.py similarity index 62% rename from tests/queryset/geo.py rename to tests/queryset/test_geo.py index 45e6a089..a546fdb6 100644 --- a/tests/queryset/geo.py +++ b/tests/queryset/test_geo.py @@ -6,13 +6,10 @@ from mongoengine import * from tests.utils import MongoDBTestCase -__all__ = ("GeoQueriesTest",) - - -class GeoQueriesTest(MongoDBTestCase): - +class TestGeoQueries(MongoDBTestCase): def _create_event_data(self, point_field_class=GeoPointField): """Create some sample data re-used in many of the tests below.""" + class Event(Document): title = StringField() date = DateTimeField() @@ -28,15 +25,18 @@ class GeoQueriesTest(MongoDBTestCase): event1 = Event.objects.create( title="Coltrane Motion @ Double Door", date=datetime.datetime.now() - datetime.timedelta(days=1), - location=[-87.677137, 41.909889]) + location=[-87.677137, 41.909889], + ) event2 = Event.objects.create( title="Coltrane Motion @ Bottom of the Hill", date=datetime.datetime.now() - datetime.timedelta(days=10), - location=[-122.4194155, 37.7749295]) + location=[-122.4194155, 37.7749295], + ) event3 = Event.objects.create( title="Coltrane Motion @ Empty Bottle", date=datetime.datetime.now(), - location=[-87.686638, 41.900474]) + location=[-87.686638, 41.900474], + ) return event1, event2, event3 @@ -48,14 +48,14 @@ class GeoQueriesTest(MongoDBTestCase): # note that "near" will show the san francisco event, too, # although it sorts to last. events = self.Event.objects(location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) + assert events.count() == 3 + assert list(events) == [event1, event3, event2] # ensure ordering is respected by "near" events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event3, event1, event2]) + assert events.count() == 3 + assert list(events) == [event3, event1, event2] def test_near_and_max_distance(self): """Ensure the "max_distance" operator works alongside the "near" @@ -65,10 +65,9 @@ class GeoQueriesTest(MongoDBTestCase): # find events within 10 degrees of san francisco point = [-122.415579, 37.7566023] - events = self.Event.objects(location__near=point, - location__max_distance=10) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + events = self.Event.objects(location__near=point, location__max_distance=10) + assert events.count() == 1 + assert events[0] == event2 def test_near_and_min_distance(self): """Ensure the "min_distance" operator works alongside the "near" @@ -78,9 +77,8 @@ class GeoQueriesTest(MongoDBTestCase): # find events at least 10 degrees away of san francisco point = [-122.415579, 37.7566023] - events = self.Event.objects(location__near=point, - location__min_distance=10) - self.assertEqual(events.count(), 2) + events = self.Event.objects(location__near=point, location__min_distance=10) + assert events.count() == 2 def test_within_distance(self): """Make sure the "within_distance" operator works.""" @@ -88,34 +86,30 @@ class GeoQueriesTest(MongoDBTestCase): # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[-87.67892, 41.9120459], 5] - events = self.Event.objects( - location__within_distance=point_and_distance) - self.assertEqual(events.count(), 2) + events = self.Event.objects(location__within_distance=point_and_distance) + assert events.count() == 2 events = list(events) - self.assertNotIn(event2, events) - self.assertIn(event1, events) - self.assertIn(event3, events) + assert event2 not in events + assert event1 in events + assert event3 in events # find events within 10 degrees of san francisco point_and_distance = [[-122.415579, 37.7566023], 10] - events = self.Event.objects( - location__within_distance=point_and_distance) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + events = self.Event.objects(location__within_distance=point_and_distance) + assert events.count() == 1 + assert events[0] == event2 # find events within 1 degree of greenpoint, broolyn, nyc, ny point_and_distance = [[-73.9509714, 40.7237134], 1] - events = self.Event.objects( - location__within_distance=point_and_distance) - self.assertEqual(events.count(), 0) + events = self.Event.objects(location__within_distance=point_and_distance) + assert events.count() == 0 # ensure ordering is respected by "within_distance" point_and_distance = [[-87.67892, 41.9120459], 10] - events = self.Event.objects( - location__within_distance=point_and_distance) + events = self.Event.objects(location__within_distance=point_and_distance) events = events.order_by("-date") - self.assertEqual(events.count(), 2) - self.assertEqual(events[0], event3) + assert events.count() == 2 + assert events[0] == event3 def test_within_box(self): """Ensure the "within_box" operator works.""" @@ -124,8 +118,8 @@ class GeoQueriesTest(MongoDBTestCase): # check that within_box works box = [(-125.0, 35.0), (-100.0, 40.0)] events = self.Event.objects(location__within_box=box) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event2.id) + assert events.count() == 1 + assert events[0].id == event2.id def test_within_polygon(self): """Ensure the "within_polygon" operator works.""" @@ -139,87 +133,78 @@ class GeoQueriesTest(MongoDBTestCase): (-87.656164, 41.898061), ] events = self.Event.objects(location__within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) + assert events.count() == 1 + assert events[0].id == event1.id polygon2 = [ (-1.742249, 54.033586), (-1.225891, 52.792797), - (-4.40094, 53.389881) + (-4.40094, 53.389881), ] events = self.Event.objects(location__within_polygon=polygon2) - self.assertEqual(events.count(), 0) + assert events.count() == 0 def test_2dsphere_near(self): """Make sure the "near" operator works with a PointField, which corresponds to a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # find all events "near" pitchfork office, chicago. # note that "near" will show the san francisco event, too, # although it sorts to last. events = self.Event.objects(location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) + assert events.count() == 3 + assert list(events) == [event1, event3, event2] # ensure ordering is respected by "near" events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event3, event1, event2]) + assert events.count() == 3 + assert list(events) == [event3, event1, event2] def test_2dsphere_near_and_max_distance(self): """Ensure the "max_distance" operator works alongside the "near" operator with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # find events within 10km of san francisco point = [-122.415579, 37.7566023] - events = self.Event.objects(location__near=point, - location__max_distance=10000) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + events = self.Event.objects(location__near=point, location__max_distance=10000) + assert events.count() == 1 + assert events[0] == event2 # find events within 1km of greenpoint, broolyn, nyc, ny - events = self.Event.objects(location__near=[-73.9509714, 40.7237134], - location__max_distance=1000) - self.assertEqual(events.count(), 0) + events = self.Event.objects( + location__near=[-73.9509714, 40.7237134], location__max_distance=1000 + ) + assert events.count() == 0 # ensure ordering is respected by "near" events = self.Event.objects( - location__near=[-87.67892, 41.9120459], - location__max_distance=10000 + location__near=[-87.67892, 41.9120459], location__max_distance=10000 ).order_by("-date") - self.assertEqual(events.count(), 2) - self.assertEqual(events[0], event3) + assert events.count() == 2 + assert events[0] == event3 def test_2dsphere_geo_within_box(self): """Ensure the "geo_within_box" operator works with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # check that within_box works box = [(-125.0, 35.0), (-100.0, 40.0)] events = self.Event.objects(location__geo_within_box=box) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event2.id) + assert events.count() == 1 + assert events[0].id == event2.id def test_2dsphere_geo_within_polygon(self): """Ensure the "geo_within_polygon" operator works with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) polygon = [ (-87.694445, 41.912114), @@ -229,64 +214,59 @@ class GeoQueriesTest(MongoDBTestCase): (-87.656164, 41.898061), ] events = self.Event.objects(location__geo_within_polygon=polygon) - self.assertEqual(events.count(), 1) - self.assertEqual(events[0].id, event1.id) + assert events.count() == 1 + assert events[0].id == event1.id polygon2 = [ (-1.742249, 54.033586), (-1.225891, 52.792797), - (-4.40094, 53.389881) + (-4.40094, 53.389881), ] events = self.Event.objects(location__geo_within_polygon=polygon2) - self.assertEqual(events.count(), 0) + assert events.count() == 0 def test_2dsphere_near_and_min_max_distance(self): """Ensure "min_distace" and "max_distance" operators work well together with the "near" operator in a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # ensure min_distance and max_distance combine well events = self.Event.objects( location__near=[-87.67892, 41.9120459], location__min_distance=1000, - location__max_distance=10000 + location__max_distance=10000, ).order_by("-date") - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event3) + assert events.count() == 1 + assert events[0] == event3 # ensure ordering is respected by "near" with "min_distance" events = self.Event.objects( - location__near=[-87.67892, 41.9120459], - location__min_distance=10000 + location__near=[-87.67892, 41.9120459], location__min_distance=10000 ).order_by("-date") - self.assertEqual(events.count(), 1) - self.assertEqual(events[0], event2) + assert events.count() == 1 + assert events[0] == event2 def test_2dsphere_geo_within_center(self): """Make sure the "geo_within_center" operator works with a 2dsphere index. """ - event1, event2, event3 = self._create_event_data( - point_field_class=PointField - ) + event1, event2, event3 = self._create_event_data(point_field_class=PointField) # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[-87.67892, 41.9120459], 2] - events = self.Event.objects( - location__geo_within_center=point_and_distance) - self.assertEqual(events.count(), 2) + events = self.Event.objects(location__geo_within_center=point_and_distance) + assert events.count() == 2 events = list(events) - self.assertNotIn(event2, events) - self.assertIn(event1, events) - self.assertIn(event3, events) + assert event2 not in events + assert event1 in events + assert event3 in events def _test_embedded(self, point_field_class): """Helper test method ensuring given point field class works well in an embedded document. """ + class Venue(EmbeddedDocument): location = point_field_class() name = StringField() @@ -300,19 +280,18 @@ class GeoQueriesTest(MongoDBTestCase): venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) - event1 = Event(title="Coltrane Motion @ Double Door", - venue=venue1).save() - event2 = Event(title="Coltrane Motion @ Bottom of the Hill", - venue=venue2).save() - event3 = Event(title="Coltrane Motion @ Empty Bottle", - venue=venue1).save() + event1 = Event(title="Coltrane Motion @ Double Door", venue=venue1).save() + event2 = Event( + title="Coltrane Motion @ Bottom of the Hill", venue=venue2 + ).save() + event3 = Event(title="Coltrane Motion @ Empty Bottle", venue=venue1).save() # find all events "near" pitchfork office, chicago. # note that "near" will show the san francisco event, too, # although it sorts to last. events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) - self.assertEqual(events.count(), 3) - self.assertEqual(list(events), [event1, event3, event2]) + assert events.count() == 3 + assert list(events) == [event1, event3, event2] def test_geo_spatial_embedded(self): """Make sure GeoPointField works properly in an embedded document.""" @@ -324,6 +303,7 @@ class GeoQueriesTest(MongoDBTestCase): def test_spherical_geospatial_operators(self): """Ensure that spherical geospatial queries are working.""" + class Point(Document): location = GeoPointField() @@ -339,58 +319,55 @@ class GeoQueriesTest(MongoDBTestCase): # Finds both points because they are within 60 km of the reference # point equidistant between them. points = Point.objects(location__near_sphere=[-122, 37.5]) - self.assertEqual(points.count(), 2) + assert points.count() == 2 # Same behavior for _within_spherical_distance points = Point.objects( - location__within_spherical_distance=[ - [-122, 37.5], - 60 / earth_radius - ] + location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius] ) - self.assertEqual(points.count(), 2) + assert points.count() == 2 - points = Point.objects(location__near_sphere=[-122, 37.5], - location__max_distance=60 / earth_radius) - self.assertEqual(points.count(), 2) + points = Point.objects( + location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius + ) + assert points.count() == 2 # Test query works with max_distance, being farer from one point - points = Point.objects(location__near_sphere=[-122, 37.8], - location__max_distance=60 / earth_radius) + points = Point.objects( + location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius + ) close_point = points.first() - self.assertEqual(points.count(), 1) + assert points.count() == 1 # Test query works with min_distance, being farer from one point - points = Point.objects(location__near_sphere=[-122, 37.8], - location__min_distance=60 / earth_radius) - self.assertEqual(points.count(), 1) + points = Point.objects( + location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius + ) + assert points.count() == 1 far_point = points.first() - self.assertNotEqual(close_point, far_point) + assert close_point != far_point # Finds both points, but orders the north point first because it's # closer to the reference point to the north. points = Point.objects(location__near_sphere=[-122, 38.5]) - self.assertEqual(points.count(), 2) - self.assertEqual(points[0].id, north_point.id) - self.assertEqual(points[1].id, south_point.id) + assert points.count() == 2 + assert points[0].id == north_point.id + assert points[1].id == south_point.id # Finds both points, but orders the south point first because it's # closer to the reference point to the south. points = Point.objects(location__near_sphere=[-122, 36.5]) - self.assertEqual(points.count(), 2) - self.assertEqual(points[0].id, south_point.id) - self.assertEqual(points[1].id, north_point.id) + assert points.count() == 2 + assert points[0].id == south_point.id + assert points[1].id == north_point.id # Finds only one point because only the first point is within 60km of # the reference point to the south. points = Point.objects( - location__within_spherical_distance=[ - [-122, 36.5], - 60 / earth_radius - ] + location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius] ) - self.assertEqual(points.count(), 1) - self.assertEqual(points[0].id, south_point.id) + assert points.count() == 1 + assert points[0].id == south_point.id def test_linestring(self): class Road(Document): @@ -404,48 +381,51 @@ class GeoQueriesTest(MongoDBTestCase): # near point = {"type": "Point", "coordinates": [40, 5]} roads = Road.objects.filter(line__near=point["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__near=point).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__near={"$geometry": point}).count() - self.assertEqual(1, roads) + assert 1 == roads # Within - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_within=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads # Intersects - line = {"type": "LineString", - "coordinates": [[40, 5], [40, 6]]} + line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]} roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects=line).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count() - self.assertEqual(1, roads) + assert 1 == roads - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads def test_polygon(self): class Road(Document): @@ -459,66 +439,66 @@ class GeoQueriesTest(MongoDBTestCase): # near point = {"type": "Point", "coordinates": [40, 5]} roads = Road.objects.filter(poly__near=point["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__near=point).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__near={"$geometry": point}).count() - self.assertEqual(1, roads) + assert 1 == roads # Within - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_within=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads # Intersects - line = {"type": "LineString", - "coordinates": [[40, 5], [41, 6]]} + line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]} roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects=line).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count() - self.assertEqual(1, roads) + assert 1 == roads - polygon = {"type": "Polygon", - "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} + polygon = { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], + } roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects=polygon).count() - self.assertEqual(1, roads) + assert 1 == roads roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() - self.assertEqual(1, roads) + assert 1 == roads def test_aspymongo_with_only(self): """Ensure as_pymongo works with only""" + class Place(Document): location = PointField() Place.drop_collection() p = Place(location=[24.946861267089844, 60.16311983618494]) p.save() - qs = Place.objects().only('location') - self.assertDictEqual( - qs.as_pymongo()[0]['location'], - {u'type': u'Point', - u'coordinates': [ - 24.946861267089844, - 60.16311983618494] - } - ) + qs = Place.objects().only("location") + assert qs.as_pymongo()[0]["location"] == { + u"type": u"Point", + u"coordinates": [24.946861267089844, 60.16311983618494], + } def test_2dsphere_point_sets_correctly(self): class Location(Document): @@ -528,11 +508,11 @@ class GeoQueriesTest(MongoDBTestCase): Location(loc=[1, 2]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) + assert loc["loc"] == {"type": "Point", "coordinates": [1, 2]} Location.objects.update(set__loc=[2, 1]) loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) + assert loc["loc"] == {"type": "Point", "coordinates": [2, 1]} def test_2dsphere_linestring_sets_correctly(self): class Location(Document): @@ -542,11 +522,11 @@ class GeoQueriesTest(MongoDBTestCase): Location(line=[[1, 2], [2, 2]]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) + assert loc["line"] == {"type": "LineString", "coordinates": [[1, 2], [2, 2]]} Location.objects.update(set__line=[[2, 1], [1, 2]]) loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}) + assert loc["line"] == {"type": "LineString", "coordinates": [[2, 1], [1, 2]]} def test_geojson_PolygonField(self): class Location(Document): @@ -556,12 +536,18 @@ class GeoQueriesTest(MongoDBTestCase): Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) + assert loc["poly"] == { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + } Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]]) loc = Location.objects.as_pymongo()[0] - self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) + assert loc["poly"] == { + "type": "Polygon", + "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]], + } -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/modify.py b/tests/queryset/test_modify.py similarity index 65% rename from tests/queryset/modify.py rename to tests/queryset/test_modify.py index 3c5879ba..556e6d9e 100644 --- a/tests/queryset/modify.py +++ b/tests/queryset/test_modify.py @@ -1,8 +1,6 @@ import unittest -from mongoengine import connect, Document, IntField, StringField, ListField - -__all__ = ("FindAndModifyTest",) +from mongoengine import Document, IntField, ListField, StringField, connect class Doc(Document): @@ -10,21 +8,20 @@ class Doc(Document): value = IntField() -class FindAndModifyTest(unittest.TestCase): - +class TestFindAndModify(unittest.TestCase): def setUp(self): connect(db="mongoenginetest") Doc.drop_collection() def assertDbEqual(self, docs): - self.assertEqual(list(Doc._collection.find().sort("id")), docs) + assert list(Doc._collection.find().sort("id")) == docs def test_modify(self): Doc(id=0, value=0).save() doc = Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).modify(set__value=-1) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_new(self): @@ -33,18 +30,18 @@ class FindAndModifyTest(unittest.TestCase): new_doc = Doc.objects(id=1).modify(set__value=-1, new=True) doc.value = -1 - self.assertEqual(new_doc.to_json(), doc.to_json()) + assert new_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_not_existing(self): Doc(id=0, value=0).save() - self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None) + assert Doc.objects(id=1).modify(set__value=-1) is None self.assertDbEqual([{"_id": 0, "value": 0}]) def test_modify_with_upsert(self): Doc(id=0, value=0).save() old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True) - self.assertEqual(old_doc, None) + assert old_doc is None self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_upsert_existing(self): @@ -52,13 +49,13 @@ class FindAndModifyTest(unittest.TestCase): doc = Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_upsert_with_new(self): Doc(id=0, value=0).save() new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1) - self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1}) + assert new_doc.to_mongo() == {"_id": 1, "value": 1} self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_remove(self): @@ -66,12 +63,12 @@ class FindAndModifyTest(unittest.TestCase): doc = Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).modify(remove=True) - self.assertEqual(old_doc.to_json(), doc.to_json()) + assert old_doc.to_json() == doc.to_json() self.assertDbEqual([{"_id": 0, "value": 0}]) def test_find_and_modify_with_remove_not_existing(self): Doc(id=0, value=0).save() - self.assertEqual(Doc.objects(id=1).modify(remove=True), None) + assert Doc.objects(id=1).modify(remove=True) is None self.assertDbEqual([{"_id": 0, "value": 0}]) def test_modify_with_order_by(self): @@ -81,17 +78,22 @@ class FindAndModifyTest(unittest.TestCase): doc = Doc(id=3, value=0).save() old_doc = Doc.objects().order_by("-id").modify(set__value=-1) - self.assertEqual(old_doc.to_json(), doc.to_json()) - self.assertDbEqual([ - {"_id": 0, "value": 3}, {"_id": 1, "value": 2}, - {"_id": 2, "value": 1}, {"_id": 3, "value": -1}]) + assert old_doc.to_json() == doc.to_json() + self.assertDbEqual( + [ + {"_id": 0, "value": 3}, + {"_id": 1, "value": 2}, + {"_id": 2, "value": 1}, + {"_id": 3, "value": -1}, + ] + ) def test_modify_with_fields(self): Doc(id=0, value=0).save() Doc(id=1, value=1).save() old_doc = Doc.objects(id=1).only("id").modify(set__value=-1) - self.assertEqual(old_doc.to_mongo(), {"_id": 1}) + assert old_doc.to_mongo() == {"_id": 1} self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_push(self): @@ -103,27 +105,25 @@ class FindAndModifyTest(unittest.TestCase): blog = BlogPost.objects.create() # Push a new tag via modify with new=False (default). - BlogPost(id=blog.id).modify(push__tags='code') - self.assertEqual(blog.tags, []) + BlogPost(id=blog.id).modify(push__tags="code") + assert blog.tags == [] blog.reload() - self.assertEqual(blog.tags, ['code']) + assert blog.tags == ["code"] # Push a new tag via modify with new=True. - blog = BlogPost.objects(id=blog.id).modify(push__tags='java', new=True) - self.assertEqual(blog.tags, ['code', 'java']) + blog = BlogPost.objects(id=blog.id).modify(push__tags="java", new=True) + assert blog.tags == ["code", "java"] # Push a new tag with a positional argument. - blog = BlogPost.objects(id=blog.id).modify( - push__tags__0='python', - new=True) - self.assertEqual(blog.tags, ['python', 'code', 'java']) + blog = BlogPost.objects(id=blog.id).modify(push__tags__0="python", new=True) + assert blog.tags == ["python", "code", "java"] # Push multiple new tags with a positional argument. blog = BlogPost.objects(id=blog.id).modify( - push__tags__1=['go', 'rust'], - new=True) - self.assertEqual(blog.tags, ['python', 'go', 'rust', 'code', 'java']) + push__tags__1=["go", "rust"], new=True + ) + assert blog.tags == ["python", "go", "rust", "code", "java"] -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/pickable.py b/tests/queryset/test_pickable.py similarity index 51% rename from tests/queryset/pickable.py rename to tests/queryset/test_pickable.py index bf7bb31c..d41f56df 100644 --- a/tests/queryset/pickable.py +++ b/tests/queryset/test_pickable.py @@ -1,10 +1,9 @@ import pickle import unittest -from pymongo.mongo_client import MongoClient -from mongoengine import Document, StringField, IntField -from mongoengine.connection import connect -__author__ = 'stas' +from mongoengine import Document, IntField, StringField +from mongoengine.connection import connect +from tests.utils import MongoDBTestCase class Person(Document): @@ -12,22 +11,15 @@ class Person(Document): age = IntField() -class TestQuerysetPickable(unittest.TestCase): +class TestQuerysetPickable(MongoDBTestCase): """ Test for adding pickling support for QuerySet instances See issue https://github.com/MongoEngine/mongoengine/issues/442 """ + def setUp(self): super(TestQuerysetPickable, self).setUp() - - connection = connect(db="test") # type: pymongo.mongo_client.MongoClient - - connection.drop_database("test") - - self.john = Person.objects.create( - name="John", - age=21 - ) + self.john = Person.objects.create(name="John", age=21) def test_picke_simple_qs(self): @@ -45,35 +37,23 @@ class TestQuerysetPickable(unittest.TestCase): loadedQs = self._get_loaded(qs) - self.assertEqual(qs.count(), loadedQs.count()) + assert qs.count() == loadedQs.count() # can update loadedQs loadedQs.update(age=23) # check - self.assertEqual(Person.objects.first().age, 23) + assert Person.objects.first().age == 23 def test_pickle_support_filtration(self): - Person.objects.create( - name="Alice", - age=22 - ) + Person.objects.create(name="Alice", age=22) - Person.objects.create( - name="Bob", - age=23 - ) + Person.objects.create(name="Bob", age=23) qs = Person.objects.filter(age__gte=22) - self.assertEqual(qs.count(), 2) + assert qs.count() == 2 loaded = self._get_loaded(qs) - self.assertEqual(loaded.count(), 2) - self.assertEqual(loaded.filter(name="Bob").first().age, 23) - - - - - - + assert loaded.count() == 2 + assert loaded.filter(name="Bob").first().age == 23 diff --git a/tests/queryset/queryset.py b/tests/queryset/test_queryset.py similarity index 54% rename from tests/queryset/queryset.py rename to tests/queryset/test_queryset.py index 55f256d9..73c419b3 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/test_queryset.py @@ -7,39 +7,44 @@ from decimal import Decimal from bson import DBRef, ObjectId import pymongo +from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import UpdateResult -import six -from six import iteritems +import pytest from mongoengine import * -from mongoengine.connection import get_connection, get_db +from mongoengine.connection import get_db from mongoengine.context_managers import query_counter, switch_db from mongoengine.errors import InvalidQueryError from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version -from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned, - QuerySet, QuerySetManager, queryset_manager) +from mongoengine.queryset import ( + DoesNotExist, + MultipleObjectsReturned, + QuerySet, + QuerySetManager, + queryset_manager, +) class db_ops_tracker(query_counter): - def get_ops(self): ignore_query = dict(self._ignored_query) - ignore_query['command.count'] = {'$ne': 'system.profile'} # Ignore the query issued by query_counter + ignore_query["command.count"] = { + "$ne": "system.profile" + } # Ignore the query issued by query_counter return list(self.db.system.profile.find(ignore_query)) def get_key_compat(mongo_ver): - ORDER_BY_KEY = 'sort' - CMD_QUERY_KEY = 'command' if mongo_ver >= MONGODB_36 else 'query' + ORDER_BY_KEY = "sort" + CMD_QUERY_KEY = "command" if mongo_ver >= MONGODB_36 else "query" return ORDER_BY_KEY, CMD_QUERY_KEY -class QuerySetTest(unittest.TestCase): - +class TestQueryset(unittest.TestCase): def setUp(self): - connect(db='mongoenginetest') - connect(db='mongoenginetest2', alias='test2') + connect(db="mongoenginetest") + connect(db="mongoenginetest2", alias="test2") class PersonMeta(EmbeddedDocument): weight = IntField() @@ -48,7 +53,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() age = IntField() person_meta = EmbeddedDocumentField(PersonMeta) - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} Person.drop_collection() self.PersonMeta = PersonMeta @@ -59,141 +64,180 @@ class QuerySetTest(unittest.TestCase): def test_initialisation(self): """Ensure that a QuerySet is correctly initialised by QuerySetManager. """ - self.assertIsInstance(self.Person.objects, QuerySet) - self.assertEqual(self.Person.objects._collection.name, - self.Person._get_collection_name()) - self.assertIsInstance(self.Person.objects._collection, pymongo.collection.Collection) + assert isinstance(self.Person.objects, QuerySet) + assert ( + self.Person.objects._collection.name == self.Person._get_collection_name() + ) + assert isinstance( + self.Person.objects._collection, pymongo.collection.Collection + ) def test_cannot_perform_joins_references(self): - class BlogPost(Document): author = ReferenceField(self.Person) author2 = GenericReferenceField() # test addressing a field from a reference - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): list(BlogPost.objects(author__name="test")) # should fail for a generic reference as well - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): list(BlogPost.objects(author2__name="test")) def test_find(self): """Ensure that a query returns a valid set of results.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) # Find all people in the collection people = self.Person.objects - self.assertEqual(people.count(), 2) + assert people.count() == 2 results = list(people) - self.assertIsInstance(results[0], self.Person) - self.assertIsInstance(results[0].id, ObjectId) + assert isinstance(results[0], self.Person) + assert isinstance(results[0].id, ObjectId) - self.assertEqual(results[0], user_a) - self.assertEqual(results[0].name, 'User A') - self.assertEqual(results[0].age, 20) + assert results[0] == user_a + assert results[0].name == "User A" + assert results[0].age == 20 - self.assertEqual(results[1], user_b) - self.assertEqual(results[1].name, 'User B') - self.assertEqual(results[1].age, 30) + assert results[1] == user_b + assert results[1].name == "User B" + assert results[1].age == 30 # Filter people by age people = self.Person.objects(age=20) - self.assertEqual(people.count(), 1) - person = people.next() - self.assertEqual(person, user_a) - self.assertEqual(person.name, "User A") - self.assertEqual(person.age, 20) + assert people.count() == 1 + person = next(people) + assert person == user_a + assert person.name == "User A" + assert person.age == 20 + + def test_slicing_sets_empty_limit_skip(self): + self.Person.objects.insert( + [self.Person(name="User {}".format(i), age=i) for i in range(5)], + load_bulk=False, + ) + + self.Person.objects.create(name="User B", age=30) + self.Person.objects.create(name="User C", age=40) + + qs = self.Person.objects()[1:2] + assert (qs._empty, qs._skip, qs._limit) == (False, 1, 1) + assert len(list(qs)) == 1 + + # Test edge case of [1:1] which should return nothing + # and require a hack so that it doesn't clash with limit(0) + qs = self.Person.objects()[1:1] + assert (qs._empty, qs._skip, qs._limit) == (True, 1, 0) + assert len(list(qs)) == 0 + + qs2 = qs[1:5] # Make sure that further slicing resets _empty + assert (qs2._empty, qs2._skip, qs2._limit) == (False, 1, 4) + assert len(list(qs2)) == 4 + + def test_limit_0_returns_all_documents(self): + self.Person.objects.create(name="User A", age=20) + self.Person.objects.create(name="User B", age=30) + + n_docs = self.Person.objects().count() + + persons = list(self.Person.objects().limit(0)) + assert len(persons) == 2 == n_docs def test_limit(self): """Ensure that QuerySet.limit works as expected.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + _ = self.Person.objects.create(name="User B", age=30) # Test limit on a new queryset people = list(self.Person.objects.limit(1)) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], user_a) + assert len(people) == 1 + assert people[0] == user_a # Test limit on an existing queryset people = self.Person.objects - self.assertEqual(len(people), 2) + assert len(people) == 2 people2 = people.limit(1) - self.assertEqual(len(people), 2) - self.assertEqual(len(people2), 1) - self.assertEqual(people2[0], user_a) + assert len(people) == 2 + assert len(people2) == 1 + assert people2[0] == user_a # Test limit with 0 as parameter people = self.Person.objects.limit(0) - self.assertEqual(people.count(with_limit_and_skip=True), 2) - self.assertEqual(len(people), 2) + assert people.count(with_limit_and_skip=True) == 2 + assert len(people) == 2 # Test chaining of only after limit - person = self.Person.objects().limit(1).only('name').first() - self.assertEqual(person, user_a) - self.assertEqual(person.name, 'User A') - self.assertEqual(person.age, None) + person = self.Person.objects().limit(1).only("name").first() + assert person == user_a + assert person.name == "User A" + assert person.age is None def test_skip(self): """Ensure that QuerySet.skip works as expected.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) # Test skip on a new queryset + people = list(self.Person.objects.skip(0)) + assert len(people) == 2 + assert people[0] == user_a + assert people[1] == user_b + people = list(self.Person.objects.skip(1)) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], user_b) + assert len(people) == 1 + assert people[0] == user_b # Test skip on an existing queryset people = self.Person.objects - self.assertEqual(len(people), 2) + assert len(people) == 2 people2 = people.skip(1) - self.assertEqual(len(people), 2) - self.assertEqual(len(people2), 1) - self.assertEqual(people2[0], user_b) + assert len(people) == 2 + assert len(people2) == 1 + assert people2[0] == user_b # Test chaining of only after skip - person = self.Person.objects().skip(1).only('name').first() - self.assertEqual(person, user_b) - self.assertEqual(person.name, 'User B') - self.assertEqual(person.age, None) + person = self.Person.objects().skip(1).only("name").first() + assert person == user_b + assert person.name == "User B" + assert person.age is None def test___getitem___invalid_index(self): """Ensure slicing a queryset works as expected.""" - with self.assertRaises(TypeError): - self.Person.objects()['a'] + with pytest.raises(TypeError): + self.Person.objects()["a"] def test_slice(self): """Ensure slicing a queryset works as expected.""" - user_a = self.Person.objects.create(name='User A', age=20) - user_b = self.Person.objects.create(name='User B', age=30) + user_a = self.Person.objects.create(name="User A", age=20) + user_b = self.Person.objects.create(name="User B", age=30) user_c = self.Person.objects.create(name="User C", age=40) # Test slice limit people = list(self.Person.objects[:2]) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], user_a) - self.assertEqual(people[1], user_b) + assert len(people) == 2 + assert people[0] == user_a + assert people[1] == user_b # Test slice skip people = list(self.Person.objects[1:]) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], user_b) - self.assertEqual(people[1], user_c) + assert len(people) == 2 + assert people[0] == user_b + assert people[1] == user_c # Test slice limit and skip people = list(self.Person.objects[1:2]) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], user_b) + assert len(people) == 1 + assert people[0] == user_b # Test slice limit and skip on an existing queryset people = self.Person.objects - self.assertEqual(len(people), 3) + assert len(people) == 3 people2 = people[1:2] - self.assertEqual(len(people2), 1) - self.assertEqual(people2[0], user_b) + assert len(people2) == 1 + assert people2[0] == user_b # Test slice limit and skip cursor reset qs = self.Person.objects[1:2] @@ -201,28 +245,32 @@ class QuerySetTest(unittest.TestCase): qs._cursor qs._cursor_obj = None people = list(qs) - self.assertEqual(len(people), 1) - self.assertEqual(people[0].name, 'User B') + assert len(people) == 1 + assert people[0].name == "User B" # Test empty slice people = list(self.Person.objects[1:1]) - self.assertEqual(len(people), 0) + assert len(people) == 0 # Test slice out of range people = list(self.Person.objects[80000:80001]) - self.assertEqual(len(people), 0) + assert len(people) == 0 # Test larger slice __repr__ self.Person.objects.delete() for i in range(55): - self.Person(name='A%s' % i, age=i).save() + self.Person(name="A%s" % i, age=i).save() - self.assertEqual(self.Person.objects.count(), 55) - self.assertEqual("Person object", "%s" % self.Person.objects[0]) - self.assertEqual("[, ]", - "%s" % self.Person.objects[1:3]) - self.assertEqual("[, ]", - "%s" % self.Person.objects[51:53]) + assert self.Person.objects.count() == 55 + assert "Person object" == "%s" % self.Person.objects[0] + assert ( + "[, ]" + == "%s" % self.Person.objects[1:3] + ) + assert ( + "[, ]" + == "%s" % self.Person.objects[51:53] + ) def test_find_one(self): """Ensure that a query using find_one returns a valid result. @@ -234,61 +282,80 @@ class QuerySetTest(unittest.TestCase): # Retrieve the first person from the database person = self.Person.objects.first() - self.assertIsInstance(person, self.Person) - self.assertEqual(person.name, "User A") - self.assertEqual(person.age, 20) + assert isinstance(person, self.Person) + assert person.name == "User A" + assert person.age == 20 # Use a query to filter the people found to just person2 person = self.Person.objects(age=30).first() - self.assertEqual(person.name, "User B") + assert person.name == "User B" person = self.Person.objects(age__lt=30).first() - self.assertEqual(person.name, "User A") + assert person.name == "User A" # Use array syntax person = self.Person.objects[0] - self.assertEqual(person.name, "User A") + assert person.name == "User A" person = self.Person.objects[1] - self.assertEqual(person.name, "User B") + assert person.name == "User B" - with self.assertRaises(IndexError): + with pytest.raises(IndexError): self.Person.objects[2] # Find a document using just the object id person = self.Person.objects.with_id(person1.id) - self.assertEqual(person.name, "User A") + assert person.name == "User A" - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): self.Person.objects(name="User A").with_id(person1.id) - def test_find_only_one(self): + def test_get_no_document_exists_raises_doesnotexist(self): + assert self.Person.objects.count() == 0 + # Try retrieving when no objects exists + with pytest.raises(DoesNotExist): + self.Person.objects.get() + with pytest.raises(self.Person.DoesNotExist): + self.Person.objects.get() + + def test_get_multiple_match_raises_multipleobjectsreturned(self): """Ensure that a query using ``get`` returns at most one result. """ - # Try retrieving when no objects exists - self.assertRaises(DoesNotExist, self.Person.objects.get) - self.assertRaises(self.Person.DoesNotExist, self.Person.objects.get) + assert self.Person.objects().count() == 0 person1 = self.Person(name="User A", age=20) person1.save() - person2 = self.Person(name="User B", age=30) + + p = self.Person.objects.get() + assert p == person1 + + person2 = self.Person(name="User B", age=20) person2.save() - # Retrieve the first person from the database - self.assertRaises(MultipleObjectsReturned, self.Person.objects.get) - self.assertRaises(self.Person.MultipleObjectsReturned, - self.Person.objects.get) + person3 = self.Person(name="User C", age=30) + person3.save() + + # .get called without argument + with pytest.raises(MultipleObjectsReturned): + self.Person.objects.get() + with pytest.raises(self.Person.MultipleObjectsReturned): + self.Person.objects.get() + + # check filtering + with pytest.raises(MultipleObjectsReturned): + self.Person.objects.get(age__lt=30) + with pytest.raises(MultipleObjectsReturned) as exc_info: + self.Person.objects(age__lt=30).get() + assert "2 or more items returned, instead of 1" == str(exc_info.value) # Use a query to filter the people found to just person2 person = self.Person.objects.get(age=30) - self.assertEqual(person.name, "User B") - - person = self.Person.objects.get(age__lt=30) - self.assertEqual(person.name, "User A") + assert person == person3 def test_find_array_position(self): """Ensure that query by array position works. """ + class Comment(EmbeddedDocument): name = StringField() @@ -301,35 +368,35 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() - Blog.objects.create(tags=['a', 'b']) - self.assertEqual(Blog.objects(tags__0='a').count(), 1) - self.assertEqual(Blog.objects(tags__0='b').count(), 0) - self.assertEqual(Blog.objects(tags__1='a').count(), 0) - self.assertEqual(Blog.objects(tags__1='b').count(), 1) + Blog.objects.create(tags=["a", "b"]) + assert Blog.objects(tags__0="a").count() == 1 + assert Blog.objects(tags__0="b").count() == 0 + assert Blog.objects(tags__1="a").count() == 0 + assert Blog.objects(tags__1="b").count() == 1 Blog.drop_collection() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog.objects.create(posts=[post1, post2]) blog2 = Blog.objects.create(posts=[post2, post1]) - blog = Blog.objects(posts__0__comments__0__name='testa').get() - self.assertEqual(blog, blog1) + blog = Blog.objects(posts__0__comments__0__name="testa").get() + assert blog == blog1 - blog = Blog.objects(posts__0__comments__0__name='testb').get() - self.assertEqual(blog, blog2) + blog = Blog.objects(posts__0__comments__0__name="testb").get() + assert blog == blog2 - query = Blog.objects(posts__1__comments__1__name='testb') - self.assertEqual(query.count(), 2) + query = Blog.objects(posts__1__comments__1__name="testb") + assert query.count() == 2 - query = Blog.objects(posts__1__comments__1__name='testa') - self.assertEqual(query.count(), 0) + query = Blog.objects(posts__1__comments__1__name="testa") + assert query.count() == 0 - query = Blog.objects(posts__0__comments__1__name='testa') - self.assertEqual(query.count(), 0) + query = Blog.objects(posts__0__comments__1__name="testa") + assert query.count() == 0 Blog.drop_collection() @@ -340,8 +407,11 @@ class QuerySetTest(unittest.TestCase): A.drop_collection() A().save() - self.assertEqual(list(A.objects.none()), []) - self.assertEqual(list(A.objects.none().all()), []) + assert list(A.objects.none()) == [] + assert list(A.objects.none().all()) == [] + assert list(A.objects.none().limit(1)) == [] + assert list(A.objects.none().skip(1)) == [] + assert list(A.objects.none()[:5]) == [] def test_chaining(self): class A(Document): @@ -365,15 +435,16 @@ class QuerySetTest(unittest.TestCase): # Doesn't work q2 = B.objects.filter(ref__in=[a1, a2]) q2 = q2.filter(ref=a1)._query - self.assertEqual(q1, q2) + assert q1 == q2 - a_objects = A.objects(s='test1') + a_objects = A.objects(s="test1") query = B.objects(ref__in=a_objects) query = query.filter(boolfield=True) - self.assertEqual(query.count(), 1) + assert query.count() == 1 def test_batch_size(self): """Ensure that batch_size works.""" + class A(Document): s = StringField() @@ -386,7 +457,7 @@ class QuerySetTest(unittest.TestCase): cnt = 0 for a in A.objects.batch_size(10): cnt += 1 - self.assertEqual(cnt, 100) + assert cnt == 100 # test chaining qs = A.objects.all() @@ -394,11 +465,11 @@ class QuerySetTest(unittest.TestCase): cnt = 0 for a in qs: cnt += 1 - self.assertEqual(cnt, 9) + assert cnt == 9 # test invalid batch size qs = A.objects.batch_size(-1) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): list(qs) def test_batch_size_cloned(self): @@ -407,47 +478,47 @@ class QuerySetTest(unittest.TestCase): # test that batch size gets cloned qs = A.objects.batch_size(5) - self.assertEqual(qs._batch_size, 5) + assert qs._batch_size == 5 qs_clone = qs.clone() - self.assertEqual(qs_clone._batch_size, 5) + assert qs_clone._batch_size == 5 def test_update_write_concern(self): """Test that passing write_concern works""" self.Person.drop_collection() write_concern = {"fsync": True} - author = self.Person.objects.create(name='Test User') + author = self.Person.objects.create(name="Test User") author.save(write_concern=write_concern) # Ensure no regression of #1958 - author = self.Person(name='Test User2') + author = self.Person(name="Test User2") author.save(write_concern=None) # will default to {w: 1} - result = self.Person.objects.update( - set__name='Ross', write_concern={"w": 1}) + result = self.Person.objects.update(set__name="Ross", write_concern={"w": 1}) - self.assertEqual(result, 2) - result = self.Person.objects.update( - set__name='Ross', write_concern={"w": 0}) - self.assertEqual(result, None) + assert result == 2 + result = self.Person.objects.update(set__name="Ross", write_concern={"w": 0}) + assert result is None result = self.Person.objects.update_one( - set__name='Test User', write_concern={"w": 1}) - self.assertEqual(result, 1) + set__name="Test User", write_concern={"w": 1} + ) + assert result == 1 result = self.Person.objects.update_one( - set__name='Test User', write_concern={"w": 0}) - self.assertEqual(result, None) + set__name="Test User", write_concern={"w": 0} + ) + assert result is None def test_update_update_has_a_value(self): """Test to ensure that update is passed a value to update to""" self.Person.drop_collection() - author = self.Person.objects.create(name='Test User') + author = self.Person.objects.create(name="Test User") - with self.assertRaises(OperationError): + with pytest.raises(OperationError): self.Person.objects(pk=author.pk).update({}) - with self.assertRaises(OperationError): + with pytest.raises(OperationError): self.Person.objects(pk=author.pk).update_one({}) def test_update_array_position(self): @@ -457,6 +528,7 @@ class QuerySetTest(unittest.TestCase): set__posts__1__comments__1__name="testc" Check that it only works for ListFields. """ + class Comment(EmbeddedDocument): name = StringField() @@ -469,31 +541,30 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) Blog.objects.create(posts=[post1, post2]) Blog.objects.create(posts=[post2, post1]) # Update all of the first comments of second posts of all blogs - Blog.objects().update(set__posts__1__comments__0__name='testc') - testc_blogs = Blog.objects(posts__1__comments__0__name='testc') - self.assertEqual(testc_blogs.count(), 2) + Blog.objects().update(set__posts__1__comments__0__name="testc") + testc_blogs = Blog.objects(posts__1__comments__0__name="testc") + assert testc_blogs.count() == 2 Blog.drop_collection() Blog.objects.create(posts=[post1, post2]) Blog.objects.create(posts=[post2, post1]) # Update only the first blog returned by the query - Blog.objects().update_one( - set__posts__1__comments__1__name='testc') - testc_blogs = Blog.objects(posts__1__comments__1__name='testc') - self.assertEqual(testc_blogs.count(), 1) + Blog.objects().update_one(set__posts__1__comments__1__name="testc") + testc_blogs = Blog.objects(posts__1__comments__1__name="testc") + assert testc_blogs.count() == 1 # Check that using this indexing syntax on a non-list fails - with self.assertRaises(InvalidQueryError): - Blog.objects().update(set__posts__1__comments__0__name__1='asdf') + with pytest.raises(InvalidQueryError): + Blog.objects().update(set__posts__1__comments__0__name__1="asdf") Blog.drop_collection() @@ -519,8 +590,8 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) post = BlogPost.objects.first() - self.assertEqual(post.comments[1].by, 'jane') - self.assertEqual(post.comments[1].votes, 8) + assert post.comments[1].by == "jane" + assert post.comments[1].votes == 8 def test_update_using_positional_operator_matches_first(self): @@ -535,7 +606,7 @@ class QuerySetTest(unittest.TestCase): Simple.objects(x=2).update(inc__x__S=1) simple = Simple.objects.first() - self.assertEqual(simple.x, [1, 3, 3, 2]) + assert simple.x == [1, 3, 3, 2] Simple.drop_collection() # You can set multiples @@ -547,10 +618,10 @@ class QuerySetTest(unittest.TestCase): Simple.objects(x=3).update(set__x__S=0) s = Simple.objects() - self.assertEqual(s[0].x, [1, 2, 0, 4]) - self.assertEqual(s[1].x, [2, 0, 4, 5]) - self.assertEqual(s[2].x, [0, 4, 5, 6]) - self.assertEqual(s[3].x, [4, 5, 6, 7]) + assert s[0].x == [1, 2, 0, 4] + assert s[1].x == [2, 0, 4, 5] + assert s[2].x == [0, 4, 5, 6] + assert s[3].x == [4, 5, 6, 7] # Using "$unset" with an expression like this "array.$" will result in # the array item becoming None, not being removed. @@ -558,14 +629,14 @@ class QuerySetTest(unittest.TestCase): Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save() Simple.objects(x=3).update(unset__x__S=1) simple = Simple.objects.first() - self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) + assert simple.x == [1, 2, None, 4, 3, 2, 3, 4] # Nested updates arent supported yet.. - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Simple.drop_collection() - Simple(x=[{'test': [1, 2, 3, 4]}]).save() + Simple(x=[{"test": [1, 2, 3, 4]}]).save() Simple.objects(x__test=2).update(set__x__S__test__S=3) - self.assertEqual(simple.x, [1, 2, 3, 4]) + assert simple.x == [1, 2, 3, 4] def test_update_using_positional_operator_embedded_document(self): """Ensure that the embedded documents can be updated using the positional @@ -590,11 +661,12 @@ class QuerySetTest(unittest.TestCase): BlogPost(title="ABC", comments=[c1, c2]).save() BlogPost.objects(comments__by="joe").update( - set__comments__S__votes=Vote(score=4)) + set__comments__S__votes=Vote(score=4) + ) post = BlogPost.objects.first() - self.assertEqual(post.comments[0].by, 'joe') - self.assertEqual(post.comments[0].votes.score, 4) + assert post.comments[0].by == "joe" + assert post.comments[0].votes.score == 4 def test_update_min_max(self): class Scores(Document): @@ -604,30 +676,29 @@ class QuerySetTest(unittest.TestCase): scores = Scores.objects.create(high_score=800, low_score=200) Scores.objects(id=scores.id).update(min__low_score=150) - self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) + assert Scores.objects.get(id=scores.id).low_score == 150 Scores.objects(id=scores.id).update(min__low_score=250) - self.assertEqual(Scores.objects.get(id=scores.id).low_score, 150) + assert Scores.objects.get(id=scores.id).low_score == 150 Scores.objects(id=scores.id).update(max__high_score=1000) - self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + assert Scores.objects.get(id=scores.id).high_score == 1000 Scores.objects(id=scores.id).update(max__high_score=500) - self.assertEqual(Scores.objects.get(id=scores.id).high_score, 1000) + assert Scores.objects.get(id=scores.id).high_score == 1000 def test_update_multiple(self): class Product(Document): item = StringField() price = FloatField() - product = Product.objects.create(item='ABC', price=10.99) - product = Product.objects.create(item='ABC', price=10.99) + product = Product.objects.create(item="ABC", price=10.99) + product = Product.objects.create(item="ABC", price=10.99) Product.objects(id=product.id).update(mul__price=1.25) - self.assertEqual(Product.objects.get(id=product.id).price, 13.7375) - unknown_product = Product.objects.create(item='Unknown') + assert Product.objects.get(id=product.id).price == 13.7375 + unknown_product = Product.objects.create(item="Unknown") Product.objects(id=unknown_product.id).update(mul__price=100) - self.assertEqual(Product.objects.get(id=unknown_product.id).price, 0) + assert Product.objects.get(id=unknown_product.id).price == 0 def test_updates_can_have_match_operators(self): - class Comment(EmbeddedDocument): content = StringField() name = StringField(max_length=120) @@ -643,15 +714,19 @@ class QuerySetTest(unittest.TestCase): comm1 = Comment(content="very funny indeed", name="John S", vote=1) comm2 = Comment(content="kind of funny", name="Mark P", vote=0) - Post(title='Fun with MongoEngine', tags=['mongodb', 'mongoengine'], - comments=[comm1, comm2]).save() + Post( + title="Fun with MongoEngine", + tags=["mongodb", "mongoengine"], + comments=[comm1, comm2], + ).save() Post.objects().update_one(pull__comments__vote__lt=1) - self.assertEqual(1, len(Post.objects.first().comments)) + assert 1 == len(Post.objects.first().comments) def test_mapfield_update(self): """Ensure that the MapField can be updated.""" + class Member(EmbeddedDocument): gender = StringField() age = IntField() @@ -662,50 +737,47 @@ class QuerySetTest(unittest.TestCase): Club.drop_collection() club = Club() - club.members['John'] = Member(gender="M", age=13) + club.members["John"] = Member(gender="M", age=13) club.save() - Club.objects().update( - set__members={"John": Member(gender="F", age=14)}) + Club.objects().update(set__members={"John": Member(gender="F", age=14)}) club = Club.objects().first() - self.assertEqual(club.members['John'].gender, "F") - self.assertEqual(club.members['John'].age, 14) + assert club.members["John"].gender == "F" + assert club.members["John"].age == 14 def test_dictfield_update(self): """Ensure that the DictField can be updated.""" + class Club(Document): members = DictField() club = Club() - club.members['John'] = {'gender': 'M', 'age': 13} + club.members["John"] = {"gender": "M", "age": 13} club.save() - Club.objects().update( - set__members={"John": {'gender': 'F', 'age': 14}}) + Club.objects().update(set__members={"John": {"gender": "F", "age": 14}}) club = Club.objects().first() - self.assertEqual(club.members['John']['gender'], "F") - self.assertEqual(club.members['John']['age'], 14) + assert club.members["John"]["gender"] == "F" + assert club.members["John"]["age"] == 14 def test_update_results(self): self.Person.drop_collection() - result = self.Person(name="Bob", age=25).update( - upsert=True, full_result=True) - self.assertIsInstance(result, UpdateResult) - self.assertIn("upserted", result.raw_result) - self.assertFalse(result.raw_result["updatedExisting"]) + result = self.Person(name="Bob", age=25).update(upsert=True, full_result=True) + assert isinstance(result, UpdateResult) + assert "upserted" in result.raw_result + assert not result.raw_result["updatedExisting"] bob = self.Person.objects.first() result = bob.update(set__age=30, full_result=True) - self.assertIsInstance(result, UpdateResult) - self.assertTrue(result.raw_result["updatedExisting"]) + assert isinstance(result, UpdateResult) + assert result.raw_result["updatedExisting"] self.Person(name="Bob", age=20).save() - result = self.Person.objects(name="Bob").update( - set__name="bobby", multi=True) - self.assertEqual(result, 2) + result = self.Person.objects(name="Bob").update(set__name="bobby", multi=True) + assert result == 2 def test_update_validate(self): class EmDoc(EmbeddedDocument): @@ -717,9 +789,12 @@ class QuerySetTest(unittest.TestCase): cdt_f = ComplexDateTimeField() ed_f = EmbeddedDocumentField(EmDoc) - self.assertRaises(ValidationError, Doc.objects().update, str_f=1, upsert=True) - self.assertRaises(ValidationError, Doc.objects().update, dt_f="datetime", upsert=True) - self.assertRaises(ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True) + with pytest.raises(ValidationError): + Doc.objects().update(str_f=1, upsert=True) + with pytest.raises(ValidationError): + Doc.objects().update(dt_f="datetime", upsert=True) + with pytest.raises(ValidationError): + Doc.objects().update(ed_f__str_f=1, upsert=True) def test_update_related_models(self): class TestPerson(Document): @@ -732,72 +807,72 @@ class QuerySetTest(unittest.TestCase): TestPerson.drop_collection() TestOrganization.drop_collection() - p = TestPerson(name='p1') + p = TestPerson(name="p1") p.save() - o = TestOrganization(name='o1') + o = TestOrganization(name="o1") o.save() o.owner = p - p.name = 'p2' + p.name = "p2" - self.assertEqual(o._get_changed_fields(), ['owner']) - self.assertEqual(p._get_changed_fields(), ['name']) + assert o._get_changed_fields() == ["owner"] + assert p._get_changed_fields() == ["name"] o.save() - self.assertEqual(o._get_changed_fields(), []) - self.assertEqual(p._get_changed_fields(), ['name']) # Fails; it's empty + assert o._get_changed_fields() == [] + assert p._get_changed_fields() == ["name"] # Fails; it's empty # This will do NOTHING at all, even though we changed the name p.save() p.reload() - self.assertEqual(p.name, 'p2') # Fails; it's still `p1` + assert p.name == "p2" # Fails; it's still `p1` def test_upsert(self): self.Person.drop_collection() - self.Person.objects( - pk=ObjectId(), name="Bob", age=30).update(upsert=True) + self.Person.objects(pk=ObjectId(), name="Bob", age=30).update(upsert=True) bob = self.Person.objects.first() - self.assertEqual("Bob", bob.name) - self.assertEqual(30, bob.age) + assert "Bob" == bob.name + assert 30 == bob.age def test_upsert_one(self): self.Person.drop_collection() bob = self.Person.objects(name="Bob", age=30).upsert_one() - self.assertEqual("Bob", bob.name) - self.assertEqual(30, bob.age) + assert "Bob" == bob.name + assert 30 == bob.age bob.name = "Bobby" bob.save() bobby = self.Person.objects(name="Bobby", age=30).upsert_one() - self.assertEqual("Bobby", bobby.name) - self.assertEqual(30, bobby.age) - self.assertEqual(bob.id, bobby.id) + assert "Bobby" == bobby.name + assert 30 == bobby.age + assert bob.id == bobby.id def test_set_on_insert(self): self.Person.drop_collection() self.Person.objects(pk=ObjectId()).update( - set__name='Bob', set_on_insert__age=30, upsert=True) + set__name="Bob", set_on_insert__age=30, upsert=True + ) bob = self.Person.objects.first() - self.assertEqual("Bob", bob.name) - self.assertEqual(30, bob.age) + assert "Bob" == bob.name + assert 30 == bob.age def test_save_and_only_on_fields_with_default(self): class Embed(EmbeddedDocument): field = IntField() class B(Document): - meta = {'collection': 'b'} + meta = {"collection": "b"} field = IntField(default=1) embed = EmbeddedDocumentField(Embed, default=Embed) @@ -815,22 +890,23 @@ class QuerySetTest(unittest.TestCase): # Checking it was saved correctly record.reload() - self.assertEqual(record.field, 2) - self.assertEqual(record.embed_no_default.field, 2) - self.assertEqual(record.embed.field, 2) + assert record.field == 2 + assert record.embed_no_default.field == 2 + assert record.embed.field == 2 # Request only the _id field and save - clone = B.objects().only('id').first() + clone = B.objects().only("id").first() clone.save() # Reload the record and see that the embed data is not lost record.reload() - self.assertEqual(record.field, 2) - self.assertEqual(record.embed_no_default.field, 2) - self.assertEqual(record.embed.field, 2) + assert record.field == 2 + assert record.embed_no_default.field == 2 + assert record.embed.field == 2 def test_bulk_insert(self): """Ensure that bulk insert works""" + class Comment(EmbeddedDocument): name = StringField() @@ -845,64 +921,62 @@ class QuerySetTest(unittest.TestCase): Blog.drop_collection() # Recreates the collection - self.assertEqual(0, Blog.objects.count()) + assert 0 == Blog.objects.count() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) # Check bulk insert using load_bulk=False - blogs = [Blog(title="%s" % i, posts=[post1, post2]) - for i in range(99)] + blogs = [Blog(title="%s" % i, posts=[post1, post2]) for i in range(99)] with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 Blog.objects.insert(blogs, load_bulk=False) - self.assertEqual(q, 1) # 1 entry containing the list of inserts + assert q == 1 # 1 entry containing the list of inserts - self.assertEqual(Blog.objects.count(), len(blogs)) + assert Blog.objects.count() == len(blogs) Blog.drop_collection() Blog.ensure_indexes() # Check bulk insert using load_bulk=True - blogs = [Blog(title="%s" % i, posts=[post1, post2]) - for i in range(99)] + blogs = [Blog(title="%s" % i, posts=[post1, post2]) for i in range(99)] with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 Blog.objects.insert(blogs) - self.assertEqual(q, 2) # 1 for insert 1 for fetch + assert q == 2 # 1 for insert 1 for fetch Blog.drop_collection() - comment1 = Comment(name='testa') - comment2 = Comment(name='testb') + comment1 = Comment(name="testa") + comment2 = Comment(name="testb") post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog(title="code", posts=[post1, post2]) blog2 = Blog(title="mongodb", posts=[post2, post1]) blog1, blog2 = Blog.objects.insert([blog1, blog2]) - self.assertEqual(blog1.title, "code") - self.assertEqual(blog2.title, "mongodb") + assert blog1.title == "code" + assert blog2.title == "mongodb" - self.assertEqual(Blog.objects.count(), 2) + assert Blog.objects.count() == 2 # test inserting an existing document (shouldn't be allowed) - with self.assertRaises(OperationError) as cm: + with pytest.raises(OperationError) as exc_info: blog = Blog.objects.first() Blog.objects.insert(blog) - self.assertEqual( - str(cm.exception), - 'Some documents have ObjectIds, use doc.update() instead' + assert ( + str(exc_info.value) + == "Some documents have ObjectIds, use doc.update() instead" ) # test inserting a query set - with self.assertRaises(OperationError) as cm: + with pytest.raises(OperationError) as exc_info: blogs_qs = Blog.objects Blog.objects.insert(blogs_qs) - self.assertEqual( - str(cm.exception), - 'Some documents have ObjectIds, use doc.update() instead' + assert ( + str(exc_info.value) + == "Some documents have ObjectIds, use doc.update() instead" ) # insert 1 new doc @@ -913,13 +987,13 @@ class QuerySetTest(unittest.TestCase): blog1 = Blog(title="code", posts=[post1, post2]) blog1 = Blog.objects.insert(blog1) - self.assertEqual(blog1.title, "code") - self.assertEqual(Blog.objects.count(), 1) + assert blog1.title == "code" + assert Blog.objects.count() == 1 Blog.drop_collection() blog1 = Blog(title="code", posts=[post1, post2]) obj_id = Blog.objects.insert(blog1, load_bulk=False) - self.assertIsInstance(obj_id, ObjectId) + assert isinstance(obj_id, ObjectId) Blog.drop_collection() post3 = Post(comments=[comment1, comment1]) @@ -927,10 +1001,10 @@ class QuerySetTest(unittest.TestCase): blog2 = Blog(title="bar", posts=[post2, post3]) Blog.objects.insert([blog1, blog2]) - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): Blog.objects.insert(Blog(title=blog2.title)) - self.assertEqual(Blog.objects.count(), 2) + assert Blog.objects.count() == 2 def test_bulk_insert_different_class_fails(self): class Blog(Document): @@ -940,7 +1014,7 @@ class QuerySetTest(unittest.TestCase): pass # try inserting a different document class - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Blog.objects.insert(Author()) def test_bulk_insert_with_wrong_type(self): @@ -948,13 +1022,13 @@ class QuerySetTest(unittest.TestCase): name = StringField() Blog.drop_collection() - Blog(name='test').save() + Blog(name="test").save() - with self.assertRaises(OperationError): + with pytest.raises(OperationError): Blog.objects.insert("HELLO WORLD") - with self.assertRaises(OperationError): - Blog.objects.insert({'name': 'garbage'}) + with pytest.raises(OperationError): + Blog.objects.insert({"name": "garbage"}) def test_bulk_insert_update_input_document_ids(self): class Comment(Document): @@ -965,23 +1039,23 @@ class QuerySetTest(unittest.TestCase): # Test with bulk comments = [Comment(idx=idx) for idx in range(20)] for com in comments: - self.assertIsNone(com.id) + assert com.id is None returned_comments = Comment.objects.insert(comments, load_bulk=True) for com in comments: - self.assertIsInstance(com.id, ObjectId) + assert isinstance(com.id, ObjectId) input_mapping = {com.id: com.idx for com in comments} saved_mapping = {com.id: com.idx for com in returned_comments} - self.assertEqual(input_mapping, saved_mapping) + assert input_mapping == saved_mapping Comment.drop_collection() # Test with just one comment = Comment(idx=0) inserted_comment_id = Comment.objects.insert(comment, load_bulk=False) - self.assertEqual(comment.id, inserted_comment_id) + assert comment.id == inserted_comment_id def test_bulk_insert_accepts_doc_with_ids(self): class Comment(Document): @@ -1003,17 +1077,18 @@ class QuerySetTest(unittest.TestCase): Comment.objects.insert(com1) - with self.assertRaises(NotUniqueError): + with pytest.raises(NotUniqueError): Comment.objects.insert(com1) def test_get_changed_fields_query_count(self): """Make sure we don't perform unnecessary db operations when none of document's fields were updated. """ + class Person(Document): name = StringField() - owns = ListField(ReferenceField('Organization')) - projects = ListField(ReferenceField('Project')) + owns = ListField(ReferenceField("Organization")) + projects = ListField(ReferenceField("Project")) class Organization(Document): name = StringField() @@ -1035,28 +1110,28 @@ class QuerySetTest(unittest.TestCase): o1 = Organization(name="o1", employees=[p1]).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 # Fetching a document should result in a query. org = Organization.objects.get(id=o1.id) - self.assertEqual(q, 1) + assert q == 1 # Checking changed fields of a newly fetched document should not # result in a query. org._get_changed_fields() - self.assertEqual(q, 1) + assert q == 1 # Saving a doc without changing any of its fields should not result # in a query (with or without cascade=False). org = Organization.objects.get(id=o1.id) with query_counter() as q: org.save() - self.assertEqual(q, 0) + assert q == 0 org = Organization.objects.get(id=o1.id) with query_counter() as q: org.save(cascade=False) - self.assertEqual(q, 0) + assert q == 0 # Saving a doc after you append a reference to it should result in # two db operations (a query for the reference and an update). @@ -1065,13 +1140,13 @@ class QuerySetTest(unittest.TestCase): with query_counter() as q: org.employees.append(p2) # dereferences p2 org.save() # saves the org - self.assertEqual(q, 2) + assert q == 2 def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. """ - self.Person(name='Person 1').save() - self.Person(name='Person 2').save() + self.Person(name="Person 1").save() + self.Person(name="Person 2").save() queryset = self.Person.objects people1 = [person for person in queryset] @@ -1082,8 +1157,8 @@ class QuerySetTest(unittest.TestCase): break people3 = [person for person in queryset] - self.assertEqual(people1, people2) - self.assertEqual(people1, people3) + assert people1 == people2 + assert people1 == people3 def test_repr(self): """Test repr behavior isnt destructive""" @@ -1099,96 +1174,97 @@ class QuerySetTest(unittest.TestCase): for i in range(1000): Doc(number=i).save() - docs = Doc.objects.order_by('number') + docs = Doc.objects.order_by("number") - self.assertEqual(docs.count(), 1000) + assert docs.count() == 1000 docs_string = "%s" % docs - self.assertIn("Doc: 0", docs_string) + assert "Doc: 0" in docs_string - self.assertEqual(docs.count(), 1000) - self.assertIn('(remaining elements truncated)', "%s" % docs) + assert docs.count() == 1000 + assert "(remaining elements truncated)" in "%s" % docs # Limit and skip docs = docs[1:4] - self.assertEqual('[, , ]', "%s" % docs) + assert "[, , ]" == "%s" % docs - self.assertEqual(docs.count(with_limit_and_skip=True), 3) + assert docs.count(with_limit_and_skip=True) == 3 for doc in docs: - self.assertEqual('.. queryset mid-iteration ..', repr(docs)) + assert ".. queryset mid-iteration .." == repr(docs) def test_regex_query_shortcuts(self): """Ensure that contains, startswith, endswith, etc work. """ - person = self.Person(name='Guido van Rossum') + person = self.Person(name="Guido van Rossum") person.save() # Test contains - obj = self.Person.objects(name__contains='van').first() - self.assertEqual(obj, person) - obj = self.Person.objects(name__contains='Van').first() - self.assertEqual(obj, None) + obj = self.Person.objects(name__contains="van").first() + assert obj == person + obj = self.Person.objects(name__contains="Van").first() + assert obj is None # Test icontains - obj = self.Person.objects(name__icontains='Van').first() - self.assertEqual(obj, person) + obj = self.Person.objects(name__icontains="Van").first() + assert obj == person # Test startswith - obj = self.Person.objects(name__startswith='Guido').first() - self.assertEqual(obj, person) - obj = self.Person.objects(name__startswith='guido').first() - self.assertEqual(obj, None) + obj = self.Person.objects(name__startswith="Guido").first() + assert obj == person + obj = self.Person.objects(name__startswith="guido").first() + assert obj is None # Test istartswith - obj = self.Person.objects(name__istartswith='guido').first() - self.assertEqual(obj, person) + obj = self.Person.objects(name__istartswith="guido").first() + assert obj == person # Test endswith - obj = self.Person.objects(name__endswith='Rossum').first() - self.assertEqual(obj, person) - obj = self.Person.objects(name__endswith='rossuM').first() - self.assertEqual(obj, None) + obj = self.Person.objects(name__endswith="Rossum").first() + assert obj == person + obj = self.Person.objects(name__endswith="rossuM").first() + assert obj is None # Test iendswith - obj = self.Person.objects(name__iendswith='rossuM').first() - self.assertEqual(obj, person) + obj = self.Person.objects(name__iendswith="rossuM").first() + assert obj == person # Test exact - obj = self.Person.objects(name__exact='Guido van Rossum').first() - self.assertEqual(obj, person) - obj = self.Person.objects(name__exact='Guido van rossum').first() - self.assertEqual(obj, None) - obj = self.Person.objects(name__exact='Guido van Rossu').first() - self.assertEqual(obj, None) + obj = self.Person.objects(name__exact="Guido van Rossum").first() + assert obj == person + obj = self.Person.objects(name__exact="Guido van rossum").first() + assert obj is None + obj = self.Person.objects(name__exact="Guido van Rossu").first() + assert obj is None # Test iexact - obj = self.Person.objects(name__iexact='gUIDO VAN rOSSUM').first() - self.assertEqual(obj, person) - obj = self.Person.objects(name__iexact='gUIDO VAN rOSSU').first() - self.assertEqual(obj, None) + obj = self.Person.objects(name__iexact="gUIDO VAN rOSSUM").first() + assert obj == person + obj = self.Person.objects(name__iexact="gUIDO VAN rOSSU").first() + assert obj is None # Test unsafe expressions - person = self.Person(name='Guido van Rossum [.\'Geek\']') + person = self.Person(name="Guido van Rossum [.'Geek']") person.save() - obj = self.Person.objects(name__icontains='[.\'Geek').first() - self.assertEqual(obj, person) + obj = self.Person.objects(name__icontains="[.'Geek").first() + assert obj == person def test_not(self): """Ensure that the __not operator works as expected. """ - alice = self.Person(name='Alice', age=25) + alice = self.Person(name="Alice", age=25) alice.save() - obj = self.Person.objects(name__iexact='alice').first() - self.assertEqual(obj, alice) + obj = self.Person.objects(name__iexact="alice").first() + assert obj == alice - obj = self.Person.objects(name__not__iexact='alice').first() - self.assertEqual(obj, None) + obj = self.Person.objects(name__not__iexact="alice").first() + assert obj is None def test_filter_chaining(self): """Ensure filters can be chained together. """ + class Blog(Document): id = StringField(primary_key=True) @@ -1217,74 +1293,71 @@ class QuerySetTest(unittest.TestCase): blog=blog_1, title="Blog Post #1", is_published=True, - published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + published_date=datetime.datetime(2010, 1, 5, 0, 0, 0), ) BlogPost.objects.create( blog=blog_2, title="Blog Post #2", is_published=True, - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + published_date=datetime.datetime(2010, 1, 6, 0, 0, 0), ) BlogPost.objects.create( blog=blog_3, title="Blog Post #3", is_published=True, - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + published_date=datetime.datetime(2010, 1, 7, 0, 0, 0), ) # find all published blog posts before 2010-01-07 published_posts = BlogPost.published() published_posts = published_posts.filter( - published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0)) - self.assertEqual(published_posts.count(), 2) + published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0) + ) + assert published_posts.count() == 2 blog_posts = BlogPost.objects blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2]) blog_posts = blog_posts.filter(blog=blog_3) - self.assertEqual(blog_posts.count(), 0) + assert blog_posts.count() == 0 BlogPost.drop_collection() Blog.drop_collection() def test_filter_chaining_with_regex(self): - person = self.Person(name='Guido van Rossum') + person = self.Person(name="Guido van Rossum") person.save() people = self.Person.objects - people = people.filter(name__startswith='Gui').filter(name__not__endswith='tum') - self.assertEqual(people.count(), 1) + people = people.filter(name__startswith="Gui").filter(name__not__endswith="tum") + assert people.count() == 1 def assertSequence(self, qs, expected): qs = list(qs) expected = list(expected) - self.assertEqual(len(qs), len(expected)) + assert len(qs) == len(expected) for i in range(len(qs)): - self.assertEqual(qs[i], expected[i]) + assert qs[i] == expected[i] def test_ordering(self): """Ensure default ordering is applied and can be overridden. """ + class BlogPost(Document): title = StringField() published_date = DateTimeField() - meta = { - 'ordering': ['-published_date'] - } + meta = {"ordering": ["-published_date"]} BlogPost.drop_collection() blog_post_1 = BlogPost.objects.create( - title="Blog Post #1", - published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + title="Blog Post #1", published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) ) blog_post_2 = BlogPost.objects.create( - title="Blog Post #2", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="Blog Post #2", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_3 = BlogPost.objects.create( - title="Blog Post #3", - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + title="Blog Post #3", published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) ) # get the "first" BlogPost using default ordering @@ -1307,42 +1380,34 @@ class QuerySetTest(unittest.TestCase): title = StringField() published_date = DateTimeField() - meta = { - 'ordering': ['-published_date'] - } + meta = {"ordering": ["-published_date"]} BlogPost.drop_collection() # default ordering should be used by default with db_ops_tracker() as q: - BlogPost.objects.filter(title='whatever').first() - self.assertEqual(len(q.get_ops()), 1) - self.assertEqual( - q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], - {'published_date': -1} - ) + BlogPost.objects.filter(title="whatever").first() + assert len(q.get_ops()) == 1 + assert q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY] == {"published_date": -1} # calling order_by() should clear the default ordering with db_ops_tracker() as q: - BlogPost.objects.filter(title='whatever').order_by().first() - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + BlogPost.objects.filter(title="whatever").order_by().first() + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] # calling an explicit order_by should use a specified sort with db_ops_tracker() as q: - BlogPost.objects.filter(title='whatever').order_by('published_date').first() - self.assertEqual(len(q.get_ops()), 1) - self.assertEqual( - q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY], - {'published_date': 1} - ) + BlogPost.objects.filter(title="whatever").order_by("published_date").first() + assert len(q.get_ops()) == 1 + assert q.get_ops()[0][CMD_QUERY_KEY][ORDER_BY_KEY] == {"published_date": 1} # calling order_by() after an explicit sort should clear it with db_ops_tracker() as q: - qs = BlogPost.objects.filter(title='whatever').order_by('published_date') + qs = BlogPost.objects.filter(title="whatever").order_by("published_date") qs.order_by().first() - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] def test_no_ordering_for_get(self): """ Ensure that Doc.objects.get doesn't use any ordering. @@ -1353,28 +1418,28 @@ class QuerySetTest(unittest.TestCase): title = StringField() published_date = DateTimeField() - meta = { - 'ordering': ['-published_date'] - } + meta = {"ordering": ["-published_date"]} BlogPost.objects.create( - title='whatever', published_date=datetime.datetime.utcnow()) + title="whatever", published_date=datetime.datetime.utcnow() + ) with db_ops_tracker() as q: - BlogPost.objects.get(title='whatever') - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + BlogPost.objects.get(title="whatever") + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] # Ordering should be ignored for .get even if we set it explicitly with db_ops_tracker() as q: - BlogPost.objects.order_by('-title').get(title='whatever') - self.assertEqual(len(q.get_ops()), 1) - self.assertNotIn(ORDER_BY_KEY, q.get_ops()[0][CMD_QUERY_KEY]) + BlogPost.objects.order_by("-title").get(title="whatever") + assert len(q.get_ops()) == 1 + assert ORDER_BY_KEY not in q.get_ops()[0][CMD_QUERY_KEY] def test_find_embedded(self): """Ensure that an embedded document is properly returned from different manners of querying. """ + class User(EmbeddedDocument): name = StringField() @@ -1384,31 +1449,29 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - user = User(name='Test User') - BlogPost.objects.create( - author=user, - content='Had a good coffee today...' - ) + user = User(name="Test User") + BlogPost.objects.create(author=user, content="Had a good coffee today...") result = BlogPost.objects.first() - self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, 'Test User') + assert isinstance(result.author, User) + assert result.author.name == "Test User" result = BlogPost.objects.get(author__name=user.name) - self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, 'Test User') + assert isinstance(result.author, User) + assert result.author.name == "Test User" - result = BlogPost.objects.get(author={'name': user.name}) - self.assertIsInstance(result.author, User) - self.assertEqual(result.author.name, 'Test User') + result = BlogPost.objects.get(author={"name": user.name}) + assert isinstance(result.author, User) + assert result.author.name == "Test User" # Fails, since the string is not a type that is able to represent the # author's document structure (should be dict) - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): BlogPost.objects.get(author=user.name) def test_find_empty_embedded(self): """Ensure that you can save and find an empty embedded document.""" + class User(EmbeddedDocument): name = StringField() @@ -1418,30 +1481,32 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - BlogPost.objects.create(content='Anonymous post...') + BlogPost.objects.create(content="Anonymous post...") result = BlogPost.objects.get(author=None) - self.assertEqual(result.author, None) + assert result.author is None def test_find_dict_item(self): """Ensure that DictField items may be found. """ + class BlogPost(Document): info = DictField() BlogPost.drop_collection() - post = BlogPost(info={'title': 'test'}) + post = BlogPost(info={"title": "test"}) post.save() - post_obj = BlogPost.objects(info__title='test').first() - self.assertEqual(post_obj.id, post.id) + post_obj = BlogPost.objects(info__title="test").first() + assert post_obj.id == post.id BlogPost.drop_collection() def test_exec_js_query(self): """Ensure that queries are properly formed for use in exec_js. """ + class BlogPost(Document): hits = IntField() published = BooleanField() @@ -1468,33 +1533,33 @@ class QuerySetTest(unittest.TestCase): """ # Ensure that normal queries work - c = BlogPost.objects(published=True).exec_js(js_func, 'hits') - self.assertEqual(c, 2) + c = BlogPost.objects(published=True).exec_js(js_func, "hits") + assert c == 2 - c = BlogPost.objects(published=False).exec_js(js_func, 'hits') - self.assertEqual(c, 1) + c = BlogPost.objects(published=False).exec_js(js_func, "hits") + assert c == 1 BlogPost.drop_collection() def test_exec_js_field_sub(self): """Ensure that field substitutions occur properly in exec_js functions. """ + class Comment(EmbeddedDocument): - content = StringField(db_field='body') + content = StringField(db_field="body") class BlogPost(Document): - name = StringField(db_field='doc-name') - comments = ListField(EmbeddedDocumentField(Comment), - db_field='cmnts') + name = StringField(db_field="doc-name") + comments = ListField(EmbeddedDocumentField(Comment), db_field="cmnts") BlogPost.drop_collection() - comments1 = [Comment(content='cool'), Comment(content='yay')] - post1 = BlogPost(name='post1', comments=comments1) + comments1 = [Comment(content="cool"), Comment(content="yay")] + post1 = BlogPost(name="post1", comments=comments1) post1.save() - comments2 = [Comment(content='nice stuff')] - post2 = BlogPost(name='post2', comments=comments2) + comments2 = [Comment(content="nice stuff")] + post2 = BlogPost(name="post2", comments=comments2) post2.save() code = """ @@ -1514,23 +1579,22 @@ class QuerySetTest(unittest.TestCase): """ sub_code = BlogPost.objects._sub_js_fields(code) - code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', - 'doc["cmnts"][i]["body"]'] + code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', 'doc["cmnts"][i]["body"]'] for chunk in code_chunks: - self.assertIn(chunk, sub_code) + assert chunk in sub_code results = BlogPost.objects.exec_js(code) expected_results = [ - {u'comment': u'cool', u'document': u'post1'}, - {u'comment': u'yay', u'document': u'post1'}, - {u'comment': u'nice stuff', u'document': u'post2'}, + {u"comment": u"cool", u"document": u"post1"}, + {u"comment": u"yay", u"document": u"post1"}, + {u"comment": u"nice stuff", u"document": u"post2"}, ] - self.assertEqual(results, expected_results) + assert results == expected_results # Test template style code = "{{~comments.content}}" sub_code = BlogPost.objects._sub_js_fields(code) - self.assertEqual("cmnts.body", sub_code) + assert "cmnts.body" == sub_code BlogPost.drop_collection() @@ -1541,66 +1605,71 @@ class QuerySetTest(unittest.TestCase): self.Person(name="User B", age=30).save() self.Person(name="User C", age=40).save() - self.assertEqual(self.Person.objects.count(), 3) + assert self.Person.objects.count() == 3 self.Person.objects(age__lt=30).delete() - self.assertEqual(self.Person.objects.count(), 2) + assert self.Person.objects.count() == 2 self.Person.objects.delete() - self.assertEqual(self.Person.objects.count(), 0) + assert self.Person.objects.count() == 0 def test_reverse_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() - BlogPost(content='Chilling out', author=me).save() - BlogPost(content='Pro Testing', author=someoneelse).save() + BlogPost(content="Watching TV", author=me).save() + BlogPost(content="Chilling out", author=me).save() + BlogPost(content="Pro Testing", author=someoneelse).save() - self.assertEqual(3, BlogPost.objects.count()) - self.Person.objects(name='Test User').delete() - self.assertEqual(1, BlogPost.objects.count()) + assert 3 == BlogPost.objects.count() + self.Person.objects(name="Test User").delete() + assert 1 == BlogPost.objects.count() def test_reverse_delete_rule_cascade_on_abstract_document(self): """Ensure cascading deletion of referring documents from the database does not fail on abstract document. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} + meta = {"abstract": True} author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) class BlogPost(AbstractBlogPost): content = StringField() + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() - BlogPost(content='Chilling out', author=me).save() - BlogPost(content='Pro Testing', author=someoneelse).save() + BlogPost(content="Watching TV", author=me).save() + BlogPost(content="Chilling out", author=me).save() + BlogPost(content="Pro Testing", author=someoneelse).save() - self.assertEqual(3, BlogPost.objects.count()) - self.Person.objects(name='Test User').delete() - self.assertEqual(1, BlogPost.objects.count()) + assert 3 == BlogPost.objects.count() + self.Person.objects(name="Test User").delete() + assert 1 == BlogPost.objects.count() def test_reverse_delete_rule_cascade_cycle(self): """Ensure reference cascading doesn't loop if reference graph isn't a tree """ + class Dummy(Document): - reference = ReferenceField('self', reverse_delete_rule=CASCADE) + reference = ReferenceField("self", reverse_delete_rule=CASCADE) base = Dummy().save() other = Dummy(reference=base).save() @@ -1609,21 +1678,24 @@ class QuerySetTest(unittest.TestCase): base.delete() - self.assertRaises(DoesNotExist, base.reload) - self.assertRaises(DoesNotExist, other.reload) + with pytest.raises(DoesNotExist): + base.reload() + with pytest.raises(DoesNotExist): + other.reload() def test_reverse_delete_rule_cascade_complex_cycle(self): """Ensure reference cascading doesn't loop if reference graph isn't a tree """ + class Category(Document): name = StringField() class Dummy(Document): - reference = ReferenceField('self', reverse_delete_rule=CASCADE) + reference = ReferenceField("self", reverse_delete_rule=CASCADE) cat = ReferenceField(Category, reverse_delete_rule=CASCADE) - cat = Category(name='cat').save() + cat = Category(name="cat").save() base = Dummy(cat=cat).save() other = Dummy(reference=base).save() other2 = Dummy(reference=other).save() @@ -1632,47 +1704,52 @@ class QuerySetTest(unittest.TestCase): cat.delete() - self.assertRaises(DoesNotExist, base.reload) - self.assertRaises(DoesNotExist, other.reload) - self.assertRaises(DoesNotExist, other2.reload) + with pytest.raises(DoesNotExist): + base.reload() + with pytest.raises(DoesNotExist): + other.reload() + with pytest.raises(DoesNotExist): + other2.reload() def test_reverse_delete_rule_cascade_self_referencing(self): """Ensure self-referencing CASCADE deletes do not result in infinite loop """ + class Category(Document): name = StringField() - parent = ReferenceField('self', reverse_delete_rule=CASCADE) + parent = ReferenceField("self", reverse_delete_rule=CASCADE) Category.drop_collection() num_children = 3 - base = Category(name='Root') + base = Category(name="Root") base.save() # Create a simple parent-child tree for i in range(num_children): - child_name = 'Child-%i' % i + child_name = "Child-%i" % i child = Category(name=child_name, parent=base) child.save() for i in range(num_children): - child_child_name = 'Child-Child-%i' % i + child_child_name = "Child-Child-%i" % i child_child = Category(name=child_child_name, parent=child) child_child.save() tree_size = 1 + num_children + (num_children * num_children) - self.assertEqual(tree_size, Category.objects.count()) - self.assertEqual(num_children, Category.objects(parent=base).count()) + assert tree_size == Category.objects.count() + assert num_children == Category.objects(parent=base).count() # The delete should effectively wipe out the Category collection # without resulting in infinite parent-child cascade recursion base.delete() - self.assertEqual(0, Category.objects.count()) + assert 0 == Category.objects.count() def test_reverse_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. """ + class Category(Document): name = StringField() @@ -1683,47 +1760,50 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() Category.drop_collection() - lameness = Category(name='Lameness') + lameness = Category(name="Lameness") lameness.save() - post = BlogPost(content='Watching TV', category=lameness) + post = BlogPost(content="Watching TV", category=lameness) post.save() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual('Lameness', BlogPost.objects.first().category.name) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().category.name == "Lameness" Category.objects.delete() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual(None, BlogPost.objects.first().category) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().category is None def test_reverse_delete_rule_nullify_on_abstract_document(self): """Ensure nullification of references to deleted documents when reference is on an abstract document. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} + meta = {"abstract": True} author = ReferenceField(self.Person, reverse_delete_rule=NULLIFY) class BlogPost(AbstractBlogPost): content = StringField() + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() + BlogPost(content="Watching TV", author=me).save() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual(me, BlogPost.objects.first().author) - self.Person.objects(name='Test User').delete() - self.assertEqual(1, BlogPost.objects.count()) - self.assertEqual(None, BlogPost.objects.first().author) + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().author == me + self.Person.objects(name="Test User").delete() + assert BlogPost.objects.count() == 1 + assert BlogPost.objects.first().author is None def test_reverse_delete_rule_deny(self): """Ensure deletion gets denied on documents that still have references to them. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) @@ -1731,72 +1811,76 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - post = BlogPost(content='Watching TV', author=me) + post = BlogPost(content="Watching TV", author=me) post.save() - self.assertRaises(OperationError, self.Person.objects.delete) + with pytest.raises(OperationError): + self.Person.objects.delete() def test_reverse_delete_rule_deny_on_abstract_document(self): """Ensure deletion gets denied on documents that still have references to them, when reference is on an abstract document. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} + meta = {"abstract": True} author = ReferenceField(self.Person, reverse_delete_rule=DENY) class BlogPost(AbstractBlogPost): content = StringField() + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - BlogPost(content='Watching TV', author=me).save() + BlogPost(content="Watching TV", author=me).save() - self.assertEqual(1, BlogPost.objects.count()) - self.assertRaises(OperationError, self.Person.objects.delete) + assert 1 == BlogPost.objects.count() + with pytest.raises(OperationError): + self.Person.objects.delete() def test_reverse_delete_rule_pull(self): """Ensure pulling of references to deleted documents. """ + class BlogPost(Document): content = StringField() - authors = ListField(ReferenceField(self.Person, - reverse_delete_rule=PULL)) + authors = ListField(ReferenceField(self.Person, reverse_delete_rule=PULL)) BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - post = BlogPost(content='Watching TV', authors=[me, someoneelse]) + post = BlogPost(content="Watching TV", authors=[me, someoneelse]) post.save() - another = BlogPost(content='Chilling Out', authors=[someoneelse]) + another = BlogPost(content="Chilling Out", authors=[someoneelse]) another.save() someoneelse.delete() post.reload() another.reload() - self.assertEqual(post.authors, [me]) - self.assertEqual(another.authors, []) + assert post.authors == [me] + assert another.authors == [] def test_reverse_delete_rule_pull_on_abstract_documents(self): """Ensure pulling of references to deleted documents when reference is defined on an abstract document.. """ + class AbstractBlogPost(Document): - meta = {'abstract': True} - authors = ListField(ReferenceField(self.Person, - reverse_delete_rule=PULL)) + meta = {"abstract": True} + authors = ListField(ReferenceField(self.Person, reverse_delete_rule=PULL)) class BlogPost(AbstractBlogPost): content = StringField() @@ -1804,27 +1888,26 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - post = BlogPost(content='Watching TV', authors=[me, someoneelse]) + post = BlogPost(content="Watching TV", authors=[me, someoneelse]) post.save() - another = BlogPost(content='Chilling Out', authors=[someoneelse]) + another = BlogPost(content="Chilling Out", authors=[someoneelse]) another.save() someoneelse.delete() post.reload() another.reload() - self.assertEqual(post.authors, [me]) - self.assertEqual(another.authors, []) + assert post.authors == [me] + assert another.authors == [] def test_delete_with_limits(self): - class Log(Document): pass @@ -1834,38 +1917,45 @@ class QuerySetTest(unittest.TestCase): Log().save() Log.objects()[3:5].delete() - self.assertEqual(8, Log.objects.count()) + assert 8 == Log.objects.count() def test_delete_with_limit_handles_delete_rules(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) + BlogPost.drop_collection() - me = self.Person(name='Test User') + me = self.Person(name="Test User") me.save() - someoneelse = self.Person(name='Some-one Else') + someoneelse = self.Person(name="Some-one Else") someoneelse.save() - BlogPost(content='Watching TV', author=me).save() - BlogPost(content='Chilling out', author=me).save() - BlogPost(content='Pro Testing', author=someoneelse).save() + BlogPost(content="Watching TV", author=me).save() + BlogPost(content="Chilling out", author=me).save() + BlogPost(content="Pro Testing", author=someoneelse).save() - self.assertEqual(3, BlogPost.objects.count()) + assert 3 == BlogPost.objects.count() self.Person.objects()[:1].delete() - self.assertEqual(1, BlogPost.objects.count()) + assert 1 == BlogPost.objects.count() - def test_limit_with_write_concern_0(self): + def test_delete_edge_case_with_write_concern_0_return_None(self): + """Return None if the delete operation is unacknowledged. + If we use an unack'd write concern, we don't really know how many + documents have been deleted. + """ p1 = self.Person(name="User Z", age=20).save() del_result = p1.delete(w=0) - self.assertEqual(None, del_result) + assert del_result is None def test_reference_field_find(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person) @@ -1873,21 +1963,21 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User').save() + me = self.Person(name="Test User").save() BlogPost(content="test 123", author=me).save() - self.assertEqual(1, BlogPost.objects(author=me).count()) - self.assertEqual(1, BlogPost.objects(author=me.pk).count()) - self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count()) + assert 1 == BlogPost.objects(author=me).count() + assert 1 == BlogPost.objects(author=me.pk).count() + assert 1 == BlogPost.objects(author="%s" % me.pk).count() - self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) - self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) - self.assertEqual( - 1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + assert 1 == BlogPost.objects(author__in=[me]).count() + assert 1 == BlogPost.objects(author__in=[me.pk]).count() + assert 1 == BlogPost.objects(author__in=["%s" % me.pk]).count() def test_reference_field_find_dbref(self): """Ensure cascading deletion of referring documents from the database. """ + class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, dbref=True) @@ -1895,17 +1985,16 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() self.Person.drop_collection() - me = self.Person(name='Test User').save() + me = self.Person(name="Test User").save() BlogPost(content="test 123", author=me).save() - self.assertEqual(1, BlogPost.objects(author=me).count()) - self.assertEqual(1, BlogPost.objects(author=me.pk).count()) - self.assertEqual(1, BlogPost.objects(author="%s" % me.pk).count()) + assert 1 == BlogPost.objects(author=me).count() + assert 1 == BlogPost.objects(author=me.pk).count() + assert 1 == BlogPost.objects(author="%s" % me.pk).count() - self.assertEqual(1, BlogPost.objects(author__in=[me]).count()) - self.assertEqual(1, BlogPost.objects(author__in=[me.pk]).count()) - self.assertEqual( - 1, BlogPost.objects(author__in=["%s" % me.pk]).count()) + assert 1 == BlogPost.objects(author__in=[me]).count() + assert 1 == BlogPost.objects(author__in=[me.pk]).count() + assert 1 == BlogPost.objects(author__in=["%s" % me.pk]).count() def test_update_intfield_operator(self): class BlogPost(Document): @@ -1918,20 +2007,20 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.update_one(set__hits=10) post.reload() - self.assertEqual(post.hits, 10) + assert post.hits == 10 BlogPost.objects.update_one(inc__hits=1) post.reload() - self.assertEqual(post.hits, 11) + assert post.hits == 11 BlogPost.objects.update_one(dec__hits=1) post.reload() - self.assertEqual(post.hits, 10) + assert post.hits == 10 # Negative dec operator is equal to a positive inc operator BlogPost.objects.update_one(dec__hits=-1) post.reload() - self.assertEqual(post.hits, 11) + assert post.hits == 11 def test_update_decimalfield_operator(self): class BlogPost(Document): @@ -1942,21 +2031,21 @@ class QuerySetTest(unittest.TestCase): post = BlogPost(review=3.5) post.save() - BlogPost.objects.update_one(inc__review=0.1) # test with floats + BlogPost.objects.update_one(inc__review=0.1) # test with floats post.reload() - self.assertEqual(float(post.review), 3.6) + assert float(post.review) == 3.6 BlogPost.objects.update_one(dec__review=0.1) post.reload() - self.assertEqual(float(post.review), 3.5) + assert float(post.review) == 3.5 - BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal + BlogPost.objects.update_one(inc__review=Decimal(0.12)) # test with Decimal post.reload() - self.assertEqual(float(post.review), 3.62) + assert float(post.review) == 3.62 BlogPost.objects.update_one(dec__review=Decimal(0.12)) post.reload() - self.assertEqual(float(post.review), 3.5) + assert float(post.review) == 3.5 def test_update_decimalfield_operator_not_working_with_force_string(self): class BlogPost(Document): @@ -1967,39 +2056,40 @@ class QuerySetTest(unittest.TestCase): post = BlogPost(review=3.5) post.save() - with self.assertRaises(OperationError): - BlogPost.objects.update_one(inc__review=0.1) # test with floats + with pytest.raises(OperationError): + BlogPost.objects.update_one(inc__review=0.1) # test with floats def test_update_listfield_operator(self): """Ensure that atomic updates work properly. """ + class BlogPost(Document): tags = ListField(StringField()) BlogPost.drop_collection() - post = BlogPost(tags=['test']) + post = BlogPost(tags=["test"]) post.save() # ListField operator - BlogPost.objects.update(push__tags='mongo') + BlogPost.objects.update(push__tags="mongo") post.reload() - self.assertIn('mongo', post.tags) + assert "mongo" in post.tags - BlogPost.objects.update_one(push_all__tags=['db', 'nosql']) + BlogPost.objects.update_one(push_all__tags=["db", "nosql"]) post.reload() - self.assertIn('db', post.tags) - self.assertIn('nosql', post.tags) + assert "db" in post.tags + assert "nosql" in post.tags tags = post.tags[:-1] BlogPost.objects.update(pop__tags=1) post.reload() - self.assertEqual(post.tags, tags) + assert post.tags == tags - BlogPost.objects.update_one(add_to_set__tags='unique') - BlogPost.objects.update_one(add_to_set__tags='unique') + BlogPost.objects.update_one(add_to_set__tags="unique") + BlogPost.objects.update_one(add_to_set__tags="unique") post.reload() - self.assertEqual(post.tags.count('unique'), 1) + assert post.tags.count("unique") == 1 BlogPost.drop_collection() @@ -2009,18 +2099,19 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - post = BlogPost(title='garbage').save() + post = BlogPost(title="garbage").save() - self.assertNotEqual(post.title, None) + assert post.title is not None BlogPost.objects.update_one(unset__title=1) post.reload() - self.assertEqual(post.title, None) + assert post.title is None pymongo_doc = BlogPost.objects.as_pymongo().first() - self.assertNotIn('title', pymongo_doc) + assert "title" not in pymongo_doc def test_update_push_with_position(self): """Ensure that the 'push' update with position works properly. """ + class BlogPost(Document): slug = StringField() tags = ListField(StringField()) @@ -2032,20 +2123,21 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.filter(id=post.id).update(push__tags="code") BlogPost.objects.filter(id=post.id).update(push__tags__0=["mongodb", "python"]) post.reload() - self.assertEqual(post.tags, ['mongodb', 'python', 'code']) + assert post.tags == ["mongodb", "python", "code"] BlogPost.objects.filter(id=post.id).update(set__tags__2="java") post.reload() - self.assertEqual(post.tags, ['mongodb', 'python', 'java']) + assert post.tags == ["mongodb", "python", "java"] # test push with singular value - BlogPost.objects.filter(id=post.id).update(push__tags__0='scala') + BlogPost.objects.filter(id=post.id).update(push__tags__0="scala") post.reload() - self.assertEqual(post.tags, ['scala', 'mongodb', 'python', 'java']) + assert post.tags == ["scala", "mongodb", "python", "java"] def test_update_push_list_of_list(self): """Ensure that the 'push' update operation works in the list of list """ + class BlogPost(Document): slug = StringField() tags = ListField() @@ -2056,11 +2148,12 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.filter(slug="test").update(push__tags=["value1", 123]) post.reload() - self.assertEqual(post.tags, [["value1", 123]]) + assert post.tags == [["value1", 123]] def test_update_push_and_pull_add_to_set(self): """Ensure that the 'pull' update operation works correctly. """ + class BlogPost(Document): slug = StringField() tags = ListField(StringField()) @@ -2072,51 +2165,49 @@ class QuerySetTest(unittest.TestCase): BlogPost.objects.filter(id=post.id).update(push__tags="code") post.reload() - self.assertEqual(post.tags, ["code"]) + assert post.tags == ["code"] - BlogPost.objects.filter(id=post.id).update( - push_all__tags=["mongodb", "code"]) + BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"]) post.reload() - self.assertEqual(post.tags, ["code", "mongodb", "code"]) + assert post.tags == ["code", "mongodb", "code"] BlogPost.objects(slug="test").update(pull__tags="code") post.reload() - self.assertEqual(post.tags, ["mongodb"]) + assert post.tags == ["mongodb"] + + BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"]) + post.reload() + assert post.tags == [] BlogPost.objects(slug="test").update( - pull_all__tags=["mongodb", "code"]) + __raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}} + ) post.reload() - self.assertEqual(post.tags, []) - - BlogPost.objects(slug="test").update( - __raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}}) - post.reload() - self.assertEqual(post.tags, ["code", "mongodb"]) + assert post.tags == ["code", "mongodb"] def test_add_to_set_each(self): class Item(Document): name = StringField(required=True) description = StringField(max_length=50) - parents = ListField(ReferenceField('self')) + parents = ListField(ReferenceField("self")) Item.drop_collection() - item = Item(name='test item').save() - parent_1 = Item(name='parent 1').save() - parent_2 = Item(name='parent 2').save() + item = Item(name="test item").save() + parent_1 = Item(name="parent 1").save() + parent_2 = Item(name="parent 2").save() item.update(add_to_set__parents=[parent_1, parent_2, parent_1]) item.reload() - self.assertEqual([parent_1, parent_2], item.parents) + assert [parent_1, parent_2] == item.parents def test_pull_nested(self): - class Collaborator(EmbeddedDocument): user = StringField() def __unicode__(self): - return '%s' % self.user + return "%s" % self.user class Site(Document): name = StringField(max_length=75, unique=True, required=True) @@ -2124,23 +2215,21 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() - c = Collaborator(user='Esteban') + c = Collaborator(user="Esteban") s = Site(name="test", collaborators=[c]).save() - Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') - self.assertEqual(Site.objects.first().collaborators, []) + Site.objects(id=s.id).update_one(pull__collaborators__user="Esteban") + assert Site.objects.first().collaborators == [] - with self.assertRaises(InvalidQueryError): - Site.objects(id=s.id).update_one( - pull_all__collaborators__user=['Ross']) + with pytest.raises(InvalidQueryError): + Site.objects(id=s.id).update_one(pull_all__collaborators__user=["Ross"]) def test_pull_from_nested_embedded(self): - class User(EmbeddedDocument): name = StringField() def __unicode__(self): - return '%s' % self.name + return "%s" % self.name class Collaborator(EmbeddedDocument): helpful = ListField(EmbeddedDocumentField(User)) @@ -2152,21 +2241,24 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() - c = User(name='Esteban') - f = User(name='Frank') - s = Site(name="test", collaborators=Collaborator( - helpful=[c], unhelpful=[f])).save() + c = User(name="Esteban") + f = User(name="Frank") + s = Site( + name="test", collaborators=Collaborator(helpful=[c], unhelpful=[f]) + ).save() Site.objects(id=s.id).update_one(pull__collaborators__helpful=c) - self.assertEqual(Site.objects.first().collaborators['helpful'], []) + assert Site.objects.first().collaborators["helpful"] == [] Site.objects(id=s.id).update_one( - pull__collaborators__unhelpful={'name': 'Frank'}) - self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + pull__collaborators__unhelpful={"name": "Frank"} + ) + assert Site.objects.first().collaborators["unhelpful"] == [] - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): Site.objects(id=s.id).update_one( - pull_all__collaborators__helpful__name=['Ross']) + pull_all__collaborators__helpful__name=["Ross"] + ) def test_pull_from_nested_embedded_using_in_nin(self): """Ensure that the 'pull' update operation works on embedded documents using 'in' and 'nin' operators. @@ -2176,7 +2268,7 @@ class QuerySetTest(unittest.TestCase): name = StringField() def __unicode__(self): - return '%s' % self.name + return "%s" % self.name class Collaborator(EmbeddedDocument): helpful = ListField(EmbeddedDocumentField(User)) @@ -2188,60 +2280,62 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() - a = User(name='Esteban') - b = User(name='Frank') - x = User(name='Harry') - y = User(name='John') + a = User(name="Esteban") + b = User(name="Frank") + x = User(name="Harry") + y = User(name="John") - s = Site(name="test", collaborators=Collaborator( - helpful=[a, b], unhelpful=[x, y])).save() + s = Site( + name="test", collaborators=Collaborator(helpful=[a, b], unhelpful=[x, y]) + ).save() - Site.objects(id=s.id).update_one(pull__collaborators__helpful__name__in=['Esteban']) # Pull a - self.assertEqual(Site.objects.first().collaborators['helpful'], [b]) + Site.objects(id=s.id).update_one( + pull__collaborators__helpful__name__in=["Esteban"] + ) # Pull a + assert Site.objects.first().collaborators["helpful"] == [b] - Site.objects(id=s.id).update_one(pull__collaborators__unhelpful__name__nin=['John']) # Pull x - self.assertEqual(Site.objects.first().collaborators['unhelpful'], [y]) + Site.objects(id=s.id).update_one( + pull__collaborators__unhelpful__name__nin=["John"] + ) # Pull x + assert Site.objects.first().collaborators["unhelpful"] == [y] def test_pull_from_nested_mapfield(self): - class Collaborator(EmbeddedDocument): user = StringField() def __unicode__(self): - return '%s' % self.user + return "%s" % self.user class Site(Document): name = StringField(max_length=75, unique=True, required=True) - collaborators = MapField( - ListField(EmbeddedDocumentField(Collaborator))) + collaborators = MapField(ListField(EmbeddedDocumentField(Collaborator))) Site.drop_collection() - c = Collaborator(user='Esteban') - f = Collaborator(user='Frank') - s = Site(name="test", collaborators={'helpful': [c], 'unhelpful': [f]}) + c = Collaborator(user="Esteban") + f = Collaborator(user="Frank") + s = Site(name="test", collaborators={"helpful": [c], "unhelpful": [f]}) s.save() - Site.objects(id=s.id).update_one( - pull__collaborators__helpful__user='Esteban') - self.assertEqual(Site.objects.first().collaborators['helpful'], []) + Site.objects(id=s.id).update_one(pull__collaborators__helpful__user="Esteban") + assert Site.objects.first().collaborators["helpful"] == [] Site.objects(id=s.id).update_one( - pull__collaborators__unhelpful={'user': 'Frank'}) - self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + pull__collaborators__unhelpful={"user": "Frank"} + ) + assert Site.objects.first().collaborators["unhelpful"] == [] - with self.assertRaises(InvalidQueryError): + with pytest.raises(InvalidQueryError): Site.objects(id=s.id).update_one( - pull_all__collaborators__helpful__user=['Ross']) + pull_all__collaborators__helpful__user=["Ross"] + ) def test_pull_in_genericembedded_field(self): - class Foo(EmbeddedDocument): name = StringField() class Bar(Document): - foos = ListField(GenericEmbeddedDocumentField( - choices=[Foo, ])) + foos = ListField(GenericEmbeddedDocumentField(choices=[Foo])) Bar.drop_collection() @@ -2249,7 +2343,7 @@ class QuerySetTest(unittest.TestCase): bar = Bar(foos=[foo]).save() Bar.objects(id=bar.id).update(pull__foos=foo) bar.reload() - self.assertEqual(len(bar.foos), 0) + assert len(bar.foos) == 0 def test_update_one_check_return_with_full_result(self): class BlogTag(Document): @@ -2257,15 +2351,14 @@ class QuerySetTest(unittest.TestCase): BlogTag.drop_collection() - BlogTag(name='garbage').save() - default_update = BlogTag.objects.update_one(name='new') - self.assertEqual(default_update, 1) + BlogTag(name="garbage").save() + default_update = BlogTag.objects.update_one(name="new") + assert default_update == 1 - full_result_update = BlogTag.objects.update_one(name='new', full_result=True) - self.assertIsInstance(full_result_update, UpdateResult) + full_result_update = BlogTag.objects.update_one(name="new", full_result=True) + assert isinstance(full_result_update, UpdateResult) def test_update_one_pop_generic_reference(self): - class BlogTag(Document): name = StringField(required=True) @@ -2276,9 +2369,9 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() BlogTag.drop_collection() - tag_1 = BlogTag(name='code') + tag_1 = BlogTag(name="code") tag_1.save() - tag_2 = BlogTag(name='mongodb') + tag_2 = BlogTag(name="mongodb") tag_2.save() post = BlogPost(slug="test", tags=[tag_1]) @@ -2286,18 +2379,17 @@ class QuerySetTest(unittest.TestCase): post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) post.save() - self.assertEqual(len(post.tags), 2) + assert len(post.tags) == 2 BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() - self.assertEqual(len(post.tags), 1) + assert len(post.tags) == 1 BlogPost.drop_collection() BlogTag.drop_collection() def test_editting_embedded_objects(self): - class BlogTag(EmbeddedDocument): name = StringField(required=True) @@ -2307,34 +2399,33 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() - tag_1 = BlogTag(name='code') - tag_2 = BlogTag(name='mongodb') + tag_1 = BlogTag(name="code") + tag_2 = BlogTag(name="mongodb") post = BlogPost(slug="test", tags=[tag_1]) post.save() post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) post.save() - self.assertEqual(len(post.tags), 2) + assert len(post.tags) == 2 BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") post.reload() - self.assertEqual(post.tags[0].name, 'python') + assert post.tags[0].name == "python" BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() - self.assertEqual(len(post.tags), 1) + assert len(post.tags) == 1 BlogPost.drop_collection() def test_set_list_embedded_documents(self): - class Author(EmbeddedDocument): name = StringField() class Message(Document): title = StringField() - authors = ListField(EmbeddedDocumentField('Author')) + authors = ListField(EmbeddedDocumentField("Author")) Message.drop_collection() @@ -2342,23 +2433,26 @@ class QuerySetTest(unittest.TestCase): message.save() Message.objects(authors__name="Harry").update_one( - set__authors__S=Author(name="Ross")) + set__authors__S=Author(name="Ross") + ) message = message.reload() - self.assertEqual(message.authors[0].name, "Ross") + assert message.authors[0].name == "Ross" Message.objects(authors__name="Ross").update_one( - set__authors=[Author(name="Harry"), - Author(name="Ross"), - Author(name="Adam")]) + set__authors=[ + Author(name="Harry"), + Author(name="Ross"), + Author(name="Adam"), + ] + ) message = message.reload() - self.assertEqual(message.authors[0].name, "Harry") - self.assertEqual(message.authors[1].name, "Ross") - self.assertEqual(message.authors[2].name, "Adam") + assert message.authors[0].name == "Harry" + assert message.authors[1].name == "Ross" + assert message.authors[2].name == "Adam" def test_set_generic_embedded_documents(self): - class Bar(EmbeddedDocument): name = StringField() @@ -2368,15 +2462,13 @@ class QuerySetTest(unittest.TestCase): User.drop_collection() - User(username='abc').save() - User.objects(username='abc').update( - set__bar=Bar(name='test'), upsert=True) + User(username="abc").save() + User.objects(username="abc").update(set__bar=Bar(name="test"), upsert=True) - user = User.objects(username='abc').first() - self.assertEqual(user.bar.name, "test") + user = User.objects(username="abc").first() + assert user.bar.name == "test" def test_reload_embedded_docs_instance(self): - class SubDoc(EmbeddedDocument): val = IntField() @@ -2386,10 +2478,9 @@ class QuerySetTest(unittest.TestCase): doc = Doc(embedded=SubDoc(val=0)).save() doc.reload() - self.assertEqual(doc.pk, doc.embedded._instance.pk) + assert doc.pk == doc.embedded._instance.pk def test_reload_list_embedded_docs_instance(self): - class SubDoc(EmbeddedDocument): val = IntField() @@ -2399,7 +2490,7 @@ class QuerySetTest(unittest.TestCase): doc = Doc(embedded=[SubDoc(val=0)]).save() doc.reload() - self.assertEqual(doc.pk, doc.embedded[0]._instance.pk) + assert doc.pk == doc.embedded[0]._instance.pk def test_order_by(self): """Ensure that QuerySets may be ordered. @@ -2408,17 +2499,17 @@ class QuerySetTest(unittest.TestCase): self.Person(name="User A", age=20).save() self.Person(name="User C", age=30).save() - names = [p.name for p in self.Person.objects.order_by('-age')] - self.assertEqual(names, ['User B', 'User C', 'User A']) + names = [p.name for p in self.Person.objects.order_by("-age")] + assert names == ["User B", "User C", "User A"] - names = [p.name for p in self.Person.objects.order_by('+age')] - self.assertEqual(names, ['User A', 'User C', 'User B']) + names = [p.name for p in self.Person.objects.order_by("+age")] + assert names == ["User A", "User C", "User B"] - names = [p.name for p in self.Person.objects.order_by('age')] - self.assertEqual(names, ['User A', 'User C', 'User B']) + names = [p.name for p in self.Person.objects.order_by("age")] + assert names == ["User A", "User C", "User B"] - ages = [p.age for p in self.Person.objects.order_by('-name')] - self.assertEqual(ages, [30, 40, 20]) + ages = [p.age for p in self.Person.objects.order_by("-name")] + assert ages == [30, 40, 20] def test_order_by_optional(self): class BlogPost(Document): @@ -2428,31 +2519,22 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_3 = BlogPost.objects.create( - title="Blog Post #3", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="Blog Post #3", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_2 = BlogPost.objects.create( - title="Blog Post #2", - published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) + title="Blog Post #2", published_date=datetime.datetime(2010, 1, 5, 0, 0, 0) ) blog_post_4 = BlogPost.objects.create( - title="Blog Post #4", - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) - ) - blog_post_1 = BlogPost.objects.create( - title="Blog Post #1", - published_date=None + title="Blog Post #4", published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) ) + blog_post_1 = BlogPost.objects.create(title="Blog Post #1", published_date=None) expected = [blog_post_1, blog_post_2, blog_post_3, blog_post_4] - self.assertSequence(BlogPost.objects.order_by('published_date'), - expected) - self.assertSequence(BlogPost.objects.order_by('+published_date'), - expected) + self.assertSequence(BlogPost.objects.order_by("published_date"), expected) + self.assertSequence(BlogPost.objects.order_by("+published_date"), expected) expected.reverse() - self.assertSequence(BlogPost.objects.order_by('-published_date'), - expected) + self.assertSequence(BlogPost.objects.order_by("-published_date"), expected) def test_order_by_list(self): class BlogPost(Document): @@ -2462,23 +2544,20 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() blog_post_1 = BlogPost.objects.create( - title="A", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="A", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_2 = BlogPost.objects.create( - title="B", - published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) + title="B", published_date=datetime.datetime(2010, 1, 6, 0, 0, 0) ) blog_post_3 = BlogPost.objects.create( - title="C", - published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) + title="C", published_date=datetime.datetime(2010, 1, 7, 0, 0, 0) ) - qs = BlogPost.objects.order_by('published_date', 'title') + qs = BlogPost.objects.order_by("published_date", "title") expected = [blog_post_1, blog_post_2, blog_post_3] self.assertSequence(qs, expected) - qs = BlogPost.objects.order_by('-published_date', '-title') + qs = BlogPost.objects.order_by("-published_date", "-title") expected.reverse() self.assertSequence(qs, expected) @@ -2489,30 +2568,30 @@ class QuerySetTest(unittest.TestCase): self.Person(name="User A", age=20).save() self.Person(name="User C", age=30).save() - only_age = self.Person.objects.order_by('-age').only('age') + only_age = self.Person.objects.order_by("-age").only("age") names = [p.name for p in only_age] ages = [p.age for p in only_age] # The .only('age') clause should mean that all names are None - self.assertEqual(names, [None, None, None]) - self.assertEqual(ages, [40, 30, 20]) + assert names == [None, None, None] + assert ages == [40, 30, 20] - qs = self.Person.objects.all().order_by('-age') + qs = self.Person.objects.all().order_by("-age") qs = qs.limit(10) ages = [p.age for p in qs] - self.assertEqual(ages, [40, 30, 20]) + assert ages == [40, 30, 20] qs = self.Person.objects.all().limit(10) - qs = qs.order_by('-age') + qs = qs.order_by("-age") ages = [p.age for p in qs] - self.assertEqual(ages, [40, 30, 20]) + assert ages == [40, 30, 20] qs = self.Person.objects.all().skip(0) - qs = qs.order_by('-age') + qs = qs.order_by("-age") ages = [p.age for p in qs] - self.assertEqual(ages, [40, 30, 20]) + assert ages == [40, 30, 20] def test_confirm_order_by_reference_wont_work(self): """Ordering by reference is not possible. Use map / reduce.. or @@ -2534,47 +2613,42 @@ class QuerySetTest(unittest.TestCase): Author(author=person_b).save() Author(author=person_c).save() - names = [ - a.author.name for a in Author.objects.order_by('-author__age')] - self.assertEqual(names, ['User A', 'User B', 'User C']) + names = [a.author.name for a in Author.objects.order_by("-author__age")] + assert names == ["User A", "User B", "User C"] def test_comment(self): """Make sure adding a comment to the query gets added to the query""" MONGO_VER = self.mongodb_version _, CMD_QUERY_KEY = get_key_compat(MONGO_VER) - QUERY_KEY = 'filter' - COMMENT_KEY = 'comment' + QUERY_KEY = "filter" + COMMENT_KEY = "comment" class User(Document): age = IntField() with db_ops_tracker() as q: - adult1 = (User.objects.filter(age__gte=18) - .comment('looking for an adult') - .first()) - - adult2 = (User.objects.comment('looking for an adult') - .filter(age__gte=18) - .first()) + User.objects.filter(age__gte=18).comment("looking for an adult").first() + User.objects.comment("looking for an adult").filter(age__gte=18).first() ops = q.get_ops() - self.assertEqual(len(ops), 2) + assert len(ops) == 2 for op in ops: - self.assertEqual(op[CMD_QUERY_KEY][QUERY_KEY], {'age': {'$gte': 18}}) - self.assertEqual(op[CMD_QUERY_KEY][COMMENT_KEY], 'looking for an adult') + assert op[CMD_QUERY_KEY][QUERY_KEY] == {"age": {"$gte": 18}} + assert op[CMD_QUERY_KEY][COMMENT_KEY] == "looking for an adult" def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. """ + class BlogPost(Document): title = StringField() - tags = ListField(StringField(), db_field='post-tag-list') + tags = ListField(StringField(), db_field="post-tag-list") BlogPost.drop_collection() - BlogPost(title="Post #1", tags=['music', 'film', 'print']).save() - BlogPost(title="Post #2", tags=['music', 'film']).save() - BlogPost(title="Post #3", tags=['film', 'photography']).save() + BlogPost(title="Post #1", tags=["music", "film", "print"]).save() + BlogPost(title="Post #2", tags=["music", "film"]).save() + BlogPost(title="Post #3", tags=["film", "photography"]).save() map_f = """ function() { @@ -2597,13 +2671,13 @@ class QuerySetTest(unittest.TestCase): # run a map/reduce operation spanning all posts results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) - self.assertEqual(len(results), 4) + assert len(results) == 4 music = list(filter(lambda r: r.key == "music", results))[0] - self.assertEqual(music.value, 2) + assert music.value == 2 film = list(filter(lambda r: r.key == "film", results))[0] - self.assertEqual(film.value, 3) + assert film.value == 3 BlogPost.drop_collection() @@ -2624,8 +2698,8 @@ class QuerySetTest(unittest.TestCase): post2.save() post3.save() - self.assertEqual(BlogPost._fields['title'].db_field, '_id') - self.assertEqual(BlogPost._meta['id_field'], 'title') + assert BlogPost._fields["title"].db_field == "_id" + assert BlogPost._meta["id_field"] == "title" map_f = """ function() { @@ -2647,9 +2721,9 @@ class QuerySetTest(unittest.TestCase): results = BlogPost.objects.map_reduce(map_f, reduce_f, "myresults") results = list(results) - self.assertEqual(results[0].object, post1) - self.assertEqual(results[1].object, post2) - self.assertEqual(results[2].object, post3) + assert results[0].object == post1 + assert results[1].object == post2 + assert results[2].object == post3 BlogPost.drop_collection() @@ -2657,16 +2731,14 @@ class QuerySetTest(unittest.TestCase): """ Test map/reduce custom output """ - register_connection('test2', 'mongoenginetest2') + register_connection("test2", "mongoenginetest2") class Family(Document): - id = IntField( - primary_key=True) + id = IntField(primary_key=True) log = StringField() class Person(Document): - id = IntField( - primary_key=True) + id = IntField(primary_key=True) name = StringField() age = IntField() family = ReferenceField(Family) @@ -2741,51 +2813,56 @@ class QuerySetTest(unittest.TestCase): cursor = Family.objects.map_reduce( map_f=map_family, reduce_f=reduce_f, - output={'replace': 'family_map', 'db_alias': 'test2'}) + output={"replace": "family_map", "db_alias": "test2"}, + ) # start a map/reduce - cursor.next() + next(cursor) results = Person.objects.map_reduce( map_f=map_person, reduce_f=reduce_f, - output={'reduce': 'family_map', 'db_alias': 'test2'}) + output={"reduce": "family_map", "db_alias": "test2"}, + ) results = list(results) - collection = get_db('test2').family_map + collection = get_db("test2").family_map - self.assertEqual( - collection.find_one({'_id': 1}), { - '_id': 1, - 'value': { - 'persons': [ - {'age': 21, 'name': u'Wilson Jr'}, - {'age': 45, 'name': u'Wilson Father'}, - {'age': 40, 'name': u'Eliana Costa'}, - {'age': 17, 'name': u'Tayza Mariana'}], - 'totalAge': 123} - }) + assert collection.find_one({"_id": 1}) == { + "_id": 1, + "value": { + "persons": [ + {"age": 21, "name": u"Wilson Jr"}, + {"age": 45, "name": u"Wilson Father"}, + {"age": 40, "name": u"Eliana Costa"}, + {"age": 17, "name": u"Tayza Mariana"}, + ], + "totalAge": 123, + }, + } - self.assertEqual( - collection.find_one({'_id': 2}), { - '_id': 2, - 'value': { - 'persons': [ - {'age': 16, 'name': u'Isabella Luanna'}, - {'age': 36, 'name': u'Sandra Mara'}, - {'age': 10, 'name': u'Igor Gabriel'}], - 'totalAge': 62} - }) + assert collection.find_one({"_id": 2}) == { + "_id": 2, + "value": { + "persons": [ + {"age": 16, "name": u"Isabella Luanna"}, + {"age": 36, "name": u"Sandra Mara"}, + {"age": 10, "name": u"Igor Gabriel"}, + ], + "totalAge": 62, + }, + } - self.assertEqual( - collection.find_one({'_id': 3}), { - '_id': 3, - 'value': { - 'persons': [ - {'age': 30, 'name': u'Arthur WA'}, - {'age': 25, 'name': u'Paula Leonel'}], - 'totalAge': 55} - }) + assert collection.find_one({"_id": 3}) == { + "_id": 3, + "value": { + "persons": [ + {"age": 30, "name": u"Arthur WA"}, + {"age": 25, "name": u"Paula Leonel"}, + ], + "totalAge": 55, + }, + } def test_map_reduce_finalize(self): """Ensure that map, reduce, and finalize run and introduce "scope" @@ -2794,10 +2871,10 @@ class QuerySetTest(unittest.TestCase): from time import mktime class Link(Document): - title = StringField(db_field='bpTitle') + title = StringField(db_field="bpTitle") up_votes = IntField() down_votes = IntField() - submitted = DateTimeField(db_field='sTime') + submitted = DateTimeField(db_field="sTime") Link.drop_collection() @@ -2807,30 +2884,42 @@ class QuerySetTest(unittest.TestCase): # Fri, 12 Feb 2010 14:36:00 -0600. Link ordering should # reflect order of insertion below, but is not influenced # by insertion order. - Link(title="Google Buzz auto-followed a woman's abusive ex ...", - up_votes=1079, - down_votes=553, - submitted=now - datetime.timedelta(hours=4)).save() - Link(title="We did it! Barbie is a computer engineer.", - up_votes=481, - down_votes=124, - submitted=now - datetime.timedelta(hours=2)).save() - Link(title="This Is A Mosquito Getting Killed By A Laser", - up_votes=1446, - down_votes=530, - submitted=now - datetime.timedelta(hours=13)).save() - Link(title="Arabic flashcards land physics student in jail.", - up_votes=215, - down_votes=105, - submitted=now - datetime.timedelta(hours=6)).save() - Link(title="The Burger Lab: Presenting, the Flood Burger", - up_votes=48, - down_votes=17, - submitted=now - datetime.timedelta(hours=5)).save() - Link(title="How to see polarization with the naked eye", - up_votes=74, - down_votes=13, - submitted=now - datetime.timedelta(hours=10)).save() + Link( + title="Google Buzz auto-followed a woman's abusive ex ...", + up_votes=1079, + down_votes=553, + submitted=now - datetime.timedelta(hours=4), + ).save() + Link( + title="We did it! Barbie is a computer engineer.", + up_votes=481, + down_votes=124, + submitted=now - datetime.timedelta(hours=2), + ).save() + Link( + title="This Is A Mosquito Getting Killed By A Laser", + up_votes=1446, + down_votes=530, + submitted=now - datetime.timedelta(hours=13), + ).save() + Link( + title="Arabic flashcards land physics student in jail.", + up_votes=215, + down_votes=105, + submitted=now - datetime.timedelta(hours=6), + ).save() + Link( + title="The Burger Lab: Presenting, the Flood Burger", + up_votes=48, + down_votes=17, + submitted=now - datetime.timedelta(hours=5), + ).save() + Link( + title="How to see polarization with the naked eye", + up_votes=74, + down_votes=13, + submitted=now - datetime.timedelta(hours=10), + ).save() map_f = """ function() { @@ -2881,89 +2970,89 @@ class QuerySetTest(unittest.TestCase): # provide the reddit epoch (used for ranking) as a variable available # to all phases of the map/reduce operation: map, reduce, and finalize. reddit_epoch = mktime(datetime.datetime(2005, 12, 8, 7, 46, 43).timetuple()) - scope = {'reddit_epoch': reddit_epoch} + scope = {"reddit_epoch": reddit_epoch} # run a map/reduce operation across all links. ordering is set # to "-value", which orders the "weight" value returned from # "finalize_f" in descending order. results = Link.objects.order_by("-value") - results = results.map_reduce(map_f, - reduce_f, - "myresults", - finalize_f=finalize_f, - scope=scope) + results = results.map_reduce( + map_f, reduce_f, "myresults", finalize_f=finalize_f, scope=scope + ) results = list(results) # assert troublesome Buzz article is ranked 1st - self.assertTrue(results[0].object.title.startswith("Google Buzz")) + assert results[0].object.title.startswith("Google Buzz") # assert laser vision is ranked last - self.assertTrue(results[-1].object.title.startswith("How to see")) + assert results[-1].object.title.startswith("How to see") Link.drop_collection() def test_item_frequencies(self): """Ensure that item frequencies are properly generated from lists. """ + class BlogPost(Document): hits = IntField() - tags = ListField(StringField(), db_field='blogTags') + tags = ListField(StringField(), db_field="blogTags") BlogPost.drop_collection() - BlogPost(hits=1, tags=['music', 'film', 'actors', 'watch']).save() - BlogPost(hits=2, tags=['music', 'watch']).save() - BlogPost(hits=2, tags=['music', 'actors']).save() + BlogPost(hits=1, tags=["music", "film", "actors", "watch"]).save() + BlogPost(hits=2, tags=["music", "watch"]).save() + BlogPost(hits=2, tags=["music", "actors"]).save() def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual( - set(['music', 'film', 'actors', 'watch']), set(f.keys())) - self.assertEqual(f['music'], 3) - self.assertEqual(f['actors'], 2) - self.assertEqual(f['watch'], 2) - self.assertEqual(f['film'], 1) + assert set(["music", "film", "actors", "watch"]) == set(f.keys()) + assert f["music"] == 3 + assert f["actors"] == 2 + assert f["watch"] == 2 + assert f["film"] == 1 - exec_js = BlogPost.objects.item_frequencies('tags') - map_reduce = BlogPost.objects.item_frequencies('tags', map_reduce=True) + exec_js = BlogPost.objects.item_frequencies("tags") + map_reduce = BlogPost.objects.item_frequencies("tags", map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Ensure query is taken into account def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) - self.assertEqual(f['music'], 2) - self.assertEqual(f['actors'], 1) - self.assertEqual(f['watch'], 1) + assert set(["music", "actors", "watch"]) == set(f.keys()) + assert f["music"] == 2 + assert f["actors"] == 1 + assert f["watch"] == 1 - exec_js = BlogPost.objects(hits__gt=1).item_frequencies('tags') - map_reduce = BlogPost.objects( - hits__gt=1).item_frequencies('tags', map_reduce=True) + exec_js = BlogPost.objects(hits__gt=1).item_frequencies("tags") + map_reduce = BlogPost.objects(hits__gt=1).item_frequencies( + "tags", map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) # Check that normalization works def test_assertions(f): - self.assertAlmostEqual(f['music'], 3.0 / 8.0) - self.assertAlmostEqual(f['actors'], 2.0 / 8.0) - self.assertAlmostEqual(f['watch'], 2.0 / 8.0) - self.assertAlmostEqual(f['film'], 1.0 / 8.0) + assert round(abs(f["music"] - 3.0 / 8.0), 7) == 0 + assert round(abs(f["actors"] - 2.0 / 8.0), 7) == 0 + assert round(abs(f["watch"] - 2.0 / 8.0), 7) == 0 + assert round(abs(f["film"] - 1.0 / 8.0), 7) == 0 - exec_js = BlogPost.objects.item_frequencies('tags', normalize=True) + exec_js = BlogPost.objects.item_frequencies("tags", normalize=True) map_reduce = BlogPost.objects.item_frequencies( - 'tags', normalize=True, map_reduce=True) + "tags", normalize=True, map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) # Check item_frequencies works for non-list fields def test_assertions(f): - self.assertEqual(set([1, 2]), set(f.keys())) - self.assertEqual(f[1], 1) - self.assertEqual(f[2], 2) + assert set([1, 2]) == set(f.keys()) + assert f[1] == 1 + assert f[2] == 2 - exec_js = BlogPost.objects.item_frequencies('hits') - map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) + exec_js = BlogPost.objects.item_frequencies("hits") + map_reduce = BlogPost.objects.item_frequencies("hits", map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) @@ -2983,57 +3072,56 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() doc = Person(name="Guido") - doc.phone = Phone(number='62-3331-1656') + doc.phone = Phone(number="62-3331-1656") doc.save() doc = Person(name="Marr") - doc.phone = Phone(number='62-3331-1656') + doc.phone = Phone(number="62-3331-1656") doc.save() doc = Person(name="WP Junior") - doc.phone = Phone(number='62-3332-1656') + doc.phone = Phone(number="62-3332-1656") doc.save() def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual( - set(['62-3331-1656', '62-3332-1656']), set(f.keys())) - self.assertEqual(f['62-3331-1656'], 2) - self.assertEqual(f['62-3332-1656'], 1) + assert set(["62-3331-1656", "62-3332-1656"]) == set(f.keys()) + assert f["62-3331-1656"] == 2 + assert f["62-3332-1656"] == 1 - exec_js = Person.objects.item_frequencies('phone.number') - map_reduce = Person.objects.item_frequencies( - 'phone.number', map_reduce=True) + exec_js = Person.objects.item_frequencies("phone.number") + map_reduce = Person.objects.item_frequencies("phone.number", map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Ensure query is taken into account def test_assertions(f): f = {key: int(val) for key, val in f.items()} - self.assertEqual(set(['62-3331-1656']), set(f.keys())) - self.assertEqual(f['62-3331-1656'], 2) + assert set(["62-3331-1656"]) == set(f.keys()) + assert f["62-3331-1656"] == 2 - exec_js = Person.objects( - phone__number='62-3331-1656').item_frequencies('phone.number') - map_reduce = Person.objects( - phone__number='62-3331-1656').item_frequencies('phone.number', map_reduce=True) + exec_js = Person.objects(phone__number="62-3331-1656").item_frequencies( + "phone.number" + ) + map_reduce = Person.objects(phone__number="62-3331-1656").item_frequencies( + "phone.number", map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) # Check that normalization works def test_assertions(f): - self.assertEqual(f['62-3331-1656'], 2.0 / 3.0) - self.assertEqual(f['62-3332-1656'], 1.0 / 3.0) + assert f["62-3331-1656"] == 2.0 / 3.0 + assert f["62-3332-1656"] == 1.0 / 3.0 - exec_js = Person.objects.item_frequencies( - 'phone.number', normalize=True) + exec_js = Person.objects.item_frequencies("phone.number", normalize=True) map_reduce = Person.objects.item_frequencies( - 'phone.number', normalize=True, map_reduce=True) + "phone.number", normalize=True, map_reduce=True + ) test_assertions(exec_js) test_assertions(map_reduce) def test_item_frequencies_null_values(self): - class Person(Document): name = StringField() city = StringField() @@ -3043,16 +3131,15 @@ class QuerySetTest(unittest.TestCase): Person(name="Wilson Snr", city="CRB").save() Person(name="Wilson Jr").save() - freq = Person.objects.item_frequencies('city') - self.assertEqual(freq, {'CRB': 1.0, None: 1.0}) - freq = Person.objects.item_frequencies('city', normalize=True) - self.assertEqual(freq, {'CRB': 0.5, None: 0.5}) + freq = Person.objects.item_frequencies("city") + assert freq == {"CRB": 1.0, None: 1.0} + freq = Person.objects.item_frequencies("city", normalize=True) + assert freq == {"CRB": 0.5, None: 0.5} - freq = Person.objects.item_frequencies('city', map_reduce=True) - self.assertEqual(freq, {'CRB': 1.0, None: 1.0}) - freq = Person.objects.item_frequencies( - 'city', normalize=True, map_reduce=True) - self.assertEqual(freq, {'CRB': 0.5, None: 0.5}) + freq = Person.objects.item_frequencies("city", map_reduce=True) + assert freq == {"CRB": 1.0, None: 1.0} + freq = Person.objects.item_frequencies("city", normalize=True, map_reduce=True) + assert freq == {"CRB": 0.5, None: 0.5} def test_item_frequencies_with_null_embedded(self): class Data(EmbeddedDocument): @@ -3076,11 +3163,11 @@ class QuerySetTest(unittest.TestCase): p.extra = Extra(tag="friend") p.save() - ot = Person.objects.item_frequencies('extra.tag', map_reduce=False) - self.assertEqual(ot, {None: 1.0, u'friend': 1.0}) + ot = Person.objects.item_frequencies("extra.tag", map_reduce=False) + assert ot == {None: 1.0, u"friend": 1.0} - ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) - self.assertEqual(ot, {None: 1.0, u'friend': 1.0}) + ot = Person.objects.item_frequencies("extra.tag", map_reduce=True) + assert ot == {None: 1.0, u"friend": 1.0} def test_item_frequencies_with_0_values(self): class Test(Document): @@ -3091,10 +3178,10 @@ class QuerySetTest(unittest.TestCase): t.val = 0 t.save() - ot = Test.objects.item_frequencies('val', map_reduce=True) - self.assertEqual(ot, {0: 1}) - ot = Test.objects.item_frequencies('val', map_reduce=False) - self.assertEqual(ot, {0: 1}) + ot = Test.objects.item_frequencies("val", map_reduce=True) + assert ot == {0: 1} + ot = Test.objects.item_frequencies("val", map_reduce=False) + assert ot == {0: 1} def test_item_frequencies_with_False_values(self): class Test(Document): @@ -3105,10 +3192,10 @@ class QuerySetTest(unittest.TestCase): t.val = False t.save() - ot = Test.objects.item_frequencies('val', map_reduce=True) - self.assertEqual(ot, {False: 1}) - ot = Test.objects.item_frequencies('val', map_reduce=False) - self.assertEqual(ot, {False: 1}) + ot = Test.objects.item_frequencies("val", map_reduce=True) + assert ot == {False: 1} + ot = Test.objects.item_frequencies("val", map_reduce=False) + assert ot == {False: 1} def test_item_frequencies_normalize(self): class Test(Document): @@ -3122,113 +3209,109 @@ class QuerySetTest(unittest.TestCase): for i in range(20): Test(val=2).save() - freqs = Test.objects.item_frequencies( - 'val', map_reduce=False, normalize=True) - self.assertEqual(freqs, {1: 50.0 / 70, 2: 20.0 / 70}) + freqs = Test.objects.item_frequencies("val", map_reduce=False, normalize=True) + assert freqs == {1: 50.0 / 70, 2: 20.0 / 70} - freqs = Test.objects.item_frequencies( - 'val', map_reduce=True, normalize=True) - self.assertEqual(freqs, {1: 50.0 / 70, 2: 20.0 / 70}) + freqs = Test.objects.item_frequencies("val", map_reduce=True, normalize=True) + assert freqs == {1: 50.0 / 70, 2: 20.0 / 70} def test_average(self): """Ensure that field can be averaged correctly. """ - self.Person(name='person', age=0).save() - self.assertEqual(int(self.Person.objects.average('age')), 0) + self.Person(name="person", age=0).save() + assert int(self.Person.objects.average("age")) == 0 ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): - self.Person(name='test%s' % i, age=age).save() + self.Person(name="test%s" % i, age=age).save() avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0 - self.assertAlmostEqual(int(self.Person.objects.average('age')), avg) + assert round(abs(int(self.Person.objects.average("age")) - avg), 7) == 0 - self.Person(name='ageless person').save() - self.assertEqual(int(self.Person.objects.average('age')), avg) + self.Person(name="ageless person").save() + assert int(self.Person.objects.average("age")) == avg # dot notation - self.Person( - name='person meta', person_meta=self.PersonMeta(weight=0)).save() - self.assertAlmostEqual( - int(self.Person.objects.average('person_meta.weight')), 0) + self.Person(name="person meta", person_meta=self.PersonMeta(weight=0)).save() + assert ( + round(abs(int(self.Person.objects.average("person_meta.weight")) - 0), 7) + == 0 + ) for i, weight in enumerate(ages): self.Person( - name='test meta%i', person_meta=self.PersonMeta(weight=weight)).save() + name="test meta%i", person_meta=self.PersonMeta(weight=weight) + ).save() - self.assertAlmostEqual( - int(self.Person.objects.average('person_meta.weight')), avg + assert ( + round(abs(int(self.Person.objects.average("person_meta.weight")) - avg), 7) + == 0 ) - self.Person(name='test meta none').save() - self.assertEqual( - int(self.Person.objects.average('person_meta.weight')), avg - ) + self.Person(name="test meta none").save() + assert int(self.Person.objects.average("person_meta.weight")) == avg # test summing over a filtered queryset over_50 = [a for a in ages if a >= 50] avg = float(sum(over_50)) / len(over_50) - self.assertEqual( - self.Person.objects.filter(age__gte=50).average('age'), - avg - ) + assert self.Person.objects.filter(age__gte=50).average("age") == avg def test_sum(self): """Ensure that field can be summed over correctly. """ ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): - self.Person(name='test%s' % i, age=age).save() + self.Person(name="test%s" % i, age=age).save() - self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + assert self.Person.objects.sum("age") == sum(ages) - self.Person(name='ageless person').save() - self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.Person(name="ageless person").save() + assert self.Person.objects.sum("age") == sum(ages) for i, age in enumerate(ages): - self.Person(name='test meta%s' % - i, person_meta=self.PersonMeta(weight=age)).save() + self.Person( + name="test meta%s" % i, person_meta=self.PersonMeta(weight=age) + ).save() - self.assertEqual( - self.Person.objects.sum('person_meta.weight'), sum(ages) - ) + assert self.Person.objects.sum("person_meta.weight") == sum(ages) - self.Person(name='weightless person').save() - self.assertEqual(self.Person.objects.sum('age'), sum(ages)) + self.Person(name="weightless person").save() + assert self.Person.objects.sum("age") == sum(ages) # test summing over a filtered queryset - self.assertEqual( - self.Person.objects.filter(age__gte=50).sum('age'), - sum([a for a in ages if a >= 50]) + assert self.Person.objects.filter(age__gte=50).sum("age") == sum( + [a for a in ages if a >= 50] ) def test_sum_over_db_field(self): """Ensure that a field mapped to a db field with a different name can be summed over correctly. """ + class UserVisit(Document): - num_visits = IntField(db_field='visits') + num_visits = IntField(db_field="visits") UserVisit.drop_collection() UserVisit.objects.create(num_visits=10) UserVisit.objects.create(num_visits=5) - self.assertEqual(UserVisit.objects.sum('num_visits'), 15) + assert UserVisit.objects.sum("num_visits") == 15 def test_average_over_db_field(self): """Ensure that a field mapped to a db field with a different name can have its average computed correctly. """ + class UserVisit(Document): - num_visits = IntField(db_field='visits') + num_visits = IntField(db_field="visits") UserVisit.drop_collection() UserVisit.objects.create(num_visits=20) UserVisit.objects.create(num_visits=10) - self.assertEqual(UserVisit.objects.average('num_visits'), 15) + assert UserVisit.objects.average("num_visits") == 15 def test_embedded_average(self): class Pay(EmbeddedDocument): @@ -3236,17 +3319,16 @@ class QuerySetTest(unittest.TestCase): class Doc(Document): name = StringField() - pay = EmbeddedDocumentField( - Pay) + pay = EmbeddedDocumentField(Pay) Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(value=150)).save() - Doc(name='Isabella Luanna', pay=Pay(value=530)).save() - Doc(name='Tayza mariana', pay=Pay(value=165)).save() - Doc(name='Eliana Costa', pay=Pay(value=115)).save() + Doc(name="Wilson Junior", pay=Pay(value=150)).save() + Doc(name="Isabella Luanna", pay=Pay(value=530)).save() + Doc(name="Tayza mariana", pay=Pay(value=165)).save() + Doc(name="Eliana Costa", pay=Pay(value=115)).save() - self.assertEqual(Doc.objects.average('pay.value'), 240) + assert Doc.objects.average("pay.value") == 240 def test_embedded_array_average(self): class Pay(EmbeddedDocument): @@ -3258,12 +3340,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(values=[150, 100])).save() - Doc(name='Isabella Luanna', pay=Pay(values=[530, 100])).save() - Doc(name='Tayza mariana', pay=Pay(values=[165, 100])).save() - Doc(name='Eliana Costa', pay=Pay(values=[115, 100])).save() + Doc(name="Wilson Junior", pay=Pay(values=[150, 100])).save() + Doc(name="Isabella Luanna", pay=Pay(values=[530, 100])).save() + Doc(name="Tayza mariana", pay=Pay(values=[165, 100])).save() + Doc(name="Eliana Costa", pay=Pay(values=[115, 100])).save() - self.assertEqual(Doc.objects.average('pay.values'), 170) + assert Doc.objects.average("pay.values") == 170 def test_array_average(self): class Doc(Document): @@ -3276,7 +3358,7 @@ class QuerySetTest(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual(Doc.objects.average('values'), 170) + assert Doc.objects.average("values") == 170 def test_embedded_sum(self): class Pay(EmbeddedDocument): @@ -3288,12 +3370,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(value=150)).save() - Doc(name='Isabella Luanna', pay=Pay(value=530)).save() - Doc(name='Tayza mariana', pay=Pay(value=165)).save() - Doc(name='Eliana Costa', pay=Pay(value=115)).save() + Doc(name="Wilson Junior", pay=Pay(value=150)).save() + Doc(name="Isabella Luanna", pay=Pay(value=530)).save() + Doc(name="Tayza mariana", pay=Pay(value=165)).save() + Doc(name="Eliana Costa", pay=Pay(value=115)).save() - self.assertEqual(Doc.objects.sum('pay.value'), 960) + assert Doc.objects.sum("pay.value") == 960 def test_embedded_array_sum(self): class Pay(EmbeddedDocument): @@ -3305,12 +3387,12 @@ class QuerySetTest(unittest.TestCase): Doc.drop_collection() - Doc(name='Wilson Junior', pay=Pay(values=[150, 100])).save() - Doc(name='Isabella Luanna', pay=Pay(values=[530, 100])).save() - Doc(name='Tayza mariana', pay=Pay(values=[165, 100])).save() - Doc(name='Eliana Costa', pay=Pay(values=[115, 100])).save() + Doc(name="Wilson Junior", pay=Pay(values=[150, 100])).save() + Doc(name="Isabella Luanna", pay=Pay(values=[530, 100])).save() + Doc(name="Tayza mariana", pay=Pay(values=[165, 100])).save() + Doc(name="Eliana Costa", pay=Pay(values=[115, 100])).save() - self.assertEqual(Doc.objects.sum('pay.values'), 1360) + assert Doc.objects.sum("pay.values") == 1360 def test_array_sum(self): class Doc(Document): @@ -3323,21 +3405,22 @@ class QuerySetTest(unittest.TestCase): Doc(values=[165, 100]).save() Doc(values=[115, 100]).save() - self.assertEqual(Doc.objects.sum('values'), 1360) + assert Doc.objects.sum("values") == 1360 def test_distinct(self): """Ensure that the QuerySet.distinct method works. """ - self.Person(name='Mr Orange', age=20).save() - self.Person(name='Mr White', age=20).save() - self.Person(name='Mr Orange', age=30).save() - self.Person(name='Mr Pink', age=30).save() - self.assertEqual(set(self.Person.objects.distinct('name')), - set(['Mr Orange', 'Mr White', 'Mr Pink'])) - self.assertEqual(set(self.Person.objects.distinct('age')), - set([20, 30])) - self.assertEqual(set(self.Person.objects(age=30).distinct('name')), - set(['Mr Orange', 'Mr Pink'])) + self.Person(name="Mr Orange", age=20).save() + self.Person(name="Mr White", age=20).save() + self.Person(name="Mr Orange", age=30).save() + self.Person(name="Mr Pink", age=30).save() + assert set(self.Person.objects.distinct("name")) == set( + ["Mr Orange", "Mr White", "Mr Pink"] + ) + assert set(self.Person.objects.distinct("age")) == set([20, 30]) + assert set(self.Person.objects(age=30).distinct("name")) == set( + ["Mr Orange", "Mr Pink"] + ) def test_distinct_handles_references(self): class Foo(Document): @@ -3355,7 +3438,7 @@ class QuerySetTest(unittest.TestCase): foo = Foo(bar=bar) foo.save() - self.assertEqual(Foo.objects.distinct("bar"), [bar]) + assert Foo.objects.distinct("bar") == [bar] def test_text_indexes(self): class News(Document): @@ -3363,93 +3446,96 @@ class QuerySetTest(unittest.TestCase): content = StringField() is_active = BooleanField(default=True) - meta = {'indexes': [ - {'fields': ['$title', "$content"], - 'default_language': 'portuguese', - 'weights': {'title': 10, 'content': 2} - } - ]} + meta = { + "indexes": [ + { + "fields": ["$title", "$content"], + "default_language": "portuguese", + "weights": {"title": 10, "content": 2}, + } + ] + } News.drop_collection() info = News.objects._collection.index_information() - self.assertIn('title_text_content_text', info) - self.assertIn('textIndexVersion', info['title_text_content_text']) + assert "title_text_content_text" in info + assert "textIndexVersion" in info["title_text_content_text"] - News(title="Neymar quebrou a vertebra", - content="O Brasil sofre com a perda de Neymar").save() + News( + title="Neymar quebrou a vertebra", + content="O Brasil sofre com a perda de Neymar", + ).save() - News(title="Brasil passa para as quartas de finais", - content="Com o brasil nas quartas de finais teremos um " - "jogo complicado com a alemanha").save() + News( + title="Brasil passa para as quartas de finais", + content="Com o brasil nas quartas de finais teremos um " + "jogo complicado com a alemanha", + ).save() - count = News.objects.search_text( - "neymar", language="portuguese").count() + count = News.objects.search_text("neymar", language="portuguese").count() - self.assertEqual(count, 1) + assert count == 1 - count = News.objects.search_text( - "brasil -neymar").count() + count = News.objects.search_text("brasil -neymar").count() - self.assertEqual(count, 1) + assert count == 1 - News(title=u"As eleições no Brasil já estão em planejamento", - content=u"A candidata dilma roussef já começa o teu planejamento", - is_active=False).save() + News( + title=u"As eleições no Brasil já estão em planejamento", + content=u"A candidata dilma roussef já começa o teu planejamento", + is_active=False, + ).save() - new = News.objects(is_active=False).search_text( - "dilma", language="pt").first() + new = News.objects(is_active=False).search_text("dilma", language="pt").first() - query = News.objects(is_active=False).search_text( - "dilma", language="pt")._query + query = News.objects(is_active=False).search_text("dilma", language="pt")._query - self.assertEqual( - query, {'$text': { - '$search': 'dilma', '$language': 'pt'}, - 'is_active': False}) + assert query == { + "$text": {"$search": "dilma", "$language": "pt"}, + "is_active": False, + } - self.assertFalse(new.is_active) - self.assertIn('dilma', new.content) - self.assertIn('planejamento', new.title) + assert not new.is_active + assert "dilma" in new.content + assert "planejamento" in new.title query = News.objects.search_text("candidata") - self.assertEqual(query._search_text, "candidata") + assert query._search_text == "candidata" new = query.first() - self.assertIsInstance(new.get_text_score(), float) + assert isinstance(new.get_text_score(), float) # count - query = News.objects.search_text('brasil').order_by('$text_score') - self.assertEqual(query._search_text, "brasil") + query = News.objects.search_text("brasil").order_by("$text_score") + assert query._search_text == "brasil" - self.assertEqual(query.count(), 3) - self.assertEqual(query._query, {'$text': {'$search': 'brasil'}}) + assert query.count() == 3 + assert query._query == {"$text": {"$search": "brasil"}} cursor_args = query._cursor_args - cursor_args_fields = cursor_args['projection'] - self.assertEqual( - cursor_args_fields, {'_text_score': {'$meta': 'textScore'}}) + cursor_args_fields = cursor_args["projection"] + assert cursor_args_fields == {"_text_score": {"$meta": "textScore"}} text_scores = [i.get_text_score() for i in query] - self.assertEqual(len(text_scores), 3) + assert len(text_scores) == 3 - self.assertTrue(text_scores[0] > text_scores[1]) - self.assertTrue(text_scores[1] > text_scores[2]) + assert text_scores[0] > text_scores[1] + assert text_scores[1] > text_scores[2] max_text_score = text_scores[0] # get item - item = News.objects.search_text( - 'brasil').order_by('$text_score').first() - self.assertEqual(item.get_text_score(), max_text_score) + item = News.objects.search_text("brasil").order_by("$text_score").first() + assert item.get_text_score() == max_text_score def test_distinct_handles_references_to_alias(self): - register_connection('testdb', 'mongoenginetest2') + register_connection("testdb", "mongoenginetest2") class Foo(Document): bar = ReferenceField("Bar") - meta = {'db_alias': 'testdb'} + meta = {"db_alias": "testdb"} class Bar(Document): text = StringField() - meta = {'db_alias': 'testdb'} + meta = {"db_alias": "testdb"} Bar.drop_collection() Foo.drop_collection() @@ -3460,13 +3546,14 @@ class QuerySetTest(unittest.TestCase): foo = Foo(bar=bar) foo.save() - self.assertEqual(Foo.objects.distinct("bar"), [bar]) + assert Foo.objects.distinct("bar") == [bar] def test_distinct_handles_db_field(self): """Ensure that distinct resolves field name to db_field as expected. """ + class Product(Document): - product_id = IntField(db_field='pid') + product_id = IntField(db_field="pid") Product.drop_collection() @@ -3474,15 +3561,12 @@ class QuerySetTest(unittest.TestCase): Product(product_id=2).save() Product(product_id=1).save() - self.assertEqual(set(Product.objects.distinct('product_id')), - set([1, 2])) - self.assertEqual(set(Product.objects.distinct('pid')), - set([1, 2])) + assert set(Product.objects.distinct("product_id")) == set([1, 2]) + assert set(Product.objects.distinct("pid")) == set([1, 2]) Product.drop_collection() def test_distinct_ListField_EmbeddedDocumentField(self): - class Author(EmbeddedDocument): name = StringField() @@ -3500,7 +3584,7 @@ class QuerySetTest(unittest.TestCase): Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien]) authors = Book.objects.distinct("authors") - self.assertEqual(authors, [mark_twain, john_tolkien]) + assert authors == [mark_twain, john_tolkien] def test_distinct_ListField_EmbeddedDocumentField_EmbeddedDocumentField(self): class Continent(EmbeddedDocument): @@ -3520,8 +3604,8 @@ class QuerySetTest(unittest.TestCase): Book.drop_collection() - europe = Continent(continent_name='europe') - asia = Continent(continent_name='asia') + europe = Continent(continent_name="europe") + asia = Continent(continent_name="asia") scotland = Country(country_name="Scotland", continent=europe) tibet = Country(country_name="Tibet", continent=asia) @@ -3534,19 +3618,18 @@ class QuerySetTest(unittest.TestCase): Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien]) country_list = Book.objects.distinct("authors.country") - self.assertEqual(country_list, [scotland, tibet]) + assert country_list == [scotland, tibet] continent_list = Book.objects.distinct("authors.country.continent") - self.assertEqual(continent_list, [europe, asia]) + assert continent_list == [europe, asia] def test_distinct_ListField_ReferenceField(self): - class Bar(Document): text = StringField() class Foo(Document): - bar = ReferenceField('Bar') - bar_lst = ListField(ReferenceField('Bar')) + bar = ReferenceField("Bar") + bar_lst = ListField(ReferenceField("Bar")) Bar.drop_collection() Foo.drop_collection() @@ -3560,11 +3643,12 @@ class QuerySetTest(unittest.TestCase): foo = Foo(bar=bar_1, bar_lst=[bar_1, bar_2]) foo.save() - self.assertEqual(Foo.objects.distinct("bar_lst"), [bar_1, bar_2]) + assert Foo.objects.distinct("bar_lst") == [bar_1, bar_2] def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. """ + class BlogPost(Document): tags = ListField(StringField()) deleted = BooleanField(default=False) @@ -3582,32 +3666,30 @@ class QuerySetTest(unittest.TestCase): @queryset_manager def music_posts(doc_cls, queryset, deleted=False): - return queryset(tags='music', - deleted=deleted).order_by('date') + return queryset(tags="music", deleted=deleted).order_by("date") BlogPost.drop_collection() - post1 = BlogPost(tags=['music', 'film']).save() - post2 = BlogPost(tags=['music']).save() - post3 = BlogPost(tags=['film', 'actors']).save() - post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save() + post1 = BlogPost(tags=["music", "film"]).save() + post2 = BlogPost(tags=["music"]).save() + post3 = BlogPost(tags=["film", "actors"]).save() + post4 = BlogPost(tags=["film", "actors", "music"], deleted=True).save() - self.assertEqual([p.id for p in BlogPost.objects()], - [post1.id, post2.id, post3.id]) - self.assertEqual([p.id for p in BlogPost.objects_1_arg()], - [post1.id, post2.id, post3.id]) - self.assertEqual([p.id for p in BlogPost.music_posts()], - [post1.id, post2.id]) + assert [p.id for p in BlogPost.objects()] == [post1.id, post2.id, post3.id] + assert [p.id for p in BlogPost.objects_1_arg()] == [ + post1.id, + post2.id, + post3.id, + ] + assert [p.id for p in BlogPost.music_posts()] == [post1.id, post2.id] - self.assertEqual([p.id for p in BlogPost.music_posts(True)], - [post4.id]) + assert [p.id for p in BlogPost.music_posts(True)] == [post4.id] BlogPost.drop_collection() def test_custom_manager_overriding_objects_works(self): - class Foo(Document): - bar = StringField(default='bar') + bar = StringField(default="bar") active = BooleanField(default=False) @queryset_manager @@ -3623,17 +3705,16 @@ class QuerySetTest(unittest.TestCase): Foo(active=True).save() Foo(active=False).save() - self.assertEqual(1, Foo.objects.count()) - self.assertEqual(1, Foo.with_inactive.count()) + assert 1 == Foo.objects.count() + assert 1 == Foo.with_inactive.count() Foo.with_inactive.first().delete() - self.assertEqual(0, Foo.with_inactive.count()) - self.assertEqual(1, Foo.objects.count()) + assert 0 == Foo.with_inactive.count() + assert 1 == Foo.objects.count() def test_inherit_objects(self): - class Foo(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} active = BooleanField(default=True) @queryset_manager @@ -3645,12 +3726,11 @@ class QuerySetTest(unittest.TestCase): Bar.drop_collection() Bar.objects.create(active=False) - self.assertEqual(0, Bar.objects.count()) + assert 0 == Bar.objects.count() def test_inherit_objects_override(self): - class Foo(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} active = BooleanField(default=True) @queryset_manager @@ -3658,25 +3738,25 @@ class QuerySetTest(unittest.TestCase): return queryset(active=True) class Bar(Foo): - @queryset_manager def objects(klass, queryset): return queryset(active=False) Bar.drop_collection() Bar.objects.create(active=False) - self.assertEqual(0, Foo.objects.count()) - self.assertEqual(1, Bar.objects.count()) + assert 0 == Foo.objects.count() + assert 1 == Bar.objects.count() def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. """ + class BlogPost(Document): author = ReferenceField(self.Person) BlogPost.drop_collection() - person = self.Person(name='test', age=30) + person = self.Person(name="test", age=30) person.save() post = BlogPost(author=person) @@ -3686,25 +3766,26 @@ class QuerySetTest(unittest.TestCase): # while using a ReferenceField's name - the document should be # converted to an DBRef, which is legal, unlike a Document object post_obj = BlogPost.objects(author=person).first() - self.assertEqual(post.id, post_obj.id) + assert post.id == post_obj.id # Test that lists of values work when using the 'in', 'nin' and 'all' post_obj = BlogPost.objects(author__in=[person]).first() - self.assertEqual(post.id, post_obj.id) + assert post.id == post_obj.id BlogPost.drop_collection() def test_update_value_conversion(self): """Ensure that values used in updates are converted before use. """ + class Group(Document): members = ListField(ReferenceField(self.Person)) Group.drop_collection() - user1 = self.Person(name='user1') + user1 = self.Person(name="user1") user1.save() - user2 = self.Person(name='user2') + user2 = self.Person(name="user2") user2.save() group = Group() @@ -3713,15 +3794,16 @@ class QuerySetTest(unittest.TestCase): Group.objects(id=group.id).update(set__members=[user1, user2]) group.reload() - self.assertEqual(len(group.members), 2) - self.assertEqual(group.members[0].name, user1.name) - self.assertEqual(group.members[1].name, user2.name) + assert len(group.members) == 2 + assert group.members[0].name == user1.name + assert group.members[1].name == user2.name Group.drop_collection() def test_bulk(self): """Ensure bulk querying by object id returns a proper dict. """ + class BlogPost(Document): title = StringField() @@ -3742,15 +3824,15 @@ class QuerySetTest(unittest.TestCase): ids = [post_1.id, post_2.id, post_5.id] objects = BlogPost.objects.in_bulk(ids) - self.assertEqual(len(objects), 3) + assert len(objects) == 3 - self.assertIn(post_1.id, objects) - self.assertIn(post_2.id, objects) - self.assertIn(post_5.id, objects) + assert post_1.id in objects + assert post_2.id in objects + assert post_5.id in objects - self.assertEqual(objects[post_1.id].title, post_1.title) - self.assertEqual(objects[post_2.id].title, post_2.title) - self.assertEqual(objects[post_5.id].title, post_5.title) + assert objects[post_1.id].title == post_1.title + assert objects[post_2.id].title == post_2.title + assert objects[post_5.id].title == post_5.title BlogPost.drop_collection() @@ -3760,21 +3842,21 @@ class QuerySetTest(unittest.TestCase): def test_custom_querysets(self): """Ensure that custom QuerySet classes may be used. """ - class CustomQuerySet(QuerySet): + class CustomQuerySet(QuerySet): def not_empty(self): return self.count() > 0 class Post(Document): - meta = {'queryset_class': CustomQuerySet} + meta = {"queryset_class": CustomQuerySet} Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3783,7 +3865,6 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): - def not_empty(self): return self.count() > 0 @@ -3795,11 +3876,11 @@ class QuerySetTest(unittest.TestCase): Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3808,7 +3889,6 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySetManager(QuerySetManager): - @staticmethod def get_queryset(doc_cls, queryset): return queryset(is_published=True) @@ -3821,8 +3901,8 @@ class QuerySetTest(unittest.TestCase): Post().save() Post(is_published=True).save() - self.assertEqual(Post.objects.count(), 2) - self.assertEqual(Post.published.count(), 1) + assert Post.objects.count() == 2 + assert Post.published.count() == 1 Post.drop_collection() @@ -3831,22 +3911,21 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): - def not_empty(self): return self.count() > 0 class Base(Document): - meta = {'abstract': True, 'queryset_class': CustomQuerySet} + meta = {"abstract": True, "queryset_class": CustomQuerySet} class Post(Base): pass Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3855,7 +3934,6 @@ class QuerySetTest(unittest.TestCase): """ class CustomQuerySet(QuerySet): - def not_empty(self): return self.count() > 0 @@ -3863,18 +3941,18 @@ class QuerySetTest(unittest.TestCase): queryset_class = CustomQuerySet class Base(Document): - meta = {'abstract': True} + meta = {"abstract": True} objects = CustomQuerySetManager() class Post(Base): pass Post.drop_collection() - self.assertIsInstance(Post.objects, CustomQuerySet) - self.assertFalse(Post.objects.not_empty()) + assert isinstance(Post.objects, CustomQuerySet) + assert not Post.objects.not_empty() Post().save() - self.assertTrue(Post.objects.not_empty()) + assert Post.objects.not_empty() Post.drop_collection() @@ -3887,10 +3965,9 @@ class QuerySetTest(unittest.TestCase): for i in range(10): Post(title="Post %s" % i).save() - self.assertEqual(5, Post.objects.limit(5).skip(5).count(with_limit_and_skip=True)) + assert 5 == Post.objects.limit(5).skip(5).count(with_limit_and_skip=True) - self.assertEqual( - 10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False)) + assert 10 == Post.objects.limit(5).skip(5).count(with_limit_and_skip=False) def test_count_and_none(self): """Test count works with None()""" @@ -3902,8 +3979,8 @@ class QuerySetTest(unittest.TestCase): for i in range(0, 10): MyDoc().save() - self.assertEqual(MyDoc.objects.count(), 10) - self.assertEqual(MyDoc.objects.none().count(), 0) + assert MyDoc.objects.count() == 10 + assert MyDoc.objects.none().count() == 0 def test_count_list_embedded(self): class B(EmbeddedDocument): @@ -3912,11 +3989,12 @@ class QuerySetTest(unittest.TestCase): class A(Document): b = ListField(EmbeddedDocumentField(B)) - self.assertEqual(A.objects(b=[{'c': 'c'}]).count(), 0) + assert A.objects(b=[{"c": "c"}]).count() == 0 def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ + class Post(Document): title = StringField() @@ -3926,13 +4004,14 @@ class QuerySetTest(unittest.TestCase): Post(title="Post 2").save() posts = Post.objects.all()[0:1] - self.assertEqual(len(list(posts())), 1) + assert len(list(posts())) == 1 Post.drop_collection() def test_order_then_filter(self): """Ensure that ordering still works after filtering. """ + class Number(Document): n = IntField() @@ -3941,15 +4020,16 @@ class QuerySetTest(unittest.TestCase): n2 = Number.objects.create(n=2) n1 = Number.objects.create(n=1) - self.assertEqual(list(Number.objects), [n2, n1]) - self.assertEqual(list(Number.objects.order_by('n')), [n1, n2]) - self.assertEqual(list(Number.objects.order_by('n').filter()), [n1, n2]) + assert list(Number.objects) == [n2, n1] + assert list(Number.objects.order_by("n")) == [n1, n2] + assert list(Number.objects.order_by("n").filter()) == [n1, n2] Number.drop_collection() def test_clone(self): """Ensure that cloning clones complex querysets """ + class Number(Document): n = IntField() @@ -3961,37 +4041,64 @@ class QuerySetTest(unittest.TestCase): test = Number.objects test2 = test.clone() - self.assertNotEqual(test, test2) - self.assertEqual(test.count(), test2.count()) + assert test != test2 + assert test.count() == test2.count() test = test.filter(n__gt=11) test2 = test.clone() - self.assertNotEqual(test, test2) - self.assertEqual(test.count(), test2.count()) + assert test != test2 + assert test.count() == test2.count() test = test.limit(10) test2 = test.clone() - self.assertNotEqual(test, test2) - self.assertEqual(test.count(), test2.count()) + assert test != test2 + assert test.count() == test2.count() + + Number.drop_collection() + + def test_clone_retains_settings(self): + """Ensure that cloning retains the read_preference and read_concern + """ + + class Number(Document): + n = IntField() + + Number.drop_collection() + + qs = Number.objects + qs_clone = qs.clone() + assert qs._read_preference == qs_clone._read_preference + assert qs._read_concern == qs_clone._read_concern + + qs = Number.objects.read_preference(ReadPreference.PRIMARY_PREFERRED) + qs_clone = qs.clone() + assert qs._read_preference == ReadPreference.PRIMARY_PREFERRED + assert qs._read_preference == qs_clone._read_preference + + qs = Number.objects.read_concern({"level": "majority"}) + qs_clone = qs.clone() + assert qs._read_concern.document == {"level": "majority"} + assert qs._read_concern == qs_clone._read_concern Number.drop_collection() def test_using(self): """Ensure that switching databases for a queryset is possible """ + class Number2(Document): n = IntField() Number2.drop_collection() - with switch_db(Number2, 'test2') as Number2: + with switch_db(Number2, "test2") as Number2: Number2.drop_collection() for i in range(1, 10): t = Number2(n=i) - t.switch_db('test2') + t.switch_db("test2") t.save() - self.assertEqual(len(Number2.objects.using('test2')), 9) + assert len(Number2.objects.using("test2")) == 9 def test_unset_reference(self): class Comment(Document): @@ -4003,34 +4110,35 @@ class QuerySetTest(unittest.TestCase): Comment.drop_collection() Post.drop_collection() - comment = Comment.objects.create(text='test') + comment = Comment.objects.create(text="test") post = Post.objects.create(comment=comment) - self.assertEqual(post.comment, comment) + assert post.comment == comment Post.objects.update(unset__comment=1) post.reload() - self.assertEqual(post.comment, None) + assert post.comment is None Comment.drop_collection() Post.drop_collection() def test_order_works_with_custom_db_field_names(self): class Number(Document): - n = IntField(db_field='number') + n = IntField(db_field="number") Number.drop_collection() n2 = Number.objects.create(n=2) n1 = Number.objects.create(n=1) - self.assertEqual(list(Number.objects), [n2, n1]) - self.assertEqual(list(Number.objects.order_by('n')), [n1, n2]) + assert list(Number.objects) == [n2, n1] + assert list(Number.objects.order_by("n")) == [n1, n2] Number.drop_collection() def test_order_works_with_primary(self): """Ensure that order_by and primary work. """ + class Number(Document): n = IntField(primary_key=True) @@ -4040,28 +4148,29 @@ class QuerySetTest(unittest.TestCase): Number(n=2).save() Number(n=3).save() - numbers = [n.n for n in Number.objects.order_by('-n')] - self.assertEqual([3, 2, 1], numbers) + numbers = [n.n for n in Number.objects.order_by("-n")] + assert [3, 2, 1] == numbers - numbers = [n.n for n in Number.objects.order_by('+n')] - self.assertEqual([1, 2, 3], numbers) + numbers = [n.n for n in Number.objects.order_by("+n")] + assert [1, 2, 3] == numbers Number.drop_collection() def test_ensure_index(self): """Ensure that manual creation of indexes works. """ + class Comment(Document): message = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} - Comment.ensure_index('message') + Comment.ensure_index("message") info = Comment.objects._collection.index_information() - info = [(value['key'], - value.get('unique', False), - value.get('sparse', False)) - for key, value in iteritems(info)] - self.assertIn(([('_cls', 1), ('message', 1)], False, False), info) + info = [ + (value["key"], value.get("unique", False), value.get("sparse", False)) + for key, value in info.items() + ] + assert ([("_cls", 1), ("message", 1)], False, False) in info def test_where(self): """Ensure that where clauses work. @@ -4080,33 +4189,34 @@ class QuerySetTest(unittest.TestCase): b.save() c.save() - query = IntPair.objects.where('this[~fielda] >= this[~fieldb]') - self.assertEqual( - 'this["fielda"] >= this["fieldb"]', query._where_clause) + query = IntPair.objects.where("this[~fielda] >= this[~fieldb]") + assert 'this["fielda"] >= this["fieldb"]' == query._where_clause results = list(query) - self.assertEqual(2, len(results)) - self.assertIn(a, results) - self.assertIn(c, results) + assert 2 == len(results) + assert a in results + assert c in results - query = IntPair.objects.where('this[~fielda] == this[~fieldb]') + query = IntPair.objects.where("this[~fielda] == this[~fieldb]") results = list(query) - self.assertEqual(1, len(results)) - self.assertIn(a, results) + assert 1 == len(results) + assert a in results query = IntPair.objects.where( - 'function() { return this[~fielda] >= this[~fieldb] }') - self.assertEqual( - 'function() { return this["fielda"] >= this["fieldb"] }', query._where_clause) + "function() { return this[~fielda] >= this[~fieldb] }" + ) + assert ( + 'function() { return this["fielda"] >= this["fieldb"] }' + == query._where_clause + ) results = list(query) - self.assertEqual(2, len(results)) - self.assertIn(a, results) - self.assertIn(c, results) + assert 2 == len(results) + assert a in results + assert c in results - with self.assertRaises(TypeError): + with pytest.raises(TypeError): list(IntPair.objects.where(fielda__gte=3)) def test_scalar(self): - class Organization(Document): name = StringField() @@ -4123,15 +4233,15 @@ class QuerySetTest(unittest.TestCase): # Efficient way to get all unique organization names for a given # set of users (Pretend this has additional filtering.) - user_orgs = set(User.objects.scalar('organization')) - orgs = Organization.objects(id__in=user_orgs).scalar('name') - self.assertEqual(list(orgs), ['White House']) + user_orgs = set(User.objects.scalar("organization")) + orgs = Organization.objects(id__in=user_orgs).scalar("name") + assert list(orgs) == ["White House"] # Efficient for generating listings, too. - orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs)) - user_map = User.objects.scalar('name', 'organization') + orgs = Organization.objects.scalar("name").in_bulk(list(user_orgs)) + user_map = User.objects.scalar("name", "organization") user_listing = [(user, orgs[org]) for user, org in user_map] - self.assertEqual([("Bob Dole", "White House")], user_listing) + assert [("Bob Dole", "White House")] == user_listing def test_scalar_simple(self): class TestDoc(Document): @@ -4144,12 +4254,12 @@ class QuerySetTest(unittest.TestCase): TestDoc(x=20, y=False).save() TestDoc(x=30, y=True).save() - plist = list(TestDoc.objects.scalar('x', 'y')) + plist = list(TestDoc.objects.scalar("x", "y")) - self.assertEqual(len(plist), 3) - self.assertEqual(plist[0], (10, True)) - self.assertEqual(plist[1], (20, False)) - self.assertEqual(plist[2], (30, True)) + assert len(plist) == 3 + assert plist[0] == (10, True) + assert plist[1] == (20, False) + assert plist[2] == (30, True) class UserDoc(Document): name = StringField() @@ -4162,21 +4272,18 @@ class QuerySetTest(unittest.TestCase): UserDoc(name="Eliana", age=37).save() UserDoc(name="Tayza", age=15).save() - ulist = list(UserDoc.objects.scalar('name', 'age')) + ulist = list(UserDoc.objects.scalar("name", "age")) - self.assertEqual(ulist, [ - (u'Wilson Jr', 19), - (u'Wilson', 43), - (u'Eliana', 37), - (u'Tayza', 15)]) + assert ulist == [ + (u"Wilson Jr", 19), + (u"Wilson", 43), + (u"Eliana", 37), + (u"Tayza", 15), + ] - ulist = list(UserDoc.objects.scalar('name').order_by('age')) + ulist = list(UserDoc.objects.scalar("name").order_by("age")) - self.assertEqual(ulist, [ - (u'Tayza'), - (u'Wilson Jr'), - (u'Eliana'), - (u'Wilson')]) + assert ulist == [(u"Tayza"), (u"Wilson Jr"), (u"Eliana"), (u"Wilson")] def test_scalar_embedded(self): class Profile(EmbeddedDocument): @@ -4193,30 +4300,41 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() - Person(profile=Profile(name="Wilson Jr", age=19), - locale=Locale(city="Corumba-GO", country="Brazil")).save() + Person( + profile=Profile(name="Wilson Jr", age=19), + locale=Locale(city="Corumba-GO", country="Brazil"), + ).save() - Person(profile=Profile(name="Gabriel Falcao", age=23), - locale=Locale(city="New York", country="USA")).save() + Person( + profile=Profile(name="Gabriel Falcao", age=23), + locale=Locale(city="New York", country="USA"), + ).save() - Person(profile=Profile(name="Lincoln de souza", age=28), - locale=Locale(city="Belo Horizonte", country="Brazil")).save() + Person( + profile=Profile(name="Lincoln de souza", age=28), + locale=Locale(city="Belo Horizonte", country="Brazil"), + ).save() - Person(profile=Profile(name="Walter cruz", age=30), - locale=Locale(city="Brasilia", country="Brazil")).save() + Person( + profile=Profile(name="Walter cruz", age=30), + locale=Locale(city="Brasilia", country="Brazil"), + ).save() - self.assertEqual( - list(Person.objects.order_by( - 'profile__age').scalar('profile__name')), - [u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz']) + assert list( + Person.objects.order_by("profile__age").scalar("profile__name") + ) == [u"Wilson Jr", u"Gabriel Falcao", u"Lincoln de souza", u"Walter cruz"] - ulist = list(Person.objects.order_by('locale.city') - .scalar('profile__name', 'profile__age', 'locale__city')) - self.assertEqual(ulist, - [(u'Lincoln de souza', 28, u'Belo Horizonte'), - (u'Walter cruz', 30, u'Brasilia'), - (u'Wilson Jr', 19, u'Corumba-GO'), - (u'Gabriel Falcao', 23, u'New York')]) + ulist = list( + Person.objects.order_by("locale.city").scalar( + "profile__name", "profile__age", "locale__city" + ) + ) + assert ulist == [ + (u"Lincoln de souza", 28, u"Belo Horizonte"), + (u"Walter cruz", 30, u"Brasilia"), + (u"Wilson Jr", 19, u"Corumba-GO"), + (u"Gabriel Falcao", 23, u"New York"), + ] def test_scalar_decimal(self): from decimal import Decimal @@ -4226,10 +4344,10 @@ class QuerySetTest(unittest.TestCase): rating = DecimalField() Person.drop_collection() - Person(name="Wilson Jr", rating=Decimal('1.0')).save() + Person(name="Wilson Jr", rating=Decimal("1.0")).save() - ulist = list(Person.objects.scalar('name', 'rating')) - self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))]) + ulist = list(Person.objects.scalar("name", "rating")) + assert ulist == [(u"Wilson Jr", Decimal("1.0"))] def test_scalar_reference_field(self): class State(Document): @@ -4247,8 +4365,8 @@ class QuerySetTest(unittest.TestCase): Person(name="Wilson JR", state=s1).save() - plist = list(Person.objects.scalar('name', 'state')) - self.assertEqual(plist, [(u'Wilson JR', s1)]) + plist = list(Person.objects.scalar("name", "state")) + assert plist == [(u"Wilson JR", s1)] def test_scalar_generic_reference_field(self): class State(Document): @@ -4266,8 +4384,8 @@ class QuerySetTest(unittest.TestCase): Person(name="Wilson JR", state=s1).save() - plist = list(Person.objects.scalar('name', 'state')) - self.assertEqual(plist, [(u'Wilson JR', s1)]) + plist = list(Person.objects.scalar("name", "state")) + assert plist == [(u"Wilson JR", s1)] def test_generic_reference_field_with_only_and_as_pymongo(self): class TestPerson(Document): @@ -4280,26 +4398,32 @@ class QuerySetTest(unittest.TestCase): TestPerson.drop_collection() TestActivity.drop_collection() - person = TestPerson(name='owner') + person = TestPerson(name="owner") person.save() - a1 = TestActivity(name='a1', owner=person) + a1 = TestActivity(name="a1", owner=person) a1.save() - activity = TestActivity.objects(owner=person).scalar('id', 'owner').no_dereference().first() - self.assertEqual(activity[0], a1.pk) - self.assertEqual(activity[1]['_ref'], DBRef('test_person', person.pk)) + activity = ( + TestActivity.objects(owner=person) + .scalar("id", "owner") + .no_dereference() + .first() + ) + assert activity[0] == a1.pk + assert activity[1]["_ref"] == DBRef("test_person", person.pk) - activity = TestActivity.objects(owner=person).only('id', 'owner')[0] - self.assertEqual(activity.pk, a1.pk) - self.assertEqual(activity.owner, person) + activity = TestActivity.objects(owner=person).only("id", "owner")[0] + assert activity.pk == a1.pk + assert activity.owner == person - activity = TestActivity.objects(owner=person).only('id', 'owner').as_pymongo().first() - self.assertEqual(activity['_id'], a1.pk) - self.assertTrue(activity['owner']['_ref'], DBRef('test_person', person.pk)) + activity = ( + TestActivity.objects(owner=person).only("id", "owner").as_pymongo().first() + ) + assert activity["_id"] == a1.pk + assert activity["owner"]["_ref"], DBRef("test_person", person.pk) def test_scalar_db_field(self): - class TestDoc(Document): x = IntField() y = BooleanField() @@ -4310,14 +4434,13 @@ class QuerySetTest(unittest.TestCase): TestDoc(x=20, y=False).save() TestDoc(x=30, y=True).save() - plist = list(TestDoc.objects.scalar('x', 'y')) - self.assertEqual(len(plist), 3) - self.assertEqual(plist[0], (10, True)) - self.assertEqual(plist[1], (20, False)) - self.assertEqual(plist[2], (30, True)) + plist = list(TestDoc.objects.scalar("x", "y")) + assert len(plist) == 3 + assert plist[0] == (10, True) + assert plist[1] == (20, False) + assert plist[2] == (30, True) def test_scalar_primary_key(self): - class SettingValue(Document): key = StringField(primary_key=True) value = StringField() @@ -4326,8 +4449,8 @@ class QuerySetTest(unittest.TestCase): s = SettingValue(key="test", value="test value") s.save() - val = SettingValue.objects.scalar('key', 'value') - self.assertEqual(list(val), [('test', 'test value')]) + val = SettingValue.objects.scalar("key", "value") + assert list(val) == [("test", "test value")] def test_scalar_cursor_behaviour(self): """Ensure that a query returns a valid set of results. @@ -4338,140 +4461,214 @@ class QuerySetTest(unittest.TestCase): person2.save() # Find all people in the collection - people = self.Person.objects.scalar('name') - self.assertEqual(people.count(), 2) + people = self.Person.objects.scalar("name") + assert people.count() == 2 results = list(people) - self.assertEqual(results[0], "User A") - self.assertEqual(results[1], "User B") + assert results[0] == "User A" + assert results[1] == "User B" # Use a query to filter the people found to just person1 - people = self.Person.objects(age=20).scalar('name') - self.assertEqual(people.count(), 1) - person = people.next() - self.assertEqual(person, "User A") + people = self.Person.objects(age=20).scalar("name") + assert people.count() == 1 + person = next(people) + assert person == "User A" # Test limit - people = list(self.Person.objects.limit(1).scalar('name')) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], 'User A') + people = list(self.Person.objects.limit(1).scalar("name")) + assert len(people) == 1 + assert people[0] == "User A" # Test skip - people = list(self.Person.objects.skip(1).scalar('name')) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], 'User B') + people = list(self.Person.objects.skip(1).scalar("name")) + assert len(people) == 1 + assert people[0] == "User B" person3 = self.Person(name="User C", age=40) person3.save() # Test slice limit - people = list(self.Person.objects[:2].scalar('name')) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], 'User A') - self.assertEqual(people[1], 'User B') + people = list(self.Person.objects[:2].scalar("name")) + assert len(people) == 2 + assert people[0] == "User A" + assert people[1] == "User B" # Test slice skip - people = list(self.Person.objects[1:].scalar('name')) - self.assertEqual(len(people), 2) - self.assertEqual(people[0], 'User B') - self.assertEqual(people[1], 'User C') + people = list(self.Person.objects[1:].scalar("name")) + assert len(people) == 2 + assert people[0] == "User B" + assert people[1] == "User C" # Test slice limit and skip - people = list(self.Person.objects[1:2].scalar('name')) - self.assertEqual(len(people), 1) - self.assertEqual(people[0], 'User B') + people = list(self.Person.objects[1:2].scalar("name")) + assert len(people) == 1 + assert people[0] == "User B" - people = list(self.Person.objects[1:1].scalar('name')) - self.assertEqual(len(people), 0) + # people = list(self.Person.objects[1:1].scalar("name")) + people = self.Person.objects[1:1] + people = people.scalar("name") + assert len(people) == 0 # Test slice out of range - people = list(self.Person.objects.scalar('name')[80000:80001]) - self.assertEqual(len(people), 0) + people = list(self.Person.objects.scalar("name")[80000:80001]) + assert len(people) == 0 # Test larger slice __repr__ self.Person.objects.delete() for i in range(55): - self.Person(name='A%s' % i, age=i).save() + self.Person(name="A%s" % i, age=i).save() - self.assertEqual(self.Person.objects.scalar('name').count(), 55) - self.assertEqual( - "A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) - self.assertEqual( - "A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) - if six.PY3: - self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[1:3]) - self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[51:53]) - else: - self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[1:3]) - self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by( - 'age').scalar('name')[51:53]) + assert self.Person.objects.scalar("name").count() == 55 + assert ( + "A0" == "%s" % self.Person.objects.order_by("name").scalar("name").first() + ) + assert "A0" == "%s" % self.Person.objects.scalar("name").order_by("name")[0] + assert ( + "['A1', 'A2']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[1:3] + ) + assert ( + "['A51', 'A52']" + == "%s" % self.Person.objects.order_by("age").scalar("name")[51:53] + ) # with_id and in_bulk - person = self.Person.objects.order_by('name').first() - self.assertEqual("A0", "%s" % - self.Person.objects.scalar('name').with_id(person.id)) + person = self.Person.objects.order_by("name").first() + assert "A0" == "%s" % self.Person.objects.scalar("name").with_id(person.id) - pks = self.Person.objects.order_by('age').scalar('pk')[1:3] - names = self.Person.objects.scalar('name').in_bulk(list(pks)).values() - if six.PY3: - expected = "['A1', 'A2']" - else: - expected = "[u'A1', u'A2']" - self.assertEqual(expected, "%s" % sorted(names)) + pks = self.Person.objects.order_by("age").scalar("pk")[1:3] + names = self.Person.objects.scalar("name").in_bulk(list(pks)).values() + expected = "['A1', 'A2']" + assert expected == "%s" % sorted(names) + + def test_fields(self): + class Bar(EmbeddedDocument): + v = StringField() + z = StringField() + + class Foo(Document): + x = StringField() + y = IntField() + items = EmbeddedDocumentListField(Bar) + + Foo.drop_collection() + + Foo(x="foo1", y=1).save() + Foo(x="foo2", y=2, items=[]).save() + Foo(x="foo3", y=3, items=[Bar(z="a", v="V")]).save() + Foo( + x="foo4", + y=4, + items=[ + Bar(z="a", v="V"), + Bar(z="b", v="W"), + Bar(z="b", v="X"), + Bar(z="c", v="V"), + ], + ).save() + Foo( + x="foo5", + y=5, + items=[ + Bar(z="b", v="X"), + Bar(z="c", v="V"), + Bar(z="d", v="V"), + Bar(z="e", v="V"), + ], + ).save() + + foos_with_x = list(Foo.objects.order_by("y").fields(x=1)) + + assert all(o.x is not None for o in foos_with_x) + + foos_without_y = list(Foo.objects.order_by("y").fields(y=0)) + + assert all(o.y is None for o in foos_without_y) + + foos_with_sliced_items = list(Foo.objects.order_by("y").fields(slice__items=1)) + + assert foos_with_sliced_items[0].items == [] + assert foos_with_sliced_items[1].items == [] + assert len(foos_with_sliced_items[2].items) == 1 + assert foos_with_sliced_items[2].items[0].z == "a" + assert len(foos_with_sliced_items[3].items) == 1 + assert foos_with_sliced_items[3].items[0].z == "a" + assert len(foos_with_sliced_items[4].items) == 1 + assert foos_with_sliced_items[4].items[0].z == "b" + + foos_with_elem_match_items = list( + Foo.objects.order_by("y").fields(elemMatch__items={"z": "b"}) + ) + + assert foos_with_elem_match_items[0].items == [] + assert foos_with_elem_match_items[1].items == [] + assert foos_with_elem_match_items[2].items == [] + assert len(foos_with_elem_match_items[3].items) == 1 + assert foos_with_elem_match_items[3].items[0].z == "b" + assert foos_with_elem_match_items[3].items[0].v == "W" + assert len(foos_with_elem_match_items[4].items) == 1 + assert foos_with_elem_match_items[4].items[0].z == "b" def test_elem_match(self): class Foo(EmbeddedDocument): shape = StringField() color = StringField() thick = BooleanField() - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Bar(Document): foo = ListField(EmbeddedDocumentField(Foo)) - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} Bar.drop_collection() - b1 = Bar(foo=[Foo(shape="square", color="purple", thick=False), - Foo(shape="circle", color="red", thick=True)]) + b1 = Bar( + foo=[ + Foo(shape="square", color="purple", thick=False), + Foo(shape="circle", color="red", thick=True), + ] + ) b1.save() - b2 = Bar(foo=[Foo(shape="square", color="red", thick=True), - Foo(shape="circle", color="purple", thick=False)]) + b2 = Bar( + foo=[ + Foo(shape="square", color="red", thick=True), + Foo(shape="circle", color="purple", thick=False), + ] + ) b2.save() - b3 = Bar(foo=[Foo(shape="square", thick=True), - Foo(shape="circle", color="purple", thick=False)]) + b3 = Bar( + foo=[ + Foo(shape="square", thick=True), + Foo(shape="circle", color="purple", thick=False), + ] + ) b3.save() - ak = list( - Bar.objects(foo__match={'shape': "square", "color": "purple"})) - self.assertEqual([b1], ak) + ak = list(Bar.objects(foo__match={"shape": "square", "color": "purple"})) + assert [b1] == ak - ak = list( - Bar.objects(foo__elemMatch={'shape': "square", "color": "purple"})) - self.assertEqual([b1], ak) + ak = list(Bar.objects(foo__elemMatch={"shape": "square", "color": "purple"})) + assert [b1] == ak ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple"))) - self.assertEqual([b1], ak) + assert [b1] == ak ak = list( - Bar.objects(foo__elemMatch={'shape': "square", "color__exists": True})) - self.assertEqual([b1, b2], ak) + Bar.objects(foo__elemMatch={"shape": "square", "color__exists": True}) + ) + assert [b1, b2] == ak + + ak = list(Bar.objects(foo__match={"shape": "square", "color__exists": True})) + assert [b1, b2] == ak ak = list( - Bar.objects(foo__match={'shape': "square", "color__exists": True})) - self.assertEqual([b1, b2], ak) + Bar.objects(foo__elemMatch={"shape": "square", "color__exists": False}) + ) + assert [b3] == ak - ak = list( - Bar.objects(foo__elemMatch={'shape': "square", "color__exists": False})) - self.assertEqual([b3], ak) - - ak = list( - Bar.objects(foo__match={'shape': "square", "color__exists": False})) - self.assertEqual([b3], ak) + ak = list(Bar.objects(foo__match={"shape": "square", "color__exists": False})) + assert [b3] == ak def test_upsert_includes_cls(self): """Upserts should include _cls information for inheritable classes @@ -4481,24 +4678,25 @@ class QuerySetTest(unittest.TestCase): test = StringField() Test.drop_collection() - Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertNotIn('_cls', Test._collection.find_one()) + Test.objects(test="foo").update_one(upsert=True, set__test="foo") + assert "_cls" not in Test._collection.find_one() class Test(Document): - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} test = StringField() Test.drop_collection() - Test.objects(test='foo').update_one(upsert=True, set__test='foo') - self.assertIn('_cls', Test._collection.find_one()) + Test.objects(test="foo").update_one(upsert=True, set__test="foo") + assert "_cls" in Test._collection.find_one() def test_update_upsert_looks_like_a_digit(self): class MyDoc(DynamicDocument): pass + MyDoc.drop_collection() - self.assertEqual(1, MyDoc.objects.update_one(upsert=True, inc__47=1)) - self.assertEqual(MyDoc.objects.get()['47'], 1) + assert 1 == MyDoc.objects.update_one(upsert=True, inc__47=1) + assert MyDoc.objects.get()["47"] == 1 def test_dictfield_key_looks_like_a_digit(self): """Only should work with DictField even if they have numeric keys.""" @@ -4507,86 +4705,132 @@ class QuerySetTest(unittest.TestCase): test = DictField() MyDoc.drop_collection() - doc = MyDoc(test={'47': 1}) + doc = MyDoc(test={"47": 1}) doc.save() - self.assertEqual(MyDoc.objects.only('test__47').get().test['47'], 1) + assert MyDoc.objects.only("test__47").get().test["47"] == 1 + + def test_clear_cls_query(self): + class Parent(Document): + name = StringField() + meta = {"allow_inheritance": True} + + class Child(Parent): + age = IntField() + + Parent.drop_collection() + + # Default query includes the "_cls" check. + assert Parent.objects._query == {"_cls": {"$in": ("Parent", "Parent.Child")}} + + # Clearing the "_cls" query should work. + assert Parent.objects.clear_cls_query()._query == {} + + # Clearing the "_cls" query should not persist across queryset instances. + assert Parent.objects._query == {"_cls": {"$in": ("Parent", "Parent.Child")}} + + # The rest of the query should not be cleared. + assert Parent.objects.filter(name="xyz").clear_cls_query()._query == { + "name": "xyz" + } + + Parent.objects.create(name="foo") + Child.objects.create(name="bar", age=1) + assert Parent.objects.clear_cls_query().count() == 2 + assert Parent.objects.count() == 2 + assert Child.objects().count() == 1 + + # XXX This isn't really how you'd want to use `clear_cls_query()`, but + # it's a decent test to validate its behavior nonetheless. + assert Child.objects.clear_cls_query().count() == 2 def test_read_preference(self): class Bar(Document): txt = StringField() - meta = { - 'indexes': ['txt'] - } + meta = {"indexes": ["txt"]} Bar.drop_collection() - bars = list(Bar.objects(read_preference=ReadPreference.PRIMARY)) - self.assertEqual([], bars) + bar = Bar.objects.create(txt="xyz") - self.assertRaises(TypeError, Bar.objects, read_preference='Primary') + bars = list(Bar.objects.read_preference(ReadPreference.PRIMARY)) + assert bars == [bar] - # read_preference as a kwarg - bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) - - # read_preference as a query set method bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + assert bars._read_preference == ReadPreference.SECONDARY_PREFERRED + assert ( + bars._cursor.collection.read_preference + == ReadPreference.SECONDARY_PREFERRED + ) - # read_preference after skip - bars = Bar.objects.skip(1) \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + # Make sure that `.read_preference(...)` does accept string values. + with pytest.raises(TypeError): + Bar.objects.read_preference("Primary") - # read_preference after limit - bars = Bar.objects.limit(1) \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + def assert_read_pref(qs, expected_read_pref): + assert qs._read_preference == expected_read_pref + assert qs._cursor.collection.read_preference == expected_read_pref - # read_preference after order_by - bars = Bar.objects.order_by('txt') \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + # Make sure read preference is respected after a `.skip(...)`. + bars = Bar.objects.skip(1).read_preference(ReadPreference.SECONDARY_PREFERRED) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) - # read_preference after hint - bars = Bar.objects.hint([('txt', 1)]) \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._read_preference, - ReadPreference.SECONDARY_PREFERRED) - self.assertEqual(bars._cursor._Cursor__read_preference, - ReadPreference.SECONDARY_PREFERRED) + # Make sure read preference is respected after a `.limit(...)`. + bars = Bar.objects.limit(1).read_preference(ReadPreference.SECONDARY_PREFERRED) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) - def test_read_preference_aggregation_framework(self): + # Make sure read preference is respected after an `.order_by(...)`. + bars = Bar.objects.order_by("txt").read_preference( + ReadPreference.SECONDARY_PREFERRED + ) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) + + # Make sure read preference is respected after a `.hint(...)`. + bars = Bar.objects.hint([("txt", 1)]).read_preference( + ReadPreference.SECONDARY_PREFERRED + ) + assert_read_pref(bars, ReadPreference.SECONDARY_PREFERRED) + + def test_read_concern(self): class Bar(Document): txt = StringField() - meta = { - 'indexes': ['txt'] - } - # Aggregates with read_preference - bars = Bar.objects \ - .read_preference(ReadPreference.SECONDARY_PREFERRED) \ - .aggregate() - self.assertEqual(bars._CommandCursor__collection.read_preference, - ReadPreference.SECONDARY_PREFERRED) + meta = {"indexes": ["txt"]} + + Bar.drop_collection() + bar = Bar.objects.create(txt="xyz") + + bars = list(Bar.objects.read_concern(None)) + assert bars == [bar] + + bars = Bar.objects.read_concern({"level": "local"}) + assert bars._read_concern.document == {"level": "local"} + assert bars._cursor.collection.read_concern.document == {"level": "local"} + + # Make sure that `.read_concern(...)` does not accept string values. + with pytest.raises(TypeError): + Bar.objects.read_concern("local") + + def assert_read_concern(qs, expected_read_concern): + assert qs._read_concern.document == expected_read_concern + assert qs._cursor.collection.read_concern.document == expected_read_concern + + # Make sure read concern is respected after a `.skip(...)`. + bars = Bar.objects.skip(1).read_concern({"level": "local"}) + assert_read_concern(bars, {"level": "local"}) + + # Make sure read concern is respected after a `.limit(...)`. + bars = Bar.objects.limit(1).read_concern({"level": "local"}) + assert_read_concern(bars, {"level": "local"}) + + # Make sure read concern is respected after an `.order_by(...)`. + bars = Bar.objects.order_by("txt").read_concern({"level": "local"}) + assert_read_concern(bars, {"level": "local"}) + + # Make sure read concern is respected after a `.hint(...)`. + bars = Bar.objects.hint([("txt", 1)]).read_concern({"level": "majority"}) + assert_read_concern(bars, {"level": "majority"}) def test_json_simple(self): - class Embedded(EmbeddedDocument): string = StringField() @@ -4599,10 +4843,10 @@ class QuerySetTest(unittest.TestCase): Doc(string="Bye", embedded_field=Embedded(string="Bye")).save() Doc().save() - json_data = Doc.objects.to_json(sort_keys=True, separators=(',', ':')) + json_data = Doc.objects.to_json(sort_keys=True, separators=(",", ":")) doc_objects = list(Doc.objects) - self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) + assert doc_objects == Doc.objects.from_json(json_data) def test_json_complex(self): class EmbeddedDoc(EmbeddedDocument): @@ -4612,33 +4856,34 @@ class QuerySetTest(unittest.TestCase): pass class Doc(Document): - string_field = StringField(default='1') + string_field = StringField(default="1") int_field = IntField(default=1) float_field = FloatField(default=1.1) boolean_field = BooleanField(default=True) datetime_field = DateTimeField(default=datetime.datetime.now) embedded_document_field = EmbeddedDocumentField( - EmbeddedDoc, default=lambda: EmbeddedDoc()) + EmbeddedDoc, default=lambda: EmbeddedDoc() + ) list_field = ListField(default=lambda: [1, 2, 3]) dict_field = DictField(default=lambda: {"hello": "world"}) objectid_field = ObjectIdField(default=ObjectId) - reference_field = ReferenceField( - Simple, default=lambda: Simple().save()) + reference_field = ReferenceField(Simple, default=lambda: Simple().save()) map_field = MapField(IntField(), default=lambda: {"simple": 1}) decimal_field = DecimalField(default=1.0) complex_datetime_field = ComplexDateTimeField(default=datetime.datetime.now) url_field = URLField(default="http://mongoengine.org") dynamic_field = DynamicField(default=1) generic_reference_field = GenericReferenceField( - default=lambda: Simple().save()) - sorted_list_field = SortedListField(IntField(), - default=lambda: [1, 2, 3]) + default=lambda: Simple().save() + ) + sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) email_field = EmailField(default="ross@example.com") geo_point_field = GeoPointField(default=lambda: [1, 2]) sequence_field = SequenceField() uuid_field = UUIDField(default=uuid.uuid4) generic_embedded_document_field = GenericEmbeddedDocumentField( - default=lambda: EmbeddedDoc()) + default=lambda: EmbeddedDoc() + ) Simple.drop_collection() Doc.drop_collection() @@ -4647,7 +4892,7 @@ class QuerySetTest(unittest.TestCase): json_data = Doc.objects.to_json() doc_objects = list(Doc.objects) - self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) + assert doc_objects == Doc.objects.from_json(json_data) def test_as_pymongo(self): class LastLogin(EmbeddedDocument): @@ -4663,111 +4908,93 @@ class QuerySetTest(unittest.TestCase): User.drop_collection() - User.objects.create(id='Bob', name="Bob Dole", age=89, price=Decimal('1.11')) + User.objects.create(id="Bob", name="Bob Dole", age=89, price=Decimal("1.11")) User.objects.create( - id='Barak', + id="Barak", name="Barak Obama", age=51, - price=Decimal('2.22'), - last_login=LastLogin( - location='White House', - ip='104.107.108.116' - ) + price=Decimal("2.22"), + last_login=LastLogin(location="White House", ip="104.107.108.116"), ) results = User.objects.as_pymongo() - self.assertEqual( - set(results[0].keys()), - set(['_id', 'name', 'age', 'price']) - ) - self.assertEqual( - set(results[1].keys()), - set(['_id', 'name', 'age', 'price', 'last_login']) + assert set(results[0].keys()) == set(["_id", "name", "age", "price"]) + assert set(results[1].keys()) == set( + ["_id", "name", "age", "price", "last_login"] ) - results = User.objects.only('id', 'name').as_pymongo() - self.assertEqual(set(results[0].keys()), set(['_id', 'name'])) + results = User.objects.only("id", "name").as_pymongo() + assert set(results[0].keys()) == set(["_id", "name"]) - users = User.objects.only('name', 'price').as_pymongo() + users = User.objects.only("name", "price").as_pymongo() results = list(users) - self.assertIsInstance(results[0], dict) - self.assertIsInstance(results[1], dict) - self.assertEqual(results[0]['name'], 'Bob Dole') - self.assertEqual(results[0]['price'], 1.11) - self.assertEqual(results[1]['name'], 'Barak Obama') - self.assertEqual(results[1]['price'], 2.22) + assert isinstance(results[0], dict) + assert isinstance(results[1], dict) + assert results[0]["name"] == "Bob Dole" + assert results[0]["price"] == 1.11 + assert results[1]["name"] == "Barak Obama" + assert results[1]["price"] == 2.22 - users = User.objects.only('name', 'last_login').as_pymongo() + users = User.objects.only("name", "last_login").as_pymongo() results = list(users) - self.assertIsInstance(results[0], dict) - self.assertIsInstance(results[1], dict) - self.assertEqual(results[0], { - '_id': 'Bob', - 'name': 'Bob Dole' - }) - self.assertEqual(results[1], { - '_id': 'Barak', - 'name': 'Barak Obama', - 'last_login': { - 'location': 'White House', - 'ip': '104.107.108.116' - } - }) + assert isinstance(results[0], dict) + assert isinstance(results[1], dict) + assert results[0] == {"_id": "Bob", "name": "Bob Dole"} + assert results[1] == { + "_id": "Barak", + "name": "Barak Obama", + "last_login": {"location": "White House", "ip": "104.107.108.116"}, + } def test_as_pymongo_returns_cls_attribute_when_using_inheritance(self): class User(Document): name = StringField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} User.drop_collection() user = User(name="Bob Dole").save() result = User.objects.as_pymongo().first() - self.assertEqual( - result, - { - '_cls': 'User', - '_id': user.id, - 'name': 'Bob Dole' - } - ) + assert result == {"_cls": "User", "_id": user.id, "name": "Bob Dole"} def test_as_pymongo_json_limit_fields(self): - class User(Document): email = EmailField(unique=True, required=True) - password_hash = StringField( - db_field='password_hash', required=True) - password_salt = StringField( - db_field='password_salt', required=True) + password_hash = StringField(db_field="password_hash", required=True) + password_salt = StringField(db_field="password_salt", required=True) User.drop_collection() - User(email="ross@example.com", password_salt="SomeSalt", - password_hash="SomeHash").save() + User( + email="ross@example.com", password_salt="SomeSalt", password_hash="SomeHash" + ).save() serialized_user = User.objects.exclude( - 'password_salt', 'password_hash').as_pymongo()[0] - self.assertEqual({'_id', 'email'}, set(serialized_user.keys())) + "password_salt", "password_hash" + ).as_pymongo()[0] + assert {"_id", "email"} == set(serialized_user.keys()) serialized_user = User.objects.exclude( - 'id', 'password_salt', 'password_hash').to_json() - self.assertEqual('[{"email": "ross@example.com"}]', serialized_user) + "id", "password_salt", "password_hash" + ).to_json() + assert '[{"email": "ross@example.com"}]' == serialized_user - serialized_user = User.objects.only('email').as_pymongo()[0] - self.assertEqual({'_id', 'email'}, set(serialized_user.keys())) + serialized_user = User.objects.only("email").as_pymongo()[0] + assert {"_id", "email"} == set(serialized_user.keys()) - serialized_user = User.objects.exclude( - 'password_salt').only('email').as_pymongo()[0] - self.assertEqual({'_id', 'email'}, set(serialized_user.keys())) + serialized_user = ( + User.objects.exclude("password_salt").only("email").as_pymongo()[0] + ) + assert {"_id", "email"} == set(serialized_user.keys()) - serialized_user = User.objects.exclude( - 'password_salt', 'id').only('email').as_pymongo()[0] - self.assertEqual({'email'}, set(serialized_user.keys())) + serialized_user = ( + User.objects.exclude("password_salt", "id").only("email").as_pymongo()[0] + ) + assert {"email"} == set(serialized_user.keys()) - serialized_user = User.objects.exclude( - 'password_salt', 'id').only('email').to_json() - self.assertEqual('[{"email": "ross@example.com"}]', - serialized_user) + serialized_user = ( + User.objects.exclude("password_salt", "id").only("email").to_json() + ) + assert '[{"email": "ross@example.com"}]' == serialized_user def test_only_after_count(self): """Test that only() works after count()""" @@ -4776,23 +5003,22 @@ class QuerySetTest(unittest.TestCase): name = StringField() age = IntField() address = StringField() + User.drop_collection() - user = User(name="User", age=50, - address="Moscow, Russia").save() + user = User(name="User", age=50, address="Moscow, Russia").save() user_queryset = User.objects(age=50) result = user_queryset.only("name", "age").as_pymongo().first() - self.assertEqual(result, {"_id": user.id, "name": "User", "age": 50}) + assert result == {"_id": user.id, "name": "User", "age": 50} result = user_queryset.count() - self.assertEqual(result, 1) + assert result == 1 result = user_queryset.only("name", "age").as_pymongo().first() - self.assertEqual(result, {"_id": user.id, "name": "User", "age": 50}) + assert result == {"_id": user.id, "name": "User", "age": 50} def test_no_dereference(self): - class Organization(Document): name = StringField() @@ -4809,12 +5035,12 @@ class QuerySetTest(unittest.TestCase): qs = User.objects() qs_user = qs.first() - self.assertIsInstance(qs.first().organization, Organization) + assert isinstance(qs.first().organization, Organization) - self.assertIsInstance(qs.no_dereference().first().organization, DBRef) + assert isinstance(qs.no_dereference().first().organization, DBRef) - self.assertIsInstance(qs_user.organization, Organization) - self.assertIsInstance(qs.first().organization, Organization) + assert isinstance(qs_user.organization, Organization) + assert isinstance(qs.first().organization, Organization) def test_no_dereference_internals(self): # Test the internals on which queryset.no_dereference relies on @@ -4828,22 +5054,24 @@ class QuerySetTest(unittest.TestCase): Organization.drop_collection() cls_organization_field = User.organization - self.assertTrue(cls_organization_field._auto_dereference, True) # default + assert cls_organization_field._auto_dereference, True # default org = Organization(name="whatever").save() User(organization=org).save() qs_no_deref = User.objects().no_dereference() user_no_deref = qs_no_deref.first() - self.assertFalse(qs_no_deref._auto_dereference) + assert not qs_no_deref._auto_dereference # Make sure the instance field is different from the class field - instance_org_field = user_no_deref._fields['organization'] - self.assertIsNot(instance_org_field, cls_organization_field) - self.assertFalse(instance_org_field._auto_dereference) + instance_org_field = user_no_deref._fields["organization"] + assert instance_org_field is not cls_organization_field + assert not instance_org_field._auto_dereference - self.assertIsInstance(user_no_deref.organization, DBRef) - self.assertTrue(cls_organization_field._auto_dereference, True) # Make sure the class Field wasn't altered + assert isinstance(user_no_deref.organization, DBRef) + assert ( + cls_organization_field._auto_dereference + ), True # Make sure the class Field wasn't altered def test_no_dereference_no_side_effect_on_existing_instance(self): # Relates to issue #1677 - ensures no regression of the bug @@ -4859,8 +5087,7 @@ class QuerySetTest(unittest.TestCase): Organization.drop_collection() org = Organization(name="whatever").save() - User(organization=org, - organization_gen=org).save() + User(organization=org, organization_gen=org).save() qs = User.objects() user = qs.first() @@ -4869,17 +5096,16 @@ class QuerySetTest(unittest.TestCase): user_no_deref = qs_no_deref.first() # ReferenceField - no_derf_org = user_no_deref.organization # was triggering the bug - self.assertIsInstance(no_derf_org, DBRef) - self.assertIsInstance(user.organization, Organization) + no_derf_org = user_no_deref.organization # was triggering the bug + assert isinstance(no_derf_org, DBRef) + assert isinstance(user.organization, Organization) # GenericReferenceField no_derf_org_gen = user_no_deref.organization_gen - self.assertIsInstance(no_derf_org_gen, dict) - self.assertIsInstance(user.organization_gen, Organization) + assert isinstance(no_derf_org_gen, dict) + assert isinstance(user.organization_gen, Organization) def test_no_dereference_embedded_doc(self): - class User(Document): name = StringField() @@ -4902,22 +5128,20 @@ class QuerySetTest(unittest.TestCase): member = Member(name="Flash", user=user) - company = Organization(name="Mongo Inc", - ceo=user, - member=member, - admins=[user], - members=[member]) + company = Organization( + name="Mongo Inc", ceo=user, member=member, admins=[user], members=[member] + ) company.save() org = Organization.objects().no_dereference().first() - self.assertNotEqual(id(org._fields['admins']), id(Organization.admins)) - self.assertFalse(org._fields['admins']._auto_dereference) + assert id(org._fields["admins"]) != id(Organization.admins) + assert not org._fields["admins"]._auto_dereference admin = org.admins[0] - self.assertIsInstance(admin, DBRef) - self.assertIsInstance(org.member.user, DBRef) - self.assertIsInstance(org.members[0].user, DBRef) + assert isinstance(admin, DBRef) + assert isinstance(org.member.user, DBRef) + assert isinstance(org.members[0].user, DBRef) def test_cached_queryset(self): class Person(Document): @@ -4928,11 +5152,11 @@ class QuerySetTest(unittest.TestCase): Person(name="No: %s" % i).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 people = Person.objects [x for x in people] - self.assertEqual(100, len(people._result_cache)) + assert 100 == len(people._result_cache) import platform @@ -4940,15 +5164,15 @@ class QuerySetTest(unittest.TestCase): # PyPy evaluates __len__ when iterating with list comprehensions while CPython does not. # This may be a bug in PyPy (PyPy/#1802) but it does not affect # the behavior of MongoEngine. - self.assertEqual(None, people._len) - self.assertEqual(q, 1) + assert people._len is None + assert q == 1 list(people) - self.assertEqual(100, people._len) # Caused by list calling len - self.assertEqual(q, 1) + assert 100 == people._len # Caused by list calling len + assert q == 1 people.count(with_limit_and_skip=True) # count is cached - self.assertEqual(q, 1) + assert q == 1 def test_no_cached_queryset(self): class Person(Document): @@ -4959,17 +5183,17 @@ class QuerySetTest(unittest.TestCase): Person(name="No: %s" % i).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 people = Person.objects.no_cache() [x for x in people] - self.assertEqual(q, 1) + assert q == 1 list(people) - self.assertEqual(q, 2) + assert q == 2 people.count() - self.assertEqual(q, 3) + assert q == 3 def test_no_cached_queryset__repr__(self): class Person(Document): @@ -4977,19 +5201,18 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() qs = Person.objects.no_cache() - self.assertEqual(repr(qs), '[]') + assert repr(qs) == "[]" def test_no_cached_on_a_cached_queryset_raise_error(self): class Person(Document): name = StringField() Person.drop_collection() - Person(name='a').save() + Person(name="a").save() qs = Person.objects() _ = list(qs) - with self.assertRaises(OperationError) as ctx_err: + with pytest.raises(OperationError, match="QuerySet already cached"): qs.no_cache() - self.assertEqual("QuerySet already cached", str(ctx_err.exception)) def test_no_cached_queryset_no_cache_back_to_cache(self): class Person(Document): @@ -4997,14 +5220,13 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() qs = Person.objects() - self.assertIsInstance(qs, QuerySet) + assert isinstance(qs, QuerySet) qs = qs.no_cache() - self.assertIsInstance(qs, QuerySetNoCache) + assert isinstance(qs, QuerySetNoCache) qs = qs.cache() - self.assertIsInstance(qs, QuerySet) + assert isinstance(qs, QuerySet) def test_cache_not_cloned(self): - class User(Document): name = StringField() @@ -5016,16 +5238,17 @@ class QuerySetTest(unittest.TestCase): User(name="Alice").save() User(name="Bob").save() - users = User.objects.all().order_by('name') - self.assertEqual("%s" % users, "[, ]") - self.assertEqual(2, len(users._result_cache)) + users = User.objects.all().order_by("name") + assert "%s" % users == "[, ]" + assert 2 == len(users._result_cache) users = users.filter(name="Bob") - self.assertEqual("%s" % users, "[]") - self.assertEqual(1, len(users._result_cache)) + assert "%s" % users == "[]" + assert 1 == len(users._result_cache) def test_no_cache(self): """Ensure you can add meta data to file""" + class Noddy(Document): fields = DictField() @@ -5039,27 +5262,27 @@ class QuerySetTest(unittest.TestCase): docs = Noddy.objects.no_cache() counter = len([1 for i in docs]) - self.assertEqual(counter, 100) + assert counter == 100 - self.assertEqual(len(list(docs)), 100) + assert len(list(docs)) == 100 # Can't directly get a length of a no-cache queryset. - with self.assertRaises(TypeError): + with pytest.raises(TypeError): len(docs) # Another iteration over the queryset should result in another db op. with query_counter() as q: list(docs) - self.assertEqual(q, 1) + assert q == 1 # ... and another one to double-check. with query_counter() as q: list(docs) - self.assertEqual(q, 1) + assert q == 1 def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. - names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] + names = ["Alice", "Bob", "Chuck", "David", "Eric", "Francis", "George"] class User(Document): name = StringField() @@ -5072,45 +5295,45 @@ class QuerySetTest(unittest.TestCase): for name in names: User(name=name).save() - users = User.objects.all().order_by('name') + users = User.objects.all().order_by("name") outer_count = 0 inner_count = 0 inner_total_count = 0 with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 - self.assertEqual(users.count(with_limit_and_skip=True), 7) + assert users.count(with_limit_and_skip=True) == 7 for i, outer_user in enumerate(users): - self.assertEqual(outer_user.name, names[i]) + assert outer_user.name == names[i] outer_count += 1 inner_count = 0 # Calling len might disrupt the inner loop if there are bugs - self.assertEqual(users.count(with_limit_and_skip=True), 7) + assert users.count(with_limit_and_skip=True) == 7 for j, inner_user in enumerate(users): - self.assertEqual(inner_user.name, names[j]) + assert inner_user.name == names[j] inner_count += 1 inner_total_count += 1 # inner loop should always be executed seven times - self.assertEqual(inner_count, 7) + assert inner_count == 7 # outer loop should be executed seven times total - self.assertEqual(outer_count, 7) + assert outer_count == 7 # inner loop should be executed fourtynine times total - self.assertEqual(inner_total_count, 7 * 7) + assert inner_total_count == 7 * 7 - self.assertEqual(q, 2) + assert q == 2 def test_no_sub_classes(self): class A(Document): x = IntField() y = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class B(A): z = IntField() @@ -5126,27 +5349,28 @@ class QuerySetTest(unittest.TestCase): B(x=30, y=50).save() C(x=40, y=60).save() - self.assertEqual(A.objects.no_sub_classes().count(), 2) - self.assertEqual(A.objects.count(), 5) + assert A.objects.no_sub_classes().count() == 2 + assert A.objects.count() == 5 - self.assertEqual(B.objects.no_sub_classes().count(), 2) - self.assertEqual(B.objects.count(), 3) + assert B.objects.no_sub_classes().count() == 2 + assert B.objects.count() == 3 - self.assertEqual(C.objects.no_sub_classes().count(), 1) - self.assertEqual(C.objects.count(), 1) + assert C.objects.no_sub_classes().count() == 1 + assert C.objects.count() == 1 for obj in A.objects.no_sub_classes(): - self.assertEqual(obj.__class__, A) + assert obj.__class__ == A for obj in B.objects.no_sub_classes(): - self.assertEqual(obj.__class__, B) + assert obj.__class__ == B for obj in C.objects.no_sub_classes(): - self.assertEqual(obj.__class__, C) + assert obj.__class__ == C def test_query_generic_embedded_document(self): """Ensure that querying sub field on generic_embedded_field works """ + class A(EmbeddedDocument): a_name = StringField() @@ -5157,19 +5381,16 @@ class QuerySetTest(unittest.TestCase): document = GenericEmbeddedDocumentField(choices=(A, B)) Doc.drop_collection() - Doc(document=A(a_name='A doc')).save() - Doc(document=B(b_name='B doc')).save() + Doc(document=A(a_name="A doc")).save() + Doc(document=B(b_name="B doc")).save() # Using raw in filter working fine - self.assertEqual(Doc.objects( - __raw__={'document.a_name': 'A doc'}).count(), 1) - self.assertEqual(Doc.objects( - __raw__={'document.b_name': 'B doc'}).count(), 1) - self.assertEqual(Doc.objects(document__a_name='A doc').count(), 1) - self.assertEqual(Doc.objects(document__b_name='B doc').count(), 1) + assert Doc.objects(__raw__={"document.a_name": "A doc"}).count() == 1 + assert Doc.objects(__raw__={"document.b_name": "B doc"}).count() == 1 + assert Doc.objects(document__a_name="A doc").count() == 1 + assert Doc.objects(document__b_name="B doc").count() == 1 def test_query_reference_to_custom_pk_doc(self): - class A(Document): id = StringField(primary_key=True) @@ -5179,21 +5400,18 @@ class QuerySetTest(unittest.TestCase): A.drop_collection() B.drop_collection() - a = A.objects.create(id='custom_id') + a = A.objects.create(id="custom_id") B.objects.create(a=a) - self.assertEqual(B.objects.count(), 1) - self.assertEqual(B.objects.get(a=a).a, a) - self.assertEqual(B.objects.get(a=a.id).a, a) + assert B.objects.count() == 1 + assert B.objects.get(a=a).a == a + assert B.objects.get(a=a.id).a == a def test_cls_query_in_subclassed_docs(self): - class Animal(Document): name = StringField() - meta = { - 'allow_inheritance': True - } + meta = {"allow_inheritance": True} class Dog(Animal): pass @@ -5201,21 +5419,20 @@ class QuerySetTest(unittest.TestCase): class Cat(Animal): pass - self.assertEqual(Animal.objects(name='Charlie')._query, { - 'name': 'Charlie', - '_cls': {'$in': ('Animal', 'Animal.Dog', 'Animal.Cat')} - }) - self.assertEqual(Dog.objects(name='Charlie')._query, { - 'name': 'Charlie', - '_cls': 'Animal.Dog' - }) - self.assertEqual(Cat.objects(name='Charlie')._query, { - 'name': 'Charlie', - '_cls': 'Animal.Cat' - }) + assert Animal.objects(name="Charlie")._query == { + "name": "Charlie", + "_cls": {"$in": ("Animal", "Animal.Dog", "Animal.Cat")}, + } + assert Dog.objects(name="Charlie")._query == { + "name": "Charlie", + "_cls": "Animal.Dog", + } + assert Cat.objects(name="Charlie")._query == { + "name": "Charlie", + "_cls": "Animal.Cat", + } def test_can_have_field_same_name_as_query_operator(self): - class Size(Document): name = StringField() @@ -5228,11 +5445,10 @@ class QuerySetTest(unittest.TestCase): instance_size = Size(name="Large").save() Example(size=instance_size).save() - self.assertEqual(Example.objects(size=instance_size).count(), 1) - self.assertEqual(Example.objects(size__in=[instance_size]).count(), 1) + assert Example.objects(size=instance_size).count() == 1 + assert Example.objects(size__in=[instance_size]).count() == 1 def test_cursor_in_an_if_stmt(self): - class Test(Document): test_field = StringField() @@ -5240,23 +5456,23 @@ class QuerySetTest(unittest.TestCase): queryset = Test.objects if queryset: - raise AssertionError('Empty cursor returns True') + raise AssertionError("Empty cursor returns True") test = Test() - test.test_field = 'test' + test.test_field = "test" test.save() queryset = Test.objects if not test: - raise AssertionError('Cursor has data and returned False') + raise AssertionError("Cursor has data and returned False") - queryset.next() + next(queryset) if not queryset: - raise AssertionError('Cursor has data and it must returns True,' - ' even in the last item.') + raise AssertionError( + "Cursor has data and it must returns True, even in the last item." + ) def test_bool_performance(self): - class Person(Document): name = StringField() @@ -5268,11 +5484,12 @@ class QuerySetTest(unittest.TestCase): if Person.objects: pass - self.assertEqual(q, 1) - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + assert q == 1 + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] - self.assertEqual(op['nreturned'], 1) + assert op["nreturned"] == 1 def test_bool_with_ordering(self): ORDER_BY_KEY, CMD_QUERY_KEY = get_key_compat(self.mongodb_version) @@ -5285,37 +5502,37 @@ class QuerySetTest(unittest.TestCase): Person(name="Test").save() # Check that bool(queryset) does not uses the orderby - qs = Person.objects.order_by('name') + qs = Person.objects.order_by("name") with query_counter() as q: if bool(qs): pass - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] - self.assertNotIn(ORDER_BY_KEY, op[CMD_QUERY_KEY]) + assert ORDER_BY_KEY not in op[CMD_QUERY_KEY] # Check that normal query uses orderby - qs2 = Person.objects.order_by('name') + qs2 = Person.objects.order_by("name") with query_counter() as q: for x in qs2: pass - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] - self.assertIn(ORDER_BY_KEY, op[CMD_QUERY_KEY]) + assert ORDER_BY_KEY in op[CMD_QUERY_KEY] def test_bool_with_ordering_from_meta_dict(self): ORDER_BY_KEY, CMD_QUERY_KEY = get_key_compat(self.mongodb_version) class Person(Document): name = StringField() - meta = { - 'ordering': ['name'] - } + meta = {"ordering": ["name"]} Person.drop_collection() @@ -5328,258 +5545,43 @@ class QuerySetTest(unittest.TestCase): if Person.objects: pass - op = q.db.system.profile.find({"ns": - {"$ne": "%s.system.indexes" % q.db.name}})[0] + op = q.db.system.profile.find( + {"ns": {"$ne": "%s.system.indexes" % q.db.name}} + )[0] - self.assertNotIn('$orderby', op[CMD_QUERY_KEY], - 'BaseQuerySet must remove orderby from meta in boolen test') + assert ( + "$orderby" not in op[CMD_QUERY_KEY] + ), "BaseQuerySet must remove orderby from meta in boolen test" - self.assertEqual(Person.objects.first().name, 'A') - self.assertTrue(Person.objects._has_data(), - 'Cursor has data and returned False') - - def test_queryset_aggregation_framework(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects(age__lte=22).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - {'_id': p2.pk, 'name': "WILSON JUNIOR"} - ]) - - data = Person.objects(age__lte=22).order_by('-name').aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"}, - {'_id': p1.pk, 'name': "ISABELLA LUANNA"} - ]) - - data = Person.objects(age__gte=17, age__lte=40).order_by('-age').aggregate({ - '$group': { - '_id': None, - 'total': {'$sum': 1}, - 'avg': {'$avg': '$age'} - } - }) - self.assertEqual(list(data), [ - {'_id': None, 'avg': 29, 'total': 2} - ]) - - data = Person.objects().aggregate({'$match': {'name': 'Isabella Luanna'}}) - self.assertEqual(list(data), [ - {u'_id': p1.pk, - u'age': 16, - u'name': u'Isabella Luanna'}] - ) - - def test_queryset_aggregation_with_skip(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.skip(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"}, - {'_id': p3.pk, 'name': "SANDRA MARA"} - ]) - - def test_queryset_aggregation_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.limit(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"} - ]) - - def test_queryset_aggregation_with_sort(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.order_by('name').aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - {'_id': p3.pk, 'name': "SANDRA MARA"}, - {'_id': p2.pk, 'name': "WILSON JUNIOR"} - ]) - - def test_queryset_aggregation_with_skip_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = list( - Person.objects.skip(1).limit(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - ) - - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"}, - ]) - - # Make sure limit/skip chaining order has no impact - data2 = Person.objects.limit(1).skip(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(data, list(data2)) - - def test_queryset_aggregation_with_sort_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.order_by('name').limit(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - {'_id': p3.pk, 'name': "SANDRA MARA"} - ]) - - # Verify adding limit/skip steps works as expected - data = Person.objects.order_by('name').limit(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}}, - {'$limit': 1}, - ) - - self.assertEqual(list(data), [ - {'_id': p1.pk, 'name': "ISABELLA LUANNA"}, - ]) - - data = Person.objects.order_by('name').limit(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}}, - {'$skip': 1}, - {'$limit': 1}, - ) - - self.assertEqual(list(data), [ - {'_id': p3.pk, 'name': "SANDRA MARA"}, - ]) - - def test_queryset_aggregation_with_sort_with_skip(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.order_by('name').skip(2).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p2.pk, 'name': "WILSON JUNIOR"} - ]) - - def test_queryset_aggregation_with_sort_with_skip_with_limit(self): - class Person(Document): - name = StringField() - age = IntField() - - Person.drop_collection() - - p1 = Person(name="Isabella Luanna", age=16) - p2 = Person(name="Wilson Junior", age=21) - p3 = Person(name="Sandra Mara", age=37) - Person.objects.insert([p1, p2, p3]) - - data = Person.objects.order_by('name').skip(1).limit(1).aggregate( - {'$project': {'name': {'$toUpper': '$name'}}} - ) - - self.assertEqual(list(data), [ - {'_id': p3.pk, 'name': "SANDRA MARA"} - ]) + assert Person.objects.first().name == "A" + assert Person.objects._has_data(), "Cursor has data and returned False" def test_delete_count(self): [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] - self.assertEqual(self.Person.objects().delete(), 3) # test ordinary QuerySey delete count + assert ( + self.Person.objects().delete() == 3 + ) # test ordinary QuerySey delete count [self.Person(name="User {0}".format(i), age=i * 10).save() for i in range(1, 4)] - self.assertEqual(self.Person.objects().skip(1).delete(), 2) # test Document delete with existing documents + assert ( + self.Person.objects().skip(1).delete() == 2 + ) # test Document delete with existing documents self.Person.objects().delete() - self.assertEqual(self.Person.objects().skip(1).delete(), 0) # test Document delete without existing documents + assert ( + self.Person.objects().skip(1).delete() == 0 + ) # test Document delete without existing documents def test_max_time_ms(self): # 778: max_time_ms can get only int or None as input - self.assertRaises(TypeError, - self.Person.objects(name="name").max_time_ms, - 'not a number') + with pytest.raises(TypeError): + self.Person.objects(name="name").max_time_ms("not a number") def test_subclass_field_query(self): class Animal(Document): is_mamal = BooleanField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class Cat(Animal): whiskers_length = FloatField() @@ -5592,8 +5594,8 @@ class QuerySetTest(unittest.TestCase): Animal(is_mamal=False).save() Cat(is_mamal=True, whiskers_length=5.1).save() ScottishCat(is_mamal=True, folded_ears=True).save() - self.assertEqual(Animal.objects(folded_ears=True).count(), 1) - self.assertEqual(Animal.objects(whiskers_length=5.1).count(), 1) + assert Animal.objects(folded_ears=True).count() == 1 + assert Animal.objects(whiskers_length=5.1).count() == 1 def test_loop_over_invalid_id_does_not_crash(self): class Person(Document): @@ -5601,14 +5603,15 @@ class QuerySetTest(unittest.TestCase): Person.drop_collection() - Person._get_collection().insert_one({'name': 'a', 'id': ''}) + Person._get_collection().insert_one({"name": "a", "id": ""}) for p in Person.objects(): - self.assertEqual(p.name, 'a') + assert p.name == "a" def test_len_during_iteration(self): """Tests that calling len on a queyset during iteration doesn't stop paging. """ + class Data(Document): pass @@ -5626,7 +5629,7 @@ class QuerySetTest(unittest.TestCase): for i, r in enumerate(records): if i == 58: len(records) - self.assertEqual(i, 249) + assert i == 249 # Assert the same behavior is true even if we didn't pre-populate the # result cache. @@ -5634,13 +5637,14 @@ class QuerySetTest(unittest.TestCase): for i, r in enumerate(records): if i == 58: len(records) - self.assertEqual(i, 249) + assert i == 249 def test_iteration_within_iteration(self): """You should be able to reliably iterate over all the documents in a given queryset even if there are multiple iterations of it happening at the same time. """ + class Data(Document): pass @@ -5652,13 +5656,14 @@ class QuerySetTest(unittest.TestCase): for j, doc2 in enumerate(qs): pass - self.assertEqual(i, 249) - self.assertEqual(j, 249) + assert i == 249 + assert j == 249 def test_in_operator_on_non_iterable(self): """Ensure that using the `__in` operator on a non-iterable raises an error. """ + class User(Document): name = StringField() @@ -5669,31 +5674,44 @@ class QuerySetTest(unittest.TestCase): User.drop_collection() BlogPost.drop_collection() - author = User.objects.create(name='Test User') - post = BlogPost.objects.create(content='Had a good coffee today...', - authors=[author]) + author = User.objects.create(name="Test User") + post = BlogPost.objects.create( + content="Had a good coffee today...", authors=[author] + ) # Make sure using `__in` with a list works blog_posts = BlogPost.objects(authors__in=[author]) - self.assertEqual(list(blog_posts), [post]) + assert list(blog_posts) == [post] # Using `__in` with a non-iterable should raise a TypeError - self.assertRaises(TypeError, BlogPost.objects(authors__in=author.pk).count) + with pytest.raises(TypeError): + BlogPost.objects(authors__in=author.pk).count() # Using `__in` with a `Document` (which is seemingly iterable but not # in a way we'd expect) should raise a TypeError, too - self.assertRaises(TypeError, BlogPost.objects(authors__in=author).count) + with pytest.raises(TypeError): + BlogPost.objects(authors__in=author).count() def test_create_count(self): self.Person.drop_collection() self.Person.objects.create(name="Foo") self.Person.objects.create(name="Bar") self.Person.objects.create(name="Baz") - self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 3) + assert self.Person.objects.count(with_limit_and_skip=True) == 3 - newPerson = self.Person.objects.create(name="Foo_1") - self.assertEqual(self.Person.objects.count(with_limit_and_skip=True), 4) + self.Person.objects.create(name="Foo_1") + assert self.Person.objects.count(with_limit_and_skip=True) == 4 + + def test_no_cursor_timeout(self): + qs = self.Person.objects() + assert qs._cursor_args == {} # ensure no regression of #2148 + + qs = self.Person.objects().timeout(True) + assert qs._cursor_args == {} + + qs = self.Person.objects().timeout(False) + assert qs._cursor_args == {"no_cursor_timeout": True} -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/test_queryset_aggregation.py b/tests/queryset/test_queryset_aggregation.py new file mode 100644 index 00000000..00e04a36 --- /dev/null +++ b/tests/queryset/test_queryset_aggregation.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- + +import unittest +import warnings + +from pymongo.read_preferences import ReadPreference + +from mongoengine import * +from tests.utils import MongoDBTestCase + + +class TestQuerysetAggregate(MongoDBTestCase): + def test_read_preference_aggregation_framework(self): + class Bar(Document): + txt = StringField() + + meta = {"indexes": ["txt"]} + + # Aggregates with read_preference + pipeline = [] + bars = Bar.objects.read_preference( + ReadPreference.SECONDARY_PREFERRED + ).aggregate(pipeline) + assert ( + bars._CommandCursor__collection.read_preference + == ReadPreference.SECONDARY_PREFERRED + ) + + def test_queryset_aggregation_framework(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects(age__lte=22).aggregate(pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ] + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects(age__lte=22).order_by("-name").aggregate(pipeline) + + assert list(data) == [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + ] + + pipeline = [ + {"$group": {"_id": None, "total": {"$sum": 1}, "avg": {"$avg": "$age"}}} + ] + data = ( + Person.objects(age__gte=17, age__lte=40) + .order_by("-age") + .aggregate(pipeline) + ) + assert list(data) == [{"_id": None, "avg": 29, "total": 2}] + + pipeline = [{"$match": {"name": "Isabella Luanna"}}] + data = Person.objects().aggregate(pipeline) + assert list(data) == [{u"_id": p1.pk, u"age": 16, u"name": u"Isabella Luanna"}] + + def test_queryset_aggregation_with_skip(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.skip(1).aggregate(pipeline) + + assert list(data) == [ + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] + + def test_queryset_aggregation_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.limit(1).aggregate(pipeline) + + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] + + def test_queryset_aggregation_with_sort(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").aggregate(pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + {"_id": p2.pk, "name": "WILSON JUNIOR"}, + ] + + def test_queryset_aggregation_with_skip_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = list(Person.objects.skip(1).limit(1).aggregate(pipeline)) + + assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] + + # Make sure limit/skip chaining order has no impact + data2 = Person.objects.limit(1).skip(1).aggregate(pipeline) + + assert data == list(data2) + + def test_queryset_aggregation_with_sort_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").limit(2).aggregate(pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] + + # Verify adding limit/skip steps works as expected + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}] + data = Person.objects.order_by("name").limit(2).aggregate(pipeline) + + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] + + pipeline = [ + {"$project": {"name": {"$toUpper": "$name"}}}, + {"$skip": 1}, + {"$limit": 1}, + ] + data = Person.objects.order_by("name").limit(2).aggregate(pipeline) + + assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] + + def test_queryset_aggregation_with_sort_with_skip(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").skip(2).aggregate(pipeline) + + assert list(data) == [{"_id": p2.pk, "name": "WILSON JUNIOR"}] + + def test_queryset_aggregation_with_sort_with_skip_with_limit(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects.order_by("name").skip(1).limit(1).aggregate(pipeline) + + assert list(data) == [{"_id": p3.pk, "name": "SANDRA MARA"}] + + def test_queryset_aggregation_deprecated_interface(self): + class Person(Document): + name = StringField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna") + p2 = Person(name="Wilson Junior") + p3 = Person(name="Sandra Mara") + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + + # Make sure a warning is emitted + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + with self.assertRaises(DeprecationWarning): + Person.objects.order_by("name").limit(2).aggregate(*pipeline) + + # Make sure old interface works as expected with a 1-step pipeline + data = Person.objects.order_by("name").limit(2).aggregate(*pipeline) + + assert list(data) == [ + {"_id": p1.pk, "name": "ISABELLA LUANNA"}, + {"_id": p3.pk, "name": "SANDRA MARA"}, + ] + + # Make sure old interface works as expected with a 2-steps pipeline + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}, {"$limit": 1}] + data = Person.objects.order_by("name").limit(2).aggregate(*pipeline) + + assert list(data) == [{"_id": p1.pk, "name": "ISABELLA LUANNA"}] + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py new file mode 100644 index 00000000..f5d248af --- /dev/null +++ b/tests/queryset/test_transform.py @@ -0,0 +1,374 @@ +import unittest + +from bson.son import SON +import pytest + +from mongoengine import * +from mongoengine.queryset import Q, transform + + +class TestTransform(unittest.TestCase): + def setUp(self): + connect(db="mongoenginetest") + + def test_transform_query(self): + """Ensure that the _transform_query function operates correctly. + """ + assert transform.query(name="test", age=30) == {"name": "test", "age": 30} + assert transform.query(age__lt=30) == {"age": {"$lt": 30}} + assert transform.query(age__gt=20, age__lt=50) == { + "age": {"$gt": 20, "$lt": 50} + } + assert transform.query(age=20, age__gt=50) == { + "$and": [{"age": {"$gt": 50}}, {"age": 20}] + } + assert transform.query(friend__age__gte=30) == {"friend.age": {"$gte": 30}} + assert transform.query(name__exists=True) == {"name": {"$exists": True}} + assert transform.query(name=["Mark"], __raw__={"name": {"$in": "Tom"}}) == { + "$and": [{"name": ["Mark"]}, {"name": {"$in": "Tom"}}] + } + assert transform.query(name__in=["Tom"], __raw__={"name": "Mark"}) == { + "$and": [{"name": {"$in": ["Tom"]}}, {"name": "Mark"}] + } + + def test_transform_update(self): + class LisDoc(Document): + foo = ListField(StringField()) + + class DicDoc(Document): + dictField = DictField() + + class Doc(Document): + pass + + LisDoc.drop_collection() + DicDoc.drop_collection() + Doc.drop_collection() + + DicDoc().save() + doc = Doc().save() + + for k, v in ( + ("set", "$set"), + ("set_on_insert", "$setOnInsert"), + ("push", "$push"), + ): + update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) + assert isinstance(update[v]["dictField.test"], dict) + + # Update special cases + update = transform.update(DicDoc, unset__dictField__test=doc) + assert update["$unset"]["dictField.test"] == 1 + + update = transform.update(DicDoc, pull__dictField__test=doc) + assert isinstance(update["$pull"]["dictField"]["test"], dict) + + update = transform.update(LisDoc, pull__foo__in=["a"]) + assert update == {"$pull": {"foo": {"$in": ["a"]}}} + + def test_transform_update_push(self): + """Ensure the differences in behvaior between 'push' and 'push_all'""" + + class BlogPost(Document): + tags = ListField(StringField()) + + update = transform.update(BlogPost, push__tags=["mongo", "db"]) + assert update == {"$push": {"tags": ["mongo", "db"]}} + + update = transform.update(BlogPost, push_all__tags=["mongo", "db"]) + assert update == {"$push": {"tags": {"$each": ["mongo", "db"]}}} + + def test_transform_update_no_operator_default_to_set(self): + """Ensure the differences in behvaior between 'push' and 'push_all'""" + + class BlogPost(Document): + tags = ListField(StringField()) + + update = transform.update(BlogPost, tags=["mongo", "db"]) + assert update == {"$set": {"tags": ["mongo", "db"]}} + + def test_query_field_name(self): + """Ensure that the correct field name is used when querying. + """ + + class Comment(EmbeddedDocument): + content = StringField(db_field="commentContent") + + class BlogPost(Document): + title = StringField(db_field="postTitle") + comments = ListField( + EmbeddedDocumentField(Comment), db_field="postComments" + ) + + BlogPost.drop_collection() + + data = {"title": "Post 1", "comments": [Comment(content="test")]} + post = BlogPost(**data) + post.save() + + assert "postTitle" in BlogPost.objects(title=data["title"])._query + assert not ("title" in BlogPost.objects(title=data["title"])._query) + assert BlogPost.objects(title=data["title"]).count() == 1 + + assert "_id" in BlogPost.objects(pk=post.id)._query + assert BlogPost.objects(pk=post.id).count() == 1 + + assert ( + "postComments.commentContent" + in BlogPost.objects(comments__content="test")._query + ) + assert BlogPost.objects(comments__content="test").count() == 1 + + BlogPost.drop_collection() + + def test_query_pk_field_name(self): + """Ensure that the correct "primary key" field name is used when + querying + """ + + class BlogPost(Document): + title = StringField(primary_key=True, db_field="postTitle") + + BlogPost.drop_collection() + + data = {"title": "Post 1"} + post = BlogPost(**data) + post.save() + + assert "_id" in BlogPost.objects(pk=data["title"])._query + assert "_id" in BlogPost.objects(title=data["title"])._query + assert BlogPost.objects(pk=data["title"]).count() == 1 + + BlogPost.drop_collection() + + def test_chaining(self): + class A(Document): + pass + + class B(Document): + a = ReferenceField(A) + + A.drop_collection() + B.drop_collection() + + a1 = A().save() + a2 = A().save() + + B(a=a1).save() + + # Works + q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query + + # Doesn't work + q2 = B.objects.filter(a__in=[a1, a2]) + q2 = q2.filter(a=a1)._query + + assert q1 == q2 + + def test_raw_query_and_Q_objects(self): + """ + Test raw plays nicely + """ + + class Foo(Document): + name = StringField() + a = StringField() + b = StringField() + c = StringField() + + meta = {"allow_inheritance": False} + + query = Foo.objects(__raw__={"$nor": [{"name": "bar"}]})._query + assert query == {"$nor": [{"name": "bar"}]} + + q1 = {"$or": [{"a": 1}, {"b": 1}]} + query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query + assert query == {"$or": [{"a": 1}, {"b": 1}], "c": 1} + + def test_raw_and_merging(self): + class Doc(Document): + meta = {"allow_inheritance": False} + + raw_query = Doc.objects( + __raw__={ + "deleted": False, + "scraped": "yes", + "$nor": [ + {"views.extracted": "no"}, + {"attachments.views.extracted": "no"}, + ], + } + )._query + + assert raw_query == { + "deleted": False, + "scraped": "yes", + "$nor": [{"views.extracted": "no"}, {"attachments.views.extracted": "no"}], + } + + def test_geojson_PointField(self): + class Location(Document): + loc = PointField() + + update = transform.update(Location, set__loc=[1, 2]) + assert update == {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} + + update = transform.update( + Location, set__loc={"type": "Point", "coordinates": [1, 2]} + ) + assert update == {"$set": {"loc": {"type": "Point", "coordinates": [1, 2]}}} + + def test_geojson_LineStringField(self): + class Location(Document): + line = LineStringField() + + update = transform.update(Location, set__line=[[1, 2], [2, 2]]) + assert update == { + "$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}} + } + + update = transform.update( + Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]} + ) + assert update == { + "$set": {"line": {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}} + } + + def test_geojson_PolygonField(self): + class Location(Document): + poly = PolygonField() + + update = transform.update( + Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]] + ) + assert update == { + "$set": { + "poly": { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + } + } + } + + update = transform.update( + Location, + set__poly={ + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + }, + ) + assert update == { + "$set": { + "poly": { + "type": "Polygon", + "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], + } + } + } + + def test_type(self): + class Doc(Document): + df = DynamicField() + + Doc(df=True).save() + Doc(df=7).save() + Doc(df="df").save() + assert Doc.objects(df__type=1).count() == 0 # double + assert Doc.objects(df__type=8).count() == 1 # bool + assert Doc.objects(df__type=2).count() == 1 # str + assert Doc.objects(df__type=16).count() == 1 # int + + def test_last_field_name_like_operator(self): + class EmbeddedItem(EmbeddedDocument): + type = StringField() + name = StringField() + + class Doc(Document): + item = EmbeddedDocumentField(EmbeddedItem) + + Doc.drop_collection() + + doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe")) + doc.save() + + assert 1 == Doc.objects(item__type__="axe").count() + assert 1 == Doc.objects(item__name__="Heroic axe").count() + + Doc.objects(id=doc.id).update(set__item__type__="sword") + assert 1 == Doc.objects(item__type__="sword").count() + assert 0 == Doc.objects(item__type__="axe").count() + + def test_understandable_error_raised(self): + class Event(Document): + title = StringField() + location = GeoPointField() + + box = [(35.0, -125.0), (40.0, -100.0)] + # I *meant* to execute location__within_box=box + events = Event.objects(location__within=box) + with pytest.raises(InvalidQueryError): + events.count() + + def test_update_pull_for_list_fields(self): + """ + Test added to check pull operation in update for + EmbeddedDocumentListField which is inside a EmbeddedDocumentField + """ + + class Word(EmbeddedDocument): + word = StringField() + index = IntField() + + class SubDoc(EmbeddedDocument): + heading = ListField(StringField()) + text = EmbeddedDocumentListField(Word) + + class MainDoc(Document): + title = StringField() + content = EmbeddedDocumentField(SubDoc) + + word = Word(word="abc", index=1) + update = transform.update(MainDoc, pull__content__text=word) + assert update == { + "$pull": {"content.text": SON([("word", u"abc"), ("index", 1)])} + } + + update = transform.update(MainDoc, pull__content__heading="xyz") + assert update == {"$pull": {"content.heading": "xyz"}} + + update = transform.update(MainDoc, pull__content__text__word__in=["foo", "bar"]) + assert update == {"$pull": {"content.text": {"word": {"$in": ["foo", "bar"]}}}} + + update = transform.update( + MainDoc, pull__content__text__word__nin=["foo", "bar"] + ) + assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} + + def test_transform_embedded_document_list_fields(self): + """ + Test added to check filtering + EmbeddedDocumentListField which is inside a EmbeddedDocumentField + """ + + class Drink(EmbeddedDocument): + id = StringField() + meta = {"strict": False} + + class Shop(Document): + drinks = EmbeddedDocumentListField(Drink) + + Shop.drop_collection() + drinks = [Drink(id="drink_1"), Drink(id="drink_2")] + Shop.objects.create(drinks=drinks) + q_obj = transform.query( + Shop, drinks__all=[{"$elemMatch": {"_id": x.id}} for x in drinks] + ) + assert q_obj == { + "drinks": {"$all": [{"$elemMatch": {"_id": x.id}} for x in drinks]} + } + + Shop.drop_collection() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/queryset/test_visitor.py b/tests/queryset/test_visitor.py new file mode 100644 index 00000000..81e0f253 --- /dev/null +++ b/tests/queryset/test_visitor.py @@ -0,0 +1,423 @@ +import datetime +import re +import unittest + +from bson import ObjectId +import pytest + +from mongoengine import * +from mongoengine.errors import InvalidQueryError +from mongoengine.queryset import Q + + +class TestQ(unittest.TestCase): + def setUp(self): + connect(db="mongoenginetest") + + class Person(Document): + name = StringField() + age = IntField() + meta = {"allow_inheritance": True} + + Person.drop_collection() + self.Person = Person + + def test_empty_q(self): + """Ensure that empty Q objects won't hurt. + """ + q1 = Q() + q2 = Q(age__gte=18) + q3 = Q() + q4 = Q(name="test") + q5 = Q() + + class Person(Document): + name = StringField() + age = IntField() + + query = {"$or": [{"age": {"$gte": 18}}, {"name": "test"}]} + assert (q1 | q2 | q3 | q4 | q5).to_query(Person) == query + + query = {"age": {"$gte": 18}, "name": "test"} + assert (q1 & q2 & q3 & q4 & q5).to_query(Person) == query + + def test_q_with_dbref(self): + """Ensure Q objects handle DBRefs correctly""" + connect(db="mongoenginetest") + + class User(Document): + pass + + class Post(Document): + created_user = ReferenceField(User) + + user = User.objects.create() + Post.objects.create(created_user=user) + + assert Post.objects.filter(created_user=user).count() == 1 + assert Post.objects.filter(Q(created_user=user)).count() == 1 + + def test_and_combination(self): + """Ensure that Q-objects correctly AND together. + """ + + class TestDoc(Document): + x = IntField() + y = StringField() + + query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) + assert query == {"$and": [{"x": {"$lt": 7}}, {"x": {"$lt": 3}}]} + + query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) + assert query == {"$and": [{"y": "a"}, {"x": {"$lt": 7}}, {"x": {"$lt": 3}}]} + + # Check normal cases work without an error + query = Q(x__lt=7) & Q(x__gt=3) + + q1 = Q(x__lt=7) + q2 = Q(x__gt=3) + query = (q1 & q2).to_query(TestDoc) + assert query == {"x": {"$lt": 7, "$gt": 3}} + + # More complex nested example + query = Q(x__lt=100) & Q(y__ne="NotMyString") + query &= Q(y__in=["a", "b", "c"]) & Q(x__gt=-100) + mongo_query = { + "x": {"$lt": 100, "$gt": -100}, + "y": {"$ne": "NotMyString", "$in": ["a", "b", "c"]}, + } + assert query.to_query(TestDoc) == mongo_query + + def test_or_combination(self): + """Ensure that Q-objects correctly OR together. + """ + + class TestDoc(Document): + x = IntField() + + q1 = Q(x__lt=3) + q2 = Q(x__gt=7) + query = (q1 | q2).to_query(TestDoc) + assert query == {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]} + + def test_and_or_combination(self): + """Ensure that Q-objects handle ANDing ORed components. + """ + + class TestDoc(Document): + x = IntField() + y = BooleanField() + + TestDoc.drop_collection() + + query = Q(x__gt=0) | Q(x__exists=False) + query &= Q(x__lt=100) + assert query.to_query(TestDoc) == { + "$and": [ + {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, + {"x": {"$lt": 100}}, + ] + } + + q1 = Q(x__gt=0) | Q(x__exists=False) + q2 = Q(x__lt=100) | Q(y=True) + query = (q1 & q2).to_query(TestDoc) + + TestDoc(x=101).save() + TestDoc(x=10).save() + TestDoc(y=True).save() + + assert query == { + "$and": [ + {"$or": [{"x": {"$gt": 0}}, {"x": {"$exists": False}}]}, + {"$or": [{"x": {"$lt": 100}}, {"y": True}]}, + ] + } + assert 2 == TestDoc.objects(q1 & q2).count() + + def test_or_and_or_combination(self): + """Ensure that Q-objects handle ORing ANDed ORed components. :) + """ + + class TestDoc(Document): + x = IntField() + y = BooleanField() + + TestDoc.drop_collection() + TestDoc(x=-1, y=True).save() + TestDoc(x=101, y=True).save() + TestDoc(x=99, y=False).save() + TestDoc(x=101, y=False).save() + + q1 = Q(x__gt=0) & (Q(y=True) | Q(y__exists=False)) + q2 = Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)) + query = (q1 | q2).to_query(TestDoc) + + assert query == { + "$or": [ + { + "$and": [ + {"x": {"$gt": 0}}, + {"$or": [{"y": True}, {"y": {"$exists": False}}]}, + ] + }, + { + "$and": [ + {"x": {"$lt": 100}}, + {"$or": [{"y": False}, {"y": {"$exists": False}}]}, + ] + }, + ] + } + assert 2 == TestDoc.objects(q1 | q2).count() + + def test_multiple_occurence_in_field(self): + class Test(Document): + name = StringField(max_length=40) + title = StringField(max_length=40) + + q1 = Q(name__contains="te") | Q(title__contains="te") + q2 = Q(name__contains="12") | Q(title__contains="12") + + q3 = q1 & q2 + + query = q3.to_query(Test) + assert query["$and"][0] == q1.to_query(Test) + assert query["$and"][1] == q2.to_query(Test) + + def test_q_clone(self): + class TestDoc(Document): + x = IntField() + + TestDoc.drop_collection() + for i in range(1, 101): + t = TestDoc(x=i) + t.save() + + # Check normal cases work without an error + test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) + + assert test.count() == 3 + + test2 = test.clone() + assert test2.count() == 3 + assert test2 != test + + test3 = test2.filter(x=6) + assert test3.count() == 1 + assert test.count() == 3 + + def test_q(self): + """Ensure that Q objects may be used to query for documents. + """ + + class BlogPost(Document): + title = StringField() + publish_date = DateTimeField() + published = BooleanField() + + BlogPost.drop_collection() + + post1 = BlogPost( + title="Test 1", publish_date=datetime.datetime(2010, 1, 8), published=False + ) + post1.save() + + post2 = BlogPost( + title="Test 2", publish_date=datetime.datetime(2010, 1, 15), published=True + ) + post2.save() + + post3 = BlogPost(title="Test 3", published=True) + post3.save() + + post4 = BlogPost(title="Test 4", publish_date=datetime.datetime(2010, 1, 8)) + post4.save() + + post5 = BlogPost(title="Test 1", publish_date=datetime.datetime(2010, 1, 15)) + post5.save() + + post6 = BlogPost(title="Test 1", published=False) + post6.save() + + # Check ObjectId lookup works + obj = BlogPost.objects(id=post1.id).first() + assert obj == post1 + + # Check Q object combination with one does not exist + q = BlogPost.objects(Q(title="Test 5") | Q(published=True)) + posts = [post.id for post in q] + + published_posts = (post2, post3) + assert all(obj.id in posts for obj in published_posts) + + q = BlogPost.objects(Q(title="Test 1") | Q(published=True)) + posts = [post.id for post in q] + published_posts = (post1, post2, post3, post5, post6) + assert all(obj.id in posts for obj in published_posts) + + # Check Q object combination + date = datetime.datetime(2010, 1, 10) + q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) + posts = [post.id for post in q] + + published_posts = (post1, post2, post3, post4) + assert all(obj.id in posts for obj in published_posts) + + assert not any(obj.id in posts for obj in [post5, post6]) + + BlogPost.drop_collection() + + # Check the 'in' operator + self.Person(name="user1", age=20).save() + self.Person(name="user2", age=20).save() + self.Person(name="user3", age=30).save() + self.Person(name="user4", age=40).save() + + assert self.Person.objects(Q(age__in=[20])).count() == 2 + assert self.Person.objects(Q(age__in=[20, 30])).count() == 3 + + # Test invalid query objs + with pytest.raises(InvalidQueryError): + self.Person.objects("user1") + + # filter should fail, too + with pytest.raises(InvalidQueryError): + self.Person.objects.filter("user1") + + def test_q_regex(self): + """Ensure that Q objects can be queried using regexes. + """ + person = self.Person(name="Guido van Rossum") + person.save() + + obj = self.Person.objects(Q(name=re.compile("^Gui"))).first() + assert obj == person + obj = self.Person.objects(Q(name=re.compile("^gui"))).first() + assert obj is None + + obj = self.Person.objects(Q(name=re.compile("^gui", re.I))).first() + assert obj == person + + obj = self.Person.objects(Q(name__not=re.compile("^bob"))).first() + assert obj == person + + obj = self.Person.objects(Q(name__not=re.compile("^Gui"))).first() + assert obj is None + + def test_q_repr(self): + assert repr(Q()) == "Q(**{})" + assert repr(Q(name="test")) == "Q(**{'name': 'test'})" + + assert ( + repr(Q(name="test") & Q(age__gte=18)) + == "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))" + ) + + assert ( + repr(Q(name="test") | Q(age__gte=18)) + == "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))" + ) + + def test_q_lists(self): + """Ensure that Q objects query ListFields correctly. + """ + + class BlogPost(Document): + tags = ListField(StringField()) + + BlogPost.drop_collection() + + BlogPost(tags=["python", "mongo"]).save() + BlogPost(tags=["python"]).save() + + assert BlogPost.objects(Q(tags="mongo")).count() == 1 + assert BlogPost.objects(Q(tags="python")).count() == 2 + + BlogPost.drop_collection() + + def test_q_merge_queries_edge_case(self): + class User(Document): + email = EmailField(required=False) + name = StringField() + + User.drop_collection() + pk = ObjectId() + User(email="example@example.com", pk=pk).save() + + assert ( + 1 + == User.objects.filter(Q(email="example@example.com") | Q(name="John Doe")) + .limit(2) + .filter(pk=pk) + .count() + ) + + def test_chained_q_or_filtering(self): + class Post(EmbeddedDocument): + name = StringField(required=True) + + class Item(Document): + postables = ListField(EmbeddedDocumentField(Post)) + + Item.drop_collection() + + Item(postables=[Post(name="a"), Post(name="b")]).save() + Item(postables=[Post(name="a"), Post(name="c")]).save() + Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save() + + assert ( + Item.objects(Q(postables__name="a") & Q(postables__name="b")).count() == 2 + ) + assert ( + Item.objects.filter(postables__name="a").filter(postables__name="b").count() + == 2 + ) + + def test_equality(self): + assert Q(name="John") == Q(name="John") + assert Q() == Q() + + def test_inequality(self): + assert Q(name="John") != Q(name="Ralph") + + def test_operation_equality(self): + q1 = Q(name="John") | Q(title="Sir") & Q(surname="Paul") + q2 = Q(name="John") | Q(title="Sir") & Q(surname="Paul") + assert q1 == q2 + + def test_operation_inequality(self): + q1 = Q(name="John") | Q(title="Sir") + q2 = Q(title="Sir") | Q(name="John") + assert q1 != q2 + + def test_combine_and_empty(self): + q = Q(x=1) + assert q & Q() == q + assert Q() & q == q + + def test_combine_and_both_empty(self): + assert Q() & Q() == Q() + + def test_combine_or_empty(self): + q = Q(x=1) + assert q | Q() == q + assert Q() | q == q + + def test_combine_or_both_empty(self): + assert Q() | Q() == Q() + + def test_q_bool(self): + assert Q(name="John") + assert not Q() + + def test_combine_bool(self): + assert not Q() & Q() + assert Q() & Q(name="John") + assert Q(name="John") & Q() + assert Q() | Q(name="John") + assert Q(name="John") | Q() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py deleted file mode 100644 index 2c2d018c..00000000 --- a/tests/queryset/transform.py +++ /dev/null @@ -1,301 +0,0 @@ -import unittest - -from bson.son import SON - -from mongoengine import * -from mongoengine.queryset import Q, transform - -__all__ = ("TransformTest",) - - -class TransformTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - - def test_transform_query(self): - """Ensure that the _transform_query function operates correctly. - """ - self.assertEqual(transform.query(name='test', age=30), - {'name': 'test', 'age': 30}) - self.assertEqual(transform.query(age__lt=30), - {'age': {'$lt': 30}}) - self.assertEqual(transform.query(age__gt=20, age__lt=50), - {'age': {'$gt': 20, '$lt': 50}}) - self.assertEqual(transform.query(age=20, age__gt=50), - {'$and': [{'age': {'$gt': 50}}, {'age': 20}]}) - self.assertEqual(transform.query(friend__age__gte=30), - {'friend.age': {'$gte': 30}}) - self.assertEqual(transform.query(name__exists=True), - {'name': {'$exists': True}}) - - def test_transform_update(self): - class LisDoc(Document): - foo = ListField(StringField()) - - class DicDoc(Document): - dictField = DictField() - - class Doc(Document): - pass - - LisDoc.drop_collection() - DicDoc.drop_collection() - Doc.drop_collection() - - DicDoc().save() - doc = Doc().save() - - for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): - update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) - self.assertIsInstance(update[v]["dictField.test"], dict) - - # Update special cases - update = transform.update(DicDoc, unset__dictField__test=doc) - self.assertEqual(update["$unset"]["dictField.test"], 1) - - update = transform.update(DicDoc, pull__dictField__test=doc) - self.assertIsInstance(update["$pull"]["dictField"]["test"], dict) - - update = transform.update(LisDoc, pull__foo__in=['a']) - self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}}) - - def test_transform_update_push(self): - """Ensure the differences in behvaior between 'push' and 'push_all'""" - class BlogPost(Document): - tags = ListField(StringField()) - - update = transform.update(BlogPost, push__tags=['mongo', 'db']) - self.assertEqual(update, {'$push': {'tags': ['mongo', 'db']}}) - - update = transform.update(BlogPost, push_all__tags=['mongo', 'db']) - self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}}) - - def test_transform_update_no_operator_default_to_set(self): - """Ensure the differences in behvaior between 'push' and 'push_all'""" - class BlogPost(Document): - tags = ListField(StringField()) - - update = transform.update(BlogPost, tags=['mongo', 'db']) - self.assertEqual(update, {'$set': {'tags': ['mongo', 'db']}}) - - def test_query_field_name(self): - """Ensure that the correct field name is used when querying. - """ - class Comment(EmbeddedDocument): - content = StringField(db_field='commentContent') - - class BlogPost(Document): - title = StringField(db_field='postTitle') - comments = ListField(EmbeddedDocumentField(Comment), - db_field='postComments') - - BlogPost.drop_collection() - - data = {'title': 'Post 1', 'comments': [Comment(content='test')]} - post = BlogPost(**data) - post.save() - - self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query) - self.assertFalse('title' in - BlogPost.objects(title=data['title'])._query) - self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) - - self.assertIn('_id', BlogPost.objects(pk=post.id)._query) - self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) - - self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query) - self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) - - BlogPost.drop_collection() - - def test_query_pk_field_name(self): - """Ensure that the correct "primary key" field name is used when - querying - """ - class BlogPost(Document): - title = StringField(primary_key=True, db_field='postTitle') - - BlogPost.drop_collection() - - data = {'title': 'Post 1'} - post = BlogPost(**data) - post.save() - - self.assertIn('_id', BlogPost.objects(pk=data['title'])._query) - self.assertIn('_id', BlogPost.objects(title=data['title'])._query) - self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) - - BlogPost.drop_collection() - - def test_chaining(self): - class A(Document): - pass - - class B(Document): - a = ReferenceField(A) - - A.drop_collection() - B.drop_collection() - - a1 = A().save() - a2 = A().save() - - B(a=a1).save() - - # Works - q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query - - # Doesn't work - q2 = B.objects.filter(a__in=[a1, a2]) - q2 = q2.filter(a=a1)._query - - self.assertEqual(q1, q2) - - def test_raw_query_and_Q_objects(self): - """ - Test raw plays nicely - """ - class Foo(Document): - name = StringField() - a = StringField() - b = StringField() - c = StringField() - - meta = { - 'allow_inheritance': False - } - - query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query - self.assertEqual(query, {'$nor': [{'name': 'bar'}]}) - - q1 = {'$or': [{'a': 1}, {'b': 1}]} - query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query - self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1}) - - def test_raw_and_merging(self): - class Doc(Document): - meta = {'allow_inheritance': False} - - raw_query = Doc.objects(__raw__={ - 'deleted': False, - 'scraped': 'yes', - '$nor': [ - {'views.extracted': 'no'}, - {'attachments.views.extracted': 'no'} - ] - })._query - - self.assertEqual(raw_query, { - 'deleted': False, - 'scraped': 'yes', - '$nor': [ - {'views.extracted': 'no'}, - {'attachments.views.extracted': 'no'} - ] - }) - - def test_geojson_PointField(self): - class Location(Document): - loc = PointField() - - update = transform.update(Location, set__loc=[1, 2]) - self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) - - update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]}) - self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}}) - - def test_geojson_LineStringField(self): - class Location(Document): - line = LineStringField() - - update = transform.update(Location, set__line=[[1, 2], [2, 2]]) - self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) - - update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) - self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) - - def test_geojson_PolygonField(self): - class Location(Document): - poly = PolygonField() - - update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) - self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) - - update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) - self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) - - def test_type(self): - class Doc(Document): - df = DynamicField() - Doc(df=True).save() - Doc(df=7).save() - Doc(df="df").save() - self.assertEqual(Doc.objects(df__type=1).count(), 0) # double - self.assertEqual(Doc.objects(df__type=8).count(), 1) # bool - self.assertEqual(Doc.objects(df__type=2).count(), 1) # str - self.assertEqual(Doc.objects(df__type=16).count(), 1) # int - - def test_last_field_name_like_operator(self): - class EmbeddedItem(EmbeddedDocument): - type = StringField() - name = StringField() - - class Doc(Document): - item = EmbeddedDocumentField(EmbeddedItem) - - Doc.drop_collection() - - doc = Doc(item=EmbeddedItem(type="axe", name="Heroic axe")) - doc.save() - - self.assertEqual(1, Doc.objects(item__type__="axe").count()) - self.assertEqual(1, Doc.objects(item__name__="Heroic axe").count()) - - Doc.objects(id=doc.id).update(set__item__type__='sword') - self.assertEqual(1, Doc.objects(item__type__="sword").count()) - self.assertEqual(0, Doc.objects(item__type__="axe").count()) - - def test_understandable_error_raised(self): - class Event(Document): - title = StringField() - location = GeoPointField() - - box = [(35.0, -125.0), (40.0, -100.0)] - # I *meant* to execute location__within_box=box - events = Event.objects(location__within=box) - with self.assertRaises(InvalidQueryError): - events.count() - - def test_update_pull_for_list_fields(self): - """ - Test added to check pull operation in update for - EmbeddedDocumentListField which is inside a EmbeddedDocumentField - """ - class Word(EmbeddedDocument): - word = StringField() - index = IntField() - - class SubDoc(EmbeddedDocument): - heading = ListField(StringField()) - text = EmbeddedDocumentListField(Word) - - class MainDoc(Document): - title = StringField() - content = EmbeddedDocumentField(SubDoc) - - word = Word(word='abc', index=1) - update = transform.update(MainDoc, pull__content__text=word) - self.assertEqual(update, {'$pull': {'content.text': SON([('word', u'abc'), ('index', 1)])}}) - - update = transform.update(MainDoc, pull__content__heading='xyz') - self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}}) - - update = transform.update(MainDoc, pull__content__text__word__in=['foo', 'bar']) - self.assertEqual(update, {'$pull': {'content.text': {'word': {'$in': ['foo', 'bar']}}}}) - - update = transform.update(MainDoc, pull__content__text__word__nin=['foo', 'bar']) - self.assertEqual(update, {'$pull': {'content.text': {'word': {'$nin': ['foo', 'bar']}}}}) - -if __name__ == '__main__': - unittest.main() diff --git a/tests/queryset/visitor.py b/tests/queryset/visitor.py deleted file mode 100644 index 22d274a8..00000000 --- a/tests/queryset/visitor.py +++ /dev/null @@ -1,358 +0,0 @@ -import datetime -import re -import unittest - -from bson import ObjectId - -from mongoengine import * -from mongoengine.errors import InvalidQueryError -from mongoengine.queryset import Q - -__all__ = ("QTest",) - - -class QTest(unittest.TestCase): - - def setUp(self): - connect(db='mongoenginetest') - - class Person(Document): - name = StringField() - age = IntField() - meta = {'allow_inheritance': True} - - Person.drop_collection() - self.Person = Person - - def test_empty_q(self): - """Ensure that empty Q objects won't hurt. - """ - q1 = Q() - q2 = Q(age__gte=18) - q3 = Q() - q4 = Q(name='test') - q5 = Q() - - class Person(Document): - name = StringField() - age = IntField() - - query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]} - self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query) - - query = {'age': {'$gte': 18}, 'name': 'test'} - self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) - - def test_q_with_dbref(self): - """Ensure Q objects handle DBRefs correctly""" - connect(db='mongoenginetest') - - class User(Document): - pass - - class Post(Document): - created_user = ReferenceField(User) - - user = User.objects.create() - Post.objects.create(created_user=user) - - self.assertEqual(Post.objects.filter(created_user=user).count(), 1) - self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1) - - def test_and_combination(self): - """Ensure that Q-objects correctly AND together. - """ - class TestDoc(Document): - x = IntField() - y = StringField() - - query = (Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual(query, {'$and': [{'x': {'$lt': 7}}, {'x': {'$lt': 3}}]}) - - query = (Q(y="a") & Q(x__lt=7) & Q(x__lt=3)).to_query(TestDoc) - self.assertEqual(query, {'$and': [{'y': "a"}, {'x': {'$lt': 7}}, {'x': {'$lt': 3}}]}) - - # Check normal cases work without an error - query = Q(x__lt=7) & Q(x__gt=3) - - q1 = Q(x__lt=7) - q2 = Q(x__gt=3) - query = (q1 & q2).to_query(TestDoc) - self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}}) - - # More complex nested example - query = Q(x__lt=100) & Q(y__ne='NotMyString') - query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100) - mongo_query = { - 'x': {'$lt': 100, '$gt': -100}, - 'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']}, - } - self.assertEqual(query.to_query(TestDoc), mongo_query) - - def test_or_combination(self): - """Ensure that Q-objects correctly OR together. - """ - class TestDoc(Document): - x = IntField() - - q1 = Q(x__lt=3) - q2 = Q(x__gt=7) - query = (q1 | q2).to_query(TestDoc) - self.assertEqual(query, { - '$or': [ - {'x': {'$lt': 3}}, - {'x': {'$gt': 7}}, - ] - }) - - def test_and_or_combination(self): - """Ensure that Q-objects handle ANDing ORed components. - """ - class TestDoc(Document): - x = IntField() - y = BooleanField() - - TestDoc.drop_collection() - - query = (Q(x__gt=0) | Q(x__exists=False)) - query &= Q(x__lt=100) - self.assertEqual(query.to_query(TestDoc), {'$and': [ - {'$or': [{'x': {'$gt': 0}}, - {'x': {'$exists': False}}]}, - {'x': {'$lt': 100}}] - }) - - q1 = (Q(x__gt=0) | Q(x__exists=False)) - q2 = (Q(x__lt=100) | Q(y=True)) - query = (q1 & q2).to_query(TestDoc) - - TestDoc(x=101).save() - TestDoc(x=10).save() - TestDoc(y=True).save() - - self.assertEqual(query, { - '$and': [ - {'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]}, - {'$or': [{'x': {'$lt': 100}}, {'y': True}]} - ] - }) - self.assertEqual(2, TestDoc.objects(q1 & q2).count()) - - def test_or_and_or_combination(self): - """Ensure that Q-objects handle ORing ANDed ORed components. :) - """ - class TestDoc(Document): - x = IntField() - y = BooleanField() - - TestDoc.drop_collection() - TestDoc(x=-1, y=True).save() - TestDoc(x=101, y=True).save() - TestDoc(x=99, y=False).save() - TestDoc(x=101, y=False).save() - - q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))) - q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) - query = (q1 | q2).to_query(TestDoc) - - self.assertEqual(query, { - '$or': [ - {'$and': [{'x': {'$gt': 0}}, - {'$or': [{'y': True}, {'y': {'$exists': False}}]}]}, - {'$and': [{'x': {'$lt': 100}}, - {'$or': [{'y': False}, {'y': {'$exists': False}}]}]} - ] - }) - self.assertEqual(2, TestDoc.objects(q1 | q2).count()) - - def test_multiple_occurence_in_field(self): - class Test(Document): - name = StringField(max_length=40) - title = StringField(max_length=40) - - q1 = Q(name__contains='te') | Q(title__contains='te') - q2 = Q(name__contains='12') | Q(title__contains='12') - - q3 = q1 & q2 - - query = q3.to_query(Test) - self.assertEqual(query["$and"][0], q1.to_query(Test)) - self.assertEqual(query["$and"][1], q2.to_query(Test)) - - def test_q_clone(self): - - class TestDoc(Document): - x = IntField() - - TestDoc.drop_collection() - for i in range(1, 101): - t = TestDoc(x=i) - t.save() - - # Check normal cases work without an error - test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) - - self.assertEqual(test.count(), 3) - - test2 = test.clone() - self.assertEqual(test2.count(), 3) - self.assertNotEqual(test2, test) - - test3 = test2.filter(x=6) - self.assertEqual(test3.count(), 1) - self.assertEqual(test.count(), 3) - - def test_q(self): - """Ensure that Q objects may be used to query for documents. - """ - class BlogPost(Document): - title = StringField() - publish_date = DateTimeField() - published = BooleanField() - - BlogPost.drop_collection() - - post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False) - post1.save() - - post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True) - post2.save() - - post3 = BlogPost(title='Test 3', published=True) - post3.save() - - post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8)) - post4.save() - - post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15)) - post5.save() - - post6 = BlogPost(title='Test 1', published=False) - post6.save() - - # Check ObjectId lookup works - obj = BlogPost.objects(id=post1.id).first() - self.assertEqual(obj, post1) - - # Check Q object combination with one does not exist - q = BlogPost.objects(Q(title='Test 5') | Q(published=True)) - posts = [post.id for post in q] - - published_posts = (post2, post3) - self.assertTrue(all(obj.id in posts for obj in published_posts)) - - q = BlogPost.objects(Q(title='Test 1') | Q(published=True)) - posts = [post.id for post in q] - published_posts = (post1, post2, post3, post5, post6) - self.assertTrue(all(obj.id in posts for obj in published_posts)) - - # Check Q object combination - date = datetime.datetime(2010, 1, 10) - q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) - posts = [post.id for post in q] - - published_posts = (post1, post2, post3, post4) - self.assertTrue(all(obj.id in posts for obj in published_posts)) - - self.assertFalse(any(obj.id in posts for obj in [post5, post6])) - - BlogPost.drop_collection() - - # Check the 'in' operator - self.Person(name='user1', age=20).save() - self.Person(name='user2', age=20).save() - self.Person(name='user3', age=30).save() - self.Person(name='user4', age=40).save() - - self.assertEqual(self.Person.objects(Q(age__in=[20])).count(), 2) - self.assertEqual(self.Person.objects(Q(age__in=[20, 30])).count(), 3) - - # Test invalid query objs - with self.assertRaises(InvalidQueryError): - self.Person.objects('user1') - - # filter should fail, too - with self.assertRaises(InvalidQueryError): - self.Person.objects.filter('user1') - - def test_q_regex(self): - """Ensure that Q objects can be queried using regexes. - """ - person = self.Person(name='Guido van Rossum') - person.save() - - obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() - self.assertEqual(obj, person) - obj = self.Person.objects(Q(name=re.compile('^gui'))).first() - self.assertEqual(obj, None) - - obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first() - self.assertEqual(obj, person) - - obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first() - self.assertEqual(obj, person) - - obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() - self.assertEqual(obj, None) - - def test_q_repr(self): - self.assertEqual(repr(Q()), 'Q(**{})') - self.assertEqual(repr(Q(name='test')), "Q(**{'name': 'test'})") - - self.assertEqual( - repr(Q(name='test') & Q(age__gte=18)), - "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))") - - self.assertEqual( - repr(Q(name='test') | Q(age__gte=18)), - "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))") - - def test_q_lists(self): - """Ensure that Q objects query ListFields correctly. - """ - class BlogPost(Document): - tags = ListField(StringField()) - - BlogPost.drop_collection() - - BlogPost(tags=['python', 'mongo']).save() - BlogPost(tags=['python']).save() - - self.assertEqual(BlogPost.objects(Q(tags='mongo')).count(), 1) - self.assertEqual(BlogPost.objects(Q(tags='python')).count(), 2) - - BlogPost.drop_collection() - - def test_q_merge_queries_edge_case(self): - - class User(Document): - email = EmailField(required=False) - name = StringField() - - User.drop_collection() - pk = ObjectId() - User(email='example@example.com', pk=pk).save() - - self.assertEqual(1, User.objects.filter(Q(email='example@example.com') | - Q(name='John Doe')).limit(2).filter(pk=pk).count()) - - def test_chained_q_or_filtering(self): - - class Post(EmbeddedDocument): - name = StringField(required=True) - - class Item(Document): - postables = ListField(EmbeddedDocumentField(Post)) - - Item.drop_collection() - - Item(postables=[Post(name="a"), Post(name="b")]).save() - Item(postables=[Post(name="a"), Post(name="c")]).save() - Item(postables=[Post(name="a"), Post(name="b"), Post(name="c")]).save() - - self.assertEqual(Item.objects(Q(postables__name="a") & Q(postables__name="b")).count(), 2) - self.assertEqual(Item.objects.filter(postables__name="a").filter(postables__name="b").count(), 2) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_ci.py b/tests/test_ci.py new file mode 100644 index 00000000..04a800eb --- /dev/null +++ b/tests/test_ci.py @@ -0,0 +1,9 @@ +def test_ci_placeholder(): + # This empty test is used within the CI to + # setup the tox venv without running the test suite + # if we simply skip all test with pytest -k=wrong_pattern + # pytest command would return with exit_code=5 (i.e "no tests run") + # making travis fail + # this empty test is the recommended way to handle this + # as described in https://github.com/pytest-dev/pytest/issues/2393 + pass diff --git a/tests/test_common.py b/tests/test_common.py index 04ad5b34..1779a91b 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,15 +1,16 @@ import unittest -from mongoengine.common import _import_class +import pytest + from mongoengine import Document +from mongoengine.common import _import_class -class TestCommon(unittest.TestCase): - +class TestCommon: def test__import_class(self): doc_cls = _import_class("Document") - self.assertIs(doc_cls, Document) + assert doc_cls is Document def test__import_class_raise_if_not_known(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _import_class("UnknownClass") diff --git a/tests/test_connection.py b/tests/test_connection.py index d3fcc395..b57d4597 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,33 +1,52 @@ import datetime -from pymongo import MongoClient -from pymongo.errors import OperationFailure, InvalidName -from pymongo import ReadPreference +from bson.tz_util import utc +import pymongo + +from pymongo import MongoClient, ReadPreference +from pymongo.errors import InvalidName, OperationFailure +import pytest try: import unittest2 as unittest except ImportError: import unittest -from nose.plugins.skip import SkipTest -import pymongo -from bson.tz_util import utc - -from mongoengine import ( - connect, register_connection, - Document, DateTimeField, - disconnect_all, StringField) import mongoengine.connection -from mongoengine.connection import (MongoEngineConnectionError, get_db, - get_connection, disconnect, DEFAULT_DATABASE_NAME) +from mongoengine import ( + DateTimeField, + Document, + StringField, + connect, + disconnect_all, + register_connection, +) +from mongoengine.connection import ( + ConnectionFailure, + DEFAULT_DATABASE_NAME, + disconnect, + get_connection, + get_db, +) def get_tz_awareness(connection): return connection.codec_options.tz_aware -class ConnectionTest(unittest.TestCase): +try: + import mongomock + MONGOMOCK_INSTALLED = True +except ImportError: + MONGOMOCK_INSTALLED = False + +require_mongomock = pytest.mark.skipif( + not MONGOMOCK_INSTALLED, reason="you need mongomock installed to run this testcase" +) + + +class ConnectionTest(unittest.TestCase): @classmethod def setUpClass(cls): disconnect_all() @@ -43,58 +62,58 @@ class ConnectionTest(unittest.TestCase): def test_connect(self): """Ensure that the connect() method works properly.""" - connect('mongoenginetest') + connect("mongoenginetest") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" - connect('mongoenginetest2', alias='testdb') - conn = get_connection('testdb') - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + connect("mongoenginetest2", alias="testdb") + conn = get_connection("testdb") + assert isinstance(conn, pymongo.mongo_client.MongoClient) def test_connect_disconnect_works_properly(self): class History1(Document): name = StringField() - meta = {'db_alias': 'db1'} + meta = {"db_alias": "db1"} class History2(Document): name = StringField() - meta = {'db_alias': 'db2'} + meta = {"db_alias": "db2"} - connect('db1', alias='db1') - connect('db2', alias='db2') + connect("db1", alias="db1") + connect("db2", alias="db2") History1.drop_collection() History2.drop_collection() - h = History1(name='default').save() - h1 = History2(name='db1').save() + h = History1(name="default").save() + h1 = History2(name="db1").save() - self.assertEqual(list(History1.objects().as_pymongo()), - [{'_id': h.id, 'name': 'default'}]) - self.assertEqual(list(History2.objects().as_pymongo()), - [{'_id': h1.id, 'name': 'db1'}]) + assert list(History1.objects().as_pymongo()) == [ + {"_id": h.id, "name": "default"} + ] + assert list(History2.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}] - disconnect('db1') - disconnect('db2') + disconnect("db1") + disconnect("db2") - with self.assertRaises(MongoEngineConnectionError): + with pytest.raises(ConnectionFailure): list(History1.objects().as_pymongo()) - with self.assertRaises(MongoEngineConnectionError): + with pytest.raises(ConnectionFailure): list(History2.objects().as_pymongo()) - connect('db1', alias='db1') - connect('db2', alias='db2') + connect("db1", alias="db1") + connect("db2", alias="db2") - self.assertEqual(list(History1.objects().as_pymongo()), - [{'_id': h.id, 'name': 'default'}]) - self.assertEqual(list(History2.objects().as_pymongo()), - [{'_id': h1.id, 'name': 'db1'}]) + assert list(History1.objects().as_pymongo()) == [ + {"_id": h.id, "name": "default"} + ] + assert list(History2.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}] def test_connect_different_documents_to_different_database(self): class History(Document): @@ -102,170 +121,207 @@ class ConnectionTest(unittest.TestCase): class History1(Document): name = StringField() - meta = {'db_alias': 'db1'} + meta = {"db_alias": "db1"} class History2(Document): name = StringField() - meta = {'db_alias': 'db2'} + meta = {"db_alias": "db2"} connect() - connect('db1', alias='db1') - connect('db2', alias='db2') + connect("db1", alias="db1") + connect("db2", alias="db2") History.drop_collection() History1.drop_collection() History2.drop_collection() - h = History(name='default').save() - h1 = History1(name='db1').save() - h2 = History2(name='db2').save() + h = History(name="default").save() + h1 = History1(name="db1").save() + h2 = History2(name="db2").save() - self.assertEqual(History._collection.database.name, DEFAULT_DATABASE_NAME) - self.assertEqual(History1._collection.database.name, 'db1') - self.assertEqual(History2._collection.database.name, 'db2') + assert History._collection.database.name == DEFAULT_DATABASE_NAME + assert History1._collection.database.name == "db1" + assert History2._collection.database.name == "db2" - self.assertEqual(list(History.objects().as_pymongo()), - [{'_id': h.id, 'name': 'default'}]) - self.assertEqual(list(History1.objects().as_pymongo()), - [{'_id': h1.id, 'name': 'db1'}]) - self.assertEqual(list(History2.objects().as_pymongo()), - [{'_id': h2.id, 'name': 'db2'}]) + assert list(History.objects().as_pymongo()) == [ + {"_id": h.id, "name": "default"} + ] + assert list(History1.objects().as_pymongo()) == [{"_id": h1.id, "name": "db1"}] + assert list(History2.objects().as_pymongo()) == [{"_id": h2.id, "name": "db2"}] def test_connect_fails_if_connect_2_times_with_default_alias(self): - connect('mongoenginetest') + connect("mongoenginetest") - with self.assertRaises(MongoEngineConnectionError) as ctx_err: - connect('mongoenginetest2') - self.assertEqual("A different connection with alias `default` was already registered. Use disconnect() first", str(ctx_err.exception)) + with pytest.raises(ConnectionFailure) as exc_info: + connect("mongoenginetest2") + assert ( + "A different connection with alias `default` was already registered. Use disconnect() first" + == str(exc_info.value) + ) def test_connect_fails_if_connect_2_times_with_custom_alias(self): - connect('mongoenginetest', alias='alias1') + connect("mongoenginetest", alias="alias1") - with self.assertRaises(MongoEngineConnectionError) as ctx_err: - connect('mongoenginetest2', alias='alias1') + with pytest.raises(ConnectionFailure) as exc_info: + connect("mongoenginetest2", alias="alias1") - self.assertEqual("A different connection with alias `alias1` was already registered. Use disconnect() first", str(ctx_err.exception)) + assert ( + "A different connection with alias `alias1` was already registered. Use disconnect() first" + == str(exc_info.value) + ) - def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way(self): + def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way( + self, + ): """Intended to keep the detecton function simple but robust""" - db_name = 'mongoenginetest' - db_alias = 'alias1' - connect(db=db_name, alias=db_alias, host='localhost', port=27017) + db_name = "mongoenginetest" + db_alias = "alias1" + connect(db=db_name, alias=db_alias, host="localhost", port=27017) - with self.assertRaises(MongoEngineConnectionError): - connect(host='mongodb://localhost:27017/%s' % db_name, alias=db_alias) + with pytest.raises(ConnectionFailure): + connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias) def test_connect_passes_silently_connect_multiple_times_with_same_config(self): # test default connection to `test` connect() connect() - self.assertEqual(len(mongoengine.connection._connections), 1) - connect('test01', alias='test01') - connect('test01', alias='test01') - self.assertEqual(len(mongoengine.connection._connections), 2) - connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02') - connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02') - self.assertEqual(len(mongoengine.connection._connections), 3) + assert len(mongoengine.connection._connections) == 1 + connect("test01", alias="test01") + connect("test01", alias="test01") + assert len(mongoengine.connection._connections) == 2 + connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02") + connect(host="mongodb://localhost:27017/mongoenginetest02", alias="test02") + assert len(mongoengine.connection._connections) == 3 def test_connect_with_invalid_db_name(self): """Ensure that connect() method fails fast if db name is invalid """ - with self.assertRaises(InvalidName): - connect('mongomock://localhost') + with pytest.raises(InvalidName): + connect("mongomock://localhost") def test_connect_with_db_name_external(self): """Ensure that connect() works if db name is $external """ """Ensure that the connect() method works properly.""" - connect('$external') + connect("$external") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, '$external') + assert isinstance(db, pymongo.database.Database) + assert db.name == "$external" - connect('$external', alias='testdb') - conn = get_connection('testdb') - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + connect("$external", alias="testdb") + conn = get_connection("testdb") + assert isinstance(conn, pymongo.mongo_client.MongoClient) def test_connect_with_invalid_db_name_type(self): """Ensure that connect() method fails fast if db name has invalid type """ - with self.assertRaises(TypeError): - non_string_db_name = ['e. g. list instead of a string'] + with pytest.raises(TypeError): + non_string_db_name = ["e. g. list instead of a string"] connect(non_string_db_name) + @require_mongomock def test_connect_in_mocking(self): """Ensure that the connect() method works properly in mocking. """ - try: - import mongomock - except ImportError: - raise SkipTest('you need mongomock installed to run this testcase') - - connect('mongoenginetest', host='mongomock://localhost') + connect("mongoenginetest", host="mongomock://localhost") conn = get_connection() - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) - connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') - conn = get_connection('testdb2') - self.assertIsInstance(conn, mongomock.MongoClient) + connect("mongoenginetest2", host="mongomock://localhost", alias="testdb2") + conn = get_connection("testdb2") + assert isinstance(conn, mongomock.MongoClient) - connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') - conn = get_connection('testdb3') - self.assertIsInstance(conn, mongomock.MongoClient) + connect( + "mongoenginetest3", + host="mongodb://localhost", + is_mock=True, + alias="testdb3", + ) + conn = get_connection("testdb3") + assert isinstance(conn, mongomock.MongoClient) - connect('mongoenginetest4', is_mock=True, alias='testdb4') - conn = get_connection('testdb4') - self.assertIsInstance(conn, mongomock.MongoClient) + connect("mongoenginetest4", is_mock=True, alias="testdb4") + conn = get_connection("testdb4") + assert isinstance(conn, mongomock.MongoClient) - connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') - conn = get_connection('testdb5') - self.assertIsInstance(conn, mongomock.MongoClient) + connect( + host="mongodb://localhost:27017/mongoenginetest5", + is_mock=True, + alias="testdb5", + ) + conn = get_connection("testdb5") + assert isinstance(conn, mongomock.MongoClient) - connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') - conn = get_connection('testdb6') - self.assertIsInstance(conn, mongomock.MongoClient) + connect(host="mongomock://localhost:27017/mongoenginetest6", alias="testdb6") + conn = get_connection("testdb6") + assert isinstance(conn, mongomock.MongoClient) - connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') - conn = get_connection('testdb7') - self.assertIsInstance(conn, mongomock.MongoClient) + connect( + host="mongomock://localhost:27017/mongoenginetest7", + is_mock=True, + alias="testdb7", + ) + conn = get_connection("testdb7") + assert isinstance(conn, mongomock.MongoClient) + @require_mongomock + def test_default_database_with_mocking(self): + """Ensure that the default database is correctly set when using mongomock. + """ + disconnect_all() + + class SomeDocument(Document): + pass + + conn = connect(host="mongomock://localhost:27017/mongoenginetest") + some_document = SomeDocument() + # database won't exist until we save a document + some_document.save() + assert conn.get_default_database().name == "mongoenginetest" + assert conn.list_database_names()[0] == "mongoenginetest" + + @require_mongomock def test_connect_with_host_list(self): """Ensure that the connect() method works when host is a list Uses mongomock to test w/o needing multiple mongod/mongos processes """ - try: - import mongomock - except ImportError: - raise SkipTest('you need mongomock installed to run this testcase') - - connect(host=['mongomock://localhost']) + connect(host=["mongomock://localhost"]) conn = get_connection() - self.assertIsInstance(conn, mongomock.MongoClient) + assert isinstance(conn, mongomock.MongoClient) - connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2') - conn = get_connection('testdb2') - self.assertIsInstance(conn, mongomock.MongoClient) + connect(host=["mongodb://localhost"], is_mock=True, alias="testdb2") + conn = get_connection("testdb2") + assert isinstance(conn, mongomock.MongoClient) - connect(host=['localhost'], is_mock=True, alias='testdb3') - conn = get_connection('testdb3') - self.assertIsInstance(conn, mongomock.MongoClient) + connect(host=["localhost"], is_mock=True, alias="testdb3") + conn = get_connection("testdb3") + assert isinstance(conn, mongomock.MongoClient) - connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') - conn = get_connection('testdb4') - self.assertIsInstance(conn, mongomock.MongoClient) + connect( + host=["mongomock://localhost:27017", "mongomock://localhost:27018"], + alias="testdb4", + ) + conn = get_connection("testdb4") + assert isinstance(conn, mongomock.MongoClient) - connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5') - conn = get_connection('testdb5') - self.assertIsInstance(conn, mongomock.MongoClient) + connect( + host=["mongodb://localhost:27017", "mongodb://localhost:27018"], + is_mock=True, + alias="testdb5", + ) + conn = get_connection("testdb5") + assert isinstance(conn, mongomock.MongoClient) - connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6') - conn = get_connection('testdb6') - self.assertIsInstance(conn, mongomock.MongoClient) + connect( + host=["localhost:27017", "localhost:27018"], is_mock=True, alias="testdb6" + ) + conn = get_connection("testdb6") + assert isinstance(conn, mongomock.MongoClient) def test_disconnect_cleans_globals(self): """Ensure that the disconnect() method cleans the globals objects""" @@ -273,52 +329,52 @@ class ConnectionTest(unittest.TestCase): dbs = mongoengine.connection._dbs connection_settings = mongoengine.connection._connection_settings - connect('mongoenginetest') + connect("mongoenginetest") - self.assertEqual(len(connections), 1) - self.assertEqual(len(dbs), 0) - self.assertEqual(len(connection_settings), 1) + assert len(connections) == 1 + assert len(dbs) == 0 + assert len(connection_settings) == 1 class TestDoc(Document): pass TestDoc.drop_collection() # triggers the db - self.assertEqual(len(dbs), 1) + assert len(dbs) == 1 disconnect() - self.assertEqual(len(connections), 0) - self.assertEqual(len(dbs), 0) - self.assertEqual(len(connection_settings), 0) + assert len(connections) == 0 + assert len(dbs) == 0 + assert len(connection_settings) == 0 def test_disconnect_cleans_cached_collection_attribute_in_document(self): """Ensure that the disconnect() method works properly""" - conn1 = connect('mongoenginetest') + connect("mongoenginetest") class History(Document): pass - self.assertIsNone(History._collection) + assert History._collection is None History.drop_collection() - History.objects.first() # will trigger the caching of _collection attribute - self.assertIsNotNone(History._collection) + History.objects.first() # will trigger the caching of _collection attribute + assert History._collection is not None disconnect() - self.assertIsNone(History._collection) + assert History._collection is None - with self.assertRaises(MongoEngineConnectionError) as ctx_err: + with pytest.raises(ConnectionFailure) as exc_info: History.objects.first() - self.assertEqual("You have not defined a default connection", str(ctx_err.exception)) + assert "You have not defined a default connection" == str(exc_info.value) def test_connect_disconnect_works_on_same_document(self): """Ensure that the connect/disconnect works properly with a single Document""" - db1 = 'db1' - db2 = 'db2' + db1 = "db1" + db2 = "db2" # Ensure freshness of the 2 databases through pymongo - client = MongoClient('localhost', 27017) + client = MongoClient("localhost", 27017) client.drop_database(db1) client.drop_database(db2) @@ -328,68 +384,68 @@ class ConnectionTest(unittest.TestCase): class User(Document): name = StringField(required=True) - user1 = User(name='John is in db1').save() + user1 = User(name="John is in db1").save() disconnect() # Make sure save doesnt work at this stage - with self.assertRaises(MongoEngineConnectionError): - User(name='Wont work').save() + with pytest.raises(ConnectionFailure): + User(name="Wont work").save() # Save in db2 connect(db2) - user2 = User(name='Bob is in db2').save() + user2 = User(name="Bob is in db2").save() disconnect() db1_users = list(client[db1].user.find()) - self.assertEqual(db1_users, [{'_id': user1.id, 'name': 'John is in db1'}]) + assert db1_users == [{"_id": user1.id, "name": "John is in db1"}] db2_users = list(client[db2].user.find()) - self.assertEqual(db2_users, [{'_id': user2.id, 'name': 'Bob is in db2'}]) + assert db2_users == [{"_id": user2.id, "name": "Bob is in db2"}] def test_disconnect_silently_pass_if_alias_does_not_exist(self): connections = mongoengine.connection._connections - self.assertEqual(len(connections), 0) - disconnect(alias='not_exist') + assert len(connections) == 0 + disconnect(alias="not_exist") def test_disconnect_all(self): connections = mongoengine.connection._connections dbs = mongoengine.connection._dbs connection_settings = mongoengine.connection._connection_settings - connect('mongoenginetest') - connect('mongoenginetest2', alias='db1') + connect("mongoenginetest") + connect("mongoenginetest2", alias="db1") class History(Document): pass class History1(Document): name = StringField() - meta = {'db_alias': 'db1'} + meta = {"db_alias": "db1"} - History.drop_collection() # will trigger the caching of _collection attribute + History.drop_collection() # will trigger the caching of _collection attribute History.objects.first() History1.drop_collection() History1.objects.first() - self.assertIsNotNone(History._collection) - self.assertIsNotNone(History1._collection) + assert History._collection is not None + assert History1._collection is not None - self.assertEqual(len(connections), 2) - self.assertEqual(len(dbs), 2) - self.assertEqual(len(connection_settings), 2) + assert len(connections) == 2 + assert len(dbs) == 2 + assert len(connection_settings) == 2 disconnect_all() - self.assertIsNone(History._collection) - self.assertIsNone(History1._collection) + assert History._collection is None + assert History1._collection is None - self.assertEqual(len(connections), 0) - self.assertEqual(len(dbs), 0) - self.assertEqual(len(connection_settings), 0) + assert len(connections) == 0 + assert len(dbs) == 0 + assert len(connection_settings) == 0 - with self.assertRaises(MongoEngineConnectionError): + with pytest.raises(ConnectionFailure): History.objects.first() - with self.assertRaises(MongoEngineConnectionError): + with pytest.raises(ConnectionFailure): History1.objects.first() def test_disconnect_all_silently_pass_if_no_connection_exist(self): @@ -398,19 +454,19 @@ class ConnectionTest(unittest.TestCase): def test_sharing_connections(self): """Ensure that connections are shared when the connection settings are exactly the same """ - connect('mongoenginetests', alias='testdb1') - expected_connection = get_connection('testdb1') + connect("mongoenginetests", alias="testdb1") + expected_connection = get_connection("testdb1") - connect('mongoenginetests', alias='testdb2') - actual_connection = get_connection('testdb2') + connect("mongoenginetests", alias="testdb2") + actual_connection = get_connection("testdb2") expected_connection.server_info() - self.assertEqual(expected_connection, actual_connection) + assert expected_connection == actual_connection def test_connect_uri(self): """Ensure that the connect() method works properly with URIs.""" - c = connect(db='mongoenginetest', alias='admin') + c = connect(db="mongoenginetest", alias="admin") c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({}) @@ -418,14 +474,16 @@ class ConnectionTest(unittest.TestCase): c.admin.authenticate("admin", "password") c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"]) - connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') + connect( + "testdb_uri", host="mongodb://username:password@localhost/mongoenginetest" + ) conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({}) @@ -434,67 +492,73 @@ class ConnectionTest(unittest.TestCase): """Ensure connect() method works properly if the URI doesn't include a database name. """ - connect("mongoenginetest", host='mongodb://localhost/') + connect("mongoenginetest", host="mongodb://localhost/") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" def test_connect_uri_default_db(self): """Ensure connect() defaults to the right database name if the URI and the database_name don't explicitly specify it. """ - connect(host='mongodb://localhost/') + connect(host="mongodb://localhost/") conn = get_connection() - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'test') + assert isinstance(db, pymongo.database.Database) + assert db.name == "test" def test_uri_without_credentials_doesnt_override_conn_settings(self): """Ensure connect() uses the username & password params if the URI doesn't explicitly specify them. """ - c = connect(host='mongodb://localhost/mongoenginetest', - username='user', - password='pass') + connect( + host="mongodb://localhost/mongoenginetest", username="user", password="pass" + ) # OperationFailure means that mongoengine attempted authentication # w/ the provided username/password and failed - that's the desired # behavior. If the MongoDB URI would override the credentials - self.assertRaises(OperationFailure, get_db) + with pytest.raises(OperationFailure): + get_db() def test_connect_uri_with_authsource(self): """Ensure that the connect() method works well with `authSource` option in the URI. """ # Create users - c = connect('mongoenginetest') + c = connect("mongoenginetest") c.admin.system.users.delete_many({}) c.admin.command("createUser", "username2", pwd="password", roles=["dbOwner"]) # Authentication fails without "authSource" test_conn = connect( - 'mongoenginetest', alias='test1', - host='mongodb://username2:password@localhost/mongoenginetest' + "mongoenginetest", + alias="test1", + host="mongodb://username2:password@localhost/mongoenginetest", ) - self.assertRaises(OperationFailure, test_conn.server_info) + with pytest.raises(OperationFailure): + test_conn.server_info() # Authentication succeeds with "authSource" authd_conn = connect( - 'mongoenginetest', alias='test2', - host=('mongodb://username2:password@localhost/' - 'mongoenginetest?authSource=admin') + "mongoenginetest", + alias="test2", + host=( + "mongodb://username2:password@localhost/" + "mongoenginetest?authSource=admin" + ), ) - db = get_db('test2') - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest') + db = get_db("test2") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest" # Clear all users authd_conn.admin.system.users.delete_many({}) @@ -502,82 +566,89 @@ class ConnectionTest(unittest.TestCase): def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ - register_connection('testdb', 'mongoenginetest2') + register_connection("testdb", "mongoenginetest2") - self.assertRaises(MongoEngineConnectionError, get_connection) - conn = get_connection('testdb') - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + with pytest.raises(ConnectionFailure): + get_connection() + conn = get_connection("testdb") + assert isinstance(conn, pymongo.mongo_client.MongoClient) - db = get_db('testdb') - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'mongoenginetest2') + db = get_db("testdb") + assert isinstance(db, pymongo.database.Database) + assert db.name == "mongoenginetest2" def test_register_connection_defaults(self): """Ensure that defaults are used when the host and port are None. """ - register_connection('testdb', 'mongoenginetest', host=None, port=None) + register_connection("testdb", "mongoenginetest", host=None, port=None) - conn = get_connection('testdb') - self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) + conn = get_connection("testdb") + assert isinstance(conn, pymongo.mongo_client.MongoClient) def test_connection_kwargs(self): """Ensure that connection kwargs get passed to pymongo.""" - connect('mongoenginetest', alias='t1', tz_aware=True) - conn = get_connection('t1') + connect("mongoenginetest", alias="t1", tz_aware=True) + conn = get_connection("t1") - self.assertTrue(get_tz_awareness(conn)) + assert get_tz_awareness(conn) - connect('mongoenginetest2', alias='t2') - conn = get_connection('t2') - self.assertFalse(get_tz_awareness(conn)) + connect("mongoenginetest2", alias="t2") + conn = get_connection("t2") + assert not get_tz_awareness(conn) def test_connection_pool_via_kwarg(self): """Ensure we can specify a max connection pool size using a connection kwarg. """ - pool_size_kwargs = {'maxpoolsize': 100} + pool_size_kwargs = {"maxpoolsize": 100} - conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) - self.assertEqual(conn.max_pool_size, 100) + conn = connect( + "mongoenginetest", alias="max_pool_size_via_kwarg", **pool_size_kwargs + ) + assert conn.max_pool_size == 100 def test_connection_pool_via_uri(self): """Ensure we can specify a max connection pool size using an option in a connection URI. """ - conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') - self.assertEqual(conn.max_pool_size, 100) + conn = connect( + host="mongodb://localhost/test?maxpoolsize=100", + alias="max_pool_size_via_uri", + ) + assert conn.max_pool_size == 100 def test_write_concern(self): """Ensure write concern can be specified in connect() via a kwarg or as part of the connection URI. """ - conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') - conn2 = connect('testing', alias='conn2', w=1, j=True) - self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) - self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) + conn1 = connect( + alias="conn1", host="mongodb://localhost/testing?w=1&journal=true" + ) + conn2 = connect("testing", alias="conn2", w=1, journal=True) + assert conn1.write_concern.document == {"w": 1, "j": True} + assert conn2.write_concern.document == {"w": 1, "j": True} def test_connect_with_replicaset_via_uri(self): """Ensure connect() works when specifying a replicaSet via the MongoDB URI. """ - c = connect(host='mongodb://localhost/test?replicaSet=local-rs') + connect(host="mongodb://localhost/test?replicaSet=local-rs") db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'test') + assert isinstance(db, pymongo.database.Database) + assert db.name == "test" def test_connect_with_replicaset_via_kwargs(self): """Ensure connect() works when specifying a replicaSet via the connection kwargs """ - c = connect(replicaset='local-rs') - self.assertEqual(c._MongoClient__options.replica_set_name, - 'local-rs') + c = connect(replicaset="local-rs") + assert c._MongoClient__options.replica_set_name == "local-rs" db = get_db() - self.assertIsInstance(db, pymongo.database.Database) - self.assertEqual(db.name, 'test') + assert isinstance(db, pymongo.database.Database) + assert db.name == "test" def test_connect_tz_aware(self): - connect('mongoenginetest', tz_aware=True) + connect("mongoenginetest", tz_aware=True) d = datetime.datetime(2010, 5, 5, tzinfo=utc) class DateDoc(Document): @@ -587,40 +658,42 @@ class ConnectionTest(unittest.TestCase): DateDoc(the_date=d).save() date_doc = DateDoc.objects.first() - self.assertEqual(d, date_doc.the_date) + assert d == date_doc.the_date def test_read_preference_from_parse(self): - conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred") - self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED) + conn = connect( + host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred" + ) + assert conn.read_preference == ReadPreference.SECONDARY_PREFERRED def test_multiple_connection_settings(self): - connect('mongoenginetest', alias='t1', host="localhost") + connect("mongoenginetest", alias="t1", host="localhost") - connect('mongoenginetest2', alias='t2', host="127.0.0.1") + connect("mongoenginetest2", alias="t2", host="127.0.0.1") mongo_connections = mongoengine.connection._connections - self.assertEqual(len(mongo_connections.items()), 2) - self.assertIn('t1', mongo_connections.keys()) - self.assertIn('t2', mongo_connections.keys()) + assert len(mongo_connections.items()) == 2 + assert "t1" in mongo_connections.keys() + assert "t2" in mongo_connections.keys() # Handle PyMongo 3+ Async Connection # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. # Purposely not catching exception to fail test if thrown. - mongo_connections['t1'].server_info() - mongo_connections['t2'].server_info() - self.assertEqual(mongo_connections['t1'].address[0], 'localhost') - self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1') + mongo_connections["t1"].server_info() + mongo_connections["t2"].server_info() + assert mongo_connections["t1"].address[0] == "localhost" + assert mongo_connections["t2"].address[0] == "127.0.0.1" def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self): - c1 = connect(alias='testdb1', db='testdb1') - c2 = connect(alias='testdb2', db='testdb2') - self.assertIs(c1, c2) + c1 = connect(alias="testdb1", db="testdb1") + c2 = connect(alias="testdb2", db="testdb2") + assert c1 is c2 def test_connect_2_databases_uses_different_client_if_different_parameters(self): - c1 = connect(alias='testdb1', db='testdb1', username='u1') - c2 = connect(alias='testdb2', db='testdb2', username='u2') - self.assertIsNot(c1, c2) + c1 = connect(alias="testdb1", db="testdb1", username="u1") + c2 = connect(alias="testdb2", db="testdb2", username="u2") + assert c1 is not c2 -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 529032fe..a4864c40 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -1,18 +1,71 @@ import unittest +import pytest + from mongoengine import * from mongoengine.connection import get_db -from mongoengine.context_managers import (switch_db, switch_collection, - no_sub_classes, no_dereference, - query_counter) +from mongoengine.context_managers import ( + no_dereference, + no_sub_classes, + query_counter, + set_read_write_concern, + set_write_concern, + switch_collection, + switch_db, +) from mongoengine.pymongo_support import count_documents -class ContextManagersTest(unittest.TestCase): +class TestContextManagers: + def test_set_write_concern(self): + connect("mongoenginetest") + + class User(Document): + name = StringField() + + collection = User._get_collection() + original_write_concern = collection.write_concern + + with set_write_concern( + collection, {"w": "majority", "j": True, "wtimeout": 1234} + ) as updated_collection: + assert updated_collection.write_concern.document == { + "w": "majority", + "j": True, + "wtimeout": 1234, + } + + assert original_write_concern.document == collection.write_concern.document + + def test_set_read_write_concern(self): + connect("mongoenginetest") + + class User(Document): + name = StringField() + + collection = User._get_collection() + + original_read_concern = collection.read_concern + original_write_concern = collection.write_concern + + with set_read_write_concern( + collection, + {"w": "majority", "j": True, "wtimeout": 1234}, + {"level": "local"}, + ) as update_collection: + assert update_collection.read_concern.document == {"level": "local"} + assert update_collection.write_concern.document == { + "w": "majority", + "j": True, + "wtimeout": 1234, + } + + assert original_read_concern.document == collection.read_concern.document + assert original_write_concern.document == collection.write_concern.document def test_switch_db_context_manager(self): - connect('mongoenginetest') - register_connection('testdb-1', 'mongoenginetest2') + connect("mongoenginetest") + register_connection("testdb-1", "mongoenginetest2") class Group(Document): name = StringField() @@ -20,53 +73,53 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() Group(name="hello - default").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() - with switch_db(Group, 'testdb-1') as Group: + with switch_db(Group, "testdb-1") as Group: - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() Group(name="hello").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() Group.drop_collection() - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() def test_switch_collection_context_manager(self): - connect('mongoenginetest') - register_connection(alias='testdb-1', db='mongoenginetest2') + connect("mongoenginetest") + register_connection(alias="testdb-1", db="mongoenginetest2") class Group(Document): name = StringField() - Group.drop_collection() # drops in default + Group.drop_collection() # drops in default - with switch_collection(Group, 'group1') as Group: - Group.drop_collection() # drops in group1 + with switch_collection(Group, "group1") as Group: + Group.drop_collection() # drops in group1 Group(name="hello - group").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() - with switch_collection(Group, 'group1') as Group: + with switch_collection(Group, "group1") as Group: - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() Group(name="hello - group1").save() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() Group.drop_collection() - self.assertEqual(0, Group.objects.count()) + assert 0 == Group.objects.count() - self.assertEqual(1, Group.objects.count()) + assert 1 == Group.objects.count() def test_no_dereference_context_manager_object_id(self): """Ensure that DBRef items in ListFields aren't dereferenced. """ - connect('mongoenginetest') + connect("mongoenginetest") class User(Document): name = StringField() @@ -80,31 +133,31 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() user = User.objects.first() Group(ref=user, members=User.objects, generic=user).save() with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields['members']._auto_dereference) - self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + assert Group._fields["members"]._auto_dereference + assert not NoDeRefGroup._fields["members"]._auto_dereference with no_dereference(Group) as Group: group = Group.objects.first() for m in group.members: - self.assertNotIsInstance(m, User) - self.assertNotIsInstance(group.ref, User) - self.assertNotIsInstance(group.generic, User) + assert not isinstance(m, User) + assert not isinstance(group.ref, User) + assert not isinstance(group.generic, User) for m in group.members: - self.assertIsInstance(m, User) - self.assertIsInstance(group.ref, User) - self.assertIsInstance(group.generic, User) + assert isinstance(m, User) + assert isinstance(group.ref, User) + assert isinstance(group.generic, User) def test_no_dereference_context_manager_dbref(self): """Ensure that DBRef items in ListFields aren't dereferenced. """ - connect('mongoenginetest') + connect("mongoenginetest") class User(Document): name = StringField() @@ -118,31 +171,29 @@ class ContextManagersTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() user = User.objects.first() Group(ref=user, members=User.objects, generic=user).save() with no_dereference(Group) as NoDeRefGroup: - self.assertTrue(Group._fields['members']._auto_dereference) - self.assertFalse(NoDeRefGroup._fields['members']._auto_dereference) + assert Group._fields["members"]._auto_dereference + assert not NoDeRefGroup._fields["members"]._auto_dereference with no_dereference(Group) as Group: group = Group.objects.first() - self.assertTrue(all([not isinstance(m, User) - for m in group.members])) - self.assertNotIsInstance(group.ref, User) - self.assertNotIsInstance(group.generic, User) + assert all([not isinstance(m, User) for m in group.members]) + assert not isinstance(group.ref, User) + assert not isinstance(group.generic, User) - self.assertTrue(all([isinstance(m, User) - for m in group.members])) - self.assertIsInstance(group.ref, User) - self.assertIsInstance(group.generic, User) + assert all([isinstance(m, User) for m in group.members]) + assert isinstance(group.ref, User) + assert isinstance(group.generic, User) def test_no_sub_classes(self): class A(Document): x = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class B(A): z = IntField() @@ -158,84 +209,85 @@ class ContextManagersTest(unittest.TestCase): B(x=30).save() C(x=40).save() - self.assertEqual(A.objects.count(), 5) - self.assertEqual(B.objects.count(), 3) - self.assertEqual(C.objects.count(), 1) + assert A.objects.count() == 5 + assert B.objects.count() == 3 + assert C.objects.count() == 1 with no_sub_classes(A): - self.assertEqual(A.objects.count(), 2) + assert A.objects.count() == 2 for obj in A.objects: - self.assertEqual(obj.__class__, A) + assert obj.__class__ == A with no_sub_classes(B): - self.assertEqual(B.objects.count(), 2) + assert B.objects.count() == 2 for obj in B.objects: - self.assertEqual(obj.__class__, B) + assert obj.__class__ == B with no_sub_classes(C): - self.assertEqual(C.objects.count(), 1) + assert C.objects.count() == 1 for obj in C.objects: - self.assertEqual(obj.__class__, C) + assert obj.__class__ == C # Confirm context manager exit correctly - self.assertEqual(A.objects.count(), 5) - self.assertEqual(B.objects.count(), 3) - self.assertEqual(C.objects.count(), 1) + assert A.objects.count() == 5 + assert B.objects.count() == 3 + assert C.objects.count() == 1 def test_no_sub_classes_modification_to_document_class_are_temporary(self): class A(Document): x = IntField() - meta = {'allow_inheritance': True} + meta = {"allow_inheritance": True} class B(A): z = IntField() - self.assertEqual(A._subclasses, ('A', 'A.B')) + assert A._subclasses == ("A", "A.B") with no_sub_classes(A): - self.assertEqual(A._subclasses, ('A',)) - self.assertEqual(A._subclasses, ('A', 'A.B')) + assert A._subclasses == ("A",) + assert A._subclasses == ("A", "A.B") - self.assertEqual(B._subclasses, ('A.B',)) + assert B._subclasses == ("A.B",) with no_sub_classes(B): - self.assertEqual(B._subclasses, ('A.B',)) - self.assertEqual(B._subclasses, ('A.B',)) + assert B._subclasses == ("A.B",) + assert B._subclasses == ("A.B",) def test_no_subclass_context_manager_does_not_swallow_exception(self): class User(Document): name = StringField() - with self.assertRaises(TypeError): + with pytest.raises(TypeError): with no_sub_classes(User): raise TypeError() def test_query_counter_does_not_swallow_exception(self): - - with self.assertRaises(TypeError): - with query_counter() as q: + with pytest.raises(TypeError): + with query_counter(): raise TypeError() def test_query_counter_temporarily_modifies_profiling_level(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() initial_profiling_level = db.profiling_level() try: - NEW_LEVEL = 1 - db.set_profiling_level(NEW_LEVEL) - self.assertEqual(db.profiling_level(), NEW_LEVEL) - with query_counter() as q: - self.assertEqual(db.profiling_level(), 2) - self.assertEqual(db.profiling_level(), NEW_LEVEL) + new_level = 1 + db.set_profiling_level(new_level) + assert db.profiling_level() == new_level + with query_counter(): + assert db.profiling_level() == 2 + assert db.profiling_level() == new_level except Exception: - db.set_profiling_level(initial_profiling_level) # Ensures it gets reseted no matter the outcome of the test + db.set_profiling_level( + initial_profiling_level + ) # Ensures it gets reseted no matter the outcome of the test raise def test_query_counter(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() collection = db.query_counter @@ -245,73 +297,123 @@ class ContextManagersTest(unittest.TestCase): count_documents(collection, {}) def issue_1_insert_query(): - collection.insert_one({'test': 'garbage'}) + collection.insert_one({"test": "garbage"}) def issue_1_find_query(): collection.find_one() counter = 0 with query_counter() as q: - self.assertEqual(q, counter) - self.assertEqual(q, counter) # Ensures previous count query did not get counted + assert q == counter + assert q == counter # Ensures previous count query did not get counted for _ in range(10): issue_1_insert_query() counter += 1 - self.assertEqual(q, counter) + assert q == counter for _ in range(4): issue_1_find_query() counter += 1 - self.assertEqual(q, counter) + assert q == counter for _ in range(3): issue_1_count_query() counter += 1 - self.assertEqual(q, counter) + assert q == counter - self.assertEqual(int(q), counter) # test __int__ - self.assertEqual(repr(q), str(int(q))) # test __repr__ - self.assertGreater(q, -1) # test __gt__ - self.assertGreaterEqual(q, int(q)) # test __gte__ - self.assertNotEqual(q, -1) - self.assertLess(q, 1000) - self.assertLessEqual(q, int(q)) + assert int(q) == counter # test __int__ + assert repr(q) == str(int(q)) # test __repr__ + assert q > -1 # test __gt__ + assert q >= int(q) # test __gte__ + assert q != -1 + assert q < 1000 + assert q <= int(q) + + def test_query_counter_alias(self): + """query_counter works properly with db aliases?""" + # Register a connection with db_alias testdb-1 + register_connection("testdb-1", "mongoenginetest2") + + class A(Document): + """Uses default db_alias""" + + name = StringField() + + class B(Document): + """Uses testdb-1 db_alias""" + + name = StringField() + meta = {"db_alias": "testdb-1"} + + A.drop_collection() + B.drop_collection() + + with query_counter() as q: + assert q == 0 + A.objects.create(name="A") + assert q == 1 + a = A.objects.first() + assert q == 2 + a.name = "Test A" + a.save() + assert q == 3 + # querying the other db should'nt alter the counter + B.objects().first() + assert q == 3 + + with query_counter(alias="testdb-1") as q: + assert q == 0 + B.objects.create(name="B") + assert q == 1 + b = B.objects.first() + assert q == 2 + b.name = "Test B" + b.save() + assert b.name == "Test B" + assert q == 3 + # querying the other db should'nt alter the counter + A.objects().first() + assert q == 3 def test_query_counter_counts_getmore_queries(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() collection = db.query_counter collection.drop() - many_docs = [{'test': 'garbage %s' % i} for i in range(150)] - collection.insert_many(many_docs) # first batch of documents contains 101 documents + many_docs = [{"test": "garbage %s" % i} for i in range(150)] + collection.insert_many( + many_docs + ) # first batch of documents contains 101 documents with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 list(collection.find()) - self.assertEqual(q, 2) # 1st select + 1 getmore + assert q == 2 # 1st select + 1 getmore def test_query_counter_ignores_particular_queries(self): - connect('mongoenginetest') + connect("mongoenginetest") db = get_db() collection = db.query_counter - collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)]) + collection.insert_many([{"test": "garbage %s" % i} for i in range(10)]) with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 cursor = collection.find() - self.assertEqual(q, 0) # cursor wasn't opened yet - _ = next(cursor) # opens the cursor and fires the find query - self.assertEqual(q, 1) + assert q == 0 # cursor wasn't opened yet + _ = next(cursor) # opens the cursor and fires the find query + assert q == 1 - cursor.close() # issues a `killcursors` query that is ignored by the context - self.assertEqual(q, 1) - _ = db.system.indexes.find_one() # queries on db.system.indexes are ignored as well - self.assertEqual(q, 1) + cursor.close() # issues a `killcursors` query that is ignored by the context + assert q == 1 + _ = ( + db.system.indexes.find_one() + ) # queries on db.system.indexes are ignored as well + assert q == 1 -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py index a9ef98e7..f4b63f05 100644 --- a/tests/test_datastructures.py +++ b/tests/test_datastructures.py @@ -1,161 +1,168 @@ import unittest -from six import iterkeys + +import pytest from mongoengine import Document -from mongoengine.base.datastructures import StrictDict, BaseList, BaseDict +from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict class DocumentStub(object): def __init__(self): self._changed_fields = [] + self._unset_fields = [] def _mark_as_changed(self, key): self._changed_fields.append(key) + def _mark_as_unset(self, key): + self._unset_fields.append(key) -class TestBaseDict(unittest.TestCase): +class TestBaseDict: @staticmethod def _get_basedict(dict_items): """Get a BaseList bound to a fake document instance""" fake_doc = DocumentStub() - base_list = BaseDict(dict_items, instance=None, name='my_name') - base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor + base_list = BaseDict(dict_items, instance=None, name="my_name") + base_list._instance = ( + fake_doc # hack to inject the mock, it does not work in the constructor + ) return base_list def test___init___(self): class MyDoc(Document): pass - dict_items = {'k': 'v'} + dict_items = {"k": "v"} doc = MyDoc() - base_dict = BaseDict(dict_items, instance=doc, name='my_name') - self.assertIsInstance(base_dict._instance, Document) - self.assertEqual(base_dict._name, 'my_name') - self.assertEqual(base_dict, dict_items) + base_dict = BaseDict(dict_items, instance=doc, name="my_name") + assert isinstance(base_dict._instance, Document) + assert base_dict._name == "my_name" + assert base_dict == dict_items def test_setdefault_calls_mark_as_changed(self): base_dict = self._get_basedict({}) - base_dict.setdefault('k', 'v') - self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) + base_dict.setdefault("k", "v") + assert base_dict._instance._changed_fields == [base_dict._name] def test_popitems_calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) - self.assertEqual(base_dict.popitem(), ('k', 'v')) - self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) - self.assertFalse(base_dict) + base_dict = self._get_basedict({"k": "v"}) + assert base_dict.popitem() == ("k", "v") + assert base_dict._instance._changed_fields == [base_dict._name] + assert not base_dict def test_pop_calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) - self.assertEqual(base_dict.pop('k'), 'v') - self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) - self.assertFalse(base_dict) + base_dict = self._get_basedict({"k": "v"}) + assert base_dict.pop("k") == "v" + assert base_dict._instance._changed_fields == [base_dict._name] + assert not base_dict def test_pop_calls_does_not_mark_as_changed_when_it_fails(self): - base_dict = self._get_basedict({'k': 'v'}) - with self.assertRaises(KeyError): - base_dict.pop('X') - self.assertFalse(base_dict._instance._changed_fields) + base_dict = self._get_basedict({"k": "v"}) + with pytest.raises(KeyError): + base_dict.pop("X") + assert not base_dict._instance._changed_fields def test_clear_calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) + base_dict = self._get_basedict({"k": "v"}) base_dict.clear() - self.assertEqual(base_dict._instance._changed_fields, ['my_name']) - self.assertEqual(base_dict, {}) + assert base_dict._instance._changed_fields == ["my_name"] + assert base_dict == {} def test___delitem___calls_mark_as_changed(self): - base_dict = self._get_basedict({'k': 'v'}) - del base_dict['k'] - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k']) - self.assertEqual(base_dict, {}) + base_dict = self._get_basedict({"k": "v"}) + del base_dict["k"] + assert base_dict._instance._changed_fields == ["my_name.k"] + assert base_dict == {} def test___getitem____KeyError(self): base_dict = self._get_basedict({}) - with self.assertRaises(KeyError): - base_dict['new'] + with pytest.raises(KeyError): + base_dict["new"] def test___getitem____simple_value(self): - base_dict = self._get_basedict({'k': 'v'}) - base_dict['k'] = 'v' + base_dict = self._get_basedict({"k": "v"}) + base_dict["k"] = "v" def test___getitem____sublist_gets_converted_to_BaseList(self): - base_dict = self._get_basedict({'k': [0, 1, 2]}) - sub_list = base_dict['k'] - self.assertEqual(sub_list, [0, 1, 2]) - self.assertIsInstance(sub_list, BaseList) - self.assertIs(sub_list._instance, base_dict._instance) - self.assertEqual(sub_list._name, 'my_name.k') - self.assertEqual(base_dict._instance._changed_fields, []) + base_dict = self._get_basedict({"k": [0, 1, 2]}) + sub_list = base_dict["k"] + assert sub_list == [0, 1, 2] + assert isinstance(sub_list, BaseList) + assert sub_list._instance is base_dict._instance + assert sub_list._name == "my_name.k" + assert base_dict._instance._changed_fields == [] # Challenge mark_as_changed from sublist sub_list[1] = None - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.1']) + assert base_dict._instance._changed_fields == ["my_name.k.1"] def test___getitem____subdict_gets_converted_to_BaseDict(self): - base_dict = self._get_basedict({'k': {'subk': 'subv'}}) - sub_dict = base_dict['k'] - self.assertEqual(sub_dict, {'subk': 'subv'}) - self.assertIsInstance(sub_dict, BaseDict) - self.assertIs(sub_dict._instance, base_dict._instance) - self.assertEqual(sub_dict._name, 'my_name.k') - self.assertEqual(base_dict._instance._changed_fields, []) + base_dict = self._get_basedict({"k": {"subk": "subv"}}) + sub_dict = base_dict["k"] + assert sub_dict == {"subk": "subv"} + assert isinstance(sub_dict, BaseDict) + assert sub_dict._instance is base_dict._instance + assert sub_dict._name == "my_name.k" + assert base_dict._instance._changed_fields == [] # Challenge mark_as_changed from subdict - sub_dict['subk'] = None - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.subk']) + sub_dict["subk"] = None + assert base_dict._instance._changed_fields == ["my_name.k.subk"] def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self): - base_dict = self._get_basedict({'k': [0, 1, 2]}) - sub_list = base_dict.get('k') - self.assertEqual(sub_list, [0, 1, 2]) - self.assertIsInstance(sub_list, BaseList) + base_dict = self._get_basedict({"k": [0, 1, 2]}) + sub_list = base_dict.get("k") + assert sub_list == [0, 1, 2] + assert isinstance(sub_list, BaseList) def test_get_returns_the_same_as___getitem__(self): - base_dict = self._get_basedict({'k': [0, 1, 2]}) - get_ = base_dict.get('k') - getitem_ = base_dict['k'] - self.assertEqual(get_, getitem_) + base_dict = self._get_basedict({"k": [0, 1, 2]}) + get_ = base_dict.get("k") + getitem_ = base_dict["k"] + assert get_ == getitem_ def test_get_default(self): base_dict = self._get_basedict({}) sentinel = object() - self.assertEqual(base_dict.get('new'), None) - self.assertIs(base_dict.get('new', sentinel), sentinel) + assert base_dict.get("new") is None + assert base_dict.get("new", sentinel) is sentinel def test___setitem___calls_mark_as_changed(self): base_dict = self._get_basedict({}) - base_dict['k'] = 'v' - self.assertEqual(base_dict._instance._changed_fields, ['my_name.k']) - self.assertEqual(base_dict, {'k': 'v'}) + base_dict["k"] = "v" + assert base_dict._instance._changed_fields == ["my_name.k"] + assert base_dict == {"k": "v"} def test_update_calls_mark_as_changed(self): base_dict = self._get_basedict({}) - base_dict.update({'k': 'v'}) - self.assertEqual(base_dict._instance._changed_fields, ['my_name']) + base_dict.update({"k": "v"}) + assert base_dict._instance._changed_fields == ["my_name"] def test___setattr____not_tracked_by_changes(self): base_dict = self._get_basedict({}) - base_dict.a_new_attr = 'test' - self.assertEqual(base_dict._instance._changed_fields, []) + base_dict.a_new_attr = "test" + assert base_dict._instance._changed_fields == [] def test___delattr____tracked_by_changes(self): # This is probably a bug as __setattr__ is not tracked # This is even bad because it could be that there is an attribute # with the same name as a key base_dict = self._get_basedict({}) - base_dict.a_new_attr = 'test' + base_dict.a_new_attr = "test" del base_dict.a_new_attr - self.assertEqual(base_dict._instance._changed_fields, ['my_name.a_new_attr']) + assert base_dict._instance._changed_fields == ["my_name.a_new_attr"] -class TestBaseList(unittest.TestCase): - +class TestBaseList: @staticmethod def _get_baselist(list_items): """Get a BaseList bound to a fake document instance""" fake_doc = DocumentStub() - base_list = BaseList(list_items, instance=None, name='my_name') - base_list._instance = fake_doc # hack to inject the mock, it does not work in the constructor + base_list = BaseList(list_items, instance=None, name="my_name") + base_list._instance = ( + fake_doc # hack to inject the mock, it does not work in the constructor + ) return base_list def test___init___(self): @@ -164,28 +171,28 @@ class TestBaseList(unittest.TestCase): list_items = [True] doc = MyDoc() - base_list = BaseList(list_items, instance=doc, name='my_name') - self.assertIsInstance(base_list._instance, Document) - self.assertEqual(base_list._name, 'my_name') - self.assertEqual(base_list, list_items) + base_list = BaseList(list_items, instance=doc, name="my_name") + assert isinstance(base_list._instance, Document) + assert base_list._name == "my_name" + assert base_list == list_items def test___iter__(self): values = [True, False, True, False] - base_list = BaseList(values, instance=None, name='my_name') - self.assertEqual(values, list(base_list)) + base_list = BaseList(values, instance=None, name="my_name") + assert values == list(base_list) def test___iter___allow_modification_while_iterating_withou_error(self): # regular list allows for this, thus this subclass must comply to that - base_list = BaseList([True, False, True, False], instance=None, name='my_name') + base_list = BaseList([True, False, True, False], instance=None, name="my_name") for idx, val in enumerate(base_list): if val: base_list.pop(idx) def test_append_calls_mark_as_changed(self): base_list = self._get_baselist([]) - self.assertFalse(base_list._instance._changed_fields) + assert not base_list._instance._changed_fields base_list.append(True) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_subclass_append(self): # Due to the way mark_as_changed_wrapper is implemented @@ -193,206 +200,203 @@ class TestBaseList(unittest.TestCase): class SubBaseList(BaseList): pass - base_list = SubBaseList([], instance=None, name='my_name') + base_list = SubBaseList([], instance=None, name="my_name") base_list.append(True) def test___getitem__using_simple_index(self): base_list = self._get_baselist([0, 1, 2]) - self.assertEqual(base_list[0], 0) - self.assertEqual(base_list[1], 1) - self.assertEqual(base_list[-1], 2) + assert base_list[0] == 0 + assert base_list[1] == 1 + assert base_list[-1] == 2 def test___getitem__using_slice(self): base_list = self._get_baselist([0, 1, 2]) - self.assertEqual(base_list[1:3], [1, 2]) - self.assertEqual(base_list[0:3:2], [0, 2]) + assert base_list[1:3] == [1, 2] + assert base_list[0:3:2] == [0, 2] def test___getitem___using_slice_returns_list(self): # Bug: using slice does not properly handles the instance # and mark_as_changed behaviour. base_list = self._get_baselist([0, 1, 2]) sliced = base_list[1:3] - self.assertEqual(sliced, [1, 2]) - self.assertIsInstance(sliced, list) - self.assertEqual(base_list._instance._changed_fields, []) + assert sliced == [1, 2] + assert isinstance(sliced, list) + assert base_list._instance._changed_fields == [] def test___getitem__sublist_returns_BaseList_bound_to_instance(self): - base_list = self._get_baselist( - [ - [1, 2], - [3, 4] - ] - ) + base_list = self._get_baselist([[1, 2], [3, 4]]) sub_list = base_list[0] - self.assertEqual(sub_list, [1, 2]) - self.assertIsInstance(sub_list, BaseList) - self.assertIs(sub_list._instance, base_list._instance) - self.assertEqual(sub_list._name, 'my_name.0') - self.assertEqual(base_list._instance._changed_fields, []) + assert sub_list == [1, 2] + assert isinstance(sub_list, BaseList) + assert sub_list._instance is base_list._instance + assert sub_list._name == "my_name.0" + assert base_list._instance._changed_fields == [] # Challenge mark_as_changed from sublist sub_list[1] = None - self.assertEqual(base_list._instance._changed_fields, ['my_name.0.1']) + assert base_list._instance._changed_fields == ["my_name.0.1"] def test___getitem__subdict_returns_BaseList_bound_to_instance(self): - base_list = self._get_baselist( - [ - {'subk': 'subv'} - ] - ) + base_list = self._get_baselist([{"subk": "subv"}]) sub_dict = base_list[0] - self.assertEqual(sub_dict, {'subk': 'subv'}) - self.assertIsInstance(sub_dict, BaseDict) - self.assertIs(sub_dict._instance, base_list._instance) - self.assertEqual(sub_dict._name, 'my_name.0') - self.assertEqual(base_list._instance._changed_fields, []) + assert sub_dict == {"subk": "subv"} + assert isinstance(sub_dict, BaseDict) + assert sub_dict._instance is base_list._instance + assert sub_dict._name == "my_name.0" + assert base_list._instance._changed_fields == [] # Challenge mark_as_changed from subdict - sub_dict['subk'] = None - self.assertEqual(base_list._instance._changed_fields, ['my_name.0.subk']) + sub_dict["subk"] = None + assert base_list._instance._changed_fields == ["my_name.0.subk"] def test_extend_calls_mark_as_changed(self): base_list = self._get_baselist([]) base_list.extend([True]) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_insert_calls_mark_as_changed(self): base_list = self._get_baselist([]) base_list.insert(0, True) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_remove_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list.remove(True) - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_remove_not_mark_as_changed_when_it_fails(self): base_list = self._get_baselist([True]) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): base_list.remove(False) - self.assertFalse(base_list._instance._changed_fields) + assert not base_list._instance._changed_fields def test_pop_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list.pop() - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_reverse_calls_mark_as_changed(self): base_list = self._get_baselist([True, False]) base_list.reverse() - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test___delitem___calls_mark_as_changed(self): base_list = self._get_baselist([True]) del base_list[0] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test___setitem___calls_with_full_slice_mark_as_changed(self): base_list = self._get_baselist([]) - base_list[:] = [0, 1] # Will use __setslice__ under py2 and __setitem__ under py3 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) - self.assertEqual(base_list, [0, 1]) + base_list[:] = [ + 0, + 1, + ] + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [0, 1] def test___setitem___calls_with_partial_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:2] = [1, 0] # Will use __setslice__ under py2 and __setitem__ under py3 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) - self.assertEqual(base_list, [1, 0, 2]) + base_list[0:2] = [ + 1, + 0, + ] + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [1, 0, 2] def test___setitem___calls_with_step_slice_mark_as_changed(self): base_list = self._get_baselist([0, 1, 2]) - base_list[0:3:2] = [-1, -2] # uses __setitem__ in both py2 & 3 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) - self.assertEqual(base_list, [-1, 1, -2]) + base_list[0:3:2] = [-1, -2] # uses __setitem__ + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [-1, 1, -2] def test___setitem___with_slice(self): base_list = self._get_baselist([0, 1, 2, 3, 4, 5]) base_list[0:6:2] = [None, None, None] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) - self.assertEqual(base_list, [None, 1, None, 3, None, 5]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [None, 1, None, 3, None, 5] def test___setitem___item_0_calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list[0] = False - self.assertEqual(base_list._instance._changed_fields, ['my_name']) - self.assertEqual(base_list, [False]) + assert base_list._instance._changed_fields == ["my_name.0"] + assert base_list == [False] def test___setitem___item_1_calls_mark_as_changed(self): base_list = self._get_baselist([True, True]) base_list[1] = False - self.assertEqual(base_list._instance._changed_fields, ['my_name.1']) - self.assertEqual(base_list, [True, False]) + assert base_list._instance._changed_fields == ["my_name.1"] + assert base_list == [True, False] def test___delslice___calls_mark_as_changed(self): base_list = self._get_baselist([0, 1]) del base_list[0:1] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) - self.assertEqual(base_list, [1]) + assert base_list._instance._changed_fields == ["my_name"] + assert base_list == [1] def test___iadd___calls_mark_as_changed(self): base_list = self._get_baselist([True]) base_list += [False] - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test___imul___calls_mark_as_changed(self): base_list = self._get_baselist([True]) - self.assertEqual(base_list._instance._changed_fields, []) + assert base_list._instance._changed_fields == [] base_list *= 2 - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_sort_calls_not_marked_as_changed_when_it_fails(self): base_list = self._get_baselist([True]) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): base_list.sort(key=1) - self.assertEqual(base_list._instance._changed_fields, []) + assert base_list._instance._changed_fields == [] def test_sort_calls_mark_as_changed(self): base_list = self._get_baselist([True, False]) base_list.sort() - self.assertEqual(base_list._instance._changed_fields, ['my_name']) + assert base_list._instance._changed_fields == ["my_name"] def test_sort_calls_with_key(self): base_list = self._get_baselist([1, 2, 11]) base_list.sort(key=lambda i: str(i)) - self.assertEqual(base_list, [1, 11, 2]) + assert base_list == [1, 11, 2] class TestStrictDict(unittest.TestCase): - def strict_dict_class(self, *args, **kwargs): - return StrictDict.create(*args, **kwargs) - def setUp(self): self.dtype = self.strict_dict_class(("a", "b", "c")) + def strict_dict_class(self, *args, **kwargs): + return StrictDict.create(*args, **kwargs) + def test_init(self): d = self.dtype(a=1, b=1, c=1) - self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) + assert (d.a, d.b, d.c) == (1, 1, 1) def test_iterkeys(self): d = self.dtype(a=1) - self.assertEqual(list(iterkeys(d)), ['a']) + assert list(d.keys()) == ["a"] def test_len(self): d = self.dtype(a=1) - self.assertEqual(len(d), 1) + assert len(d) == 1 def test_pop(self): d = self.dtype(a=1) - self.assertIn('a', d) - d.pop('a') - self.assertNotIn('a', d) + assert "a" in d + d.pop("a") + assert "a" not in d def test_repr(self): d = self.dtype(a=1, b=2, c=3) - self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') + assert repr(d) == '{"a": 1, "b": 2, "c": 3}' # make sure quotes are escaped properly d = self.dtype(a='"', b="'", c="") - self.assertEqual(repr(d), '{"a": \'"\', "b": "\'", "c": \'\'}') + assert repr(d) == '{"a": \'"\', "b": "\'", "c": \'\'}' def test_init_fails_on_nonexisting_attrs(self): - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): self.dtype(a=1, b=2, d=3) def test_eq(self): @@ -404,46 +408,47 @@ class TestStrictDict(unittest.TestCase): h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1) i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2) - self.assertEqual(d, dd) - self.assertNotEqual(d, e) - self.assertNotEqual(d, f) - self.assertNotEqual(d, g) - self.assertNotEqual(f, d) - self.assertEqual(d, h) - self.assertNotEqual(d, i) + assert d == dd + assert d != e + assert d != f + assert d != g + assert f != d + assert d == h + assert d != i def test_setattr_getattr(self): d = self.dtype() d.a = 1 - self.assertEqual(d.a, 1) - self.assertRaises(AttributeError, getattr, d, 'b') + assert d.a == 1 + with pytest.raises(AttributeError): + getattr(d, "b") def test_setattr_raises_on_nonexisting_attr(self): d = self.dtype() - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): d.x = 1 def test_setattr_getattr_special(self): d = self.strict_dict_class(["items"]) d.items = 1 - self.assertEqual(d.items, 1) + assert d.items == 1 def test_get(self): d = self.dtype(a=1) - self.assertEqual(d.get('a'), 1) - self.assertEqual(d.get('b', 'bla'), 'bla') + assert d.get("a") == 1 + assert d.get("b", "bla") == "bla" def test_items(self): d = self.dtype(a=1) - self.assertEqual(d.items(), [('a', 1)]) + assert d.items() == [("a", 1)] d = self.dtype(a=1, b=2) - self.assertEqual(d.items(), [('a', 1), ('b', 2)]) + assert d.items() == [("a", 1), ("b", 2)] def test_mappings_protocol(self): d = self.dtype(a=1, b=2) - self.assertEqual(dict(d), {'a': 1, 'b': 2}) - self.assertEqual(dict(**d), {'a': 1, 'b': 2}) + assert dict(d) == {"a": 1, "b": 2} + assert dict(**d) == {"a": 1, "b": 2} -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_dereference.py b/tests/test_dereference.py index 9c565810..0f9f412c 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -2,26 +2,24 @@ import unittest from bson import DBRef, ObjectId -from six import iteritems from mongoengine import * -from mongoengine.connection import get_db from mongoengine.context_managers import query_counter class FieldTest(unittest.TestCase): - @classmethod def setUpClass(cls): - cls.db = connect(db='mongoenginetest') + cls.db = connect(db="mongoenginetest") @classmethod def tearDownClass(cls): - cls.db.drop_database('mongoenginetest') + cls.db.drop_database("mongoenginetest") def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -32,7 +30,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -42,37 +40,37 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 - len(group_obj._data['members']) - self.assertEqual(q, 1) + len(group_obj._data["members"]) + assert q == 1 len(group_obj.members) - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 User.drop_collection() Group.drop_collection() @@ -80,6 +78,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_dref_false(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -90,7 +89,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -98,44 +97,45 @@ class FieldTest(unittest.TestCase): group.reload() # Confirm reload works with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced # verifies that no additional queries gets executed # if we re-iterate over the ListField once it is # dereferenced [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 def test_list_item_dereference_orphan_dbref(self): """Ensure that orphan DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -146,7 +146,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -157,21 +157,21 @@ class FieldTest(unittest.TestCase): # Group.members list is an orphan DBRef User.objects[0].delete() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced # verifies that no additional queries gets executed # if we re-iterate over the ListField once it is # dereferenced [m for m in group_obj.members] - self.assertEqual(q, 2) - self.assertTrue(group_obj._data['members']._dereferenced) + assert q == 2 + assert group_obj._data["members"]._dereferenced User.drop_collection() Group.drop_collection() @@ -179,6 +179,7 @@ class FieldTest(unittest.TestCase): def test_list_item_dereference_dref_false_stores_as_type(self): """Ensure that DBRef items are stored as their type """ + class User(Document): my_id = IntField(primary_key=True) name = StringField() @@ -189,17 +190,18 @@ class FieldTest(unittest.TestCase): User.drop_collection() Group.drop_collection() - user = User(my_id=1, name='user 1').save() + user = User(my_id=1, name="user 1").save() Group(members=User.objects).save() group = Group.objects.first() - self.assertEqual(Group._get_collection().find_one()['members'], [1]) - self.assertEqual(group.members, [user]) + assert Group._get_collection().find_one()["members"] == [1] + assert group.members == [user] def test_handle_old_style_references(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -210,7 +212,7 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 26): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() group = Group(members=User.objects) @@ -227,8 +229,8 @@ class FieldTest(unittest.TestCase): group.save() group = Group.objects.first() - self.assertEqual(group.members[0].name, 'user 1') - self.assertEqual(group.members[-1].name, 'String!') + assert group.members[0].name == "user 1" + assert group.members[-1].name == "String!" def test_migrate_references(self): """Example of migrating ReferenceField storage @@ -249,12 +251,12 @@ class FieldTest(unittest.TestCase): group = Group(author=user, members=[user]).save() raw_data = Group._get_collection().find_one() - self.assertIsInstance(raw_data['author'], DBRef) - self.assertIsInstance(raw_data['members'][0], DBRef) + assert isinstance(raw_data["author"], DBRef) + assert isinstance(raw_data["members"][0], DBRef) group = Group.objects.first() - self.assertEqual(group.author, user) - self.assertEqual(group.members, [user]) + assert group.author == user + assert group.members == [user] # Migrate the model definition class Group(Document): @@ -264,86 +266,86 @@ class FieldTest(unittest.TestCase): # Migrate the data for g in Group.objects(): # Explicitly mark as changed so resets - g._mark_as_changed('author') - g._mark_as_changed('members') + g._mark_as_changed("author") + g._mark_as_changed("members") g.save() group = Group.objects.first() - self.assertEqual(group.author, user) - self.assertEqual(group.members, [user]) + assert group.author == user + assert group.members == [user] raw_data = Group._get_collection().find_one() - self.assertIsInstance(raw_data['author'], ObjectId) - self.assertIsInstance(raw_data['members'][0], ObjectId) + assert isinstance(raw_data["author"], ObjectId) + assert isinstance(raw_data["members"][0], ObjectId) def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ + class Employee(Document): name = StringField() - boss = ReferenceField('self') - friends = ListField(ReferenceField('self')) + boss = ReferenceField("self") + friends = ListField(ReferenceField("self")) Employee.drop_collection() - bill = Employee(name='Bill Lumbergh') + bill = Employee(name="Bill Lumbergh") bill.save() - michael = Employee(name='Michael Bolton') + michael = Employee(name="Michael Bolton") michael.save() - samir = Employee(name='Samir Nagheenanajar') + samir = Employee(name="Samir Nagheenanajar") samir.save() friends = [michael, samir] - peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) + peter = Employee(name="Peter Gibbons", boss=bill, friends=friends) peter.save() - Employee(name='Funky Gibbon', boss=bill, friends=friends).save() - Employee(name='Funky Gibbon', boss=bill, friends=friends).save() - Employee(name='Funky Gibbon', boss=bill, friends=friends).save() + Employee(name="Funky Gibbon", boss=bill, friends=friends).save() + Employee(name="Funky Gibbon", boss=bill, friends=friends).save() + Employee(name="Funky Gibbon", boss=bill, friends=friends).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 peter = Employee.objects.with_id(peter.id) - self.assertEqual(q, 1) + assert q == 1 peter.boss - self.assertEqual(q, 2) + assert q == 2 peter.friends - self.assertEqual(q, 3) + assert q == 3 # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 peter = Employee.objects.with_id(peter.id).select_related() - self.assertEqual(q, 2) + assert q == 2 - self.assertEqual(peter.boss, bill) - self.assertEqual(q, 2) + assert peter.boss == bill + assert q == 2 - self.assertEqual(peter.friends, friends) - self.assertEqual(q, 2) + assert peter.friends == friends + assert q == 2 # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 employees = Employee.objects(boss=bill).select_related() - self.assertEqual(q, 2) + assert q == 2 for employee in employees: - self.assertEqual(employee.boss, bill) - self.assertEqual(q, 2) + assert employee.boss == bill + assert q == 2 - self.assertEqual(employee.friends, friends) - self.assertEqual(q, 2) + assert employee.friends == friends + assert q == 2 def test_list_of_lists_of_references(self): - class User(Document): name = StringField() @@ -357,26 +359,27 @@ class FieldTest(unittest.TestCase): Post.drop_collection() SimpleList.drop_collection() - u1 = User.objects.create(name='u1') - u2 = User.objects.create(name='u2') - u3 = User.objects.create(name='u3') + u1 = User.objects.create(name="u1") + u2 = User.objects.create(name="u2") + u3 = User.objects.create(name="u3") SimpleList.objects.create(users=[u1, u2, u3]) - self.assertEqual(SimpleList.objects.all()[0].users, [u1, u2, u3]) + assert SimpleList.objects.all()[0].users == [u1, u2, u3] Post.objects.create(user_lists=[[u1, u2], [u3]]) - self.assertEqual(Post.objects.all()[0].user_lists, [[u1, u2], [u3]]) + assert Post.objects.all()[0].user_lists == [[u1, u2], [u3]] def test_circular_reference(self): """Ensure you can handle circular references """ + class Relation(EmbeddedDocument): name = StringField() - person = ReferenceField('Person') + person = ReferenceField("Person") class Person(Document): name = StringField() - relations = ListField(EmbeddedDocumentField('Relation')) + relations = ListField(EmbeddedDocumentField("Relation")) def __repr__(self): return "" % self.name @@ -398,14 +401,15 @@ class FieldTest(unittest.TestCase): daughter.relations.append(self_rel) daughter.save() - self.assertEqual("[, ]", "%s" % Person.objects()) + assert "[, ]" == "%s" % Person.objects() def test_circular_reference_on_self(self): """Ensure you can handle circular references """ + class Person(Document): name = StringField() - relations = ListField(ReferenceField('self')) + relations = ListField(ReferenceField("self")) def __repr__(self): return "" % self.name @@ -424,14 +428,15 @@ class FieldTest(unittest.TestCase): daughter.relations.append(daughter) daughter.save() - self.assertEqual("[, ]", "%s" % Person.objects()) + assert "[, ]" == "%s" % Person.objects() def test_circular_tree_reference(self): """Ensure you can handle circular references with more than one level """ + class Other(EmbeddedDocument): name = StringField() - friends = ListField(ReferenceField('Person')) + friends = ListField(ReferenceField("Person")) class Person(Document): name = StringField() @@ -443,8 +448,8 @@ class FieldTest(unittest.TestCase): Person.drop_collection() paul = Person(name="Paul").save() maria = Person(name="Maria").save() - julia = Person(name='Julia').save() - anna = Person(name='Anna').save() + julia = Person(name="Julia").save() + anna = Person(name="Anna").save() paul.other.friends = [maria, julia, anna] paul.other.name = "Paul's friends" @@ -462,13 +467,12 @@ class FieldTest(unittest.TestCase): anna.other.name = "Anna's friends" anna.save() - self.assertEqual( - "[, , , ]", - "%s" % Person.objects() + assert ( + "[, , , ]" + == "%s" % Person.objects() ) def test_generic_reference(self): - class UserA(Document): name = StringField() @@ -488,13 +492,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -506,53 +510,52 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) - + assert "User" in m.__class__.__name__ def test_generic_reference_orphan_dbref(self): """Ensure that generic orphan DBRef items in ListFields are dereferenced. @@ -577,13 +580,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -595,18 +598,18 @@ class FieldTest(unittest.TestCase): # an orphan DBRef in the GenericReference ListField UserA.objects[0].delete() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) - self.assertTrue(group_obj._data['members']._dereferenced) + assert q == 4 + assert group_obj._data["members"]._dereferenced [m for m in group_obj.members] - self.assertEqual(q, 4) - self.assertTrue(group_obj._data['members']._dereferenced) + assert q == 4 + assert group_obj._data["members"]._dereferenced UserA.drop_collection() UserB.drop_collection() @@ -614,7 +617,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_list_field_complex(self): - class UserA(Document): name = StringField() @@ -634,13 +636,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -652,52 +654,52 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 for m in group_obj.members: - self.assertIn('User', m.__class__.__name__) + assert "User" in m.__class__.__name__ UserA.drop_collection() UserB.drop_collection() @@ -705,7 +707,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_map_field_reference(self): - class User(Document): name = StringField() @@ -717,7 +718,7 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - user = User(name='user %s' % i) + user = User(name="user %s" % i) user.save() members.append(user) @@ -728,49 +729,48 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 - for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, User) + for k, m in group_obj.members.items(): + assert isinstance(m, User) # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 - for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, User) + for k, m in group_obj.members.items(): + assert isinstance(m, User) - # Queryset select_related + # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 - for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, User) + for k, m in group_obj.members.items(): + assert isinstance(m, User) User.drop_collection() Group.drop_collection() def test_dict_field(self): - class UserA(Document): name = StringField() @@ -790,13 +790,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -807,65 +807,65 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 - for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + for k, m in group_obj.members.items(): + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 - for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + for k, m in group_obj.members.items(): + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 - for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + for k, m in group_obj.members.items(): + assert "User" in m.__class__.__name__ Group.objects.delete() Group().save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 1) - self.assertEqual(group_obj.members, {}) + assert q == 1 + assert group_obj.members == {} UserA.drop_collection() UserB.drop_collection() @@ -873,10 +873,9 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_dict_field_no_field_inheritance(self): - class UserA(Document): name = StringField() - meta = {'allow_inheritance': False} + meta = {"allow_inheritance": False} class Group(Document): members = DictField() @@ -886,7 +885,7 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() members += [a] @@ -898,58 +897,57 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 - for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, UserA) + for k, m in group_obj.members.items(): + assert isinstance(m, UserA) # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 - for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, UserA) + for k, m in group_obj.members.items(): + assert isinstance(m, UserA) # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 2) + assert q == 2 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 [m for m in group_obj.members] - self.assertEqual(q, 2) + assert q == 2 - for k, m in iteritems(group_obj.members): - self.assertIsInstance(m, UserA) + for k, m in group_obj.members.items(): + assert isinstance(m, UserA) UserA.drop_collection() Group.drop_collection() def test_generic_reference_map_field(self): - class UserA(Document): name = StringField() @@ -969,13 +967,13 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i) + a = UserA(name="User A %s" % i) a.save() - b = UserB(name='User B %s' % i) + b = UserB(name="User B %s" % i) b.save() - c = UserC(name='User C %s' % i) + c = UserC(name="User C %s" % i) c.save() members += [a, b, c] @@ -986,64 +984,64 @@ class FieldTest(unittest.TestCase): group.save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 - for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + for k, m in group_obj.members.items(): + assert "User" in m.__class__.__name__ # Document select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first().select_related() - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 - for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + for k, m in group_obj.members.items(): + assert "User" in m.__class__.__name__ # Queryset select_related with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_objs = Group.objects.select_related() - self.assertEqual(q, 4) + assert q == 4 for group_obj in group_objs: [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 [m for m in group_obj.members] - self.assertEqual(q, 4) + assert q == 4 - for k, m in iteritems(group_obj.members): - self.assertIn('User', m.__class__.__name__) + for k, m in group_obj.members.items(): + assert "User" in m.__class__.__name__ Group.objects.delete() Group().save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 [m for m in group_obj.members] - self.assertEqual(q, 1) + assert q == 1 UserA.drop_collection() UserB.drop_collection() @@ -1051,7 +1049,6 @@ class FieldTest(unittest.TestCase): Group.drop_collection() def test_multidirectional_lists(self): - class Asset(Document): name = StringField(max_length=250, required=True) path = StringField() @@ -1062,21 +1059,20 @@ class FieldTest(unittest.TestCase): Asset.drop_collection() - root = Asset(name='', path="/", title="Site Root") + root = Asset(name="", path="/", title="Site Root") root.save() - company = Asset(name='company', title='Company', parent=root, parents=[root]) + company = Asset(name="company", title="Company", parent=root, parents=[root]) company.save() root.children = [company] root.save() root = root.reload() - self.assertEqual(root.children, [company]) - self.assertEqual(company.parents, [root]) + assert root.children == [company] + assert company.parents == [root] def test_dict_in_dbref_instance(self): - class Person(Document): name = StringField(max_length=250, required=True) @@ -1087,34 +1083,35 @@ class FieldTest(unittest.TestCase): Person.drop_collection() Room.drop_collection() - bob = Person.objects.create(name='Bob') + bob = Person.objects.create(name="Bob") bob.save() - sarah = Person.objects.create(name='Sarah') + sarah = Person.objects.create(name="Sarah") sarah.save() room_101 = Room.objects.create(number="101") room_101.staffs_with_position = [ - {'position_key': 'window', 'staff': sarah}, - {'position_key': 'door', 'staff': bob.to_dbref()}] + {"position_key": "window", "staff": sarah}, + {"position_key": "door", "staff": bob.to_dbref()}, + ] room_101.save() room = Room.objects.first().select_related() - self.assertEqual(room.staffs_with_position[0]['staff'], sarah) - self.assertEqual(room.staffs_with_position[1]['staff'], bob) + assert room.staffs_with_position[0]["staff"] == sarah + assert room.staffs_with_position[1]["staff"] == bob def test_document_reload_no_inheritance(self): class Foo(Document): - meta = {'allow_inheritance': False} - bar = ReferenceField('Bar') - baz = ReferenceField('Baz') + meta = {"allow_inheritance": False} + bar = ReferenceField("Bar") + baz = ReferenceField("Baz") class Bar(Document): - meta = {'allow_inheritance': False} - msg = StringField(required=True, default='Blammo!') + meta = {"allow_inheritance": False} + msg = StringField(required=True, default="Blammo!") class Baz(Document): - meta = {'allow_inheritance': False} - msg = StringField(required=True, default='Kaboom!') + meta = {"allow_inheritance": False} + msg = StringField(required=True, default="Kaboom!") Foo.drop_collection() Bar.drop_collection() @@ -1130,19 +1127,22 @@ class FieldTest(unittest.TestCase): foo.save() foo.reload() - self.assertEqual(type(foo.bar), Bar) - self.assertEqual(type(foo.baz), Baz) + assert type(foo.bar) == Bar + assert type(foo.baz) == Baz def test_document_reload_reference_integrity(self): """ Ensure reloading a document with multiple similar id in different collections doesn't mix them. """ + class Topic(Document): id = IntField(primary_key=True) + class User(Document): id = IntField(primary_key=True) name = StringField() + class Message(Document): id = IntField(primary_key=True) topic = ReferenceField(Topic) @@ -1154,23 +1154,24 @@ class FieldTest(unittest.TestCase): # All objects share the same id, but each in a different collection topic = Topic(id=1).save() - user = User(id=1, name='user-name').save() + user = User(id=1, name="user-name").save() Message(id=1, topic=topic, author=user).save() concurrent_change_user = User.objects.get(id=1) - concurrent_change_user.name = 'new-name' + concurrent_change_user.name = "new-name" concurrent_change_user.save() - self.assertNotEqual(user.name, 'new-name') + assert user.name != "new-name" msg = Message.objects.get(id=1) msg.reload() - self.assertEqual(msg.topic, topic) - self.assertEqual(msg.author, user) - self.assertEqual(msg.author.name, 'new-name') + assert msg.topic == topic + assert msg.author == user + assert msg.author.name == "new-name" def test_list_lookup_not_checked_in_map(self): """Ensure we dereference list data correctly """ + class Comment(Document): id = IntField(primary_key=True) text = StringField() @@ -1182,17 +1183,18 @@ class FieldTest(unittest.TestCase): Comment.drop_collection() Message.drop_collection() - c1 = Comment(id=0, text='zero').save() - c2 = Comment(id=1, text='one').save() + c1 = Comment(id=0, text="zero").save() + c2 = Comment(id=1, text="one").save() Message(id=1, comments=[c1, c2]).save() msg = Message.objects.get(id=1) - self.assertEqual(0, msg.comments[0].id) - self.assertEqual(1, msg.comments[1].id) + assert 0 == msg.comments[0].id + assert 1 == msg.comments[1].id def test_list_item_dereference_dref_false_save_doesnt_cause_extra_queries(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -1204,24 +1206,25 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() Group(name="Test", members=User.objects).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 group_obj.name = "new test" group_obj.save() - self.assertEqual(q, 2) + assert q == 2 def test_list_item_dereference_dref_true_save_doesnt_cause_extra_queries(self): """Ensure that DBRef items in ListFields are dereferenced. """ + class User(Document): name = StringField() @@ -1233,23 +1236,22 @@ class FieldTest(unittest.TestCase): Group.drop_collection() for i in range(1, 51): - User(name='user %s' % i).save() + User(name="user %s" % i).save() Group(name="Test", members=User.objects).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 group_obj.name = "new test" group_obj.save() - self.assertEqual(q, 2) + assert q == 2 def test_generic_reference_save_doesnt_cause_extra_queries(self): - class UserA(Document): name = StringField() @@ -1270,29 +1272,29 @@ class FieldTest(unittest.TestCase): members = [] for i in range(1, 51): - a = UserA(name='User A %s' % i).save() - b = UserB(name='User B %s' % i).save() - c = UserC(name='User C %s' % i).save() + a = UserA(name="User A %s" % i).save() + b = UserB(name="User B %s" % i).save() + c = UserC(name="User C %s" % i).save() members += [a, b, c] Group(name="test", members=members).save() with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 group_obj = Group.objects.first() - self.assertEqual(q, 1) + assert q == 1 group_obj.name = "new test" group_obj.save() - self.assertEqual(q, 2) + assert q == 2 def test_objectid_reference_across_databases(self): # mongoenginetest - Is default connection alias from setUp() # Register Aliases - register_connection('testdb-1', 'mongoenginetest2') + register_connection("testdb-1", "mongoenginetest2") class User(Document): name = StringField() @@ -1311,16 +1313,17 @@ class FieldTest(unittest.TestCase): # Can't use query_counter across databases - so test the _data object book = Book.objects.first() - self.assertNotIsInstance(book._data['author'], User) + assert not isinstance(book._data["author"], User) book.select_related() - self.assertIsInstance(book._data['author'], User) + assert isinstance(book._data["author"], User) def test_non_ascii_pk(self): """ Ensure that dbref conversion to string does not fail when non-ascii characters are used in primary key """ + class Brand(Document): title = StringField(max_length=255, primary_key=True) @@ -1337,11 +1340,11 @@ class FieldTest(unittest.TestCase): BrandGroup(title="top_brands", brands=[brand1, brand2]).save() brand_groups = BrandGroup.objects().all() - self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands])) + assert 2 == len([brand for bg in brand_groups for brand in bg.brands]) def test_dereferencing_embedded_listfield_referencefield(self): class Tag(Document): - meta = {'collection': 'tags'} + meta = {"collection": "tags"} name = StringField() class Post(EmbeddedDocument): @@ -1349,22 +1352,21 @@ class FieldTest(unittest.TestCase): tags = ListField(ReferenceField("Tag", dbref=True)) class Page(Document): - meta = {'collection': 'pages'} + meta = {"collection": "pages"} tags = ListField(ReferenceField("Tag", dbref=True)) posts = ListField(EmbeddedDocumentField(Post)) Tag.drop_collection() Page.drop_collection() - tag = Tag(name='test').save() - post = Post(body='test body', tags=[tag]) + tag = Tag(name="test").save() + post = Post(body="test body", tags=[tag]) Page(tags=[tag], posts=[post]).save() page = Page.objects.first() - self.assertEqual(page.tags[0], page.posts[0].tags[0]) + assert page.tags[0] == page.posts[0].tags[0] def test_select_related_follows_embedded_referencefields(self): - class Song(Document): title = StringField() @@ -1382,13 +1384,13 @@ class FieldTest(unittest.TestCase): playlist = Playlist.objects.create(items=items) with query_counter() as q: - self.assertEqual(q, 0) + assert q == 0 playlist = Playlist.objects.first().select_related() songs = [item.song for item in playlist.items] - self.assertEqual(q, 2) + assert q == 2 -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_replicaset_connection.py b/tests/test_replicaset_connection.py index cacdce8b..5d83da00 100644 --- a/tests/test_replicaset_connection.py +++ b/tests/test_replicaset_connection.py @@ -1,10 +1,9 @@ import unittest -from pymongo import ReadPreference -from pymongo import MongoClient +from pymongo import MongoClient, ReadPreference import mongoengine -from mongoengine.connection import MongoEngineConnectionError +from mongoengine.connection import ConnectionFailure CONN_CLASS = MongoClient @@ -12,7 +11,6 @@ READ_PREF = ReadPreference.SECONDARY class ConnectionTest(unittest.TestCase): - def setUp(self): mongoengine.connection._connection_settings = {} mongoengine.connection._connections = {} @@ -26,20 +24,21 @@ class ConnectionTest(unittest.TestCase): def test_replicaset_uri_passes_read_preference(self): """Requires a replica set called "rs" on port 27017 """ - try: - conn = mongoengine.connect(db='mongoenginetest', - host="mongodb://localhost/mongoenginetest?replicaSet=rs", - read_preference=READ_PREF) - except MongoEngineConnectionError as e: + conn = mongoengine.connect( + db="mongoenginetest", + host="mongodb://localhost/mongoenginetest?replicaSet=rs", + read_preference=READ_PREF, + ) + except ConnectionFailure: return if not isinstance(conn, CONN_CLASS): # really??? return - self.assertEqual(conn.read_preference, READ_PREF) + assert conn.read_preference == READ_PREF -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_signals.py b/tests/test_signals.py index 34cb43c3..64976e25 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -7,7 +7,7 @@ from mongoengine import signals signal_output = [] -class SignalTests(unittest.TestCase): +class TestSignal(unittest.TestCase): """ Testing signals before/after saving and deleting. """ @@ -20,7 +20,7 @@ class SignalTests(unittest.TestCase): return signal_output def setUp(self): - connect(db='mongoenginetest') + connect(db="mongoenginetest") class Author(Document): # Make the id deterministic for easier testing @@ -32,60 +32,65 @@ class SignalTests(unittest.TestCase): @classmethod def pre_init(cls, sender, document, *args, **kwargs): - signal_output.append('pre_init signal, %s' % cls.__name__) - signal_output.append(kwargs['values']) + signal_output.append("pre_init signal, %s" % cls.__name__) + signal_output.append(kwargs["values"]) @classmethod def post_init(cls, sender, document, **kwargs): - signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created)) + signal_output.append( + "post_init signal, %s, document._created = %s" + % (document, document._created) + ) @classmethod def pre_save(cls, sender, document, **kwargs): - signal_output.append('pre_save signal, %s' % document) + signal_output.append("pre_save signal, %s" % document) signal_output.append(kwargs) @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): - signal_output.append('pre_save_post_validation signal, %s' % document) - if kwargs.pop('created', False): - signal_output.append('Is created') + signal_output.append("pre_save_post_validation signal, %s" % document) + if kwargs.pop("created", False): + signal_output.append("Is created") else: - signal_output.append('Is updated') + signal_output.append("Is updated") signal_output.append(kwargs) @classmethod def post_save(cls, sender, document, **kwargs): - dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() - signal_output.append('post_save signal, %s' % document) - signal_output.append('post_save dirty keys, %s' % dirty_keys) - if kwargs.pop('created', False): - signal_output.append('Is created') + dirty_keys = list(document._delta()[0].keys()) + list( + document._delta()[1].keys() + ) + signal_output.append("post_save signal, %s" % document) + signal_output.append("post_save dirty keys, %s" % dirty_keys) + if kwargs.pop("created", False): + signal_output.append("Is created") else: - signal_output.append('Is updated') + signal_output.append("Is updated") signal_output.append(kwargs) @classmethod def pre_delete(cls, sender, document, **kwargs): - signal_output.append('pre_delete signal, %s' % document) + signal_output.append("pre_delete signal, %s" % document) signal_output.append(kwargs) @classmethod def post_delete(cls, sender, document, **kwargs): - signal_output.append('post_delete signal, %s' % document) + signal_output.append("post_delete signal, %s" % document) signal_output.append(kwargs) @classmethod def pre_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('pre_bulk_insert signal, %s' % documents) + signal_output.append("pre_bulk_insert signal, %s" % documents) signal_output.append(kwargs) @classmethod def post_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('post_bulk_insert signal, %s' % documents) - if kwargs.pop('loaded', False): - signal_output.append('Is loaded') + signal_output.append("post_bulk_insert signal, %s" % documents) + if kwargs.pop("loaded", False): + signal_output.append("Is loaded") else: - signal_output.append('Not loaded') + signal_output.append("Not loaded") signal_output.append(kwargs) self.Author = Author @@ -101,12 +106,12 @@ class SignalTests(unittest.TestCase): @classmethod def pre_delete(cls, sender, document, **kwargs): - signal_output.append('pre_delete signal, %s' % document) + signal_output.append("pre_delete signal, %s" % document) signal_output.append(kwargs) @classmethod def post_delete(cls, sender, document, **kwargs): - signal_output.append('post_delete signal, %s' % document) + signal_output.append("post_delete signal, %s" % document) signal_output.append(kwargs) self.Another = Another @@ -117,11 +122,11 @@ class SignalTests(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): - if 'created' in kwargs: - if kwargs['created']: - signal_output.append('Is created') + if "created" in kwargs: + if kwargs["created"]: + signal_output.append("Is created") else: - signal_output.append('Is updated') + signal_output.append("Is updated") self.ExplicitId = ExplicitId ExplicitId.drop_collection() @@ -136,9 +141,13 @@ class SignalTests(unittest.TestCase): @classmethod def pre_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('pre_bulk_insert signal, %s' % - [(doc, {'active': documents[n].active}) - for n, doc in enumerate(documents)]) + signal_output.append( + "pre_bulk_insert signal, %s" + % [ + (doc, {"active": documents[n].active}) + for n, doc in enumerate(documents) + ] + ) # make changes here, this is just an example - # it could be anything that needs pre-validation or looks-ups before bulk bulk inserting @@ -149,13 +158,17 @@ class SignalTests(unittest.TestCase): @classmethod def post_bulk_insert(cls, sender, documents, **kwargs): - signal_output.append('post_bulk_insert signal, %s' % - [(doc, {'active': documents[n].active}) - for n, doc in enumerate(documents)]) - if kwargs.pop('loaded', False): - signal_output.append('Is loaded') + signal_output.append( + "post_bulk_insert signal, %s" + % [ + (doc, {"active": documents[n].active}) + for n, doc in enumerate(documents) + ] + ) + if kwargs.pop("loaded", False): + signal_output.append("Is loaded") else: - signal_output.append('Not loaded') + signal_output.append("Not loaded") signal_output.append(kwargs) self.Post = Post @@ -178,7 +191,9 @@ class SignalTests(unittest.TestCase): signals.pre_init.connect(Author.pre_init, sender=Author) signals.post_init.connect(Author.post_init, sender=Author) signals.pre_save.connect(Author.pre_save, sender=Author) - signals.pre_save_post_validation.connect(Author.pre_save_post_validation, sender=Author) + signals.pre_save_post_validation.connect( + Author.pre_save_post_validation, sender=Author + ) signals.post_save.connect(Author.post_save, sender=Author) signals.pre_delete.connect(Author.pre_delete, sender=Author) signals.post_delete.connect(Author.post_delete, sender=Author) @@ -199,7 +214,9 @@ class SignalTests(unittest.TestCase): signals.post_delete.disconnect(self.Author.post_delete) signals.pre_delete.disconnect(self.Author.pre_delete) signals.post_save.disconnect(self.Author.post_save) - signals.pre_save_post_validation.disconnect(self.Author.pre_save_post_validation) + signals.pre_save_post_validation.disconnect( + self.Author.pre_save_post_validation + ) signals.pre_save.disconnect(self.Author.pre_save) signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) @@ -230,36 +247,36 @@ class SignalTests(unittest.TestCase): # Note that there is a chance that the following assert fails in case # some receivers (eventually created in other tests) # gets garbage collected (https://pythonhosted.org/blinker/#blinker.base.Signal.connect) - self.assertEqual(self.pre_signals, post_signals) + assert self.pre_signals == post_signals def test_model_signals(self): """ Model saves should throw some signals. """ def create_author(): - self.Author(name='Bill Shakespeare') + self.Author(name="Bill Shakespeare") def bulk_create_author_with_load(): - a1 = self.Author(name='Bill Shakespeare') + a1 = self.Author(name="Bill Shakespeare") self.Author.objects.insert([a1], load_bulk=True) def bulk_create_author_without_load(): - a1 = self.Author(name='Bill Shakespeare') + a1 = self.Author(name="Bill Shakespeare") self.Author.objects.insert([a1], load_bulk=False) def load_existing_author(): - a = self.Author(name='Bill Shakespeare') + a = self.Author(name="Bill Shakespeare") a.save() self.get_signal_output(lambda: None) # eliminate signal output - a1 = self.Author.objects(name='Bill Shakespeare')[0] + _ = self.Author.objects(name="Bill Shakespeare")[0] - self.assertEqual(self.get_signal_output(create_author), [ + assert self.get_signal_output(create_author) == [ "pre_init signal, Author", - {'name': 'Bill Shakespeare'}, + {"name": "Bill Shakespeare"}, "post_init signal, Bill Shakespeare, document._created = True", - ]) + ] - a1 = self.Author(name='Bill Shakespeare') - self.assertEqual(self.get_signal_output(a1.save), [ + a1 = self.Author(name="Bill Shakespeare") + assert self.get_signal_output(a1.save) == [ "pre_save signal, Bill Shakespeare", {}, "pre_save_post_validation signal, Bill Shakespeare", @@ -268,12 +285,12 @@ class SignalTests(unittest.TestCase): "post_save signal, Bill Shakespeare", "post_save dirty keys, ['name']", "Is created", - {} - ]) + {}, + ] a1.reload() - a1.name = 'William Shakespeare' - self.assertEqual(self.get_signal_output(a1.save), [ + a1.name = "William Shakespeare" + assert self.get_signal_output(a1.save) == [ "pre_save signal, William Shakespeare", {}, "pre_save_post_validation signal, William Shakespeare", @@ -282,157 +299,157 @@ class SignalTests(unittest.TestCase): "post_save signal, William Shakespeare", "post_save dirty keys, ['name']", "Is updated", - {} - ]) - - self.assertEqual(self.get_signal_output(a1.delete), [ - 'pre_delete signal, William Shakespeare', {}, - 'post_delete signal, William Shakespeare', - {} - ]) + ] - self.assertEqual(self.get_signal_output(load_existing_author), [ - "pre_init signal, Author", - {'id': 2, 'name': 'Bill Shakespeare'}, - "post_init signal, Bill Shakespeare, document._created = False" - ]) - - self.assertEqual(self.get_signal_output(bulk_create_author_with_load), [ - 'pre_init signal, Author', - {'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = True', - 'pre_bulk_insert signal, []', + assert self.get_signal_output(a1.delete) == [ + "pre_delete signal, William Shakespeare", {}, - 'pre_init signal, Author', - {'id': 3, 'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = False', - 'post_bulk_insert signal, []', - 'Is loaded', - {} - ]) + "post_delete signal, William Shakespeare", + {}, + ] - self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ + assert self.get_signal_output(load_existing_author) == [ "pre_init signal, Author", - {'name': 'Bill Shakespeare'}, + {"id": 2, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + ] + + assert self.get_signal_output(bulk_create_author_with_load) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {}, + "pre_init signal, Author", + {"id": 3, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + "post_bulk_insert signal, []", + "Is loaded", + {}, + ] + + assert self.get_signal_output(bulk_create_author_without_load) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, "post_init signal, Bill Shakespeare, document._created = True", "pre_bulk_insert signal, []", {}, "post_bulk_insert signal, []", "Not loaded", - {} - ]) + {}, + ] def test_signal_kwargs(self): """ Make sure signal_kwargs is passed to signals calls. """ def live_and_let_die(): - a = self.Author(name='Bill Shakespeare') - a.save(signal_kwargs={'live': True, 'die': False}) - a.delete(signal_kwargs={'live': False, 'die': True}) + a = self.Author(name="Bill Shakespeare") + a.save(signal_kwargs={"live": True, "die": False}) + a.delete(signal_kwargs={"live": False, "die": True}) - self.assertEqual(self.get_signal_output(live_and_let_die), [ + assert self.get_signal_output(live_and_let_die) == [ "pre_init signal, Author", - {'name': 'Bill Shakespeare'}, + {"name": "Bill Shakespeare"}, "post_init signal, Bill Shakespeare, document._created = True", "pre_save signal, Bill Shakespeare", - {'die': False, 'live': True}, + {"die": False, "live": True}, "pre_save_post_validation signal, Bill Shakespeare", "Is created", - {'die': False, 'live': True}, + {"die": False, "live": True}, "post_save signal, Bill Shakespeare", "post_save dirty keys, ['name']", "Is created", - {'die': False, 'live': True}, - 'pre_delete signal, Bill Shakespeare', - {'die': True, 'live': False}, - 'post_delete signal, Bill Shakespeare', - {'die': True, 'live': False} - ]) + {"die": False, "live": True}, + "pre_delete signal, Bill Shakespeare", + {"die": True, "live": False}, + "post_delete signal, Bill Shakespeare", + {"die": True, "live": False}, + ] def bulk_create_author(): - a1 = self.Author(name='Bill Shakespeare') - self.Author.objects.insert([a1], signal_kwargs={'key': True}) + a1 = self.Author(name="Bill Shakespeare") + self.Author.objects.insert([a1], signal_kwargs={"key": True}) - self.assertEqual(self.get_signal_output(bulk_create_author), [ - 'pre_init signal, Author', - {'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = True', - 'pre_bulk_insert signal, []', - {'key': True}, - 'pre_init signal, Author', - {'id': 2, 'name': 'Bill Shakespeare'}, - 'post_init signal, Bill Shakespeare, document._created = False', - 'post_bulk_insert signal, []', - 'Is loaded', - {'key': True} - ]) + assert self.get_signal_output(bulk_create_author) == [ + "pre_init signal, Author", + {"name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = True", + "pre_bulk_insert signal, []", + {"key": True}, + "pre_init signal, Author", + {"id": 2, "name": "Bill Shakespeare"}, + "post_init signal, Bill Shakespeare, document._created = False", + "post_bulk_insert signal, []", + "Is loaded", + {"key": True}, + ] def test_queryset_delete_signals(self): """ Queryset delete should throw some signals. """ - self.Another(name='Bill Shakespeare').save() - self.assertEqual(self.get_signal_output(self.Another.objects.delete), [ - 'pre_delete signal, Bill Shakespeare', + self.Another(name="Bill Shakespeare").save() + assert self.get_signal_output(self.Another.objects.delete) == [ + "pre_delete signal, Bill Shakespeare", {}, - 'post_delete signal, Bill Shakespeare', - {} - ]) + "post_delete signal, Bill Shakespeare", + {}, + ] def test_signals_with_explicit_doc_ids(self): """ Model saves must have a created flag the first time.""" ei = self.ExplicitId(id=123) # post save must received the created flag, even if there's already # an object id present - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] # second time, it must be an update - self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + assert self.get_signal_output(ei.save) == ["Is updated"] def test_signals_with_switch_collection(self): ei = self.ExplicitId(id=123) ei.switch_collection("explicit__1") - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_collection("explicit__1") - self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + assert self.get_signal_output(ei.save) == ["Is updated"] ei.switch_collection("explicit__1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_collection("explicit__1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] def test_signals_with_switch_db(self): - connect('mongoenginetest') - register_connection('testdb-1', 'mongoenginetest2') + connect("mongoenginetest") + register_connection("testdb-1", "mongoenginetest2") ei = self.ExplicitId(id=123) ei.switch_db("testdb-1") - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_db("testdb-1") - self.assertEqual(self.get_signal_output(ei.save), ['Is updated']) + assert self.get_signal_output(ei.save) == ["Is updated"] ei.switch_db("testdb-1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] ei.switch_db("testdb-1", keep_created=False) - self.assertEqual(self.get_signal_output(ei.save), ['Is created']) + assert self.get_signal_output(ei.save) == ["Is created"] def test_signals_bulk_insert(self): def bulk_set_active_post(): posts = [ - self.Post(title='Post 1'), - self.Post(title='Post 2'), - self.Post(title='Post 3') + self.Post(title="Post 1"), + self.Post(title="Post 2"), + self.Post(title="Post 3"), ] self.Post.objects.insert(posts) results = self.get_signal_output(bulk_set_active_post) - self.assertEqual(results, [ + assert results == [ "pre_bulk_insert signal, [(, {'active': False}), (, {'active': False}), (, {'active': False})]", {}, "post_bulk_insert signal, [(, {'active': True}), (, {'active': True}), (, {'active': True})]", - 'Is loaded', - {} - ]) + "Is loaded", + {}, + ] -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index 562cc1ff..dd178273 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,38 +1,39 @@ -import unittest import re +import unittest + +import pytest from mongoengine.base.utils import LazyRegexCompiler signal_output = [] -class LazyRegexCompilerTest(unittest.TestCase): - +class TestLazyRegexCompiler: def test_lazy_regex_compiler_verify_laziness_of_descriptor(self): class UserEmail(object): - EMAIL_REGEX = LazyRegexCompiler('@', flags=32) + EMAIL_REGEX = LazyRegexCompiler("@", flags=32) - descriptor = UserEmail.__dict__['EMAIL_REGEX'] - self.assertIsNone(descriptor._compiled_regex) + descriptor = UserEmail.__dict__["EMAIL_REGEX"] + assert descriptor._compiled_regex is None regex = UserEmail.EMAIL_REGEX - self.assertEqual(regex, re.compile('@', flags=32)) - self.assertEqual(regex.search('user@domain.com').group(), '@') + assert regex == re.compile("@", flags=32) + assert regex.search("user@domain.com").group() == "@" user_email = UserEmail() - self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX) + assert user_email.EMAIL_REGEX is UserEmail.EMAIL_REGEX def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self): class UserEmail(object): - EMAIL_REGEX = LazyRegexCompiler('@') + EMAIL_REGEX = LazyRegexCompiler("@") user_email = UserEmail() - with self.assertRaises(AttributeError): - user_email.EMAIL_REGEX = re.compile('@') + with pytest.raises(AttributeError): + user_email.EMAIL_REGEX = re.compile("@") def test_lazy_regex_compiler_verify_can_override_class_attr(self): class UserEmail(object): - EMAIL_REGEX = LazyRegexCompiler('@') + EMAIL_REGEX = LazyRegexCompiler("@") - UserEmail.EMAIL_REGEX = re.compile('cookies') - self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies') + UserEmail.EMAIL_REGEX = re.compile("cookies") + assert UserEmail.EMAIL_REGEX.search("Cake & cookies").group() == "cookies" diff --git a/tests/utils.py b/tests/utils.py index be7a8095..195b9dba 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,14 +1,13 @@ -import operator import unittest -from nose.plugins.skip import SkipTest +import pytest from mongoengine import connect from mongoengine.connection import disconnect_all, get_db from mongoengine.mongodb_support import get_mongodb_version -MONGO_TEST_DB = 'mongoenginetest' # standard name for the test database +MONGO_TEST_DB = "mongoenginetest" # standard name for the test database class MongoDBTestCase(unittest.TestCase): @@ -37,7 +36,7 @@ def get_as_pymongo(doc): def _decorated_with_ver_requirement(func, mongo_version_req, oper): """Return a MongoDB version requirement decorator. - The resulting decorator will raise a SkipTest exception if the current + The resulting decorator will skip the test if the current MongoDB version doesn't match the provided version/operator. For example, if you define a decorator like so: @@ -53,12 +52,14 @@ def _decorated_with_ver_requirement(func, mongo_version_req, oper): :param mongo_version_req: The mongodb version requirement (tuple(int, int)) :param oper: The operator to apply (e.g. operator.ge) """ + def _inner(*args, **kwargs): mongodb_v = get_mongodb_version() if oper(mongodb_v, mongo_version_req): return func(*args, **kwargs) - raise SkipTest('Needs MongoDB v{}+'.format('.'.join(str(n) for n in mongo_version_req))) + pretty_version = ".".join(str(n) for n in mongo_version_req) + pytest.skip("Needs MongoDB v{}+".format(pretty_version)) _inner.__name__ = func.__name__ _inner.__doc__ = func.__doc__ diff --git a/tox.ini b/tox.ini index 40bcea8a..6f33772c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,13 @@ [tox] -envlist = {py27,py35,pypy,pypy3}-{mg35,mg3x} +envlist = {py35,pypy3}-{mg34,mg36,mg39,mg311} [testenv] commands = - python setup.py nosetests {posargs} + python setup.py test {posargs} deps = - nose - mg34x: PyMongo>=3.4,<3.5 - mg3x: PyMongo>=3.0,<3.7 + mg34: pymongo>=3.4,<3.5 + mg36: pymongo>=3.6,<3.7 + mg39: pymongo>=3.9,<3.10 + mg311: pymongo>=3.11,<3.12 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs