Compare commits
	
		
			81 Commits
		
	
	
		
			v0.20.0
			...
			bagerard-p
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | d73ca6f90d | ||
|  | e6c2169f76 | ||
|  | 1d17dc4663 | ||
|  | eeac3bd2e6 | ||
|  | 3f5a15d236 | ||
|  | 338c40b5d5 | ||
|  | fc3ccf9606 | ||
|  | 746faceb5c | ||
|  | eb56fb9bda | ||
|  | 161493c0d2 | ||
|  | cb9f329d11 | ||
|  | 03af784ebe | ||
|  | e5f6e4584a | ||
|  | 79f9f223d0 | ||
|  | 0bc18cd6e1 | ||
|  | 30a3c6a5b7 | ||
|  | 90c5d83f84 | ||
|  | d8b8ff6851 | ||
|  | ee664f0c90 | ||
|  | 94a7e813b1 | ||
|  | 8ef7213426 | ||
|  | 2f4464ead5 | ||
|  | 89b93461ac | ||
|  | 9e40f3ae83 | ||
|  | f4962fbc40 | ||
|  | c9d53ca5d5 | ||
|  | 65f50fd713 | ||
|  | bf1d04e399 | ||
|  | 5a8e5e5a40 | ||
|  | f3919dd839 | ||
|  | 9f82a02ddf | ||
|  | 015a36c85f | ||
|  | fbd3388a59 | ||
|  | d8a52d68c5 | ||
|  | 4286708e2e | ||
|  | e362d089e1 | ||
|  | 6b657886a5 | ||
|  | eb16945147 | ||
|  | 38047ca992 | ||
|  | c801e79d4b | ||
|  | 3fca3739de | ||
|  | c218c8bb6c | ||
|  | 0bbc05995a | ||
|  | 3adb67901b | ||
|  | d4350e7da4 | ||
|  | 4665658145 | ||
|  | 0d289fd5a1 | ||
|  | aabc18755c | ||
|  | 1f2a5db016 | ||
|  | ff40f66291 | ||
|  | 7f77084e0e | ||
|  | aca4de728e | ||
|  | 9e7ca43cad | ||
|  | 7116dec74a | ||
|  | a5302b870b | ||
|  | 604e9974b6 | ||
|  | 3e1c83f8fa | ||
|  | e431e27cb2 | ||
|  | 4f188655d0 | ||
|  | 194b0cac88 | ||
|  | 7b4175fc5c | ||
|  | adb5f74ddb | ||
|  | 107a1c34c8 | ||
|  | dc7da5204f | ||
|  | 0301bca176 | ||
|  | 49f9bca23b | ||
|  | 31498bd7dd | ||
|  | 1698f398eb | ||
|  | 4275c2d7b7 | ||
|  | 22bff8566d | ||
|  | d8657be320 | ||
|  | 412bed0f6d | ||
|  | 53cf26b9af | ||
|  | 2fa48cd9e5 | ||
|  | e64a7a9448 | ||
|  | 84f3dce492 | ||
|  | 60c42dddd5 | ||
|  | f93f9406ee | ||
|  | 928770c43a | ||
|  | d37a30e083 | ||
|  | c9ed930606 | 
							
								
								
									
										33
									
								
								.github/workflows/main.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								.github/workflows/main.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | |||||||
|  | # This is a basic workflow to help you get started with Actions | ||||||
|  |  | ||||||
|  | name: CI | ||||||
|  |  | ||||||
|  | # Controls when the action will run. Triggers the workflow on push or pull request | ||||||
|  | # events but only for the master branch | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: [ master ] | ||||||
|  |   pull_request: | ||||||
|  |     branches: [ master ] | ||||||
|  |  | ||||||
|  | # A workflow run is made up of one or more jobs that can run sequentially or in parallel | ||||||
|  | jobs: | ||||||
|  |   # This workflow contains a single job called "build" | ||||||
|  |   build: | ||||||
|  |     # The type of runner that the job will run on | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|  |     # Steps represent a sequence of tasks that will be executed as part of the job | ||||||
|  |     steps: | ||||||
|  |       # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it | ||||||
|  |       - uses: actions/checkout@v2 | ||||||
|  |  | ||||||
|  |       # Runs a single command using the runners shell | ||||||
|  |       - name: Run a one-line script | ||||||
|  |         run: echo Hello, world! | ||||||
|  |  | ||||||
|  |       # Runs a set of commands using the runners shell | ||||||
|  |       - name: Run a multi-line script | ||||||
|  |         run: | | ||||||
|  |           echo Add other actions to build, | ||||||
|  |           echo test, and deploy your project. | ||||||
							
								
								
									
										20
									
								
								.readthedocs.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								.readthedocs.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | # .readthedocs.yml | ||||||
|  | # Read the Docs configuration file | ||||||
|  | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details | ||||||
|  |  | ||||||
|  | # Required | ||||||
|  | version: 2 | ||||||
|  |  | ||||||
|  | # Build documentation in the docs/ directory with Sphinx | ||||||
|  | sphinx: | ||||||
|  |   configuration: docs/conf.py | ||||||
|  |  | ||||||
|  | # Optionally set the version of Python and requirements required to build your docs | ||||||
|  | python: | ||||||
|  |   version: 3.7 | ||||||
|  |   install: | ||||||
|  |     - requirements: docs/requirements.txt | ||||||
|  |     # docs/conf.py is importing mongoengine | ||||||
|  |     # so mongoengine needs to be installed as well | ||||||
|  |     - method: setuptools | ||||||
|  |       path: . | ||||||
							
								
								
									
										18
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -16,26 +16,26 @@ | |||||||
| language: python | language: python | ||||||
| dist: xenial | dist: xenial | ||||||
| python: | python: | ||||||
| - 3.5 |  | ||||||
| - 3.6 | - 3.6 | ||||||
| - 3.7 | - 3.7 | ||||||
| - 3.8 | - 3.8 | ||||||
|  | - 3.9 | ||||||
| - pypy3 | - pypy3 | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   global: |   global: | ||||||
|     - MONGODB_3_4=3.4.17 |     - MONGODB_3_4=3.4.19 | ||||||
|     - MONGODB_3_6=3.6.12 |     - MONGODB_3_6=3.6.13 | ||||||
|     - MONGODB_4_0=4.0.13 |     - MONGODB_4_0=4.0.13 | ||||||
|  |  | ||||||
|     - PYMONGO_3_4=3.4 |     - PYMONGO_3_4=3.4 | ||||||
|     - PYMONGO_3_6=3.6 |     - PYMONGO_3_6=3.6 | ||||||
|     - PYMONGO_3_9=3.9 |     - PYMONGO_3_9=3.9 | ||||||
|     - PYMONGO_3_10=3.10 |     - PYMONGO_3_11=3.11 | ||||||
|  |  | ||||||
|     - MAIN_PYTHON_VERSION=3.7 |     - MAIN_PYTHON_VERSION=3.7 | ||||||
|   matrix: |   matrix: | ||||||
|     - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} |     - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11} | ||||||
|  |  | ||||||
| matrix: | matrix: | ||||||
|   # Finish the build as soon as one job fails |   # Finish the build as soon as one job fails | ||||||
| @@ -47,9 +47,9 @@ matrix: | |||||||
|   - python: 3.7 |   - python: 3.7 | ||||||
|     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} | ||||||
|   - python: 3.7 |   - python: 3.7 | ||||||
|     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_10} |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11} | ||||||
|   - python: 3.8 |   - python: 3.8 | ||||||
|     env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_10} |     env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11} | ||||||
|  |  | ||||||
| install: | install: | ||||||
|   # Install Mongo |   # Install Mongo | ||||||
| @@ -75,7 +75,7 @@ script: | |||||||
|   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" | ||||||
|  |  | ||||||
| after_success: | after_success: | ||||||
| - - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi |   - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi | ||||||
|  |  | ||||||
| notifications: | notifications: | ||||||
|   irc: irc.freenode.org#mongoengine |   irc: irc.freenode.org#mongoengine | ||||||
| @@ -103,5 +103,5 @@ deploy: | |||||||
|   on: |   on: | ||||||
|     tags: true |     tags: true | ||||||
|     repo: MongoEngine/mongoengine |     repo: MongoEngine/mongoengine | ||||||
|     condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) |     condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4}) | ||||||
|     python: 3.7 |     python: 3.7 | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -257,3 +257,5 @@ that much better: | |||||||
|  * Matthew Simpson (https://github.com/mcsimps2) |  * Matthew Simpson (https://github.com/mcsimps2) | ||||||
|  * Leonardo Domingues (https://github.com/leodmgs) |  * Leonardo Domingues (https://github.com/leodmgs) | ||||||
|  * Agustin Barto (https://github.com/abarto) |  * Agustin Barto (https://github.com/abarto) | ||||||
|  |  * Stankiewicz Mateusz (https://github.com/mas15) | ||||||
|  |  * Felix Schultheiß (https://github.com/felix-smashdocs) | ||||||
|   | |||||||
| @@ -16,6 +16,9 @@ MongoEngine | |||||||
|   :target: https://landscape.io/github/MongoEngine/mongoengine/master |   :target: https://landscape.io/github/MongoEngine/mongoengine/master | ||||||
|   :alt: Code Health |   :alt: Code Health | ||||||
|  |  | ||||||
|  | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg | ||||||
|  |   :target: https://github.com/ambv/black | ||||||
|  |  | ||||||
| About | About | ||||||
| ===== | ===== | ||||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||||
|   | |||||||
| @@ -6,6 +6,17 @@ Changelog | |||||||
| Development | Development | ||||||
| =========== | =========== | ||||||
| - (Fill this out as you fix issues and develop your features). | - (Fill this out as you fix issues and develop your features). | ||||||
|  | - Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412 | ||||||
|  | - When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count | ||||||
|  |     and Cursor.count that got deprecated in pymongo >= 3.7. | ||||||
|  |     This should have a negative impact on performance of count see Issue #2219 | ||||||
|  | - Fix a bug that made the queryset drop the read_preference after clone(). | ||||||
|  | - Remove Py3.5 from CI as it reached EOL and add Python 3.9 | ||||||
|  | - Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311 | ||||||
|  | - Bug fix in ListField when updating the first item, it was saving the whole list, instead of | ||||||
|  |     just replacing the first item (as it's usually done) #2392 | ||||||
|  | - Add EnumField: ``mongoengine.fields.EnumField`` | ||||||
|  | - Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields | ||||||
|  |  | ||||||
| Changes in 0.20.0 | Changes in 0.20.0 | ||||||
| ================= | ================= | ||||||
| @@ -28,7 +39,7 @@ Changes in 0.20.0 | |||||||
|  |  | ||||||
| Changes in 0.19.1 | Changes in 0.19.1 | ||||||
| ================= | ================= | ||||||
| - Requires Pillow < 7.0.0 as it dropped Python2 support | - Tests require Pillow < 7.0.0 as it dropped Python2 support | ||||||
| - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of | - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of | ||||||
|     pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 |     pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 | ||||||
|  |  | ||||||
| @@ -456,9 +467,6 @@ Changes in 0.8.3 | |||||||
| - Document.select_related() now respects ``db_alias`` (#377) | - Document.select_related() now respects ``db_alias`` (#377) | ||||||
| - Reload uses shard_key if applicable (#384) | - Reload uses shard_key if applicable (#384) | ||||||
| - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | ||||||
|  |  | ||||||
|   **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 |  | ||||||
|  |  | ||||||
| - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) | - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) | ||||||
| - Fixed ListField setslice and delslice dirty tracking (#390) | - Fixed ListField setslice and delslice dirty tracking (#390) | ||||||
| - Added Django 1.5 PY3 support (#392) | - Added Django 1.5 PY3 support (#392) | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| # | # | ||||||
| # MongoEngine documentation build configuration file, created by | # MongoEngine documentation build configuration file, created by | ||||||
| # sphinx-quickstart on Sun Nov 22 18:14:13 2009. | # sphinx-quickstart on Sun Nov 22 18:14:13 2009. | ||||||
|   | |||||||
| @@ -31,6 +31,8 @@ the :attr:`host` to | |||||||
|  |  | ||||||
|     connect('project1', host='mongodb://localhost/database_name') |     connect('project1', host='mongodb://localhost/database_name') | ||||||
|  |  | ||||||
|  | .. note:: URI containing SRV records (e.g mongodb+srv://server.example.com/) can be used as well as the :attr:`host` | ||||||
|  |  | ||||||
| .. note:: Database, username and password from URI string overrides | .. note:: Database, username and password from URI string overrides | ||||||
|     corresponding parameters in :func:`~mongoengine.connect`: :: |     corresponding parameters in :func:`~mongoengine.connect`: :: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -76,6 +76,7 @@ are as follows: | |||||||
| * :class:`~mongoengine.fields.EmailField` | * :class:`~mongoengine.fields.EmailField` | ||||||
| * :class:`~mongoengine.fields.EmbeddedDocumentField` | * :class:`~mongoengine.fields.EmbeddedDocumentField` | ||||||
| * :class:`~mongoengine.fields.EmbeddedDocumentListField` | * :class:`~mongoengine.fields.EmbeddedDocumentListField` | ||||||
|  | * :class:`~mongoengine.fields.EnumField` | ||||||
| * :class:`~mongoengine.fields.FileField` | * :class:`~mongoengine.fields.FileField` | ||||||
| * :class:`~mongoengine.fields.FloatField` | * :class:`~mongoengine.fields.FloatField` | ||||||
| * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||||
| @@ -426,19 +427,6 @@ either a single field name, or a list or tuple of field names:: | |||||||
|         first_name = StringField() |         first_name = StringField() | ||||||
|         last_name = StringField(unique_with='first_name') |         last_name = StringField(unique_with='first_name') | ||||||
|  |  | ||||||
| Skipping Document validation on save |  | ||||||
| ------------------------------------ |  | ||||||
| You can also skip the whole document validation process by setting |  | ||||||
| ``validate=False`` when calling the :meth:`~mongoengine.document.Document.save` |  | ||||||
| method:: |  | ||||||
|  |  | ||||||
|     class Recipient(Document): |  | ||||||
|         name = StringField() |  | ||||||
|         email = EmailField() |  | ||||||
|  |  | ||||||
|     recipient = Recipient(name='admin', email='root@localhost') |  | ||||||
|     recipient.save()               # will raise a ValidationError while |  | ||||||
|     recipient.save(validate=False) # won't |  | ||||||
|  |  | ||||||
| Document collections | Document collections | ||||||
| ==================== | ==================== | ||||||
|   | |||||||
| @@ -41,35 +41,6 @@ already exist, then any changes will be updated atomically.  For example:: | |||||||
| .. seealso:: | .. seealso:: | ||||||
|     :ref:`guide-atomic-updates` |     :ref:`guide-atomic-updates` | ||||||
|  |  | ||||||
| Pre save data validation and cleaning |  | ||||||
| ------------------------------------- |  | ||||||
| MongoEngine allows you to create custom cleaning rules for your documents when |  | ||||||
| calling :meth:`~mongoengine.Document.save`.  By providing a custom |  | ||||||
| :meth:`~mongoengine.Document.clean` method you can do any pre validation / data |  | ||||||
| cleaning. |  | ||||||
|  |  | ||||||
| This might be useful if you want to ensure a default value based on other |  | ||||||
| document values for example:: |  | ||||||
|  |  | ||||||
|     class Essay(Document): |  | ||||||
|         status = StringField(choices=('Published', 'Draft'), required=True) |  | ||||||
|         pub_date = DateTimeField() |  | ||||||
|  |  | ||||||
|         def clean(self): |  | ||||||
|             """Ensures that only published essays have a `pub_date` and |  | ||||||
|             automatically sets `pub_date` if essay is published and `pub_date` |  | ||||||
|             is not set""" |  | ||||||
|             if self.status == 'Draft' and self.pub_date is not None: |  | ||||||
|                 msg = 'Draft entries should not have a publication date.' |  | ||||||
|                 raise ValidationError(msg) |  | ||||||
|             # Set the pub_date for published items if not set. |  | ||||||
|             if self.status == 'Published' and self.pub_date is None: |  | ||||||
|                 self.pub_date = datetime.now() |  | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|     Cleaning is only called if validation is turned on and when calling |  | ||||||
|     :meth:`~mongoengine.Document.save`. |  | ||||||
|  |  | ||||||
| Cascading Saves | Cascading Saves | ||||||
| --------------- | --------------- | ||||||
| If your document contains :class:`~mongoengine.fields.ReferenceField` or | If your document contains :class:`~mongoengine.fields.ReferenceField` or | ||||||
|   | |||||||
| @@ -10,6 +10,7 @@ User Guide | |||||||
|    defining-documents |    defining-documents | ||||||
|    document-instances |    document-instances | ||||||
|    querying |    querying | ||||||
|  |    validation | ||||||
|    gridfs |    gridfs | ||||||
|    signals |    signals | ||||||
|    text-indexes |    text-indexes | ||||||
|   | |||||||
| @@ -609,7 +609,7 @@ to push values with index:: | |||||||
| .. note:: | .. note:: | ||||||
|     Currently only top level lists are handled, future versions of mongodb / |     Currently only top level lists are handled, future versions of mongodb / | ||||||
|     pymongo plan to support nested positional operators.  See `The $ positional |     pymongo plan to support nested positional operators.  See `The $ positional | ||||||
|     operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_. |     operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_. | ||||||
|  |  | ||||||
| Server-side javascript execution | Server-side javascript execution | ||||||
| ================================ | ================================ | ||||||
|   | |||||||
							
								
								
									
										123
									
								
								docs/guide/validation.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										123
									
								
								docs/guide/validation.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,123 @@ | |||||||
|  | ==================== | ||||||
|  | Document Validation | ||||||
|  | ==================== | ||||||
|  |  | ||||||
|  | By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB | ||||||
|  | and makes sure they are consistent with the fields defined in your models. | ||||||
|  |  | ||||||
|  | MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema. | ||||||
|  | This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance | ||||||
|  | of your model but this operation may fail under some circumstances (e.g. if there is a field in | ||||||
|  | the document fetched from the database that is not defined in your model). | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Built-in validation | ||||||
|  | =================== | ||||||
|  |  | ||||||
|  | Mongoengine provides different fields that encapsulate the corresponding validation | ||||||
|  | out of the box. Validation runs when calling `.validate()` or `.save()` | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     from mongoengine import Document, EmailField | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         email = EmailField() | ||||||
|  |         age = IntField(min_value=0, max_value=99) | ||||||
|  |  | ||||||
|  |     user = User(email='invalid@', age=24) | ||||||
|  |     user.validate()     # raises ValidationError (Invalid email address: ['email']) | ||||||
|  |     user.save()         # raises ValidationError (Invalid email address: ['email']) | ||||||
|  |  | ||||||
|  |     user2 = User(email='john.doe@garbage.com', age=1000) | ||||||
|  |     user2.save()        # raises ValidationError (Integer value is too large: ['age']) | ||||||
|  |  | ||||||
|  | Custom validation | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | The following feature can be used to customize the validation: | ||||||
|  |  | ||||||
|  | * Field `validation` parameter | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     def not_john_doe(name): | ||||||
|  |         if name == 'John Doe': | ||||||
|  |             raise ValidationError("John Doe is not a valid name") | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         full_name = StringField(validation=not_john_doe) | ||||||
|  |  | ||||||
|  |     Person(full_name='Billy Doe').save() | ||||||
|  |     Person(full_name='John Doe').save()  # raises ValidationError (John Doe is not a valid name) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | * Document `clean` method | ||||||
|  |  | ||||||
|  | This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide | ||||||
|  | custom model validation and/or to modify some of the field values prior to validation. | ||||||
|  | For instance, you could use it to automatically provide a value for a field, or to do validation | ||||||
|  | that requires access to more than a single field. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Essay(Document): | ||||||
|  |         status = StringField(choices=('Published', 'Draft'), required=True) | ||||||
|  |         pub_date = DateTimeField() | ||||||
|  |  | ||||||
|  |         def clean(self): | ||||||
|  |             # Validate that only published essays have a `pub_date` | ||||||
|  |             if self.status == 'Draft' and self.pub_date is not None: | ||||||
|  |                 raise ValidationError('Draft entries should not have a publication date.') | ||||||
|  |             # Set the pub_date for published items if not set. | ||||||
|  |             if self.status == 'Published' and self.pub_date is None: | ||||||
|  |                 self.pub_date = datetime.now() | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |     Cleaning is only called if validation is turned on and when calling | ||||||
|  |     :meth:`~mongoengine.Document.save`. | ||||||
|  |  | ||||||
|  | * Adding custom Field classes | ||||||
|  |  | ||||||
|  | We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible | ||||||
|  | to subclass a Field and encapsulate some validation by overriding the `validate` method | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class AgeField(IntField): | ||||||
|  |  | ||||||
|  |         def validate(self, value): | ||||||
|  |             super(AgeField, self).validate(value)     # let IntField.validate run first | ||||||
|  |             if value == 60: | ||||||
|  |                 self.error('60 is not allowed') | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         age = AgeField(min_value=0, max_value=99) | ||||||
|  |  | ||||||
|  |     Person(age=20).save()   # passes | ||||||
|  |     Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age']) | ||||||
|  |     Person(age=60).save()   # raises ValidationError (Person:None) (60 is not allowed: ['age']) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. note:: | ||||||
|  |  | ||||||
|  |    When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly, | ||||||
|  |    it will provide a better context with the error message | ||||||
|  |  | ||||||
|  | Skipping validation | ||||||
|  | ==================== | ||||||
|  |  | ||||||
|  | Although discouraged as it allows to violate fields constraints, if for some reason you need to disable | ||||||
|  | the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`. | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         age = IntField(max_value=100) | ||||||
|  |  | ||||||
|  |     Person(age=1000).save()    # raises ValidationError (Integer value is too large) | ||||||
|  |  | ||||||
|  |     Person(age=1000).save(validate=False) | ||||||
|  |     person = Person.objects.first() | ||||||
|  |     assert person.age == 1000 | ||||||
|  |  | ||||||
							
								
								
									
										3
									
								
								docs/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								docs/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | |||||||
|  | pymongo>=3.11 | ||||||
|  | Sphinx==3.2.1 | ||||||
|  | sphinx-rtd-theme==0.5.0 | ||||||
| @@ -179,7 +179,7 @@ class BaseList(list): | |||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, "_mark_as_changed"): |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|             if key: |             if key is not None: | ||||||
|                 self._instance._mark_as_changed( |                 self._instance._mark_as_changed( | ||||||
|                     "{}.{}".format(self._name, key % len(self)) |                     "{}.{}".format(self._name, key % len(self)) | ||||||
|                 ) |                 ) | ||||||
| @@ -215,7 +215,7 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         Filters the list by only including embedded documents with the |         Filters the list by only including embedded documents with the | ||||||
|         given keyword arguments. |         given keyword arguments. | ||||||
|  |  | ||||||
|         This method only supports simple comparison (e.g: .filter(name='John Doe')) |         This method only supports simple comparison (e.g. .filter(name='John Doe')) | ||||||
|         and does not support operators like __gte, __lte, __icontains like queryset.filter does |         and does not support operators like __gte, __lte, __icontains like queryset.filter does | ||||||
|  |  | ||||||
|         :param kwargs: The keyword arguments corresponding to the fields to |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|   | |||||||
| @@ -64,8 +64,6 @@ class BaseDocument: | |||||||
|             It may contain additional reserved keywords, e.g. "__auto_convert". |             It may contain additional reserved keywords, e.g. "__auto_convert". | ||||||
|         :param __auto_convert: If True, supplied values will be converted |         :param __auto_convert: If True, supplied values will be converted | ||||||
|             to Python-type values via each field's `to_python` method. |             to Python-type values via each field's `to_python` method. | ||||||
|         :param __only_fields: A set of fields that have been loaded for |  | ||||||
|             this document. Empty if all fields have been loaded. |  | ||||||
|         :param _created: Indicates whether this is a brand new document |         :param _created: Indicates whether this is a brand new document | ||||||
|             or whether it's already been persisted before. Defaults to true. |             or whether it's already been persisted before. Defaults to true. | ||||||
|         """ |         """ | ||||||
| @@ -80,8 +78,6 @@ class BaseDocument: | |||||||
|  |  | ||||||
|         __auto_convert = values.pop("__auto_convert", True) |         __auto_convert = values.pop("__auto_convert", True) | ||||||
|  |  | ||||||
|         __only_fields = set(values.pop("__only_fields", values)) |  | ||||||
|  |  | ||||||
|         _created = values.pop("_created", True) |         _created = values.pop("_created", True) | ||||||
|  |  | ||||||
|         signals.pre_init.send(self.__class__, document=self, values=values) |         signals.pre_init.send(self.__class__, document=self, values=values) | ||||||
| @@ -106,10 +102,8 @@ class BaseDocument: | |||||||
|         self._dynamic_fields = SON() |         self._dynamic_fields = SON() | ||||||
|  |  | ||||||
|         # Assign default values to the instance. |         # Assign default values to the instance. | ||||||
|         # We set default values only for fields loaded from DB. See |  | ||||||
|         # https://github.com/mongoengine/mongoengine/issues/399 for more info. |  | ||||||
|         for key, field in self._fields.items(): |         for key, field in self._fields.items(): | ||||||
|             if self._db_field_map.get(key, key) in __only_fields: |             if self._db_field_map.get(key, key) in values: | ||||||
|                 continue |                 continue | ||||||
|             value = getattr(self, key, None) |             value = getattr(self, key, None) | ||||||
|             setattr(self, key, value) |             setattr(self, key, value) | ||||||
| @@ -117,25 +111,22 @@ class BaseDocument: | |||||||
|         if "_cls" not in values: |         if "_cls" not in values: | ||||||
|             self._cls = self._class_name |             self._cls = self._class_name | ||||||
|  |  | ||||||
|         # Set passed values after initialisation |         # Set actual values | ||||||
|         if self._dynamic: |         dynamic_data = {} | ||||||
|             dynamic_data = {} |         FileField = _import_class("FileField") | ||||||
|             for key, value in values.items(): |         for key, value in values.items(): | ||||||
|                 if key in self._fields or key == "_id": |             key = self._reverse_db_field_map.get(key, key) | ||||||
|                     setattr(self, key, value) |             field = self._fields.get(key) | ||||||
|                 else: |             if field or key in ("id", "pk", "_cls"): | ||||||
|  |                 if __auto_convert and value is not None: | ||||||
|  |                     if field and not isinstance(field, FileField): | ||||||
|  |                         value = field.to_python(value) | ||||||
|  |                 setattr(self, key, value) | ||||||
|  |             else: | ||||||
|  |                 if self._dynamic: | ||||||
|                     dynamic_data[key] = value |                     dynamic_data[key] = value | ||||||
|         else: |  | ||||||
|             FileField = _import_class("FileField") |  | ||||||
|             for key, value in values.items(): |  | ||||||
|                 key = self._reverse_db_field_map.get(key, key) |  | ||||||
|                 if key in self._fields or key in ("id", "pk", "_cls"): |  | ||||||
|                     if __auto_convert and value is not None: |  | ||||||
|                         field = self._fields.get(key) |  | ||||||
|                         if field and not isinstance(field, FileField): |  | ||||||
|                             value = field.to_python(value) |  | ||||||
|                     setattr(self, key, value) |  | ||||||
|                 else: |                 else: | ||||||
|  |                     # For strict Document | ||||||
|                     self._data[key] = value |                     self._data[key] = value | ||||||
|  |  | ||||||
|         # Set any get_<field>_display methods |         # Set any get_<field>_display methods | ||||||
| @@ -314,7 +305,8 @@ class BaseDocument: | |||||||
|  |  | ||||||
|     def clean(self): |     def clean(self): | ||||||
|         """ |         """ | ||||||
|         Hook for doing document level data cleaning before validation is run. |         Hook for doing document level data cleaning (usually validation or assignment) | ||||||
|  |         before validation is run. | ||||||
|  |  | ||||||
|         Any ValidationError raised by this method will not be associated with |         Any ValidationError raised by this method will not be associated with | ||||||
|         a particular field; it will have a special-case association with the |         a particular field; it will have a special-case association with the | ||||||
| @@ -537,6 +529,9 @@ class BaseDocument: | |||||||
|         """Using _get_changed_fields iterate and remove any fields that |         """Using _get_changed_fields iterate and remove any fields that | ||||||
|         are marked as changed. |         are marked as changed. | ||||||
|         """ |         """ | ||||||
|  |         ReferenceField = _import_class("ReferenceField") | ||||||
|  |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|         for changed in self._get_changed_fields(): |         for changed in self._get_changed_fields(): | ||||||
|             parts = changed.split(".") |             parts = changed.split(".") | ||||||
|             data = self |             data = self | ||||||
| @@ -549,7 +544,8 @@ class BaseDocument: | |||||||
|                 elif isinstance(data, dict): |                 elif isinstance(data, dict): | ||||||
|                     data = data.get(part, None) |                     data = data.get(part, None) | ||||||
|                 else: |                 else: | ||||||
|                     data = getattr(data, part, None) |                     field_name = data._reverse_db_field_map.get(part, part) | ||||||
|  |                     data = getattr(data, field_name, None) | ||||||
|  |  | ||||||
|                 if not isinstance(data, LazyReference) and hasattr( |                 if not isinstance(data, LazyReference) and hasattr( | ||||||
|                     data, "_changed_fields" |                     data, "_changed_fields" | ||||||
| @@ -558,10 +554,40 @@ class BaseDocument: | |||||||
|                         continue |                         continue | ||||||
|  |  | ||||||
|                     data._changed_fields = [] |                     data._changed_fields = [] | ||||||
|  |                 elif isinstance(data, (list, tuple, dict)): | ||||||
|  |                     if hasattr(data, "field") and isinstance( | ||||||
|  |                         data.field, (ReferenceField, GenericReferenceField) | ||||||
|  |                     ): | ||||||
|  |                         continue | ||||||
|  |                     BaseDocument._nestable_types_clear_changed_fields(data) | ||||||
|  |  | ||||||
|         self._changed_fields = [] |         self._changed_fields = [] | ||||||
|  |  | ||||||
|     def _nestable_types_changed_fields(self, changed_fields, base_key, data): |     @staticmethod | ||||||
|  |     def _nestable_types_clear_changed_fields(data): | ||||||
|  |         """Inspect nested data for changed fields | ||||||
|  |  | ||||||
|  |         :param data: data to inspect for changes | ||||||
|  |         """ | ||||||
|  |         Document = _import_class("Document") | ||||||
|  |  | ||||||
|  |         # Loop list / dict fields as they contain documents | ||||||
|  |         # Determine the iterator to use | ||||||
|  |         if not hasattr(data, "items"): | ||||||
|  |             iterator = enumerate(data) | ||||||
|  |         else: | ||||||
|  |             iterator = data.items() | ||||||
|  |  | ||||||
|  |         for index_or_key, value in iterator: | ||||||
|  |             if hasattr(value, "_get_changed_fields") and not isinstance( | ||||||
|  |                 value, Document | ||||||
|  |             ):  # don't follow references | ||||||
|  |                 value._clear_changed_fields() | ||||||
|  |             elif isinstance(value, (list, tuple, dict)): | ||||||
|  |                 BaseDocument._nestable_types_clear_changed_fields(value) | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def _nestable_types_changed_fields(changed_fields, base_key, data): | ||||||
|         """Inspect nested data for changed fields |         """Inspect nested data for changed fields | ||||||
|  |  | ||||||
|         :param changed_fields: Previously collected changed fields |         :param changed_fields: Previously collected changed fields | ||||||
| @@ -586,7 +612,9 @@ class BaseDocument: | |||||||
|                 changed = value._get_changed_fields() |                 changed = value._get_changed_fields() | ||||||
|                 changed_fields += ["{}{}".format(item_key, k) for k in changed if k] |                 changed_fields += ["{}{}".format(item_key, k) for k in changed if k] | ||||||
|             elif isinstance(value, (list, tuple, dict)): |             elif isinstance(value, (list, tuple, dict)): | ||||||
|                 self._nestable_types_changed_fields(changed_fields, item_key, value) |                 BaseDocument._nestable_types_changed_fields( | ||||||
|  |                     changed_fields, item_key, value | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|     def _get_changed_fields(self): |     def _get_changed_fields(self): | ||||||
|         """Return a list of all fields that have explicitly been changed. |         """Return a list of all fields that have explicitly been changed. | ||||||
| @@ -721,11 +749,8 @@ class BaseDocument: | |||||||
|         return cls._meta.get("collection", None) |         return cls._meta.get("collection", None) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): |     def _from_son(cls, son, _auto_dereference=True, created=False): | ||||||
|         """Create an instance of a Document (subclass) from a PyMongo SON.""" |         """Create an instance of a Document (subclass) from a PyMongo SON.""" | ||||||
|         if not only_fields: |  | ||||||
|             only_fields = [] |  | ||||||
|  |  | ||||||
|         if son and not isinstance(son, dict): |         if son and not isinstance(son, dict): | ||||||
|             raise ValueError( |             raise ValueError( | ||||||
|                 "The source SON object needs to be of type 'dict' but a '%s' was found" |                 "The source SON object needs to be of type 'dict' but a '%s' was found" | ||||||
| @@ -780,9 +805,7 @@ class BaseDocument: | |||||||
|         if cls.STRICT: |         if cls.STRICT: | ||||||
|             data = {k: v for k, v in data.items() if k in cls._fields} |             data = {k: v for k, v in data.items() if k in cls._fields} | ||||||
|  |  | ||||||
|         obj = cls( |         obj = cls(__auto_convert=False, _created=created, **data) | ||||||
|             __auto_convert=False, _created=created, __only_fields=only_fields, **data |  | ||||||
|         ) |  | ||||||
|         obj._changed_fields = [] |         obj._changed_fields = [] | ||||||
|         if not _auto_dereference: |         if not _auto_dereference: | ||||||
|             obj._fields = fields |             obj._fields = fields | ||||||
|   | |||||||
| @@ -464,9 +464,9 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | |||||||
|             # insert_one will provoke UniqueError alongside save does not |             # insert_one will provoke UniqueError alongside save does not | ||||||
|             # therefore, it need to catch and call replace_one. |             # therefore, it need to catch and call replace_one. | ||||||
|             if "_id" in doc: |             if "_id" in doc: | ||||||
|                 raw_object = wc_collection.find_one_and_replace( |                 select_dict = {"_id": doc["_id"]} | ||||||
|                     {"_id": doc["_id"]}, doc |                 select_dict = self._integrate_shard_key(doc, select_dict) | ||||||
|                 ) |                 raw_object = wc_collection.find_one_and_replace(select_dict, doc) | ||||||
|                 if raw_object: |                 if raw_object: | ||||||
|                     return doc["_id"] |                     return doc["_id"] | ||||||
|  |  | ||||||
| @@ -489,6 +489,23 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | |||||||
|  |  | ||||||
|         return update_doc |         return update_doc | ||||||
|  |  | ||||||
|  |     def _integrate_shard_key(self, doc, select_dict): | ||||||
|  |         """Integrates the collection's shard key to the `select_dict`, which will be used for the query. | ||||||
|  |         The value from the shard key is taken from the `doc` and finally the select_dict is returned. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         # Need to add shard key to query, or you get an error | ||||||
|  |         shard_key = self._meta.get("shard_key", tuple()) | ||||||
|  |         for k in shard_key: | ||||||
|  |             path = self._lookup_field(k.split(".")) | ||||||
|  |             actual_key = [p.db_field for p in path] | ||||||
|  |             val = doc | ||||||
|  |             for ak in actual_key: | ||||||
|  |                 val = val[ak] | ||||||
|  |             select_dict[".".join(actual_key)] = val | ||||||
|  |  | ||||||
|  |         return select_dict | ||||||
|  |  | ||||||
|     def _save_update(self, doc, save_condition, write_concern): |     def _save_update(self, doc, save_condition, write_concern): | ||||||
|         """Update an existing document. |         """Update an existing document. | ||||||
|  |  | ||||||
| @@ -504,15 +521,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | |||||||
|  |  | ||||||
|         select_dict["_id"] = object_id |         select_dict["_id"] = object_id | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |         select_dict = self._integrate_shard_key(doc, select_dict) | ||||||
|         shard_key = self._meta.get("shard_key", tuple()) |  | ||||||
|         for k in shard_key: |  | ||||||
|             path = self._lookup_field(k.split(".")) |  | ||||||
|             actual_key = [p.db_field for p in path] |  | ||||||
|             val = doc |  | ||||||
|             for ak in actual_key: |  | ||||||
|                 val = val[ak] |  | ||||||
|             select_dict[".".join(actual_key)] = val |  | ||||||
|  |  | ||||||
|         update_doc = self._get_update_doc() |         update_doc = self._get_update_doc() | ||||||
|         if update_doc: |         if update_doc: | ||||||
| @@ -639,7 +648,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | |||||||
|                 write_concern=write_concern, _from_doc_delete=True |                 write_concern=write_concern, _from_doc_delete=True | ||||||
|             ) |             ) | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
|             message = "Could not delete document (%s)" % err.message |             message = "Could not delete document (%s)" % err.args | ||||||
|             raise OperationError(message) |             raise OperationError(message) | ||||||
|         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) |         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) | ||||||
|  |  | ||||||
| @@ -919,7 +928,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def list_indexes(cls): |     def list_indexes(cls): | ||||||
|         """ Lists all of the indexes that should be created for given |         """Lists all of the indexes that should be created for given | ||||||
|         collection. It includes all the indexes from super- and sub-classes. |         collection. It includes all the indexes from super- and sub-classes. | ||||||
|         """ |         """ | ||||||
|         if cls._meta.get("abstract"): |         if cls._meta.get("abstract"): | ||||||
| @@ -984,7 +993,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def compare_indexes(cls): |     def compare_indexes(cls): | ||||||
|         """ Compares the indexes defined in MongoEngine with the ones |         """Compares the indexes defined in MongoEngine with the ones | ||||||
|         existing in the database. Returns any missing/extra indexes. |         existing in the database. Returns any missing/extra indexes. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|   | |||||||
| @@ -87,6 +87,7 @@ __all__ = ( | |||||||
|     "PolygonField", |     "PolygonField", | ||||||
|     "SequenceField", |     "SequenceField", | ||||||
|     "UUIDField", |     "UUIDField", | ||||||
|  |     "EnumField", | ||||||
|     "MultiPointField", |     "MultiPointField", | ||||||
|     "MultiLineStringField", |     "MultiLineStringField", | ||||||
|     "MultiPolygonField", |     "MultiPolygonField", | ||||||
| @@ -433,7 +434,7 @@ class DecimalField(BaseField): | |||||||
|         :param max_value: Validation rule for the maximum acceptable value. |         :param max_value: Validation rule for the maximum acceptable value. | ||||||
|         :param force_string: Store the value as a string (instead of a float). |         :param force_string: Store the value as a string (instead of a float). | ||||||
|          Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied) |          Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied) | ||||||
|          and some query operator won't work (e.g: inc, dec) |          and some query operator won't work (e.g. inc, dec) | ||||||
|         :param precision: Number of decimal places to store. |         :param precision: Number of decimal places to store. | ||||||
|         :param rounding: The rounding rule from the python decimal library: |         :param rounding: The rounding rule from the python decimal library: | ||||||
|  |  | ||||||
| @@ -773,6 +774,9 @@ class EmbeddedDocumentField(BaseField): | |||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         if value is not None and not isinstance(value, self.document_type): |         if value is not None and not isinstance(value, self.document_type): | ||||||
|  |             # Short circuit for special operators, returning them as is | ||||||
|  |             if isinstance(value, dict) and all(k.startswith("$") for k in value.keys()): | ||||||
|  |                 return value | ||||||
|             try: |             try: | ||||||
|                 value = self.document_type._from_son(value) |                 value = self.document_type._from_son(value) | ||||||
|             except ValueError: |             except ValueError: | ||||||
| @@ -844,8 +848,7 @@ class DynamicField(BaseField): | |||||||
|     Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" |     Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" | ||||||
|  |  | ||||||
|     def to_mongo(self, value, use_db_field=True, fields=None): |     def to_mongo(self, value, use_db_field=True, fields=None): | ||||||
|         """Convert a Python type to a MongoDB compatible type. |         """Convert a Python type to a MongoDB compatible type.""" | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         if isinstance(value, str): |         if isinstance(value, str): | ||||||
|             return value |             return value | ||||||
| @@ -1619,6 +1622,70 @@ class BinaryField(BaseField): | |||||||
|         return super().prepare_query_value(op, self.to_mongo(value)) |         return super().prepare_query_value(op, self.to_mongo(value)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class EnumField(BaseField): | ||||||
|  |     """Enumeration Field. Values are stored underneath as is, | ||||||
|  |     so it will only work with simple types (str, int, etc) that | ||||||
|  |     are bson encodable | ||||||
|  |      Example usage: | ||||||
|  |     .. code-block:: python | ||||||
|  |  | ||||||
|  |         class Status(Enum): | ||||||
|  |             NEW = 'new' | ||||||
|  |             DONE = 'done' | ||||||
|  |  | ||||||
|  |         class ModelWithEnum(Document): | ||||||
|  |             status = EnumField(Status, default=Status.NEW) | ||||||
|  |  | ||||||
|  |         ModelWithEnum(status='done') | ||||||
|  |         ModelWithEnum(status=Status.DONE) | ||||||
|  |  | ||||||
|  |     Enum fields can be searched using enum or its value: | ||||||
|  |     .. code-block:: python | ||||||
|  |  | ||||||
|  |         ModelWithEnum.objects(status='new').count() | ||||||
|  |         ModelWithEnum.objects(status=Status.NEW).count() | ||||||
|  |  | ||||||
|  |     Note that choices cannot be set explicitly, they are derived | ||||||
|  |     from the provided enum class. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, enum, **kwargs): | ||||||
|  |         self._enum_cls = enum | ||||||
|  |         if "choices" in kwargs: | ||||||
|  |             raise ValueError( | ||||||
|  |                 "'choices' can't be set on EnumField, " | ||||||
|  |                 "it is implicitly set as the enum class" | ||||||
|  |             ) | ||||||
|  |         kwargs["choices"] = list(self._enum_cls) | ||||||
|  |         super().__init__(**kwargs) | ||||||
|  |  | ||||||
|  |     def __set__(self, instance, value): | ||||||
|  |         is_legal_value = value is None or isinstance(value, self._enum_cls) | ||||||
|  |         if not is_legal_value: | ||||||
|  |             try: | ||||||
|  |                 value = self._enum_cls(value) | ||||||
|  |             except Exception: | ||||||
|  |                 pass | ||||||
|  |         return super().__set__(instance, value) | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         if isinstance(value, self._enum_cls): | ||||||
|  |             return value.value | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def validate(self, value): | ||||||
|  |         if value and not isinstance(value, self._enum_cls): | ||||||
|  |             try: | ||||||
|  |                 self._enum_cls(value) | ||||||
|  |             except Exception as e: | ||||||
|  |                 self.error(str(e)) | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return value | ||||||
|  |         return super().prepare_query_value(op, self.to_mongo(value)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class GridFSError(Exception): | class GridFSError(Exception): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
| @@ -2042,7 +2109,7 @@ class ImageField(FileField): | |||||||
|  |  | ||||||
| class SequenceField(BaseField): | class SequenceField(BaseField): | ||||||
|     """Provides a sequential counter see: |     """Provides a sequential counter see: | ||||||
|      http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers |      https://docs.mongodb.com/manual/reference/method/ObjectId/#ObjectIDs-SequenceNumbers | ||||||
|  |  | ||||||
|     .. note:: |     .. note:: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ | |||||||
| Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. | Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. | ||||||
| """ | """ | ||||||
| import pymongo | import pymongo | ||||||
|  | from pymongo.errors import OperationFailure | ||||||
|  |  | ||||||
| _PYMONGO_37 = (3, 7) | _PYMONGO_37 = (3, 7) | ||||||
|  |  | ||||||
| @@ -10,13 +11,41 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) | |||||||
| IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 | IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 | ||||||
|  |  | ||||||
|  |  | ||||||
| def count_documents(collection, filter): | def count_documents( | ||||||
|     """Pymongo>3.7 deprecates count in favour of count_documents""" |     collection, filter, skip=None, limit=None, hint=None, collation=None | ||||||
|  | ): | ||||||
|  |     """Pymongo>3.7 deprecates count in favour of count_documents | ||||||
|  |     """ | ||||||
|  |     if limit == 0: | ||||||
|  |         return 0  # Pymongo raises an OperationFailure if called with limit=0 | ||||||
|  |  | ||||||
|  |     kwargs = {} | ||||||
|  |     if skip is not None: | ||||||
|  |         kwargs["skip"] = skip | ||||||
|  |     if limit is not None: | ||||||
|  |         kwargs["limit"] = limit | ||||||
|  |     if hint not in (-1, None): | ||||||
|  |         kwargs["hint"] = hint | ||||||
|  |     if collation is not None: | ||||||
|  |         kwargs["collation"] = collation | ||||||
|  |  | ||||||
|  |     # count_documents appeared in pymongo 3.7 | ||||||
|     if IS_PYMONGO_GTE_37: |     if IS_PYMONGO_GTE_37: | ||||||
|         return collection.count_documents(filter) |         try: | ||||||
|     else: |             return collection.count_documents(filter=filter, **kwargs) | ||||||
|         count = collection.find(filter).count() |         except OperationFailure: | ||||||
|     return count |             # OperationFailure - accounts for some operators that used to work | ||||||
|  |             # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) | ||||||
|  |             # fallback to deprecated Cursor.count | ||||||
|  |             # Keeping this should be reevaluated the day pymongo removes .count entirely | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |     cursor = collection.find(filter) | ||||||
|  |     for option, option_value in kwargs.items(): | ||||||
|  |         cursor_method = getattr(cursor, option) | ||||||
|  |         cursor = cursor_method(option_value) | ||||||
|  |     with_limit_and_skip = "skip" in kwargs or "limit" in kwargs | ||||||
|  |     return cursor.count(with_limit_and_skip=with_limit_and_skip) | ||||||
|  |  | ||||||
|  |  | ||||||
| def list_collection_names(db, include_system_collections=False): | def list_collection_names(db, include_system_collections=False): | ||||||
|   | |||||||
| @@ -29,6 +29,7 @@ from mongoengine.errors import ( | |||||||
|     NotUniqueError, |     NotUniqueError, | ||||||
|     OperationError, |     OperationError, | ||||||
| ) | ) | ||||||
|  | from mongoengine.pymongo_support import count_documents | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
| from mongoengine.queryset.field_list import QueryFieldList | from mongoengine.queryset.field_list import QueryFieldList | ||||||
| from mongoengine.queryset.visitor import Q, QNode | from mongoengine.queryset.visitor import Q, QNode | ||||||
| @@ -83,13 +84,20 @@ class BaseQuerySet: | |||||||
|         self._cursor_obj = None |         self._cursor_obj = None | ||||||
|         self._limit = None |         self._limit = None | ||||||
|         self._skip = None |         self._skip = None | ||||||
|  |  | ||||||
|         self._hint = -1  # Using -1 as None is a valid value for hint |         self._hint = -1  # Using -1 as None is a valid value for hint | ||||||
|         self._collation = None |         self._collation = None | ||||||
|         self._batch_size = None |         self._batch_size = None | ||||||
|         self.only_fields = [] |  | ||||||
|         self._max_time_ms = None |         self._max_time_ms = None | ||||||
|         self._comment = None |         self._comment = None | ||||||
|  |  | ||||||
|  |         # Hack - As people expect cursor[5:5] to return | ||||||
|  |         # an empty result set. It's hard to do that right, though, because the | ||||||
|  |         # server uses limit(0) to mean 'no limit'. So we set _empty | ||||||
|  |         # in that case and check for it when iterating. We also unset | ||||||
|  |         # it anytime we change _limit. Inspired by how it is done in pymongo.Cursor | ||||||
|  |         self._empty = False | ||||||
|  |  | ||||||
|     def __call__(self, q_obj=None, **query): |     def __call__(self, q_obj=None, **query): | ||||||
|         """Filter the selected documents by calling the |         """Filter the selected documents by calling the | ||||||
|         :class:`~mongoengine.queryset.QuerySet` with a query. |         :class:`~mongoengine.queryset.QuerySet` with a query. | ||||||
| @@ -162,6 +170,7 @@ class BaseQuerySet: | |||||||
|         [<User: User object>, <User: User object>] |         [<User: User object>, <User: User object>] | ||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|  |         queryset._empty = False | ||||||
|  |  | ||||||
|         # Handle a slice |         # Handle a slice | ||||||
|         if isinstance(key, slice): |         if isinstance(key, slice): | ||||||
| @@ -169,6 +178,8 @@ class BaseQuerySet: | |||||||
|             queryset._skip, queryset._limit = key.start, key.stop |             queryset._skip, queryset._limit = key.start, key.stop | ||||||
|             if key.start and key.stop: |             if key.start and key.stop: | ||||||
|                 queryset._limit = key.stop - key.start |                 queryset._limit = key.stop - key.start | ||||||
|  |             if queryset._limit == 0: | ||||||
|  |                 queryset._empty = True | ||||||
|  |  | ||||||
|             # Allow further QuerySet modifications to be performed |             # Allow further QuerySet modifications to be performed | ||||||
|             return queryset |             return queryset | ||||||
| @@ -178,9 +189,7 @@ class BaseQuerySet: | |||||||
|             if queryset._scalar: |             if queryset._scalar: | ||||||
|                 return queryset._get_scalar( |                 return queryset._get_scalar( | ||||||
|                     queryset._document._from_son( |                     queryset._document._from_son( | ||||||
|                         queryset._cursor[key], |                         queryset._cursor[key], _auto_dereference=self._auto_dereference, | ||||||
|                         _auto_dereference=self._auto_dereference, |  | ||||||
|                         only_fields=self.only_fields, |  | ||||||
|                     ) |                     ) | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
| @@ -188,9 +197,7 @@ class BaseQuerySet: | |||||||
|                 return queryset._cursor[key] |                 return queryset._cursor[key] | ||||||
|  |  | ||||||
|             return queryset._document._from_son( |             return queryset._document._from_son( | ||||||
|                 queryset._cursor[key], |                 queryset._cursor[key], _auto_dereference=self._auto_dereference, | ||||||
|                 _auto_dereference=self._auto_dereference, |  | ||||||
|                 only_fields=self.only_fields, |  | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         raise TypeError("Provide a slice or an integer index") |         raise TypeError("Provide a slice or an integer index") | ||||||
| @@ -394,9 +401,36 @@ class BaseQuerySet: | |||||||
|             :meth:`skip` that has been applied to this cursor into account when |             :meth:`skip` that has been applied to this cursor into account when | ||||||
|             getting the count |             getting the count | ||||||
|         """ |         """ | ||||||
|         if self._limit == 0 and with_limit_and_skip is False or self._none: |         # mimic the fact that setting .limit(0) in pymongo sets no limit | ||||||
|  |         # https://docs.mongodb.com/manual/reference/method/cursor.limit/#zero-value | ||||||
|  |         if ( | ||||||
|  |             self._limit == 0 | ||||||
|  |             and with_limit_and_skip is False | ||||||
|  |             or self._none | ||||||
|  |             or self._empty | ||||||
|  |         ): | ||||||
|             return 0 |             return 0 | ||||||
|         count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) |  | ||||||
|  |         kwargs = ( | ||||||
|  |             {"limit": self._limit, "skip": self._skip} if with_limit_and_skip else {} | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         if self._limit == 0: | ||||||
|  |             # mimic the fact that historically .limit(0) sets no limit | ||||||
|  |             kwargs.pop("limit", None) | ||||||
|  |  | ||||||
|  |         if self._hint not in (-1, None): | ||||||
|  |             kwargs["hint"] = self._hint | ||||||
|  |  | ||||||
|  |         if self._collation: | ||||||
|  |             kwargs["collation"] = self._collation | ||||||
|  |  | ||||||
|  |         count = count_documents( | ||||||
|  |             collection=self._cursor.collection, | ||||||
|  |             filter=self._cursor._Cursor__spec, | ||||||
|  |             **kwargs | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         self._cursor_obj = None |         self._cursor_obj = None | ||||||
|         return count |         return count | ||||||
|  |  | ||||||
| @@ -680,12 +714,10 @@ class BaseQuerySet: | |||||||
|  |  | ||||||
|         if full_response: |         if full_response: | ||||||
|             if result["value"] is not None: |             if result["value"] is not None: | ||||||
|                 result["value"] = self._document._from_son( |                 result["value"] = self._document._from_son(result["value"]) | ||||||
|                     result["value"], only_fields=self.only_fields |  | ||||||
|                 ) |  | ||||||
|         else: |         else: | ||||||
|             if result is not None: |             if result is not None: | ||||||
|                 result = self._document._from_son(result, only_fields=self.only_fields) |                 result = self._document._from_son(result) | ||||||
|  |  | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
| @@ -718,24 +750,22 @@ class BaseQuerySet: | |||||||
|         docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args) |         docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args) | ||||||
|         if self._scalar: |         if self._scalar: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc["_id"]] = self._get_scalar( |                 doc_map[doc["_id"]] = self._get_scalar(self._document._from_son(doc)) | ||||||
|                     self._document._from_son(doc, only_fields=self.only_fields) |  | ||||||
|                 ) |  | ||||||
|         elif self._as_pymongo: |         elif self._as_pymongo: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc["_id"]] = doc |                 doc_map[doc["_id"]] = doc | ||||||
|         else: |         else: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc["_id"]] = self._document._from_son( |                 doc_map[doc["_id"]] = self._document._from_son( | ||||||
|                     doc, |                     doc, _auto_dereference=self._auto_dereference, | ||||||
|                     only_fields=self.only_fields, |  | ||||||
|                     _auto_dereference=self._auto_dereference, |  | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|         return doc_map |         return doc_map | ||||||
|  |  | ||||||
|     def none(self): |     def none(self): | ||||||
|         """Helper that just returns a list""" |         """Returns a queryset that never returns any objects and no query will be executed when accessing the results | ||||||
|  |         inspired by django none() https://docs.djangoproject.com/en/dev/ref/models/querysets/#none | ||||||
|  |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._none = True |         queryset._none = True | ||||||
|         return queryset |         return queryset | ||||||
| @@ -789,16 +819,17 @@ class BaseQuerySet: | |||||||
|             "_snapshot", |             "_snapshot", | ||||||
|             "_timeout", |             "_timeout", | ||||||
|             "_read_preference", |             "_read_preference", | ||||||
|  |             "_read_concern", | ||||||
|             "_iter", |             "_iter", | ||||||
|             "_scalar", |             "_scalar", | ||||||
|             "_as_pymongo", |             "_as_pymongo", | ||||||
|             "_limit", |             "_limit", | ||||||
|             "_skip", |             "_skip", | ||||||
|  |             "_empty", | ||||||
|             "_hint", |             "_hint", | ||||||
|             "_collation", |             "_collation", | ||||||
|             "_auto_dereference", |             "_auto_dereference", | ||||||
|             "_search_text", |             "_search_text", | ||||||
|             "only_fields", |  | ||||||
|             "_max_time_ms", |             "_max_time_ms", | ||||||
|             "_comment", |             "_comment", | ||||||
|             "_batch_size", |             "_batch_size", | ||||||
| @@ -834,6 +865,7 @@ class BaseQuerySet: | |||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._limit = n |         queryset._limit = n | ||||||
|  |         queryset._empty = False  # cancels the effect of empty | ||||||
|  |  | ||||||
|         # If a cursor object has already been created, apply the limit to it. |         # If a cursor object has already been created, apply the limit to it. | ||||||
|         if queryset._cursor_obj: |         if queryset._cursor_obj: | ||||||
| @@ -1001,7 +1033,6 @@ class BaseQuerySet: | |||||||
|         .. versionchanged:: 0.5 - Added subfield support |         .. versionchanged:: 0.5 - Added subfield support | ||||||
|         """ |         """ | ||||||
|         fields = {f: QueryFieldList.ONLY for f in fields} |         fields = {f: QueryFieldList.ONLY for f in fields} | ||||||
|         self.only_fields = list(fields.keys()) |  | ||||||
|         return self.fields(True, **fields) |         return self.fields(True, **fields) | ||||||
|  |  | ||||||
|     def exclude(self, *fields): |     def exclude(self, *fields): | ||||||
| @@ -1266,10 +1297,7 @@ class BaseQuerySet: | |||||||
|     def from_json(self, json_data): |     def from_json(self, json_data): | ||||||
|         """Converts json data to unsaved objects""" |         """Converts json data to unsaved objects""" | ||||||
|         son_data = json_util.loads(json_data) |         son_data = json_util.loads(json_data) | ||||||
|         return [ |         return [self._document._from_son(data) for data in son_data] | ||||||
|             self._document._from_son(data, only_fields=self.only_fields) |  | ||||||
|             for data in son_data |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def aggregate(self, pipeline, *suppl_pipeline, **kwargs): |     def aggregate(self, pipeline, *suppl_pipeline, **kwargs): | ||||||
|         """Perform a aggregate function based in your queryset params |         """Perform a aggregate function based in your queryset params | ||||||
| @@ -1311,10 +1339,11 @@ class BaseQuerySet: | |||||||
|         final_pipeline = initial_pipeline + user_pipeline |         final_pipeline = initial_pipeline + user_pipeline | ||||||
|  |  | ||||||
|         collection = self._collection |         collection = self._collection | ||||||
|         if self._read_preference is not None: |         if self._read_preference is not None or self._read_concern is not None: | ||||||
|             collection = self._collection.with_options( |             collection = self._collection.with_options( | ||||||
|                 read_preference=self._read_preference |                 read_preference=self._read_preference, read_concern=self._read_concern | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         return collection.aggregate(final_pipeline, cursor={}, **kwargs) |         return collection.aggregate(final_pipeline, cursor={}, **kwargs) | ||||||
|  |  | ||||||
|     # JS functionality |     # JS functionality | ||||||
| @@ -1584,7 +1613,7 @@ class BaseQuerySet: | |||||||
|     def __next__(self): |     def __next__(self): | ||||||
|         """Wrap the result in a :class:`~mongoengine.Document` object. |         """Wrap the result in a :class:`~mongoengine.Document` object. | ||||||
|         """ |         """ | ||||||
|         if self._limit == 0 or self._none: |         if self._none or self._empty: | ||||||
|             raise StopIteration |             raise StopIteration | ||||||
|  |  | ||||||
|         raw_doc = next(self._cursor) |         raw_doc = next(self._cursor) | ||||||
| @@ -1593,9 +1622,7 @@ class BaseQuerySet: | |||||||
|             return raw_doc |             return raw_doc | ||||||
|  |  | ||||||
|         doc = self._document._from_son( |         doc = self._document._from_son( | ||||||
|             raw_doc, |             raw_doc, _auto_dereference=self._auto_dereference, | ||||||
|             _auto_dereference=self._auto_dereference, |  | ||||||
|             only_fields=self.only_fields, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         if self._scalar: |         if self._scalar: | ||||||
| @@ -1603,8 +1630,6 @@ class BaseQuerySet: | |||||||
|  |  | ||||||
|         return doc |         return doc | ||||||
|  |  | ||||||
|     next = __next__  # For Python2 support |  | ||||||
|  |  | ||||||
|     def rewind(self): |     def rewind(self): | ||||||
|         """Rewind the cursor to its unevaluated state. |         """Rewind the cursor to its unevaluated state. | ||||||
|  |  | ||||||
|   | |||||||
| @@ -144,6 +144,7 @@ class QuerySet(BaseQuerySet): | |||||||
|             return super().count(with_limit_and_skip) |             return super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|         if self._len is None: |         if self._len is None: | ||||||
|  |             # cache the length | ||||||
|             self._len = super().count(with_limit_and_skip) |             self._len = super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|         return self._len |         return self._len | ||||||
|   | |||||||
| @@ -7,6 +7,11 @@ from mongoengine.queryset import transform | |||||||
| __all__ = ("Q", "QNode") | __all__ = ("Q", "QNode") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def warn_empty_is_deprecated(): | ||||||
|  |     msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||||
|  |     warnings.warn(msg, DeprecationWarning, stacklevel=2) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNodeVisitor: | class QNodeVisitor: | ||||||
|     """Base visitor class for visiting Q-object nodes in a query tree. |     """Base visitor class for visiting Q-object nodes in a query tree. | ||||||
|     """ |     """ | ||||||
| @@ -98,19 +103,18 @@ class QNode: | |||||||
|         object. |         object. | ||||||
|         """ |         """ | ||||||
|         # If the other Q() is empty, ignore it and just use `self`. |         # If the other Q() is empty, ignore it and just use `self`. | ||||||
|         if getattr(other, "empty", True): |         if not bool(other): | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         # Or if this Q is empty, ignore it and just use `other`. |         # Or if this Q is empty, ignore it and just use `other`. | ||||||
|         if self.empty: |         if not bool(self): | ||||||
|             return other |             return other | ||||||
|  |  | ||||||
|         return QCombination(operation, [self, other]) |         return QCombination(operation, [self, other]) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|         msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" |         warn_empty_is_deprecated() | ||||||
|         warnings.warn(msg, DeprecationWarning) |  | ||||||
|         return False |         return False | ||||||
|  |  | ||||||
|     def __or__(self, other): |     def __or__(self, other): | ||||||
| @@ -152,8 +156,7 @@ class QCombination(QNode): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|         msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" |         warn_empty_is_deprecated() | ||||||
|         warnings.warn(msg, DeprecationWarning) |  | ||||||
|         return not bool(self.children) |         return not bool(self.children) | ||||||
|  |  | ||||||
|     def __eq__(self, other): |     def __eq__(self, other): | ||||||
| @@ -186,4 +189,5 @@ class Q(QNode): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|  |         warn_empty_is_deprecated() | ||||||
|         return not bool(self.query) |         return not bool(self.query) | ||||||
|   | |||||||
| @@ -1,3 +0,0 @@ | |||||||
| pymongo>=3.4 |  | ||||||
| Sphinx==1.5.5 |  | ||||||
| sphinx-rtd-theme==0.2.4 |  | ||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from bson import SON | from bson import SON | ||||||
| @@ -29,7 +28,8 @@ class TestDelta(MongoDBTestCase): | |||||||
|         self.delta(Document) |         self.delta(Document) | ||||||
|         self.delta(DynamicDocument) |         self.delta(DynamicDocument) | ||||||
|  |  | ||||||
|     def delta(self, DocClass): |     @staticmethod | ||||||
|  |     def delta(DocClass): | ||||||
|         class Doc(DocClass): |         class Doc(DocClass): | ||||||
|             string_field = StringField() |             string_field = StringField() | ||||||
|             int_field = IntField() |             int_field = IntField() | ||||||
| @@ -428,13 +428,20 @@ class TestDelta(MongoDBTestCase): | |||||||
|         assert doc.dict_field == {"hello": "world"} |         assert doc.dict_field == {"hello": "world"} | ||||||
|         assert doc.list_field == ["1", 2, {"hello": "world"}] |         assert doc.list_field == ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|     def test_delta_recursive_db_field(self): |     def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self): | ||||||
|         self.delta_recursive_db_field(Document, EmbeddedDocument) |         self.delta_recursive_db_field(Document, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self): | ||||||
|         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) |         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self): | ||||||
|         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) |         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self): | ||||||
|         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) |         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|     def delta_recursive_db_field(self, DocClass, EmbeddedClass): |     @staticmethod | ||||||
|  |     def delta_recursive_db_field(DocClass, EmbeddedClass): | ||||||
|         class Embedded(EmbeddedClass): |         class Embedded(EmbeddedClass): | ||||||
|             string_field = StringField(db_field="db_string_field") |             string_field = StringField(db_field="db_string_field") | ||||||
|             int_field = IntField(db_field="db_int_field") |             int_field = IntField(db_field="db_int_field") | ||||||
| @@ -487,6 +494,7 @@ class TestDelta(MongoDBTestCase): | |||||||
|         doc = doc.reload(10) |         doc = doc.reload(10) | ||||||
|         assert doc.embedded_field.dict_field == {} |         assert doc.embedded_field.dict_field == {} | ||||||
|  |  | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|         doc.embedded_field.list_field = [] |         doc.embedded_field.list_field = [] | ||||||
|         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||||
|         assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) |         assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) | ||||||
| @@ -537,6 +545,7 @@ class TestDelta(MongoDBTestCase): | |||||||
|             {}, |             {}, | ||||||
|         ) |         ) | ||||||
|         doc.save() |         doc.save() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|         doc = doc.reload(10) |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|         assert doc.embedded_field.list_field[0] == "1" |         assert doc.embedded_field.list_field[0] == "1" | ||||||
| @@ -634,6 +643,7 @@ class TestDelta(MongoDBTestCase): | |||||||
|         doc.save() |         doc.save() | ||||||
|         doc = doc.reload(10) |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc._delta() == ({}, {},) | ||||||
|         del doc.embedded_field.list_field[2].list_field |         del doc.embedded_field.list_field[2].list_field | ||||||
|         assert doc._delta() == ( |         assert doc._delta() == ( | ||||||
|             {}, |             {}, | ||||||
| @@ -732,12 +742,12 @@ class TestDelta(MongoDBTestCase): | |||||||
|         assert organization._get_changed_fields() == [] |         assert organization._get_changed_fields() == [] | ||||||
|  |  | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         assert {} == removals |         assert removals == {} | ||||||
|         assert {} == updates |         assert updates == {} | ||||||
|  |  | ||||||
|         organization.employees.append(person) |         organization.employees.append(person) | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         assert {} == removals |         assert removals == {} | ||||||
|         assert "employees" in updates |         assert "employees" in updates | ||||||
|  |  | ||||||
|     def test_delta_with_dbref_false(self): |     def test_delta_with_dbref_false(self): | ||||||
| @@ -749,12 +759,12 @@ class TestDelta(MongoDBTestCase): | |||||||
|         assert organization._get_changed_fields() == [] |         assert organization._get_changed_fields() == [] | ||||||
|  |  | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         assert {} == removals |         assert removals == {} | ||||||
|         assert {} == updates |         assert updates == {} | ||||||
|  |  | ||||||
|         organization.employees.append(person) |         organization.employees.append(person) | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         assert {} == removals |         assert removals == {} | ||||||
|         assert "employees" in updates |         assert "employees" in updates | ||||||
|  |  | ||||||
|     def test_nested_nested_fields_mark_as_changed(self): |     def test_nested_nested_fields_mark_as_changed(self): | ||||||
| @@ -767,19 +777,46 @@ class TestDelta(MongoDBTestCase): | |||||||
|  |  | ||||||
|         MyDoc.drop_collection() |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|         mydoc = MyDoc( |         MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save() | ||||||
|             name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}} |  | ||||||
|         ).save() |  | ||||||
|  |  | ||||||
|         mydoc = MyDoc.objects.first() |         mydoc = MyDoc.objects.first() | ||||||
|         subdoc = mydoc.subs["a"]["b"] |         subdoc = mydoc.subs["a"]["b"] | ||||||
|         subdoc.name = "bar" |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|         assert ["name"] == subdoc._get_changed_fields() |         assert subdoc._get_changed_fields() == ["name"] | ||||||
|         assert ["subs.a.b.name"] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == ["subs.a.b.name"] | ||||||
|  |  | ||||||
|         mydoc._clear_changed_fields() |         mydoc._clear_changed_fields() | ||||||
|         assert [] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |     def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField(db_field="db_name") | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed") | ||||||
|  |             name = StringField(db_field="db_name") | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         mydoc.embed.name = "foo1" | ||||||
|  |  | ||||||
|  |         assert mydoc.embed._get_changed_fields() == ["db_name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["db_embed.db_name"] | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         embed = EmbeddedDoc(name="foo2") | ||||||
|  |         embed.name = "bar" | ||||||
|  |         mydoc.embed = embed | ||||||
|  |  | ||||||
|  |         assert embed._get_changed_fields() == ["db_name"] | ||||||
|  |         assert mydoc._get_changed_fields() == ["db_embed"] | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|     def test_lower_level_mark_as_changed(self): |     def test_lower_level_mark_as_changed(self): | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
| @@ -794,17 +831,17 @@ class TestDelta(MongoDBTestCase): | |||||||
|  |  | ||||||
|         mydoc = MyDoc.objects.first() |         mydoc = MyDoc.objects.first() | ||||||
|         mydoc.subs["a"] = EmbeddedDoc() |         mydoc.subs["a"] = EmbeddedDoc() | ||||||
|         assert ["subs.a"] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == ["subs.a"] | ||||||
|  |  | ||||||
|         subdoc = mydoc.subs["a"] |         subdoc = mydoc.subs["a"] | ||||||
|         subdoc.name = "bar" |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|         assert ["name"] == subdoc._get_changed_fields() |         assert subdoc._get_changed_fields() == ["name"] | ||||||
|         assert ["subs.a"] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == ["subs.a"] | ||||||
|         mydoc.save() |         mydoc.save() | ||||||
|  |  | ||||||
|         mydoc._clear_changed_fields() |         mydoc._clear_changed_fields() | ||||||
|         assert [] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|     def test_upper_level_mark_as_changed(self): |     def test_upper_level_mark_as_changed(self): | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
| @@ -821,15 +858,15 @@ class TestDelta(MongoDBTestCase): | |||||||
|         subdoc = mydoc.subs["a"] |         subdoc = mydoc.subs["a"] | ||||||
|         subdoc.name = "bar" |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|         assert ["name"] == subdoc._get_changed_fields() |         assert subdoc._get_changed_fields() == ["name"] | ||||||
|         assert ["subs.a.name"] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == ["subs.a.name"] | ||||||
|  |  | ||||||
|         mydoc.subs["a"] = EmbeddedDoc() |         mydoc.subs["a"] = EmbeddedDoc() | ||||||
|         assert ["subs.a"] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == ["subs.a"] | ||||||
|         mydoc.save() |         mydoc.save() | ||||||
|  |  | ||||||
|         mydoc._clear_changed_fields() |         mydoc._clear_changed_fields() | ||||||
|         assert [] == mydoc._get_changed_fields() |         assert mydoc._get_changed_fields() == [] | ||||||
|  |  | ||||||
|     def test_referenced_object_changed_attributes(self): |     def test_referenced_object_changed_attributes(self): | ||||||
|         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" |         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" | ||||||
|   | |||||||
| @@ -37,6 +37,19 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         # Confirm no changes to self.Person |         # Confirm no changes to self.Person | ||||||
|         assert not hasattr(self.Person, "age") |         assert not hasattr(self.Person, "age") | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_parse_values_in_constructor_like_document_do(self): | ||||||
|  |         class ProductDynamicDocument(DynamicDocument): | ||||||
|  |             title = StringField() | ||||||
|  |             price = FloatField() | ||||||
|  |  | ||||||
|  |         class ProductDocument(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             price = FloatField() | ||||||
|  |  | ||||||
|  |         product = ProductDocument(title="Blabla", price="12.5") | ||||||
|  |         dyn_product = ProductDynamicDocument(title="Blabla", price="12.5") | ||||||
|  |         assert product.price == dyn_product.price == 12.5 | ||||||
|  |  | ||||||
|     def test_change_scope_of_variable(self): |     def test_change_scope_of_variable(self): | ||||||
|         """Test changing the scope of a dynamic field has no adverse effects""" |         """Test changing the scope of a dynamic field has no adverse effects""" | ||||||
|         p = self.Person() |         p = self.Person() | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
|  |  | ||||||
| @@ -551,8 +550,9 @@ class TestIndexes(unittest.TestCase): | |||||||
|         assert 5 == query_result.count() |         assert 5 == query_result.count() | ||||||
|  |  | ||||||
|         incorrect_collation = {"arndom": "wrdo"} |         incorrect_collation = {"arndom": "wrdo"} | ||||||
|         with pytest.raises(OperationFailure): |         with pytest.raises(OperationFailure) as exc_info: | ||||||
|             BlogPost.objects.collation(incorrect_collation).count() |             BlogPost.objects.collation(incorrect_collation).count() | ||||||
|  |         assert "Missing expected field" in str(exc_info.value) | ||||||
|  |  | ||||||
|         query_result = BlogPost.objects.collation({}).order_by("name") |         query_result = BlogPost.objects.collation({}).order_by("name") | ||||||
|         assert [x.name for x in query_result] == sorted(names) |         assert [x.name for x in query_result] == sorted(names) | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import os | import os | ||||||
| import pickle | import pickle | ||||||
| import unittest | import unittest | ||||||
| @@ -188,7 +187,7 @@ class TestDocumentInstance(MongoDBTestCase): | |||||||
|  |  | ||||||
|     def test_queryset_resurrects_dropped_collection(self): |     def test_queryset_resurrects_dropped_collection(self): | ||||||
|         self.Person.drop_collection() |         self.Person.drop_collection() | ||||||
|         assert [] == list(self.Person.objects()) |         assert list(self.Person.objects()) == [] | ||||||
|  |  | ||||||
|         # Ensure works correctly with inhertited classes |         # Ensure works correctly with inhertited classes | ||||||
|         class Actor(self.Person): |         class Actor(self.Person): | ||||||
| @@ -196,7 +195,7 @@ class TestDocumentInstance(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Actor.objects() |         Actor.objects() | ||||||
|         self.Person.drop_collection() |         self.Person.drop_collection() | ||||||
|         assert [] == list(Actor.objects()) |         assert list(Actor.objects()) == [] | ||||||
|  |  | ||||||
|     def test_polymorphic_references(self): |     def test_polymorphic_references(self): | ||||||
|         """Ensure that the correct subclasses are returned from a query |         """Ensure that the correct subclasses are returned from a query | ||||||
| @@ -501,7 +500,7 @@ class TestDocumentInstance(MongoDBTestCase): | |||||||
|         doc.reload() |         doc.reload() | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|  |  | ||||||
|     def test_update_shard_key_routing(self): |     def test_save_update_shard_key_routing(self): | ||||||
|         """Ensures updating a doc with a specified shard_key includes it in |         """Ensures updating a doc with a specified shard_key includes it in | ||||||
|         the query. |         the query. | ||||||
|         """ |         """ | ||||||
| @@ -529,6 +528,29 @@ class TestDocumentInstance(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_save_create_shard_key_routing(self): | ||||||
|  |         """Ensures inserting a doc with a specified shard_key includes it in | ||||||
|  |         the query. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             _id = UUIDField(binary=False, primary_key=True, default=uuid.uuid4) | ||||||
|  |             is_mammal = BooleanField() | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"shard_key": ("is_mammal",)} | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         doc = Animal(is_mammal=True, name="Dog") | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             doc.save() | ||||||
|  |             query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0] | ||||||
|  |             assert query_op["op"] == "command" | ||||||
|  |             assert query_op["command"]["findAndModify"] == "animal" | ||||||
|  |             assert set(query_op["command"]["query"].keys()) == set(["_id", "is_mammal"]) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|     def test_reload_with_changed_fields(self): |     def test_reload_with_changed_fields(self): | ||||||
|         """Ensures reloading will not affect changed fields""" |         """Ensures reloading will not affect changed fields""" | ||||||
|  |  | ||||||
| @@ -578,7 +600,8 @@ class TestDocumentInstance(MongoDBTestCase): | |||||||
|         doc.embedded_field.list_field.append(1) |         doc.embedded_field.list_field.append(1) | ||||||
|         doc.embedded_field.dict_field["woot"] = "woot" |         doc.embedded_field.dict_field["woot"] = "woot" | ||||||
|  |  | ||||||
|         assert doc._get_changed_fields() == [ |         changed = doc._get_changed_fields() | ||||||
|  |         assert changed == [ | ||||||
|             "list_field", |             "list_field", | ||||||
|             "dict_field.woot", |             "dict_field.woot", | ||||||
|             "embedded_field.list_field", |             "embedded_field.list_field", | ||||||
| @@ -3411,7 +3434,7 @@ class TestDocumentInstance(MongoDBTestCase): | |||||||
|         assert obj3 != dbref2 |         assert obj3 != dbref2 | ||||||
|         assert dbref2 != obj3 |         assert dbref2 != obj3 | ||||||
|  |  | ||||||
|     def test_default_values(self): |     def test_default_values_dont_get_override_upon_save_when_only_is_used(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             created_on = DateTimeField(default=lambda: datetime.utcnow()) |             created_on = DateTimeField(default=lambda: datetime.utcnow()) | ||||||
|             name = StringField() |             name = StringField() | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import uuid | import uuid | ||||||
|  |  | ||||||
| from bson import Binary | from bson import Binary | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime | import datetime | ||||||
| import itertools | import itertools | ||||||
| import math | import math | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime | import datetime | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime as dt | import datetime as dt | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from bson import InvalidDocument | from bson import InvalidDocument | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| @@ -113,7 +112,7 @@ class TestDictField(MongoDBTestCase): | |||||||
|         post.info.setdefault("authors", []) |         post.info.setdefault("authors", []) | ||||||
|         post.save() |         post.save() | ||||||
|         post.reload() |         post.reload() | ||||||
|         assert [] == post.info["authors"] |         assert post.info["authors"] == [] | ||||||
|  |  | ||||||
|     def test_dictfield_dump_document(self): |     def test_dictfield_dump_document(self): | ||||||
|         """Ensure a DictField can handle another document's dump.""" |         """Ensure a DictField can handle another document's dump.""" | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import sys | import sys | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from mongoengine import ( | from mongoengine import ( | ||||||
|   | |||||||
							
								
								
									
										122
									
								
								tests/fields/test_enum_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										122
									
								
								tests/fields/test_enum_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,122 @@ | |||||||
|  | from enum import Enum | ||||||
|  |  | ||||||
|  | from bson import InvalidDocument | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Status(Enum): | ||||||
|  |     NEW = "new" | ||||||
|  |     DONE = "done" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ModelWithEnum(Document): | ||||||
|  |     status = EnumField(Status) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestStringEnumField(MongoDBTestCase): | ||||||
|  |     def test_storage(self): | ||||||
|  |         model = ModelWithEnum(status=Status.NEW).save() | ||||||
|  |         assert get_as_pymongo(model) == {"_id": model.id, "status": "new"} | ||||||
|  |  | ||||||
|  |     def test_set_enum(self): | ||||||
|  |         ModelWithEnum.drop_collection() | ||||||
|  |         ModelWithEnum(status=Status.NEW).save() | ||||||
|  |         assert ModelWithEnum.objects(status=Status.NEW).count() == 1 | ||||||
|  |         assert ModelWithEnum.objects.first().status == Status.NEW | ||||||
|  |  | ||||||
|  |     def test_set_by_value(self): | ||||||
|  |         ModelWithEnum.drop_collection() | ||||||
|  |         ModelWithEnum(status="new").save() | ||||||
|  |         assert ModelWithEnum.objects.first().status == Status.NEW | ||||||
|  |  | ||||||
|  |     def test_filter(self): | ||||||
|  |         ModelWithEnum.drop_collection() | ||||||
|  |         ModelWithEnum(status="new").save() | ||||||
|  |         assert ModelWithEnum.objects(status="new").count() == 1 | ||||||
|  |         assert ModelWithEnum.objects(status=Status.NEW).count() == 1 | ||||||
|  |         assert ModelWithEnum.objects(status=Status.DONE).count() == 0 | ||||||
|  |  | ||||||
|  |     def test_change_value(self): | ||||||
|  |         m = ModelWithEnum(status="new") | ||||||
|  |         m.status = Status.DONE | ||||||
|  |         m.save() | ||||||
|  |         assert m.status == Status.DONE | ||||||
|  |  | ||||||
|  |     def test_set_default(self): | ||||||
|  |         class ModelWithDefault(Document): | ||||||
|  |             status = EnumField(Status, default=Status.DONE) | ||||||
|  |  | ||||||
|  |         m = ModelWithDefault().save() | ||||||
|  |         assert m.status == Status.DONE | ||||||
|  |  | ||||||
|  |     def test_enum_field_can_be_empty(self): | ||||||
|  |         ModelWithEnum.drop_collection() | ||||||
|  |         m = ModelWithEnum().save() | ||||||
|  |         assert m.status is None | ||||||
|  |         assert ModelWithEnum.objects()[0].status is None | ||||||
|  |         assert ModelWithEnum.objects(status=None).count() == 1 | ||||||
|  |  | ||||||
|  |     def test_set_none_explicitly(self): | ||||||
|  |         ModelWithEnum.drop_collection() | ||||||
|  |         ModelWithEnum(status=None).save() | ||||||
|  |         assert ModelWithEnum.objects.first().status is None | ||||||
|  |  | ||||||
|  |     def test_cannot_create_model_with_wrong_enum_value(self): | ||||||
|  |         m = ModelWithEnum(status="wrong_one") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             m.validate() | ||||||
|  |  | ||||||
|  |     def test_user_is_informed_when_tries_to_set_choices(self): | ||||||
|  |         with pytest.raises(ValueError, match="'choices' can't be set on EnumField"): | ||||||
|  |             EnumField(Status, choices=["my", "custom", "options"]) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Color(Enum): | ||||||
|  |     RED = 1 | ||||||
|  |     BLUE = 2 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ModelWithColor(Document): | ||||||
|  |     color = EnumField(Color, default=Color.RED) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestIntEnumField(MongoDBTestCase): | ||||||
|  |     def test_enum_with_int(self): | ||||||
|  |         ModelWithColor.drop_collection() | ||||||
|  |         m = ModelWithColor().save() | ||||||
|  |         assert m.color == Color.RED | ||||||
|  |         assert ModelWithColor.objects(color=Color.RED).count() == 1 | ||||||
|  |         assert ModelWithColor.objects(color=1).count() == 1 | ||||||
|  |         assert ModelWithColor.objects(color=2).count() == 0 | ||||||
|  |  | ||||||
|  |     def test_create_int_enum_by_value(self): | ||||||
|  |         model = ModelWithColor(color=2).save() | ||||||
|  |         assert model.color == Color.BLUE | ||||||
|  |  | ||||||
|  |     def test_storage_enum_with_int(self): | ||||||
|  |         model = ModelWithColor(color=Color.BLUE).save() | ||||||
|  |         assert get_as_pymongo(model) == {"_id": model.id, "color": 2} | ||||||
|  |  | ||||||
|  |     def test_validate_model(self): | ||||||
|  |         with pytest.raises(ValidationError, match="Value must be one of"): | ||||||
|  |             ModelWithColor(color=3).validate() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValidationError, match="Value must be one of"): | ||||||
|  |             ModelWithColor(color="wrong_type").validate() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestFunkyEnumField(MongoDBTestCase): | ||||||
|  |     def test_enum_incompatible_bson_type_fails_during_save(self): | ||||||
|  |         class FunkyColor(Enum): | ||||||
|  |             YELLOW = object() | ||||||
|  |  | ||||||
|  |         class ModelWithFunkyColor(Document): | ||||||
|  |             color = EnumField(FunkyColor) | ||||||
|  |  | ||||||
|  |         m = ModelWithFunkyColor(color=FunkyColor.YELLOW) | ||||||
|  |  | ||||||
|  |         with pytest.raises(InvalidDocument, match="[cC]annot encode object"): | ||||||
|  |             m.save() | ||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime | import datetime | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| @@ -336,7 +335,7 @@ class TestField(MongoDBTestCase): | |||||||
|         doc.save() |         doc.save() | ||||||
|  |  | ||||||
|         # Unset all the fields |         # Unset all the fields | ||||||
|         HandleNoneFields._get_collection().update( |         HandleNoneFields._get_collection().update_one( | ||||||
|             {"_id": doc.id}, |             {"_id": doc.id}, | ||||||
|             {"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}}, |             {"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}}, | ||||||
|         ) |         ) | ||||||
| @@ -1084,7 +1083,7 @@ class TestField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         e = Simple().save() |         e = Simple().save() | ||||||
|         e.mapping = [] |         e.mapping = [] | ||||||
|         assert [] == e._changed_fields |         assert e._changed_fields == [] | ||||||
|  |  | ||||||
|         class Simple(Document): |         class Simple(Document): | ||||||
|             mapping = DictField() |             mapping = DictField() | ||||||
| @@ -1093,7 +1092,7 @@ class TestField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         e = Simple().save() |         e = Simple().save() | ||||||
|         e.mapping = {} |         e.mapping = {} | ||||||
|         assert [] == e._changed_fields |         assert e._changed_fields == [] | ||||||
|  |  | ||||||
|     def test_slice_marks_field_as_changed(self): |     def test_slice_marks_field_as_changed(self): | ||||||
|         class Simple(Document): |         class Simple(Document): | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import copy | import copy | ||||||
| import os | import os | ||||||
| import tempfile | import tempfile | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| @@ -381,7 +380,7 @@ class TestGeoField(MongoDBTestCase): | |||||||
|  |  | ||||||
|             meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} |             meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} | ||||||
|  |  | ||||||
|         assert [] == Log._geo_indices() |         assert Log._geo_indices() == [] | ||||||
|  |  | ||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|         Log.ensure_indexes() |         Log.ensure_indexes() | ||||||
| @@ -401,7 +400,7 @@ class TestGeoField(MongoDBTestCase): | |||||||
|                 "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] |                 "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] | ||||||
|             } |             } | ||||||
|  |  | ||||||
|         assert [] == Log._geo_indices() |         assert Log._geo_indices() == [] | ||||||
|  |  | ||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|         Log.ensure_indexes() |         Log.ensure_indexes() | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from bson import DBRef, ObjectId | from bson import DBRef, ObjectId | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime | import datetime | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from bson import DBRef, SON | from bson import DBRef, SON | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,5 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import uuid | import uuid | ||||||
|  |  | ||||||
| import pytest | import pytest | ||||||
|   | |||||||
| @@ -1,5 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| import datetime | import datetime | ||||||
| import unittest | import unittest | ||||||
| import uuid | import uuid | ||||||
| @@ -114,6 +112,38 @@ class TestQueryset(unittest.TestCase): | |||||||
|         assert person.name == "User A" |         assert person.name == "User A" | ||||||
|         assert person.age == 20 |         assert person.age == 20 | ||||||
|  |  | ||||||
|  |     def test_slicing_sets_empty_limit_skip(self): | ||||||
|  |         self.Person.objects.insert( | ||||||
|  |             [self.Person(name="User {}".format(i), age=i) for i in range(5)], | ||||||
|  |             load_bulk=False, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         self.Person.objects.create(name="User B", age=30) | ||||||
|  |         self.Person.objects.create(name="User C", age=40) | ||||||
|  |  | ||||||
|  |         qs = self.Person.objects()[1:2] | ||||||
|  |         assert (qs._empty, qs._skip, qs._limit) == (False, 1, 1) | ||||||
|  |         assert len(list(qs)) == 1 | ||||||
|  |  | ||||||
|  |         # Test edge case of [1:1] which should return nothing | ||||||
|  |         # and require a hack so that it doesn't clash with limit(0) | ||||||
|  |         qs = self.Person.objects()[1:1] | ||||||
|  |         assert (qs._empty, qs._skip, qs._limit) == (True, 1, 0) | ||||||
|  |         assert len(list(qs)) == 0 | ||||||
|  |  | ||||||
|  |         qs2 = qs[1:5]  # Make sure that further slicing resets _empty | ||||||
|  |         assert (qs2._empty, qs2._skip, qs2._limit) == (False, 1, 4) | ||||||
|  |         assert len(list(qs2)) == 4 | ||||||
|  |  | ||||||
|  |     def test_limit_0_returns_all_documents(self): | ||||||
|  |         self.Person.objects.create(name="User A", age=20) | ||||||
|  |         self.Person.objects.create(name="User B", age=30) | ||||||
|  |  | ||||||
|  |         n_docs = self.Person.objects().count() | ||||||
|  |  | ||||||
|  |         persons = list(self.Person.objects().limit(0)) | ||||||
|  |         assert len(persons) == 2 == n_docs | ||||||
|  |  | ||||||
|     def test_limit(self): |     def test_limit(self): | ||||||
|         """Ensure that QuerySet.limit works as expected.""" |         """Ensure that QuerySet.limit works as expected.""" | ||||||
|         user_a = self.Person.objects.create(name="User A", age=20) |         user_a = self.Person.objects.create(name="User A", age=20) | ||||||
| @@ -377,6 +407,9 @@ class TestQueryset(unittest.TestCase): | |||||||
|  |  | ||||||
|         assert list(A.objects.none()) == [] |         assert list(A.objects.none()) == [] | ||||||
|         assert list(A.objects.none().all()) == [] |         assert list(A.objects.none().all()) == [] | ||||||
|  |         assert list(A.objects.none().limit(1)) == [] | ||||||
|  |         assert list(A.objects.none().skip(1)) == [] | ||||||
|  |         assert list(A.objects.none()[:5]) == [] | ||||||
|  |  | ||||||
|     def test_chaining(self): |     def test_chaining(self): | ||||||
|         class A(Document): |         class A(Document): | ||||||
| @@ -4021,6 +4054,32 @@ class TestQueryset(unittest.TestCase): | |||||||
|  |  | ||||||
|         Number.drop_collection() |         Number.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_clone_retains_settings(self): | ||||||
|  |         """Ensure that cloning retains the read_preference and read_concern | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Number(Document): | ||||||
|  |             n = IntField() | ||||||
|  |  | ||||||
|  |         Number.drop_collection() | ||||||
|  |  | ||||||
|  |         qs = Number.objects | ||||||
|  |         qs_clone = qs.clone() | ||||||
|  |         assert qs._read_preference == qs_clone._read_preference | ||||||
|  |         assert qs._read_concern == qs_clone._read_concern | ||||||
|  |  | ||||||
|  |         qs = Number.objects.read_preference(ReadPreference.PRIMARY_PREFERRED) | ||||||
|  |         qs_clone = qs.clone() | ||||||
|  |         assert qs._read_preference == ReadPreference.PRIMARY_PREFERRED | ||||||
|  |         assert qs._read_preference == qs_clone._read_preference | ||||||
|  |  | ||||||
|  |         qs = Number.objects.read_concern({"level": "majority"}) | ||||||
|  |         qs_clone = qs.clone() | ||||||
|  |         assert qs._read_concern.document == {"level": "majority"} | ||||||
|  |         assert qs._read_concern == qs_clone._read_concern | ||||||
|  |  | ||||||
|  |         Number.drop_collection() | ||||||
|  |  | ||||||
|     def test_using(self): |     def test_using(self): | ||||||
|         """Ensure that switching databases for a queryset is possible |         """Ensure that switching databases for a queryset is possible | ||||||
|         """ |         """ | ||||||
| @@ -4442,7 +4501,9 @@ class TestQueryset(unittest.TestCase): | |||||||
|         assert len(people) == 1 |         assert len(people) == 1 | ||||||
|         assert people[0] == "User B" |         assert people[0] == "User B" | ||||||
|  |  | ||||||
|         people = list(self.Person.objects[1:1].scalar("name")) |         # people = list(self.Person.objects[1:1].scalar("name")) | ||||||
|  |         people = self.Person.objects[1:1] | ||||||
|  |         people = people.scalar("name") | ||||||
|         assert len(people) == 0 |         assert len(people) == 0 | ||||||
|  |  | ||||||
|         # Test slice out of range |         # Test slice out of range | ||||||
|   | |||||||
| @@ -1,5 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| import unittest | import unittest | ||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
|   | |||||||
| @@ -344,6 +344,31 @@ class TestTransform(unittest.TestCase): | |||||||
|         ) |         ) | ||||||
|         assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} |         assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}} | ||||||
|  |  | ||||||
|  |     def test_transform_embedded_document_list_fields(self): | ||||||
|  |         """ | ||||||
|  |         Test added to check filtering | ||||||
|  |         EmbeddedDocumentListField which is inside a EmbeddedDocumentField | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Drink(EmbeddedDocument): | ||||||
|  |             id = StringField() | ||||||
|  |             meta = {"strict": False} | ||||||
|  |  | ||||||
|  |         class Shop(Document): | ||||||
|  |             drinks = EmbeddedDocumentListField(Drink) | ||||||
|  |  | ||||||
|  |         Shop.drop_collection() | ||||||
|  |         drinks = [Drink(id="drink_1"), Drink(id="drink_2")] | ||||||
|  |         Shop.objects.create(drinks=drinks) | ||||||
|  |         q_obj = transform.query( | ||||||
|  |             Shop, drinks__all=[{"$elemMatch": {"_id": x.id}} for x in drinks] | ||||||
|  |         ) | ||||||
|  |         assert q_obj == { | ||||||
|  |             "drinks": {"$all": [{"$elemMatch": {"_id": x.id}} for x in drinks]} | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         Shop.drop_collection() | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -282,7 +282,7 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         # database won't exist until we save a document |         # database won't exist until we save a document | ||||||
|         some_document.save() |         some_document.save() | ||||||
|         assert conn.get_default_database().name == "mongoenginetest" |         assert conn.get_default_database().name == "mongoenginetest" | ||||||
|         assert conn.database_names()[0] == "mongoenginetest" |         assert conn.list_database_names()[0] == "mongoenginetest" | ||||||
|  |  | ||||||
|     @require_mongomock |     @require_mongomock | ||||||
|     def test_connect_with_host_list(self): |     def test_connect_with_host_list(self): | ||||||
|   | |||||||
| @@ -9,10 +9,14 @@ from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict | |||||||
| class DocumentStub(object): | class DocumentStub(object): | ||||||
|     def __init__(self): |     def __init__(self): | ||||||
|         self._changed_fields = [] |         self._changed_fields = [] | ||||||
|  |         self._unset_fields = [] | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key): |     def _mark_as_changed(self, key): | ||||||
|         self._changed_fields.append(key) |         self._changed_fields.append(key) | ||||||
|  |  | ||||||
|  |     def _mark_as_unset(self, key): | ||||||
|  |         self._unset_fields.append(key) | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBaseDict: | class TestBaseDict: | ||||||
|     @staticmethod |     @staticmethod | ||||||
| @@ -314,7 +318,7 @@ class TestBaseList: | |||||||
|     def test___setitem___item_0_calls_mark_as_changed(self): |     def test___setitem___item_0_calls_mark_as_changed(self): | ||||||
|         base_list = self._get_baselist([True]) |         base_list = self._get_baselist([True]) | ||||||
|         base_list[0] = False |         base_list[0] = False | ||||||
|         assert base_list._instance._changed_fields == ["my_name"] |         assert base_list._instance._changed_fields == ["my_name.0"] | ||||||
|         assert base_list == [False] |         assert base_list == [False] | ||||||
|  |  | ||||||
|     def test___setitem___item_1_calls_mark_as_changed(self): |     def test___setitem___item_1_calls_mark_as_changed(self): | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from bson import DBRef, ObjectId | from bson import DBRef, ObjectId | ||||||
| @@ -370,8 +369,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         assert Post.objects.all()[0].user_lists == [[u1, u2], [u3]] |         assert Post.objects.all()[0].user_lists == [[u1, u2], [u3]] | ||||||
|  |  | ||||||
|     def test_circular_reference(self): |     def test_circular_reference(self): | ||||||
|         """Ensure you can handle circular references |         """Ensure you can handle circular references""" | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class Relation(EmbeddedDocument): |         class Relation(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -426,6 +424,7 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         daughter.relations.append(mother) |         daughter.relations.append(mother) | ||||||
|         daughter.relations.append(daughter) |         daughter.relations.append(daughter) | ||||||
|  |         assert daughter._get_changed_fields() == ["relations"] | ||||||
|         daughter.save() |         daughter.save() | ||||||
|  |  | ||||||
|         assert "[<Person: Mother>, <Person: Daughter>]" == "%s" % Person.objects() |         assert "[<Person: Mother>, <Person: Daughter>]" == "%s" % Person.objects() | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|   | |||||||
| @@ -50,7 +50,7 @@ def _decorated_with_ver_requirement(func, mongo_version_req, oper): | |||||||
|     ran against MongoDB < v3.6. |     ran against MongoDB < v3.6. | ||||||
|  |  | ||||||
|     :param mongo_version_req: The mongodb version requirement (tuple(int, int)) |     :param mongo_version_req: The mongodb version requirement (tuple(int, int)) | ||||||
|     :param oper: The operator to apply (e.g: operator.ge) |     :param oper: The operator to apply (e.g. operator.ge) | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def _inner(*args, **kwargs): |     def _inner(*args, **kwargs): | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								tox.ini
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								tox.ini
									
									
									
									
									
								
							| @@ -1,5 +1,5 @@ | |||||||
| [tox] | [tox] | ||||||
| envlist = {py35,pypy3}-{mg34,mg36,mg39,mg310} | envlist = {py35,pypy3}-{mg34,mg36,mg39,mg311} | ||||||
|  |  | ||||||
| [testenv] | [testenv] | ||||||
| commands = | commands = | ||||||
| @@ -8,6 +8,6 @@ deps = | |||||||
|     mg34: pymongo>=3.4,<3.5 |     mg34: pymongo>=3.4,<3.5 | ||||||
|     mg36: pymongo>=3.6,<3.7 |     mg36: pymongo>=3.6,<3.7 | ||||||
|     mg39: pymongo>=3.9,<3.10 |     mg39: pymongo>=3.9,<3.10 | ||||||
|     mg310: pymongo>=3.10,<3.11 |     mg311: pymongo>=3.11,<3.12 | ||||||
| setenv = | setenv = | ||||||
|     PYTHON_EGG_CACHE = {envdir}/python-eggs |     PYTHON_EGG_CACHE = {envdir}/python-eggs | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user