Compare commits

..

81 Commits

Author SHA1 Message Date
Bastien Gérard
d73ca6f90d Create main.yml 2020-11-15 15:24:40 +01:00
Bastien Gérard
e6c2169f76 Merge pull request #2418 from bagerard/add_black_formatting_badge
Add black badge to readme
2020-11-14 13:42:57 +01:00
Bastien Gérard
1d17dc4663 Add black badge to readme to emphasize that repo is using autoformatter black as it is often forgotten in PR and makes CI failing 2020-11-12 22:13:53 +01:00
Bastien Gérard
eeac3bd2e6 Merge pull request #2416 from bagerard/remove_python_35
Remove Py3.5 as it is EOL and added 3.9 to CI
2020-11-12 09:47:44 +01:00
Bastien Gérard
3f5a15d236 improve changelog 2020-11-12 00:43:22 +01:00
Bastien Gérard
338c40b5d5 Remove Py3.5 as it is EOL and added 3.9 to CI 2020-11-11 21:14:54 +01:00
Bastien Gérard
fc3ccf9606 Merge pull request #2415 from bagerard/add_srv_uri_connect_doc
Document fact that srv URI can be used with host #1956
2020-11-11 21:05:52 +01:00
Bastien Gérard
746faceb5c Document fact that srv URI can be used with host #1956 2020-11-08 22:55:24 +01:00
Bastien Gérard
eb56fb9bda Merge pull request #2413 from bagerard/dynamic_document_parsing_known_fields
Bug fix in DynamicDocument which is not parsing known fields
2020-11-08 13:17:07 +01:00
Bastien Gérard
161493c0d2 Merge pull request #2408 from bagerard/refactoring_remove_useless_code_only_fields
Removed code related to Document.__only_fields
2020-11-08 13:16:03 +01:00
Bastien Gérard
cb9f329d11 Merge pull request #2401 from SMASHDOCs/bugfix-save-sharding
Bugfix #2154
2020-11-07 21:43:49 +01:00
Bastien Gérard
03af784ebe Bug fix in DynamicDocument which isn not parsing known fields in constructor like Document do #2412 2020-11-07 21:30:23 +01:00
Felix Schultheiß
e5f6e4584a Merge commit master into bugfix-save-sharding 2020-11-03 10:05:31 +01:00
Felix Schultheiß
79f9f223d0 added to authors 2020-11-03 10:00:15 +01:00
Felix Schultheiß
0bc18cd6e1 fixed shard test case for old mongodb version 2020-11-03 10:00:02 +01:00
Felix Schultheiß
30a3c6a5b7 added testcase for save create with shard key 2020-11-02 17:30:24 +01:00
Bastien Gérard
90c5d83f84 remove deprecated comment 2020-11-02 15:02:11 +01:00
Bastien Gérard
d8b8ff6851 Removed code related to Document.__only_fields and Queryset.only_fields which appear to have no effect 2020-11-02 14:52:02 +01:00
Bastien Gérard
ee664f0c90 Merge pull request #2406 from bagerard/improve_enumfield_doc
improve EnumField Doc and add quick test
2020-11-01 23:08:30 +01:00
Bastien Gérard
94a7e813b1 fix difference in test for certain version of pymongo 2020-11-01 19:37:13 +01:00
Bastien Gérard
8ef7213426 improve EnumField Doc and add quick test 2020-11-01 14:05:58 +01:00
Bastien Gérard
2f4464ead5 Merge pull request #2404 from mas15/add-enum-field
Add EnumField
2020-11-01 13:41:20 +01:00
Bastien Gérard
89b93461ac Merge pull request #2405 from bagerard/remove_encoding_declarations
remove utf8 encoding declaration in test files
2020-11-01 13:16:57 +01:00
Mateusz Stankiewicz
9e40f3ae83 PR ammends 2020-10-31 10:47:20 +01:00
Bastien Gérard
f4962fbc40 remove utf8 encoding declaration in test files as it's not needed/recommended 2020-10-30 21:10:21 +01:00
Mateusz Stankiewicz
c9d53ca5d5 Add EnumField 2020-10-30 13:06:37 +01:00
Bastien Gérard
65f50fd713 Merge pull request #2387 from bagerard/fix_change_fields_inconsistencies
fix inconsistencies in ._changed_fields computation
2020-10-29 21:15:31 +01:00
Felix Schultheiß
bf1d04e399 black reformatting 2020-10-29 14:56:08 +01:00
Felix Schultheiß
5a8e5e5a40 updated docstring 2020-10-27 16:34:57 +01:00
Felix Schultheiß
f3919dd839 stripped out integrating shard key from _save_update, use it also in _save_create 2020-10-27 12:55:35 +01:00
Bastien Gérard
9f82a02ddf Merge pull request #2106 from bagerard/add_validation_to_doc
Add a documentation page for validation
2020-10-20 00:27:05 +02:00
Bastien Gérard
015a36c85f minor styling fix in .rst 2020-10-19 23:59:12 +02:00
Bastien Gérard
fbd3388a59 Merge branch 'master' of github.com:MongoEngine/mongoengine into add_validation_to_doc 2020-10-19 23:36:12 +02:00
Bastien Gérard
d8a52d68c5 improve doc in .readthedocs.yml 2020-10-19 23:34:24 +02:00
Bastien Gérard
4286708e2e fix mongoengine setup.py path in .readthedocs.yml 2020-10-18 22:47:00 +02:00
Bastien Gérard
e362d089e1 install mongoengine for readthedocs build to work 2020-10-18 22:44:06 +02:00
Bastien Gérard
6b657886a5 remove explicit install from .readthedocs.yml to rely on default instead 2020-10-18 22:21:45 +02:00
Bastien Gérard
eb16945147 fix requirements.txt location for readthedocs 2020-10-18 22:06:15 +02:00
Bastien Gérard
38047ca992 Merge pull request #2396 from bagerard/fix_readthedocs_failed_build
Fix readthedocs build that failed
2020-10-18 22:04:01 +02:00
Bastien Gérard
c801e79d4b Fix readthedocs build that failed by making it use python3 instead of default python2.7 2020-10-18 21:33:30 +02:00
Bastien Gérard
3fca3739de rework validation documentation based on review 2020-10-18 21:11:16 +02:00
Bastien Gérard
c218c8bb6c Merge branch 'master' of github.com:MongoEngine/mongoengine into add_validation_to_doc 2020-10-17 15:05:27 +02:00
Bastien Gérard
0bbc05995a Merge pull request #2393 from bagerard/fix_listfield_change_0
Fix listfield change detection of index 0
2020-10-11 10:15:18 +02:00
Bastien Gérard
3adb67901b update changelog for #2392 2020-10-11 00:53:46 +02:00
Bastien Gérard
d4350e7da4 Fix for ListField that isnt detecting properly that item 0 is changed 2020-10-10 23:32:22 +02:00
Bastien Gérard
4665658145 Merge pull request #2390 from bagerard/bump_latest_lib_ci
Upgrade pymongo and mongodb versions used in CI
2020-10-07 21:41:12 +02:00
Bastien Gérard
0d289fd5a1 upgrade pymongo and mongodb versions used in CI 2020-10-07 21:30:43 +02:00
Bastien Gérard
aabc18755c fix inconsistencies in ._changed_fields computation 2020-10-07 00:01:09 +02:00
Bastien Gérard
1f2a5db016 fix deprecated use of .update in test suite 2020-08-12 22:30:52 +02:00
Bastien Gérard
ff40f66291 Merge pull request #2243 from bagerard/fix_count_documents_deprecation
Fix count documents deprecation
2020-08-12 22:18:02 +02:00
Bastien Gérard
7f77084e0e minor fixes in doc links 2020-08-12 21:56:38 +02:00
Bastien Gérard
aca4de728e Merge branch 'master' of github.com:MongoEngine/mongoengine into fix_count_documents_deprecation 2020-08-11 23:01:33 +02:00
Bastien Gérard
9e7ca43cad Merge pull request #2365 from hiimdoublej/fix/queryTransform
Fix query transformation regarding special operators
2020-08-11 22:18:33 +02:00
Bastien Gérard
7116dec74a run black to please ci 2020-08-11 21:55:22 +02:00
Bastien Gérard
a5302b870b Merge branch 'fix/queryTransform' of git://github.com/hiimdoublej/mongoengine into hiimdoublej-fix/queryTransform 2020-08-11 21:48:00 +02:00
Bastien Gérard
604e9974b6 Merge pull request #2363 from bagerard/AttributeError_message_attr
fix py3 incompatible code
2020-08-03 21:37:36 +02:00
Johnny Chang
3e1c83f8fa Fix query transformation regarding special operators 2020-08-04 00:30:15 +08:00
Bastien Gérard
e431e27cb2 #2360 fix py3 incompatible code 2020-08-01 15:09:10 +02:00
Bastien Gérard
4f188655d0 Merge pull request #2335 from bagerard/fix_limit0_bug
Fix bug with Doc.objects.limit(0) which should return all docs
2020-05-27 09:43:35 +02:00
Bastien Gérard
194b0cac88 improve doc + changelog 2020-05-26 23:45:35 +02:00
Bastien Gérard
7b4175fc5c Merge branch 'master' of github.com:MongoEngine/mongoengine into fix_limit0_bug 2020-05-26 23:44:05 +02:00
Bastien Gérard
adb5f74ddb Fix a bug in limit0 #2311 2020-05-26 23:37:55 +02:00
Bastien Gérard
107a1c34c8 Merge pull request #2331 from abarto/fix/clone-retain-read-preference-read-concern
Add read_concern to cloned properties. Add read_concern to aggregate().
2020-05-23 23:22:56 +02:00
Bastien Gérard
dc7da5204f Merge branch 'terencehonles-patch-1' 2020-05-23 23:12:33 +02:00
Bastien Gérard
0301bca176 Merge branch 'patch-1' of https://github.com/terencehonles/mongoengine into terencehonles-patch-1 2020-05-23 23:12:01 +02:00
Bastien Gérard
49f9bca23b fix black formatting 2020-05-23 23:08:56 +02:00
Agustin Barto
31498bd7dd Update changelog 2020-05-20 18:58:18 -03:00
Agustin Barto
1698f398eb Add _read_concern to copied properties. Add read_concern to aggregate. Add test to check the read_concern and read_preference values are kept after cloning. 2020-05-20 18:56:13 -03:00
Bastien Gérard
4275c2d7b7 Merge pull request #2330 from terencehonles/fix-empty-deprecation-warning-in-q-node
fix self inflicted deprecation warnings in QNode
2020-05-19 22:02:12 +02:00
Terence D. Honles
22bff8566d fix self inflicted deprecation warnings in QNode 2020-05-19 11:00:30 -07:00
Terence Honles
d8657be320 Fix requirement Pillow < 7 to mention it is for tests only 2020-05-19 10:23:07 -07:00
Bastien Gérard
412bed0f6d fix bug in legacy .count due to with_limit_and_skip that was missing 2020-01-12 11:04:05 +01:00
Bastien Gérard
53cf26b9af Merge branch 'master' of github.com:MongoEngine/mongoengine into fix_count_documents_deprecation 2020-01-12 10:07:36 +01:00
Bastien Gérard
2fa48cd9e5 fix for pymongo < 3.7 2020-01-07 22:24:55 +01:00
Bastien Gérard
e64a7a9448 reformat with latest black 2020-01-07 22:11:24 +01:00
Bastien Gérard
84f3dce492 fix flake8 findings 2020-01-05 22:50:19 +01:00
Bastien Gérard
60c42dddd5 finalize code related to count_documents migration 2020-01-05 22:29:13 +01:00
Bastien Gérard
f93f9406ee improve doc next to code 2020-01-05 21:08:20 +01:00
Bastien Gérard
928770c43a switching to count_documents 2020-01-05 21:01:03 +01:00
Bastien Gérard
d37a30e083 improve doc (based on review) 2019-06-30 20:46:40 +02:00
Bastien Gérard
c9ed930606 Add a documentation page for validation 2019-06-25 22:31:48 +02:00
61 changed files with 803 additions and 239 deletions

33
.github/workflows/main.yml vendored Normal file
View File

@@ -0,0 +1,33 @@
# This is a basic workflow to help you get started with Actions
name: CI
# Controls when the action will run. Triggers the workflow on push or pull request
# events but only for the master branch
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: ubuntu-latest
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
# Runs a single command using the runners shell
- name: Run a one-line script
run: echo Hello, world!
# Runs a set of commands using the runners shell
- name: Run a multi-line script
run: |
echo Add other actions to build,
echo test, and deploy your project.

20
.readthedocs.yml Normal file
View File

@@ -0,0 +1,20 @@
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# Optionally set the version of Python and requirements required to build your docs
python:
version: 3.7
install:
- requirements: docs/requirements.txt
# docs/conf.py is importing mongoengine
# so mongoengine needs to be installed as well
- method: setuptools
path: .

View File

@@ -16,26 +16,26 @@
language: python
dist: xenial
python:
- 3.5
- 3.6
- 3.7
- 3.8
- 3.9
- pypy3
env:
global:
- MONGODB_3_4=3.4.17
- MONGODB_3_6=3.6.12
- MONGODB_3_4=3.4.19
- MONGODB_3_6=3.6.13
- MONGODB_4_0=4.0.13
- PYMONGO_3_4=3.4
- PYMONGO_3_6=3.6
- PYMONGO_3_9=3.9
- PYMONGO_3_10=3.10
- PYMONGO_3_11=3.11
- MAIN_PYTHON_VERSION=3.7
matrix:
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10}
- MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_11}
matrix:
# Finish the build as soon as one job fails
@@ -47,9 +47,9 @@ matrix:
- python: 3.7
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9}
- python: 3.7
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_10}
env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_11}
- python: 3.8
env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_10}
env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_11}
install:
# Install Mongo
@@ -75,7 +75,7 @@ script:
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine"
after_success:
- - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi
- if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi
notifications:
irc: irc.freenode.org#mongoengine
@@ -103,5 +103,5 @@ deploy:
on:
tags: true
repo: MongoEngine/mongoengine
condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4})
condition: ($PYMONGO = ${PYMONGO_3_11}) && ($MONGODB = ${MONGODB_3_4})
python: 3.7

View File

@@ -257,3 +257,5 @@ that much better:
* Matthew Simpson (https://github.com/mcsimps2)
* Leonardo Domingues (https://github.com/leodmgs)
* Agustin Barto (https://github.com/abarto)
* Stankiewicz Mateusz (https://github.com/mas15)
* Felix Schultheiß (https://github.com/felix-smashdocs)

View File

@@ -16,6 +16,9 @@ MongoEngine
:target: https://landscape.io/github/MongoEngine/mongoengine/master
:alt: Code Health
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/ambv/black
About
=====
MongoEngine is a Python Object-Document Mapper for working with MongoDB.

View File

@@ -6,6 +6,17 @@ Changelog
Development
===========
- (Fill this out as you fix issues and develop your features).
- Bug fix in DynamicDocument which is not parsing known fields in constructor like Document do #2412
- When using pymongo >= 3.7, make use of Collection.count_documents instead of Collection.count
and Cursor.count that got deprecated in pymongo >= 3.7.
This should have a negative impact on performance of count see Issue #2219
- Fix a bug that made the queryset drop the read_preference after clone().
- Remove Py3.5 from CI as it reached EOL and add Python 3.9
- Fix the behavior of Doc.objects.limit(0) which should return all documents (similar to mongodb) #2311
- Bug fix in ListField when updating the first item, it was saving the whole list, instead of
just replacing the first item (as it's usually done) #2392
- Add EnumField: ``mongoengine.fields.EnumField``
- Refactoring - Remove useless code related to Document.__only_fields and Queryset.only_fields
Changes in 0.20.0
=================
@@ -28,7 +39,7 @@ Changes in 0.20.0
Changes in 0.19.1
=================
- Requires Pillow < 7.0.0 as it dropped Python2 support
- Tests require Pillow < 7.0.0 as it dropped Python2 support
- DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of
pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079
@@ -456,9 +467,6 @@ Changes in 0.8.3
- Document.select_related() now respects ``db_alias`` (#377)
- Reload uses shard_key if applicable (#384)
- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396)
**Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3
- Fixed pickling dynamic documents ``_dynamic_fields`` (#387)
- Fixed ListField setslice and delslice dirty tracking (#390)
- Added Django 1.5 PY3 support (#392)

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# MongoEngine documentation build configuration file, created by
# sphinx-quickstart on Sun Nov 22 18:14:13 2009.

View File

@@ -31,6 +31,8 @@ the :attr:`host` to
connect('project1', host='mongodb://localhost/database_name')
.. note:: URI containing SRV records (e.g mongodb+srv://server.example.com/) can be used as well as the :attr:`host`
.. note:: Database, username and password from URI string overrides
corresponding parameters in :func:`~mongoengine.connect`: ::

View File

@@ -76,6 +76,7 @@ are as follows:
* :class:`~mongoengine.fields.EmailField`
* :class:`~mongoengine.fields.EmbeddedDocumentField`
* :class:`~mongoengine.fields.EmbeddedDocumentListField`
* :class:`~mongoengine.fields.EnumField`
* :class:`~mongoengine.fields.FileField`
* :class:`~mongoengine.fields.FloatField`
* :class:`~mongoengine.fields.GenericEmbeddedDocumentField`
@@ -426,19 +427,6 @@ either a single field name, or a list or tuple of field names::
first_name = StringField()
last_name = StringField(unique_with='first_name')
Skipping Document validation on save
------------------------------------
You can also skip the whole document validation process by setting
``validate=False`` when calling the :meth:`~mongoengine.document.Document.save`
method::
class Recipient(Document):
name = StringField()
email = EmailField()
recipient = Recipient(name='admin', email='root@localhost')
recipient.save() # will raise a ValidationError while
recipient.save(validate=False) # won't
Document collections
====================

View File

@@ -41,35 +41,6 @@ already exist, then any changes will be updated atomically. For example::
.. seealso::
:ref:`guide-atomic-updates`
Pre save data validation and cleaning
-------------------------------------
MongoEngine allows you to create custom cleaning rules for your documents when
calling :meth:`~mongoengine.Document.save`. By providing a custom
:meth:`~mongoengine.Document.clean` method you can do any pre validation / data
cleaning.
This might be useful if you want to ensure a default value based on other
document values for example::
class Essay(Document):
status = StringField(choices=('Published', 'Draft'), required=True)
pub_date = DateTimeField()
def clean(self):
"""Ensures that only published essays have a `pub_date` and
automatically sets `pub_date` if essay is published and `pub_date`
is not set"""
if self.status == 'Draft' and self.pub_date is not None:
msg = 'Draft entries should not have a publication date.'
raise ValidationError(msg)
# Set the pub_date for published items if not set.
if self.status == 'Published' and self.pub_date is None:
self.pub_date = datetime.now()
.. note::
Cleaning is only called if validation is turned on and when calling
:meth:`~mongoengine.Document.save`.
Cascading Saves
---------------
If your document contains :class:`~mongoengine.fields.ReferenceField` or

View File

@@ -10,6 +10,7 @@ User Guide
defining-documents
document-instances
querying
validation
gridfs
signals
text-indexes

View File

@@ -609,7 +609,7 @@ to push values with index::
.. note::
Currently only top level lists are handled, future versions of mongodb /
pymongo plan to support nested positional operators. See `The $ positional
operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_.
operator <https://docs.mongodb.com/manual/tutorial/update-documents/#Updating-The%24positionaloperator>`_.
Server-side javascript execution
================================

123
docs/guide/validation.rst Normal file
View File

@@ -0,0 +1,123 @@
====================
Document Validation
====================
By design, MongoEngine strictly validates the documents right before they are inserted in MongoDB
and makes sure they are consistent with the fields defined in your models.
MongoEngine makes the assumption that the documents that exists in the DB are compliant with the schema.
This means that Mongoengine will not validate a document when an object is loaded from the DB into an instance
of your model but this operation may fail under some circumstances (e.g. if there is a field in
the document fetched from the database that is not defined in your model).
Built-in validation
===================
Mongoengine provides different fields that encapsulate the corresponding validation
out of the box. Validation runs when calling `.validate()` or `.save()`
.. code-block:: python
from mongoengine import Document, EmailField
class User(Document):
email = EmailField()
age = IntField(min_value=0, max_value=99)
user = User(email='invalid@', age=24)
user.validate() # raises ValidationError (Invalid email address: ['email'])
user.save() # raises ValidationError (Invalid email address: ['email'])
user2 = User(email='john.doe@garbage.com', age=1000)
user2.save() # raises ValidationError (Integer value is too large: ['age'])
Custom validation
=================
The following feature can be used to customize the validation:
* Field `validation` parameter
.. code-block:: python
def not_john_doe(name):
if name == 'John Doe':
raise ValidationError("John Doe is not a valid name")
class Person(Document):
full_name = StringField(validation=not_john_doe)
Person(full_name='Billy Doe').save()
Person(full_name='John Doe').save() # raises ValidationError (John Doe is not a valid name)
* Document `clean` method
This method is called as part of :meth:`~mongoengine.document.Document.save` and should be used to provide
custom model validation and/or to modify some of the field values prior to validation.
For instance, you could use it to automatically provide a value for a field, or to do validation
that requires access to more than a single field.
.. code-block:: python
class Essay(Document):
status = StringField(choices=('Published', 'Draft'), required=True)
pub_date = DateTimeField()
def clean(self):
# Validate that only published essays have a `pub_date`
if self.status == 'Draft' and self.pub_date is not None:
raise ValidationError('Draft entries should not have a publication date.')
# Set the pub_date for published items if not set.
if self.status == 'Published' and self.pub_date is None:
self.pub_date = datetime.now()
.. note::
Cleaning is only called if validation is turned on and when calling
:meth:`~mongoengine.Document.save`.
* Adding custom Field classes
We recommend as much as possible to use fields provided by MongoEngine. However, it is also possible
to subclass a Field and encapsulate some validation by overriding the `validate` method
.. code-block:: python
class AgeField(IntField):
def validate(self, value):
super(AgeField, self).validate(value) # let IntField.validate run first
if value == 60:
self.error('60 is not allowed')
class Person(Document):
age = AgeField(min_value=0, max_value=99)
Person(age=20).save() # passes
Person(age=1000).save() # raises ValidationError (Integer value is too large: ['age'])
Person(age=60).save() # raises ValidationError (Person:None) (60 is not allowed: ['age'])
.. note::
When overriding `validate`, use `self.error("your-custom-error")` instead of raising ValidationError explicitly,
it will provide a better context with the error message
Skipping validation
====================
Although discouraged as it allows to violate fields constraints, if for some reason you need to disable
the validation and cleaning of a document when you call :meth:`~mongoengine.document.Document.save`, you can use `.save(validate=False)`.
.. code-block:: python
class Person(Document):
age = IntField(max_value=100)
Person(age=1000).save() # raises ValidationError (Integer value is too large)
Person(age=1000).save(validate=False)
person = Person.objects.first()
assert person.age == 1000

3
docs/requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
pymongo>=3.11
Sphinx==3.2.1
sphinx-rtd-theme==0.5.0

View File

@@ -179,7 +179,7 @@ class BaseList(list):
def _mark_as_changed(self, key=None):
if hasattr(self._instance, "_mark_as_changed"):
if key:
if key is not None:
self._instance._mark_as_changed(
"{}.{}".format(self._name, key % len(self))
)
@@ -215,7 +215,7 @@ class EmbeddedDocumentList(BaseList):
Filters the list by only including embedded documents with the
given keyword arguments.
This method only supports simple comparison (e.g: .filter(name='John Doe'))
This method only supports simple comparison (e.g. .filter(name='John Doe'))
and does not support operators like __gte, __lte, __icontains like queryset.filter does
:param kwargs: The keyword arguments corresponding to the fields to

View File

@@ -64,8 +64,6 @@ class BaseDocument:
It may contain additional reserved keywords, e.g. "__auto_convert".
:param __auto_convert: If True, supplied values will be converted
to Python-type values via each field's `to_python` method.
:param __only_fields: A set of fields that have been loaded for
this document. Empty if all fields have been loaded.
:param _created: Indicates whether this is a brand new document
or whether it's already been persisted before. Defaults to true.
"""
@@ -80,8 +78,6 @@ class BaseDocument:
__auto_convert = values.pop("__auto_convert", True)
__only_fields = set(values.pop("__only_fields", values))
_created = values.pop("_created", True)
signals.pre_init.send(self.__class__, document=self, values=values)
@@ -106,10 +102,8 @@ class BaseDocument:
self._dynamic_fields = SON()
# Assign default values to the instance.
# We set default values only for fields loaded from DB. See
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
for key, field in self._fields.items():
if self._db_field_map.get(key, key) in __only_fields:
if self._db_field_map.get(key, key) in values:
continue
value = getattr(self, key, None)
setattr(self, key, value)
@@ -117,25 +111,22 @@ class BaseDocument:
if "_cls" not in values:
self._cls = self._class_name
# Set passed values after initialisation
if self._dynamic:
dynamic_data = {}
for key, value in values.items():
if key in self._fields or key == "_id":
setattr(self, key, value)
else:
# Set actual values
dynamic_data = {}
FileField = _import_class("FileField")
for key, value in values.items():
key = self._reverse_db_field_map.get(key, key)
field = self._fields.get(key)
if field or key in ("id", "pk", "_cls"):
if __auto_convert and value is not None:
if field and not isinstance(field, FileField):
value = field.to_python(value)
setattr(self, key, value)
else:
if self._dynamic:
dynamic_data[key] = value
else:
FileField = _import_class("FileField")
for key, value in values.items():
key = self._reverse_db_field_map.get(key, key)
if key in self._fields or key in ("id", "pk", "_cls"):
if __auto_convert and value is not None:
field = self._fields.get(key)
if field and not isinstance(field, FileField):
value = field.to_python(value)
setattr(self, key, value)
else:
# For strict Document
self._data[key] = value
# Set any get_<field>_display methods
@@ -314,7 +305,8 @@ class BaseDocument:
def clean(self):
"""
Hook for doing document level data cleaning before validation is run.
Hook for doing document level data cleaning (usually validation or assignment)
before validation is run.
Any ValidationError raised by this method will not be associated with
a particular field; it will have a special-case association with the
@@ -537,6 +529,9 @@ class BaseDocument:
"""Using _get_changed_fields iterate and remove any fields that
are marked as changed.
"""
ReferenceField = _import_class("ReferenceField")
GenericReferenceField = _import_class("GenericReferenceField")
for changed in self._get_changed_fields():
parts = changed.split(".")
data = self
@@ -549,7 +544,8 @@ class BaseDocument:
elif isinstance(data, dict):
data = data.get(part, None)
else:
data = getattr(data, part, None)
field_name = data._reverse_db_field_map.get(part, part)
data = getattr(data, field_name, None)
if not isinstance(data, LazyReference) and hasattr(
data, "_changed_fields"
@@ -558,10 +554,40 @@ class BaseDocument:
continue
data._changed_fields = []
elif isinstance(data, (list, tuple, dict)):
if hasattr(data, "field") and isinstance(
data.field, (ReferenceField, GenericReferenceField)
):
continue
BaseDocument._nestable_types_clear_changed_fields(data)
self._changed_fields = []
def _nestable_types_changed_fields(self, changed_fields, base_key, data):
@staticmethod
def _nestable_types_clear_changed_fields(data):
"""Inspect nested data for changed fields
:param data: data to inspect for changes
"""
Document = _import_class("Document")
# Loop list / dict fields as they contain documents
# Determine the iterator to use
if not hasattr(data, "items"):
iterator = enumerate(data)
else:
iterator = data.items()
for index_or_key, value in iterator:
if hasattr(value, "_get_changed_fields") and not isinstance(
value, Document
): # don't follow references
value._clear_changed_fields()
elif isinstance(value, (list, tuple, dict)):
BaseDocument._nestable_types_clear_changed_fields(value)
@staticmethod
def _nestable_types_changed_fields(changed_fields, base_key, data):
"""Inspect nested data for changed fields
:param changed_fields: Previously collected changed fields
@@ -586,7 +612,9 @@ class BaseDocument:
changed = value._get_changed_fields()
changed_fields += ["{}{}".format(item_key, k) for k in changed if k]
elif isinstance(value, (list, tuple, dict)):
self._nestable_types_changed_fields(changed_fields, item_key, value)
BaseDocument._nestable_types_changed_fields(
changed_fields, item_key, value
)
def _get_changed_fields(self):
"""Return a list of all fields that have explicitly been changed.
@@ -721,11 +749,8 @@ class BaseDocument:
return cls._meta.get("collection", None)
@classmethod
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
def _from_son(cls, son, _auto_dereference=True, created=False):
"""Create an instance of a Document (subclass) from a PyMongo SON."""
if not only_fields:
only_fields = []
if son and not isinstance(son, dict):
raise ValueError(
"The source SON object needs to be of type 'dict' but a '%s' was found"
@@ -780,9 +805,7 @@ class BaseDocument:
if cls.STRICT:
data = {k: v for k, v in data.items() if k in cls._fields}
obj = cls(
__auto_convert=False, _created=created, __only_fields=only_fields, **data
)
obj = cls(__auto_convert=False, _created=created, **data)
obj._changed_fields = []
if not _auto_dereference:
obj._fields = fields

View File

@@ -464,9 +464,9 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
# insert_one will provoke UniqueError alongside save does not
# therefore, it need to catch and call replace_one.
if "_id" in doc:
raw_object = wc_collection.find_one_and_replace(
{"_id": doc["_id"]}, doc
)
select_dict = {"_id": doc["_id"]}
select_dict = self._integrate_shard_key(doc, select_dict)
raw_object = wc_collection.find_one_and_replace(select_dict, doc)
if raw_object:
return doc["_id"]
@@ -489,6 +489,23 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
return update_doc
def _integrate_shard_key(self, doc, select_dict):
"""Integrates the collection's shard key to the `select_dict`, which will be used for the query.
The value from the shard key is taken from the `doc` and finally the select_dict is returned.
"""
# Need to add shard key to query, or you get an error
shard_key = self._meta.get("shard_key", tuple())
for k in shard_key:
path = self._lookup_field(k.split("."))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict[".".join(actual_key)] = val
return select_dict
def _save_update(self, doc, save_condition, write_concern):
"""Update an existing document.
@@ -504,15 +521,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
select_dict["_id"] = object_id
# Need to add shard key to query, or you get an error
shard_key = self._meta.get("shard_key", tuple())
for k in shard_key:
path = self._lookup_field(k.split("."))
actual_key = [p.db_field for p in path]
val = doc
for ak in actual_key:
val = val[ak]
select_dict[".".join(actual_key)] = val
select_dict = self._integrate_shard_key(doc, select_dict)
update_doc = self._get_update_doc()
if update_doc:
@@ -639,7 +648,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
write_concern=write_concern, _from_doc_delete=True
)
except pymongo.errors.OperationFailure as err:
message = "Could not delete document (%s)" % err.message
message = "Could not delete document (%s)" % err.args
raise OperationError(message)
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
@@ -919,7 +928,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
@classmethod
def list_indexes(cls):
""" Lists all of the indexes that should be created for given
"""Lists all of the indexes that should be created for given
collection. It includes all the indexes from super- and sub-classes.
"""
if cls._meta.get("abstract"):
@@ -984,7 +993,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
@classmethod
def compare_indexes(cls):
""" Compares the indexes defined in MongoEngine with the ones
"""Compares the indexes defined in MongoEngine with the ones
existing in the database. Returns any missing/extra indexes.
"""

View File

@@ -87,6 +87,7 @@ __all__ = (
"PolygonField",
"SequenceField",
"UUIDField",
"EnumField",
"MultiPointField",
"MultiLineStringField",
"MultiPolygonField",
@@ -433,7 +434,7 @@ class DecimalField(BaseField):
:param max_value: Validation rule for the maximum acceptable value.
:param force_string: Store the value as a string (instead of a float).
Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied)
and some query operator won't work (e.g: inc, dec)
and some query operator won't work (e.g. inc, dec)
:param precision: Number of decimal places to store.
:param rounding: The rounding rule from the python decimal library:
@@ -773,6 +774,9 @@ class EmbeddedDocumentField(BaseField):
def prepare_query_value(self, op, value):
if value is not None and not isinstance(value, self.document_type):
# Short circuit for special operators, returning them as is
if isinstance(value, dict) and all(k.startswith("$") for k in value.keys()):
return value
try:
value = self.document_type._from_son(value)
except ValueError:
@@ -844,8 +848,7 @@ class DynamicField(BaseField):
Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
def to_mongo(self, value, use_db_field=True, fields=None):
"""Convert a Python type to a MongoDB compatible type.
"""
"""Convert a Python type to a MongoDB compatible type."""
if isinstance(value, str):
return value
@@ -1619,6 +1622,70 @@ class BinaryField(BaseField):
return super().prepare_query_value(op, self.to_mongo(value))
class EnumField(BaseField):
"""Enumeration Field. Values are stored underneath as is,
so it will only work with simple types (str, int, etc) that
are bson encodable
Example usage:
.. code-block:: python
class Status(Enum):
NEW = 'new'
DONE = 'done'
class ModelWithEnum(Document):
status = EnumField(Status, default=Status.NEW)
ModelWithEnum(status='done')
ModelWithEnum(status=Status.DONE)
Enum fields can be searched using enum or its value:
.. code-block:: python
ModelWithEnum.objects(status='new').count()
ModelWithEnum.objects(status=Status.NEW).count()
Note that choices cannot be set explicitly, they are derived
from the provided enum class.
"""
def __init__(self, enum, **kwargs):
self._enum_cls = enum
if "choices" in kwargs:
raise ValueError(
"'choices' can't be set on EnumField, "
"it is implicitly set as the enum class"
)
kwargs["choices"] = list(self._enum_cls)
super().__init__(**kwargs)
def __set__(self, instance, value):
is_legal_value = value is None or isinstance(value, self._enum_cls)
if not is_legal_value:
try:
value = self._enum_cls(value)
except Exception:
pass
return super().__set__(instance, value)
def to_mongo(self, value):
if isinstance(value, self._enum_cls):
return value.value
return value
def validate(self, value):
if value and not isinstance(value, self._enum_cls):
try:
self._enum_cls(value)
except Exception as e:
self.error(str(e))
def prepare_query_value(self, op, value):
if value is None:
return value
return super().prepare_query_value(op, self.to_mongo(value))
class GridFSError(Exception):
pass
@@ -2042,7 +2109,7 @@ class ImageField(FileField):
class SequenceField(BaseField):
"""Provides a sequential counter see:
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
https://docs.mongodb.com/manual/reference/method/ObjectId/#ObjectIDs-SequenceNumbers
.. note::

View File

@@ -2,6 +2,7 @@
Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support.
"""
import pymongo
from pymongo.errors import OperationFailure
_PYMONGO_37 = (3, 7)
@@ -10,13 +11,41 @@ PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37
def count_documents(collection, filter):
"""Pymongo>3.7 deprecates count in favour of count_documents"""
def count_documents(
collection, filter, skip=None, limit=None, hint=None, collation=None
):
"""Pymongo>3.7 deprecates count in favour of count_documents
"""
if limit == 0:
return 0 # Pymongo raises an OperationFailure if called with limit=0
kwargs = {}
if skip is not None:
kwargs["skip"] = skip
if limit is not None:
kwargs["limit"] = limit
if hint not in (-1, None):
kwargs["hint"] = hint
if collation is not None:
kwargs["collation"] = collation
# count_documents appeared in pymongo 3.7
if IS_PYMONGO_GTE_37:
return collection.count_documents(filter)
else:
count = collection.find(filter).count()
return count
try:
return collection.count_documents(filter=filter, **kwargs)
except OperationFailure:
# OperationFailure - accounts for some operators that used to work
# with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere)
# fallback to deprecated Cursor.count
# Keeping this should be reevaluated the day pymongo removes .count entirely
pass
cursor = collection.find(filter)
for option, option_value in kwargs.items():
cursor_method = getattr(cursor, option)
cursor = cursor_method(option_value)
with_limit_and_skip = "skip" in kwargs or "limit" in kwargs
return cursor.count(with_limit_and_skip=with_limit_and_skip)
def list_collection_names(db, include_system_collections=False):

View File

@@ -29,6 +29,7 @@ from mongoengine.errors import (
NotUniqueError,
OperationError,
)
from mongoengine.pymongo_support import count_documents
from mongoengine.queryset import transform
from mongoengine.queryset.field_list import QueryFieldList
from mongoengine.queryset.visitor import Q, QNode
@@ -83,13 +84,20 @@ class BaseQuerySet:
self._cursor_obj = None
self._limit = None
self._skip = None
self._hint = -1 # Using -1 as None is a valid value for hint
self._collation = None
self._batch_size = None
self.only_fields = []
self._max_time_ms = None
self._comment = None
# Hack - As people expect cursor[5:5] to return
# an empty result set. It's hard to do that right, though, because the
# server uses limit(0) to mean 'no limit'. So we set _empty
# in that case and check for it when iterating. We also unset
# it anytime we change _limit. Inspired by how it is done in pymongo.Cursor
self._empty = False
def __call__(self, q_obj=None, **query):
"""Filter the selected documents by calling the
:class:`~mongoengine.queryset.QuerySet` with a query.
@@ -162,6 +170,7 @@ class BaseQuerySet:
[<User: User object>, <User: User object>]
"""
queryset = self.clone()
queryset._empty = False
# Handle a slice
if isinstance(key, slice):
@@ -169,6 +178,8 @@ class BaseQuerySet:
queryset._skip, queryset._limit = key.start, key.stop
if key.start and key.stop:
queryset._limit = key.stop - key.start
if queryset._limit == 0:
queryset._empty = True
# Allow further QuerySet modifications to be performed
return queryset
@@ -178,9 +189,7 @@ class BaseQuerySet:
if queryset._scalar:
return queryset._get_scalar(
queryset._document._from_son(
queryset._cursor[key],
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields,
queryset._cursor[key], _auto_dereference=self._auto_dereference,
)
)
@@ -188,9 +197,7 @@ class BaseQuerySet:
return queryset._cursor[key]
return queryset._document._from_son(
queryset._cursor[key],
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields,
queryset._cursor[key], _auto_dereference=self._auto_dereference,
)
raise TypeError("Provide a slice or an integer index")
@@ -394,9 +401,36 @@ class BaseQuerySet:
:meth:`skip` that has been applied to this cursor into account when
getting the count
"""
if self._limit == 0 and with_limit_and_skip is False or self._none:
# mimic the fact that setting .limit(0) in pymongo sets no limit
# https://docs.mongodb.com/manual/reference/method/cursor.limit/#zero-value
if (
self._limit == 0
and with_limit_and_skip is False
or self._none
or self._empty
):
return 0
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
kwargs = (
{"limit": self._limit, "skip": self._skip} if with_limit_and_skip else {}
)
if self._limit == 0:
# mimic the fact that historically .limit(0) sets no limit
kwargs.pop("limit", None)
if self._hint not in (-1, None):
kwargs["hint"] = self._hint
if self._collation:
kwargs["collation"] = self._collation
count = count_documents(
collection=self._cursor.collection,
filter=self._cursor._Cursor__spec,
**kwargs
)
self._cursor_obj = None
return count
@@ -680,12 +714,10 @@ class BaseQuerySet:
if full_response:
if result["value"] is not None:
result["value"] = self._document._from_son(
result["value"], only_fields=self.only_fields
)
result["value"] = self._document._from_son(result["value"])
else:
if result is not None:
result = self._document._from_son(result, only_fields=self.only_fields)
result = self._document._from_son(result)
return result
@@ -718,24 +750,22 @@ class BaseQuerySet:
docs = self._collection.find({"_id": {"$in": object_ids}}, **self._cursor_args)
if self._scalar:
for doc in docs:
doc_map[doc["_id"]] = self._get_scalar(
self._document._from_son(doc, only_fields=self.only_fields)
)
doc_map[doc["_id"]] = self._get_scalar(self._document._from_son(doc))
elif self._as_pymongo:
for doc in docs:
doc_map[doc["_id"]] = doc
else:
for doc in docs:
doc_map[doc["_id"]] = self._document._from_son(
doc,
only_fields=self.only_fields,
_auto_dereference=self._auto_dereference,
doc, _auto_dereference=self._auto_dereference,
)
return doc_map
def none(self):
"""Helper that just returns a list"""
"""Returns a queryset that never returns any objects and no query will be executed when accessing the results
inspired by django none() https://docs.djangoproject.com/en/dev/ref/models/querysets/#none
"""
queryset = self.clone()
queryset._none = True
return queryset
@@ -789,16 +819,17 @@ class BaseQuerySet:
"_snapshot",
"_timeout",
"_read_preference",
"_read_concern",
"_iter",
"_scalar",
"_as_pymongo",
"_limit",
"_skip",
"_empty",
"_hint",
"_collation",
"_auto_dereference",
"_search_text",
"only_fields",
"_max_time_ms",
"_comment",
"_batch_size",
@@ -834,6 +865,7 @@ class BaseQuerySet:
"""
queryset = self.clone()
queryset._limit = n
queryset._empty = False # cancels the effect of empty
# If a cursor object has already been created, apply the limit to it.
if queryset._cursor_obj:
@@ -1001,7 +1033,6 @@ class BaseQuerySet:
.. versionchanged:: 0.5 - Added subfield support
"""
fields = {f: QueryFieldList.ONLY for f in fields}
self.only_fields = list(fields.keys())
return self.fields(True, **fields)
def exclude(self, *fields):
@@ -1266,10 +1297,7 @@ class BaseQuerySet:
def from_json(self, json_data):
"""Converts json data to unsaved objects"""
son_data = json_util.loads(json_data)
return [
self._document._from_son(data, only_fields=self.only_fields)
for data in son_data
]
return [self._document._from_son(data) for data in son_data]
def aggregate(self, pipeline, *suppl_pipeline, **kwargs):
"""Perform a aggregate function based in your queryset params
@@ -1311,10 +1339,11 @@ class BaseQuerySet:
final_pipeline = initial_pipeline + user_pipeline
collection = self._collection
if self._read_preference is not None:
if self._read_preference is not None or self._read_concern is not None:
collection = self._collection.with_options(
read_preference=self._read_preference
read_preference=self._read_preference, read_concern=self._read_concern
)
return collection.aggregate(final_pipeline, cursor={}, **kwargs)
# JS functionality
@@ -1584,7 +1613,7 @@ class BaseQuerySet:
def __next__(self):
"""Wrap the result in a :class:`~mongoengine.Document` object.
"""
if self._limit == 0 or self._none:
if self._none or self._empty:
raise StopIteration
raw_doc = next(self._cursor)
@@ -1593,9 +1622,7 @@ class BaseQuerySet:
return raw_doc
doc = self._document._from_son(
raw_doc,
_auto_dereference=self._auto_dereference,
only_fields=self.only_fields,
raw_doc, _auto_dereference=self._auto_dereference,
)
if self._scalar:
@@ -1603,8 +1630,6 @@ class BaseQuerySet:
return doc
next = __next__ # For Python2 support
def rewind(self):
"""Rewind the cursor to its unevaluated state.

View File

@@ -144,6 +144,7 @@ class QuerySet(BaseQuerySet):
return super().count(with_limit_and_skip)
if self._len is None:
# cache the length
self._len = super().count(with_limit_and_skip)
return self._len

View File

@@ -7,6 +7,11 @@ from mongoengine.queryset import transform
__all__ = ("Q", "QNode")
def warn_empty_is_deprecated():
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
class QNodeVisitor:
"""Base visitor class for visiting Q-object nodes in a query tree.
"""
@@ -98,19 +103,18 @@ class QNode:
object.
"""
# If the other Q() is empty, ignore it and just use `self`.
if getattr(other, "empty", True):
if not bool(other):
return self
# Or if this Q is empty, ignore it and just use `other`.
if self.empty:
if not bool(self):
return other
return QCombination(operation, [self, other])
@property
def empty(self):
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
warnings.warn(msg, DeprecationWarning)
warn_empty_is_deprecated()
return False
def __or__(self, other):
@@ -152,8 +156,7 @@ class QCombination(QNode):
@property
def empty(self):
msg = "'empty' property is deprecated in favour of using 'not bool(filter)'"
warnings.warn(msg, DeprecationWarning)
warn_empty_is_deprecated()
return not bool(self.children)
def __eq__(self, other):
@@ -186,4 +189,5 @@ class Q(QNode):
@property
def empty(self):
warn_empty_is_deprecated()
return not bool(self.query)

View File

@@ -1,3 +0,0 @@
pymongo>=3.4
Sphinx==1.5.5
sphinx-rtd-theme==0.2.4

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from mongoengine import *

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from bson import SON
@@ -29,7 +28,8 @@ class TestDelta(MongoDBTestCase):
self.delta(Document)
self.delta(DynamicDocument)
def delta(self, DocClass):
@staticmethod
def delta(DocClass):
class Doc(DocClass):
string_field = StringField()
int_field = IntField()
@@ -428,13 +428,20 @@ class TestDelta(MongoDBTestCase):
assert doc.dict_field == {"hello": "world"}
assert doc.list_field == ["1", 2, {"hello": "world"}]
def test_delta_recursive_db_field(self):
def test_delta_recursive_db_field_on_doc_and_embeddeddoc(self):
self.delta_recursive_db_field(Document, EmbeddedDocument)
def test_delta_recursive_db_field_on_doc_and_dynamicembeddeddoc(self):
self.delta_recursive_db_field(Document, DynamicEmbeddedDocument)
def test_delta_recursive_db_field_on_dynamicdoc_and_embeddeddoc(self):
self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument)
def test_delta_recursive_db_field_on_dynamicdoc_and_dynamicembeddeddoc(self):
self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument)
def delta_recursive_db_field(self, DocClass, EmbeddedClass):
@staticmethod
def delta_recursive_db_field(DocClass, EmbeddedClass):
class Embedded(EmbeddedClass):
string_field = StringField(db_field="db_string_field")
int_field = IntField(db_field="db_int_field")
@@ -487,6 +494,7 @@ class TestDelta(MongoDBTestCase):
doc = doc.reload(10)
assert doc.embedded_field.dict_field == {}
assert doc._get_changed_fields() == []
doc.embedded_field.list_field = []
assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"]
assert doc.embedded_field._delta() == ({}, {"db_list_field": 1})
@@ -537,6 +545,7 @@ class TestDelta(MongoDBTestCase):
{},
)
doc.save()
assert doc._get_changed_fields() == []
doc = doc.reload(10)
assert doc.embedded_field.list_field[0] == "1"
@@ -634,6 +643,7 @@ class TestDelta(MongoDBTestCase):
doc.save()
doc = doc.reload(10)
assert doc._delta() == ({}, {},)
del doc.embedded_field.list_field[2].list_field
assert doc._delta() == (
{},
@@ -732,12 +742,12 @@ class TestDelta(MongoDBTestCase):
assert organization._get_changed_fields() == []
updates, removals = organization._delta()
assert {} == removals
assert {} == updates
assert removals == {}
assert updates == {}
organization.employees.append(person)
updates, removals = organization._delta()
assert {} == removals
assert removals == {}
assert "employees" in updates
def test_delta_with_dbref_false(self):
@@ -749,12 +759,12 @@ class TestDelta(MongoDBTestCase):
assert organization._get_changed_fields() == []
updates, removals = organization._delta()
assert {} == removals
assert {} == updates
assert removals == {}
assert updates == {}
organization.employees.append(person)
updates, removals = organization._delta()
assert {} == removals
assert removals == {}
assert "employees" in updates
def test_nested_nested_fields_mark_as_changed(self):
@@ -767,19 +777,46 @@ class TestDelta(MongoDBTestCase):
MyDoc.drop_collection()
mydoc = MyDoc(
name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}
).save()
MyDoc(name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}}).save()
mydoc = MyDoc.objects.first()
subdoc = mydoc.subs["a"]["b"]
subdoc.name = "bar"
assert ["name"] == subdoc._get_changed_fields()
assert ["subs.a.b.name"] == mydoc._get_changed_fields()
assert subdoc._get_changed_fields() == ["name"]
assert mydoc._get_changed_fields() == ["subs.a.b.name"]
mydoc._clear_changed_fields()
assert [] == mydoc._get_changed_fields()
assert mydoc._get_changed_fields() == []
def test_nested_nested_fields_db_field_set__gets_mark_as_changed_and_cleaned(self):
class EmbeddedDoc(EmbeddedDocument):
name = StringField(db_field="db_name")
class MyDoc(Document):
embed = EmbeddedDocumentField(EmbeddedDoc, db_field="db_embed")
name = StringField(db_field="db_name")
MyDoc.drop_collection()
MyDoc(name="testcase1", embed=EmbeddedDoc(name="foo")).save()
mydoc = MyDoc.objects.first()
mydoc.embed.name = "foo1"
assert mydoc.embed._get_changed_fields() == ["db_name"]
assert mydoc._get_changed_fields() == ["db_embed.db_name"]
mydoc = MyDoc.objects.first()
embed = EmbeddedDoc(name="foo2")
embed.name = "bar"
mydoc.embed = embed
assert embed._get_changed_fields() == ["db_name"]
assert mydoc._get_changed_fields() == ["db_embed"]
mydoc._clear_changed_fields()
assert mydoc._get_changed_fields() == []
def test_lower_level_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
@@ -794,17 +831,17 @@ class TestDelta(MongoDBTestCase):
mydoc = MyDoc.objects.first()
mydoc.subs["a"] = EmbeddedDoc()
assert ["subs.a"] == mydoc._get_changed_fields()
assert mydoc._get_changed_fields() == ["subs.a"]
subdoc = mydoc.subs["a"]
subdoc.name = "bar"
assert ["name"] == subdoc._get_changed_fields()
assert ["subs.a"] == mydoc._get_changed_fields()
assert subdoc._get_changed_fields() == ["name"]
assert mydoc._get_changed_fields() == ["subs.a"]
mydoc.save()
mydoc._clear_changed_fields()
assert [] == mydoc._get_changed_fields()
assert mydoc._get_changed_fields() == []
def test_upper_level_mark_as_changed(self):
class EmbeddedDoc(EmbeddedDocument):
@@ -821,15 +858,15 @@ class TestDelta(MongoDBTestCase):
subdoc = mydoc.subs["a"]
subdoc.name = "bar"
assert ["name"] == subdoc._get_changed_fields()
assert ["subs.a.name"] == mydoc._get_changed_fields()
assert subdoc._get_changed_fields() == ["name"]
assert mydoc._get_changed_fields() == ["subs.a.name"]
mydoc.subs["a"] = EmbeddedDoc()
assert ["subs.a"] == mydoc._get_changed_fields()
assert mydoc._get_changed_fields() == ["subs.a"]
mydoc.save()
mydoc._clear_changed_fields()
assert [] == mydoc._get_changed_fields()
assert mydoc._get_changed_fields() == []
def test_referenced_object_changed_attributes(self):
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""

View File

@@ -37,6 +37,19 @@ class TestDynamicDocument(MongoDBTestCase):
# Confirm no changes to self.Person
assert not hasattr(self.Person, "age")
def test_dynamic_document_parse_values_in_constructor_like_document_do(self):
class ProductDynamicDocument(DynamicDocument):
title = StringField()
price = FloatField()
class ProductDocument(Document):
title = StringField()
price = FloatField()
product = ProductDocument(title="Blabla", price="12.5")
dyn_product = ProductDynamicDocument(title="Blabla", price="12.5")
assert product.price == dyn_product.price == 12.5
def test_change_scope_of_variable(self):
"""Test changing the scope of a dynamic field has no adverse effects"""
p = self.Person()

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime
@@ -551,8 +550,9 @@ class TestIndexes(unittest.TestCase):
assert 5 == query_result.count()
incorrect_collation = {"arndom": "wrdo"}
with pytest.raises(OperationFailure):
with pytest.raises(OperationFailure) as exc_info:
BlogPost.objects.collation(incorrect_collation).count()
assert "Missing expected field" in str(exc_info.value)
query_result = BlogPost.objects.collation({}).order_by("name")
assert [x.name for x in query_result] == sorted(names)

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
import warnings

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import os
import pickle
import unittest
@@ -188,7 +187,7 @@ class TestDocumentInstance(MongoDBTestCase):
def test_queryset_resurrects_dropped_collection(self):
self.Person.drop_collection()
assert [] == list(self.Person.objects())
assert list(self.Person.objects()) == []
# Ensure works correctly with inhertited classes
class Actor(self.Person):
@@ -196,7 +195,7 @@ class TestDocumentInstance(MongoDBTestCase):
Actor.objects()
self.Person.drop_collection()
assert [] == list(Actor.objects())
assert list(Actor.objects()) == []
def test_polymorphic_references(self):
"""Ensure that the correct subclasses are returned from a query
@@ -501,7 +500,7 @@ class TestDocumentInstance(MongoDBTestCase):
doc.reload()
Animal.drop_collection()
def test_update_shard_key_routing(self):
def test_save_update_shard_key_routing(self):
"""Ensures updating a doc with a specified shard_key includes it in
the query.
"""
@@ -529,6 +528,29 @@ class TestDocumentInstance(MongoDBTestCase):
Animal.drop_collection()
def test_save_create_shard_key_routing(self):
"""Ensures inserting a doc with a specified shard_key includes it in
the query.
"""
class Animal(Document):
_id = UUIDField(binary=False, primary_key=True, default=uuid.uuid4)
is_mammal = BooleanField()
name = StringField()
meta = {"shard_key": ("is_mammal",)}
Animal.drop_collection()
doc = Animal(is_mammal=True, name="Dog")
with query_counter() as q:
doc.save()
query_op = q.db.system.profile.find({"ns": "mongoenginetest.animal"})[0]
assert query_op["op"] == "command"
assert query_op["command"]["findAndModify"] == "animal"
assert set(query_op["command"]["query"].keys()) == set(["_id", "is_mammal"])
Animal.drop_collection()
def test_reload_with_changed_fields(self):
"""Ensures reloading will not affect changed fields"""
@@ -578,7 +600,8 @@ class TestDocumentInstance(MongoDBTestCase):
doc.embedded_field.list_field.append(1)
doc.embedded_field.dict_field["woot"] = "woot"
assert doc._get_changed_fields() == [
changed = doc._get_changed_fields()
assert changed == [
"list_field",
"dict_field.woot",
"embedded_field.list_field",
@@ -3411,7 +3434,7 @@ class TestDocumentInstance(MongoDBTestCase):
assert obj3 != dbref2
assert dbref2 != obj3
def test_default_values(self):
def test_default_values_dont_get_override_upon_save_when_only_is_used(self):
class Person(Document):
created_on = DateTimeField(default=lambda: datetime.utcnow())
name = StringField()

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import uuid
from bson import Binary

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import pytest
from mongoengine import *

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
from decimal import Decimal
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime
import itertools
import math

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime as dt
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
from decimal import Decimal
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
from bson import InvalidDocument
import pytest
@@ -113,7 +112,7 @@ class TestDictField(MongoDBTestCase):
post.info.setdefault("authors", [])
post.save()
post.reload()
assert [] == post.info["authors"]
assert post.info["authors"] == []
def test_dictfield_dump_document(self):
"""Ensure a DictField can handle another document's dump."""

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import sys
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import pytest
from mongoengine import (

View File

@@ -0,0 +1,122 @@
from enum import Enum
from bson import InvalidDocument
import pytest
from mongoengine import *
from tests.utils import MongoDBTestCase, get_as_pymongo
class Status(Enum):
NEW = "new"
DONE = "done"
class ModelWithEnum(Document):
status = EnumField(Status)
class TestStringEnumField(MongoDBTestCase):
def test_storage(self):
model = ModelWithEnum(status=Status.NEW).save()
assert get_as_pymongo(model) == {"_id": model.id, "status": "new"}
def test_set_enum(self):
ModelWithEnum.drop_collection()
ModelWithEnum(status=Status.NEW).save()
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
assert ModelWithEnum.objects.first().status == Status.NEW
def test_set_by_value(self):
ModelWithEnum.drop_collection()
ModelWithEnum(status="new").save()
assert ModelWithEnum.objects.first().status == Status.NEW
def test_filter(self):
ModelWithEnum.drop_collection()
ModelWithEnum(status="new").save()
assert ModelWithEnum.objects(status="new").count() == 1
assert ModelWithEnum.objects(status=Status.NEW).count() == 1
assert ModelWithEnum.objects(status=Status.DONE).count() == 0
def test_change_value(self):
m = ModelWithEnum(status="new")
m.status = Status.DONE
m.save()
assert m.status == Status.DONE
def test_set_default(self):
class ModelWithDefault(Document):
status = EnumField(Status, default=Status.DONE)
m = ModelWithDefault().save()
assert m.status == Status.DONE
def test_enum_field_can_be_empty(self):
ModelWithEnum.drop_collection()
m = ModelWithEnum().save()
assert m.status is None
assert ModelWithEnum.objects()[0].status is None
assert ModelWithEnum.objects(status=None).count() == 1
def test_set_none_explicitly(self):
ModelWithEnum.drop_collection()
ModelWithEnum(status=None).save()
assert ModelWithEnum.objects.first().status is None
def test_cannot_create_model_with_wrong_enum_value(self):
m = ModelWithEnum(status="wrong_one")
with pytest.raises(ValidationError):
m.validate()
def test_user_is_informed_when_tries_to_set_choices(self):
with pytest.raises(ValueError, match="'choices' can't be set on EnumField"):
EnumField(Status, choices=["my", "custom", "options"])
class Color(Enum):
RED = 1
BLUE = 2
class ModelWithColor(Document):
color = EnumField(Color, default=Color.RED)
class TestIntEnumField(MongoDBTestCase):
def test_enum_with_int(self):
ModelWithColor.drop_collection()
m = ModelWithColor().save()
assert m.color == Color.RED
assert ModelWithColor.objects(color=Color.RED).count() == 1
assert ModelWithColor.objects(color=1).count() == 1
assert ModelWithColor.objects(color=2).count() == 0
def test_create_int_enum_by_value(self):
model = ModelWithColor(color=2).save()
assert model.color == Color.BLUE
def test_storage_enum_with_int(self):
model = ModelWithColor(color=Color.BLUE).save()
assert get_as_pymongo(model) == {"_id": model.id, "color": 2}
def test_validate_model(self):
with pytest.raises(ValidationError, match="Value must be one of"):
ModelWithColor(color=3).validate()
with pytest.raises(ValidationError, match="Value must be one of"):
ModelWithColor(color="wrong_type").validate()
class TestFunkyEnumField(MongoDBTestCase):
def test_enum_incompatible_bson_type_fails_during_save(self):
class FunkyColor(Enum):
YELLOW = object()
class ModelWithFunkyColor(Document):
color = EnumField(FunkyColor)
m = ModelWithFunkyColor(color=FunkyColor.YELLOW)
with pytest.raises(InvalidDocument, match="[cC]annot encode object"):
m.save()

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime
import unittest
@@ -336,7 +335,7 @@ class TestField(MongoDBTestCase):
doc.save()
# Unset all the fields
HandleNoneFields._get_collection().update(
HandleNoneFields._get_collection().update_one(
{"_id": doc.id},
{"$unset": {"str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1}},
)
@@ -1084,7 +1083,7 @@ class TestField(MongoDBTestCase):
e = Simple().save()
e.mapping = []
assert [] == e._changed_fields
assert e._changed_fields == []
class Simple(Document):
mapping = DictField()
@@ -1093,7 +1092,7 @@ class TestField(MongoDBTestCase):
e = Simple().save()
e.mapping = {}
assert [] == e._changed_fields
assert e._changed_fields == []
def test_slice_marks_field_as_changed(self):
class Simple(Document):

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import copy
import os
import tempfile

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import pytest
from mongoengine import *

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from mongoengine import *
@@ -381,7 +380,7 @@ class TestGeoField(MongoDBTestCase):
meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]}
assert [] == Log._geo_indices()
assert Log._geo_indices() == []
Log.drop_collection()
Log.ensure_indexes()
@@ -401,7 +400,7 @@ class TestGeoField(MongoDBTestCase):
"indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}]
}
assert [] == Log._geo_indices()
assert Log._geo_indices() == []
Log.drop_collection()
Log.ensure_indexes()

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import pytest
from mongoengine import *

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
from bson import DBRef, ObjectId
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime
import pytest

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
from bson import DBRef, SON
import pytest

View File

@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
from mongoengine import *
from tests.utils import MongoDBTestCase

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import pytest
from mongoengine import *

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import uuid
import pytest

View File

@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import datetime
import unittest
import uuid
@@ -114,6 +112,38 @@ class TestQueryset(unittest.TestCase):
assert person.name == "User A"
assert person.age == 20
def test_slicing_sets_empty_limit_skip(self):
self.Person.objects.insert(
[self.Person(name="User {}".format(i), age=i) for i in range(5)],
load_bulk=False,
)
self.Person.objects.create(name="User B", age=30)
self.Person.objects.create(name="User C", age=40)
qs = self.Person.objects()[1:2]
assert (qs._empty, qs._skip, qs._limit) == (False, 1, 1)
assert len(list(qs)) == 1
# Test edge case of [1:1] which should return nothing
# and require a hack so that it doesn't clash with limit(0)
qs = self.Person.objects()[1:1]
assert (qs._empty, qs._skip, qs._limit) == (True, 1, 0)
assert len(list(qs)) == 0
qs2 = qs[1:5] # Make sure that further slicing resets _empty
assert (qs2._empty, qs2._skip, qs2._limit) == (False, 1, 4)
assert len(list(qs2)) == 4
def test_limit_0_returns_all_documents(self):
self.Person.objects.create(name="User A", age=20)
self.Person.objects.create(name="User B", age=30)
n_docs = self.Person.objects().count()
persons = list(self.Person.objects().limit(0))
assert len(persons) == 2 == n_docs
def test_limit(self):
"""Ensure that QuerySet.limit works as expected."""
user_a = self.Person.objects.create(name="User A", age=20)
@@ -377,6 +407,9 @@ class TestQueryset(unittest.TestCase):
assert list(A.objects.none()) == []
assert list(A.objects.none().all()) == []
assert list(A.objects.none().limit(1)) == []
assert list(A.objects.none().skip(1)) == []
assert list(A.objects.none()[:5]) == []
def test_chaining(self):
class A(Document):
@@ -4021,6 +4054,32 @@ class TestQueryset(unittest.TestCase):
Number.drop_collection()
def test_clone_retains_settings(self):
"""Ensure that cloning retains the read_preference and read_concern
"""
class Number(Document):
n = IntField()
Number.drop_collection()
qs = Number.objects
qs_clone = qs.clone()
assert qs._read_preference == qs_clone._read_preference
assert qs._read_concern == qs_clone._read_concern
qs = Number.objects.read_preference(ReadPreference.PRIMARY_PREFERRED)
qs_clone = qs.clone()
assert qs._read_preference == ReadPreference.PRIMARY_PREFERRED
assert qs._read_preference == qs_clone._read_preference
qs = Number.objects.read_concern({"level": "majority"})
qs_clone = qs.clone()
assert qs._read_concern.document == {"level": "majority"}
assert qs._read_concern == qs_clone._read_concern
Number.drop_collection()
def test_using(self):
"""Ensure that switching databases for a queryset is possible
"""
@@ -4442,7 +4501,9 @@ class TestQueryset(unittest.TestCase):
assert len(people) == 1
assert people[0] == "User B"
people = list(self.Person.objects[1:1].scalar("name"))
# people = list(self.Person.objects[1:1].scalar("name"))
people = self.Person.objects[1:1]
people = people.scalar("name")
assert len(people) == 0
# Test slice out of range

View File

@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
import warnings

View File

@@ -344,6 +344,31 @@ class TestTransform(unittest.TestCase):
)
assert update == {"$pull": {"content.text": {"word": {"$nin": ["foo", "bar"]}}}}
def test_transform_embedded_document_list_fields(self):
"""
Test added to check filtering
EmbeddedDocumentListField which is inside a EmbeddedDocumentField
"""
class Drink(EmbeddedDocument):
id = StringField()
meta = {"strict": False}
class Shop(Document):
drinks = EmbeddedDocumentListField(Drink)
Shop.drop_collection()
drinks = [Drink(id="drink_1"), Drink(id="drink_2")]
Shop.objects.create(drinks=drinks)
q_obj = transform.query(
Shop, drinks__all=[{"$elemMatch": {"_id": x.id}} for x in drinks]
)
assert q_obj == {
"drinks": {"$all": [{"$elemMatch": {"_id": x.id}} for x in drinks]}
}
Shop.drop_collection()
if __name__ == "__main__":
unittest.main()

View File

@@ -282,7 +282,7 @@ class ConnectionTest(unittest.TestCase):
# database won't exist until we save a document
some_document.save()
assert conn.get_default_database().name == "mongoenginetest"
assert conn.database_names()[0] == "mongoenginetest"
assert conn.list_database_names()[0] == "mongoenginetest"
@require_mongomock
def test_connect_with_host_list(self):

View File

@@ -9,10 +9,14 @@ from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict
class DocumentStub(object):
def __init__(self):
self._changed_fields = []
self._unset_fields = []
def _mark_as_changed(self, key):
self._changed_fields.append(key)
def _mark_as_unset(self, key):
self._unset_fields.append(key)
class TestBaseDict:
@staticmethod
@@ -314,7 +318,7 @@ class TestBaseList:
def test___setitem___item_0_calls_mark_as_changed(self):
base_list = self._get_baselist([True])
base_list[0] = False
assert base_list._instance._changed_fields == ["my_name"]
assert base_list._instance._changed_fields == ["my_name.0"]
assert base_list == [False]
def test___setitem___item_1_calls_mark_as_changed(self):

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from bson import DBRef, ObjectId
@@ -370,8 +369,7 @@ class FieldTest(unittest.TestCase):
assert Post.objects.all()[0].user_lists == [[u1, u2], [u3]]
def test_circular_reference(self):
"""Ensure you can handle circular references
"""
"""Ensure you can handle circular references"""
class Relation(EmbeddedDocument):
name = StringField()
@@ -426,6 +424,7 @@ class FieldTest(unittest.TestCase):
daughter.relations.append(mother)
daughter.relations.append(daughter)
assert daughter._get_changed_fields() == ["relations"]
daughter.save()
assert "[<Person: Mother>, <Person: Daughter>]" == "%s" % Person.objects()

View File

@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import unittest
from mongoengine import *

View File

@@ -50,7 +50,7 @@ def _decorated_with_ver_requirement(func, mongo_version_req, oper):
ran against MongoDB < v3.6.
:param mongo_version_req: The mongodb version requirement (tuple(int, int))
:param oper: The operator to apply (e.g: operator.ge)
:param oper: The operator to apply (e.g. operator.ge)
"""
def _inner(*args, **kwargs):

View File

@@ -1,5 +1,5 @@
[tox]
envlist = {py35,pypy3}-{mg34,mg36,mg39,mg310}
envlist = {py35,pypy3}-{mg34,mg36,mg39,mg311}
[testenv]
commands =
@@ -8,6 +8,6 @@ deps =
mg34: pymongo>=3.4,<3.5
mg36: pymongo>=3.6,<3.7
mg39: pymongo>=3.9,<3.10
mg310: pymongo>=3.10,<3.11
mg311: pymongo>=3.11,<3.12
setenv =
PYTHON_EGG_CACHE = {envdir}/python-eggs