Compare commits
	
		
			1 Commits
		
	
	
		
			v0.18.2
			...
			cleanup-qs
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 160379ea85 | 
							
								
								
									
										23
									
								
								.install_mongodb_on_travis.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								.install_mongodb_on_travis.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | |||||||
|  | #!/bin/bash | ||||||
|  |  | ||||||
|  | sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 | ||||||
|  |  | ||||||
|  | if [ "$MONGODB" = "2.4" ]; then | ||||||
|  |     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||||
|  |     sudo apt-get update | ||||||
|  |     sudo apt-get install mongodb-10gen=2.4.14 | ||||||
|  |     sudo service mongodb start | ||||||
|  | elif [ "$MONGODB" = "2.6" ]; then | ||||||
|  |     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||||
|  |     sudo apt-get update | ||||||
|  |     sudo apt-get install mongodb-org-server=2.6.12 | ||||||
|  |     # service should be started automatically | ||||||
|  | elif [ "$MONGODB" = "3.0" ]; then | ||||||
|  |     echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||||
|  |     sudo apt-get update | ||||||
|  |     sudo apt-get install mongodb-org-server=3.0.14 | ||||||
|  |     # service should be started automatically | ||||||
|  | else | ||||||
|  |     echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0." | ||||||
|  |     exit 1 | ||||||
|  | fi; | ||||||
							
								
								
									
										93
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										93
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -2,73 +2,68 @@ | |||||||
| # PyMongo combinations. However, that would result in an overly long build | # PyMongo combinations. However, that would result in an overly long build | ||||||
| # with a very large number of jobs, hence we only test a subset of all the | # with a very large number of jobs, hence we only test a subset of all the | ||||||
| # combinations: | # combinations: | ||||||
| # * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | # * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5. | ||||||
| #   tested against Python v2.7, v3.5, v3.6, and PyPy. | # * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x. | ||||||
| # * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo | # * MongoDB v3.0 is tested against PyMongo v3.x. | ||||||
| #   combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. | # * MongoDB v2.6 is currently the "main" version tested against Python v2.7, | ||||||
| # * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. | #   v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x. | ||||||
| # |  | ||||||
| # We should periodically check MongoDB Server versions supported by MongoDB |  | ||||||
| # Inc., add newly released versions to the test matrix, and remove versions |  | ||||||
| # which have reached their End of Life. See: |  | ||||||
| # 1. https://www.mongodb.com/support-policy. |  | ||||||
| # 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility |  | ||||||
| # | # | ||||||
| # Reminder: Update README.rst if you change MongoDB versions we test. | # Reminder: Update README.rst if you change MongoDB versions we test. | ||||||
|  |  | ||||||
|  |  | ||||||
| language: python | language: python | ||||||
|  |  | ||||||
| python: | python: | ||||||
| - 2.7 | - 2.7 | ||||||
| - 3.5 | - 3.5 | ||||||
| - 3.6 |  | ||||||
| - pypy | - pypy | ||||||
|  | - pypy3 | ||||||
| dist: xenial |  | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   global: | - MONGODB=2.6 PYMONGO=2.7 | ||||||
|     - MONGODB_3_4=3.4.17 | - MONGODB=2.6 PYMONGO=2.8 | ||||||
|     - MONGODB_3_6=3.6.12 | - MONGODB=2.6 PYMONGO=3.0 | ||||||
|   matrix: |  | ||||||
|     - MONGODB=${MONGODB_3_4} PYMONGO=3.x |  | ||||||
|  |  | ||||||
| matrix: | matrix: | ||||||
|  |  | ||||||
|   # Finish the build as soon as one job fails |   # Finish the build as soon as one job fails | ||||||
|   fast_finish: true |   fast_finish: true | ||||||
|  |  | ||||||
|   include: |   include: | ||||||
|   - python: 2.7 |   - python: 2.7 | ||||||
|     env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x |     env: MONGODB=2.4 PYMONGO=2.7 | ||||||
|   - python: 3.6 |   - python: 2.7 | ||||||
|     env: MONGODB=${MONGODB_3_6} PYMONGO=3.x |     env: MONGODB=2.4 PYMONGO=3.0 | ||||||
|   - python: 3.7 |   - python: 2.7 | ||||||
|     env: MONGODB=${MONGODB_3_6} PYMONGO=3.x |     env: MONGODB=3.0 PYMONGO=3.0 | ||||||
|  |   - python: 3.5 | ||||||
|  |     env: MONGODB=2.4 PYMONGO=2.7 | ||||||
|  |   - python: 3.5 | ||||||
|  |     env: MONGODB=2.4 PYMONGO=3.0 | ||||||
|  |   - python: 3.5 | ||||||
|  |     env: MONGODB=3.0 PYMONGO=3.0 | ||||||
|  |  | ||||||
|  | before_install: | ||||||
|  | - bash .install_mongodb_on_travis.sh | ||||||
|  |  | ||||||
| install: | install: | ||||||
|   # Install Mongo | - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev | ||||||
|   - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz |   libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev | ||||||
|   - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz |   python-tk | ||||||
|   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version | - travis_retry pip install --upgrade pip | ||||||
|   # Install python dependencies | - travis_retry pip install coveralls | ||||||
|   - pip install --upgrade pip | - travis_retry pip install flake8 flake8-import-order | ||||||
|   - pip install coveralls | - travis_retry pip install tox>=1.9 | ||||||
|   - pip install flake8 flake8-import-order | - travis_retry pip install "virtualenv<14.0.0"  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) | ||||||
|   - pip install tox         # tox 3.11.0 has requirement virtualenv>=14.0.0 | - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test | ||||||
|   - pip install virtualenv  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) |  | ||||||
|   # Install the tox venv |  | ||||||
|   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test |  | ||||||
|  |  | ||||||
|  | # Cache dependencies installed via pip | ||||||
|  | cache: pip | ||||||
|  |  | ||||||
|  | # Run flake8 for py27 | ||||||
| before_script: | before_script: | ||||||
|   - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data | - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi | ||||||
|   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork |  | ||||||
|   - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi   # Run flake8 for py27 |  | ||||||
|   - mongo --eval 'db.version();'    # Make sure mongo is awake |  | ||||||
|  |  | ||||||
| script: | script: | ||||||
|   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage | - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage | ||||||
|  |  | ||||||
| # For now only submit coveralls for Python v2.7. Python v3.x currently shows | # For now only submit coveralls for Python v2.7. Python v3.x currently shows | ||||||
| # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | ||||||
| @@ -92,15 +87,15 @@ deploy: | |||||||
|   password: |   password: | ||||||
|     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= |     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= | ||||||
|  |  | ||||||
|   # Create a source distribution and a pure python wheel for faster installs. |   # create a source distribution and a pure python wheel for faster installs | ||||||
|   distributions: "sdist bdist_wheel" |   distributions: "sdist bdist_wheel" | ||||||
|  |  | ||||||
|   # Only deploy on tagged commits (aka GitHub releases) and only for the parent |   # only deploy on tagged commits (aka GitHub releases) and only for the | ||||||
|   # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. |   # parent repo's builds running Python 2.7 along with dev PyMongo (we run | ||||||
|   # We run Travis against many different Python, PyMongo, and MongoDB versions |   # Travis against many different Python and PyMongo versions and we don't | ||||||
|   # and we don't want the deploy to occur multiple times). |   # want the deploy to occur multiple times). | ||||||
|   on: |   on: | ||||||
|     tags: true |     tags: true | ||||||
|     repo: MongoEngine/mongoengine |     repo: MongoEngine/mongoengine | ||||||
|     condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4.17) |     condition: "$PYMONGO = 3.0" | ||||||
|     python: 2.7 |     python: 2.7 | ||||||
|   | |||||||
							
								
								
									
										9
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										9
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -243,12 +243,3 @@ that much better: | |||||||
|  * Victor Varvaryuk |  * Victor Varvaryuk | ||||||
|  * Stanislav Kaledin (https://github.com/sallyruthstruik) |  * Stanislav Kaledin (https://github.com/sallyruthstruik) | ||||||
|  * Dmitry Yantsen (https://github.com/mrTable) |  * Dmitry Yantsen (https://github.com/mrTable) | ||||||
|  * Renjianxin (https://github.com/Davidrjx) |  | ||||||
|  * Erdenezul Batmunkh (https://github.com/erdenezul) |  | ||||||
|  * Andy Yankovsky (https://github.com/werat) |  | ||||||
|  * Bastien Gérard (https://github.com/bagerard) |  | ||||||
|  * Trevor Hall (https://github.com/tjhall13) |  | ||||||
|  * Gleb Voropaev (https://github.com/buggyspace) |  | ||||||
|  * Paulo Amaral (https://github.com/pauloAmaral) |  | ||||||
|  * Gaurav Dadhania (https://github.com/GVRV) |  | ||||||
|  * Yurii Andrieiev (https://github.com/yandrieiev) |  | ||||||
|   | |||||||
| @@ -22,11 +22,8 @@ Supported Interpreters | |||||||
|  |  | ||||||
| MongoEngine supports CPython 2.7 and newer. Language | MongoEngine supports CPython 2.7 and newer. Language | ||||||
| features not supported by all interpreters can not be used. | features not supported by all interpreters can not be used. | ||||||
| The codebase is written in python 2 so you must be using python 2 | Please also ensure that your code is properly converted by | ||||||
| when developing new features. Compatibility of the library with Python 3 | `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | ||||||
| relies on the 2to3 package that gets executed as part of the installation |  | ||||||
| build. You should ensure that your code is properly converted by |  | ||||||
| `2to3 <http://docs.python.org/library/2to3.html>`_. |  | ||||||
|  |  | ||||||
| Style Guide | Style Guide | ||||||
| ----------- | ----------- | ||||||
| @@ -80,6 +77,4 @@ You might also use the github `Edit <https://github.com/blog/844-forking-with-th | |||||||
| button. | button. | ||||||
|  |  | ||||||
| If you want to test your documentation changes locally, you need to install | If you want to test your documentation changes locally, you need to install | ||||||
| the ``sphinx`` and ``sphinx_rtd_theme`` packages. Once these are installed, | the ``sphinx`` package. | ||||||
| go to the ``docs`` directory, run ``make html`` and inspect the updated docs |  | ||||||
| by running ``open _build/html/index.html``. |  | ||||||
|   | |||||||
							
								
								
									
										18
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								README.rst
									
									
									
									
									
								
							| @@ -26,28 +26,26 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | |||||||
|  |  | ||||||
| Supported MongoDB Versions | Supported MongoDB Versions | ||||||
| ========================== | ========================== | ||||||
| MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions | MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future | ||||||
| should be supported as well, but aren't actively tested at the moment. Make | versions should be supported as well, but aren't actively tested at the moment. | ||||||
| sure to open an issue or submit a pull request if you experience any problems | Make sure to open an issue or submit a pull request if you experience any | ||||||
| with MongoDB version > 3.6. | problems with MongoDB v3.2+. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||||
| `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | ||||||
| You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| and thus you can use ``easy_install -U mongoengine``. Another option is | and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||||
| `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | ||||||
| to both create the virtual environment and install the package. Otherwise, you can | setup.py install``. | ||||||
| download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and |  | ||||||
| run ``python setup.py install``. |  | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | ||||||
| At the very least, you'll need these two packages to use MongoEngine: | At the very least, you'll need these two packages to use MongoEngine: | ||||||
|  |  | ||||||
| - pymongo>=3.4 | - pymongo>=2.7.1 | ||||||
| - six>=1.10.0 | - six>=1.10.0 | ||||||
|  |  | ||||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||||
|   | |||||||
							
								
								
									
										207
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										207
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,207 @@ | |||||||
|  | #!/usr/bin/env python | ||||||
|  |  | ||||||
|  | """ | ||||||
|  | Simple benchmark comparing PyMongo and MongoEngine. | ||||||
|  |  | ||||||
|  | Sample run on a mid 2015 MacBook Pro (commit b282511): | ||||||
|  |  | ||||||
|  | Benchmarking... | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - Pymongo | ||||||
|  | 2.58979988098 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||||
|  | 1.26657605171 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - MongoEngine | ||||||
|  | 8.4351580143 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries without continual assign - MongoEngine | ||||||
|  | 7.20191693306 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True | ||||||
|  | 6.31104588509 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||||
|  | 6.07083487511 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||||
|  | 5.97704291344 | ||||||
|  | ---------------------------------------------------------------------------------------------------- | ||||||
|  | Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||||
|  | 5.9111430645 | ||||||
|  | """ | ||||||
|  |  | ||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     print("Benchmarking...") | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('timeit_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.timeit_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']['key' + str(j)] = 'value ' + str(j) | ||||||
|  |  | ||||||
|  |     noddy.save(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - Pymongo""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | from pymongo.write_concern import WriteConcern | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.save(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('timeit_test') | ||||||
|  | connection.close() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect('timeit_test') | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - MongoEngine""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     fields = {} | ||||||
|  |     for j in range(20): | ||||||
|  |         fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.fields = fields | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries without continual assign - MongoEngine""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}, cascade=True) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}, validate=False, cascade=True) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(validate=False, write_concern={"w": 0}) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print(t.timeit(1)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
| @@ -1,148 +0,0 @@ | |||||||
| from timeit import repeat |  | ||||||
|  |  | ||||||
| import mongoengine |  | ||||||
| from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument, |  | ||||||
|                          EmbeddedDocumentField, IntField, ListField, |  | ||||||
|                          StringField) |  | ||||||
|  |  | ||||||
| mongoengine.connect(db='mongoengine_benchmark_test') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def timeit(f, n=10000): |  | ||||||
|     return min(repeat(f, repeat=3, number=n)) / float(n) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_basic(): |  | ||||||
|     class Book(Document): |  | ||||||
|         name = StringField() |  | ||||||
|         pages = IntField() |  | ||||||
|         tags = ListField(StringField()) |  | ||||||
|         is_published = BooleanField() |  | ||||||
|         author_email = EmailField() |  | ||||||
|  |  | ||||||
|     Book.drop_collection() |  | ||||||
|  |  | ||||||
|     def init_book(): |  | ||||||
|         return Book( |  | ||||||
|             name='Always be closing', |  | ||||||
|             pages=100, |  | ||||||
|             tags=['self-help', 'sales'], |  | ||||||
|             is_published=True, |  | ||||||
|             author_email='alec@example.com', |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6)) |  | ||||||
|  |  | ||||||
|     b = init_book() |  | ||||||
|     print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6)) |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         'Doc setattr: %.3fus' % ( |  | ||||||
|             timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6)) |  | ||||||
|  |  | ||||||
|     print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6)) |  | ||||||
|  |  | ||||||
|     def save_book(): |  | ||||||
|         b._mark_as_changed('name') |  | ||||||
|         b._mark_as_changed('tags') |  | ||||||
|         b.save() |  | ||||||
|  |  | ||||||
|     print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6)) |  | ||||||
|  |  | ||||||
|     son = b.to_mongo() |  | ||||||
|     print( |  | ||||||
|         'Load from SON: %.3fus' % ( |  | ||||||
|             timeit(lambda: Book._from_son(son), 1000) * 10**6 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         'Load from database: %.3fus' % ( |  | ||||||
|             timeit(lambda: Book.objects[0], 100) * 10**6 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def create_and_delete_book(): |  | ||||||
|         b = init_book() |  | ||||||
|         b.save() |  | ||||||
|         b.delete() |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         'Init + save to database + delete: %.3fms' % ( |  | ||||||
|             timeit(create_and_delete_book, 10) * 10**3 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def test_big_doc(): |  | ||||||
|     class Contact(EmbeddedDocument): |  | ||||||
|         name = StringField() |  | ||||||
|         title = StringField() |  | ||||||
|         address = StringField() |  | ||||||
|  |  | ||||||
|     class Company(Document): |  | ||||||
|         name = StringField() |  | ||||||
|         contacts = ListField(EmbeddedDocumentField(Contact)) |  | ||||||
|  |  | ||||||
|     Company.drop_collection() |  | ||||||
|  |  | ||||||
|     def init_company(): |  | ||||||
|         return Company( |  | ||||||
|             name='MongoDB, Inc.', |  | ||||||
|             contacts=[ |  | ||||||
|                 Contact( |  | ||||||
|                     name='Contact %d' % x, |  | ||||||
|                     title='CEO', |  | ||||||
|                     address='Address %d' % x, |  | ||||||
|                 ) |  | ||||||
|                 for x in range(1000) |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     company = init_company() |  | ||||||
|     print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3)) |  | ||||||
|  |  | ||||||
|     print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3)) |  | ||||||
|  |  | ||||||
|     company.save() |  | ||||||
|  |  | ||||||
|     def save_company(): |  | ||||||
|         company._mark_as_changed('name') |  | ||||||
|         company._mark_as_changed('contacts') |  | ||||||
|         company.save() |  | ||||||
|  |  | ||||||
|     print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3)) |  | ||||||
|  |  | ||||||
|     son = company.to_mongo() |  | ||||||
|     print( |  | ||||||
|         'Load from SON: %.3fms' % ( |  | ||||||
|             timeit(lambda: Company._from_son(son), 100) * 10**3 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         'Load from database: %.3fms' % ( |  | ||||||
|             timeit(lambda: Company.objects[0], 100) * 10**3 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def create_and_delete_company(): |  | ||||||
|         c = init_company() |  | ||||||
|         c.save() |  | ||||||
|         c.delete() |  | ||||||
|  |  | ||||||
|     print( |  | ||||||
|         'Init + save to database + delete: %.3fms' % ( |  | ||||||
|             timeit(create_and_delete_company, 10) * 10**3 |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     test_basic() |  | ||||||
|     print('-' * 100) |  | ||||||
|     test_big_doc() |  | ||||||
| @@ -1,154 +0,0 @@ | |||||||
| import timeit |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('mongoengine_benchmark_test') |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.mongoengine_benchmark_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.insert_one(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys]  # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('PyMongo: Creating 10000 dictionaries.') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient, WriteConcern |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.mongoengine_benchmark_test |  | ||||||
| noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.insert_one(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys]  # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('mongoengine_benchmark_test') |  | ||||||
| connection.close() |  | ||||||
|  |  | ||||||
| from mongoengine import Document, DictField, connect |  | ||||||
| connect("mongoengine_benchmark_test") |  | ||||||
|  |  | ||||||
| class Noddy(Document): |  | ||||||
|     fields = DictField() |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys]  # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('MongoEngine: Creating 10000 dictionaries.') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     fields = {} |  | ||||||
|     for j in range(20): |  | ||||||
|         fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.fields = fields |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys]  # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print('-' * 100) |  | ||||||
|     print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).') |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print('{}s'.format(t.timeit(1))) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
							
								
								
									
										17
									
								
								docs/_themes/sphinx_rtd_theme/__init__.py
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										17
									
								
								docs/_themes/sphinx_rtd_theme/__init__.py
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,17 @@ | |||||||
|  | """Sphinx ReadTheDocs theme. | ||||||
|  |  | ||||||
|  | From https://github.com/ryan-roemer/sphinx-bootstrap-theme. | ||||||
|  |  | ||||||
|  | """ | ||||||
|  | import os | ||||||
|  |  | ||||||
|  | VERSION = (0, 1, 5) | ||||||
|  |  | ||||||
|  | __version__ = ".".join(str(v) for v in VERSION) | ||||||
|  | __version_full__ = __version__ | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_html_theme_path(): | ||||||
|  |     """Return list of HTML theme paths.""" | ||||||
|  |     cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||||
|  |     return cur_dir | ||||||
							
								
								
									
										15
									
								
								docs/_themes/sphinx_rtd_theme/breadcrumbs.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										15
									
								
								docs/_themes/sphinx_rtd_theme/breadcrumbs.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,15 @@ | |||||||
|  | <ul class="wy-breadcrumbs"> | ||||||
|  |   <li><a href="{{ pathto(master_doc) }}">Docs</a> »</li> | ||||||
|  |   <li><a href="">{{ title }}</a></li> | ||||||
|  |     <li class="wy-breadcrumbs-aside"> | ||||||
|  |       {% if display_github %} | ||||||
|  |         <a href="https://github.com/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst" class="icon icon-github"> Edit on GitHub</a> | ||||||
|  |       {% elif display_bitbucket %} | ||||||
|  |         <a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}.rst'" class="icon icon-bitbucket"> Edit on Bitbucket</a> | ||||||
|  |       {% elif show_source and has_source and sourcename %} | ||||||
|  |         <a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a> | ||||||
|  |       {% endif %} | ||||||
|  |     </li> | ||||||
|  | </ul> | ||||||
|  | <hr/> | ||||||
|  |  | ||||||
							
								
								
									
										30
									
								
								docs/_themes/sphinx_rtd_theme/footer.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										30
									
								
								docs/_themes/sphinx_rtd_theme/footer.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,30 @@ | |||||||
|  | <footer> | ||||||
|  |   {% if next or prev %} | ||||||
|  |     <div class="rst-footer-buttons"> | ||||||
|  |       {% if next %} | ||||||
|  |         <a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a> | ||||||
|  |       {% endif %} | ||||||
|  |       {% if prev %} | ||||||
|  |         <a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a> | ||||||
|  |       {% endif %} | ||||||
|  |     </div> | ||||||
|  |   {% endif %} | ||||||
|  |  | ||||||
|  |   <hr/> | ||||||
|  |  | ||||||
|  |   <p> | ||||||
|  |   {%- if show_copyright %} | ||||||
|  |     {%- if hasdoc('copyright') %} | ||||||
|  |       {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %} | ||||||
|  |     {%- else %} | ||||||
|  |       {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} | ||||||
|  |     {%- endif %} | ||||||
|  |   {%- endif %} | ||||||
|  |  | ||||||
|  |   {%- if last_updated %} | ||||||
|  |     {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} | ||||||
|  |   {%- endif %} | ||||||
|  |   </p> | ||||||
|  |  | ||||||
|  |   {% trans %}<a href="https://www.github.com/snide/sphinx_rtd_theme">Sphinx theme</a> provided by <a href="http://readthedocs.org">Read the Docs</a>{% endtrans %} | ||||||
|  | </footer> | ||||||
							
								
								
									
										142
									
								
								docs/_themes/sphinx_rtd_theme/layout.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										142
									
								
								docs/_themes/sphinx_rtd_theme/layout.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,142 @@ | |||||||
|  | {# TEMPLATE VAR SETTINGS #} | ||||||
|  | {%- set url_root = pathto('', 1) %} | ||||||
|  | {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} | ||||||
|  | {%- if not embedded and docstitle %} | ||||||
|  |   {%- set titlesuffix = " — "|safe + docstitle|e %} | ||||||
|  | {%- else %} | ||||||
|  |   {%- set titlesuffix = "" %} | ||||||
|  | {%- endif %} | ||||||
|  |  | ||||||
|  | <!DOCTYPE html> | ||||||
|  | <!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]--> | ||||||
|  | <!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]--> | ||||||
|  | <head> | ||||||
|  |   <meta charset="utf-8"> | ||||||
|  |   <meta name="viewport" content="width=device-width, initial-scale=1.0"> | ||||||
|  |   {% block htmltitle %} | ||||||
|  |   <title>{{ title|striptags|e }}{{ titlesuffix }}</title> | ||||||
|  |   {% endblock %} | ||||||
|  |  | ||||||
|  |   {# FAVICON #} | ||||||
|  |   {% if favicon %} | ||||||
|  |     <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/> | ||||||
|  |   {% endif %} | ||||||
|  |   {# CANONICAL #} | ||||||
|  |   {%- if theme_canonical_url %} | ||||||
|  |     <link rel="canonical" href="{{ theme_canonical_url }}{{ pagename }}.html"/> | ||||||
|  |   {%- endif %} | ||||||
|  |  | ||||||
|  |   {# CSS #} | ||||||
|  |   <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'> | ||||||
|  |  | ||||||
|  |   {# JS #} | ||||||
|  |   {% if not embedded %} | ||||||
|  |  | ||||||
|  |     <script type="text/javascript"> | ||||||
|  |       var DOCUMENTATION_OPTIONS = { | ||||||
|  |         URL_ROOT:'{{ url_root }}', | ||||||
|  |         VERSION:'{{ release|e }}', | ||||||
|  |         COLLAPSE_INDEX:false, | ||||||
|  |         FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}', | ||||||
|  |         HAS_SOURCE:  {{ has_source|lower }} | ||||||
|  |       }; | ||||||
|  |     </script> | ||||||
|  |     {%- for scriptfile in script_files %} | ||||||
|  |       <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script> | ||||||
|  |     {%- endfor %} | ||||||
|  |  | ||||||
|  |     {% if use_opensearch %} | ||||||
|  |       <link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/> | ||||||
|  |     {% endif %} | ||||||
|  |  | ||||||
|  |   {% endif %} | ||||||
|  |  | ||||||
|  |   {# RTD hosts these file themselves, so just load on non RTD builds #} | ||||||
|  |   {% if not READTHEDOCS %} | ||||||
|  |     <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" /> | ||||||
|  |     <script type="text/javascript" src="_static/js/theme.js"></script> | ||||||
|  |   {% endif %} | ||||||
|  |  | ||||||
|  |   {% for cssfile in css_files %} | ||||||
|  |     <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" /> | ||||||
|  |   {% endfor %} | ||||||
|  |  | ||||||
|  |   {%- block linktags %} | ||||||
|  |     {%- if hasdoc('about') %} | ||||||
|  |         <link rel="author" title="{{ _('About these documents') }}" | ||||||
|  |               href="{{ pathto('about') }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if hasdoc('genindex') %} | ||||||
|  |         <link rel="index" title="{{ _('Index') }}" | ||||||
|  |               href="{{ pathto('genindex') }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if hasdoc('search') %} | ||||||
|  |         <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if hasdoc('copyright') %} | ||||||
|  |         <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}"/> | ||||||
|  |     {%- if parents %} | ||||||
|  |         <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if next %} | ||||||
|  |         <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if prev %} | ||||||
|  |         <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |   {%- endblock %} | ||||||
|  |   {%- block extrahead %} {% endblock %} | ||||||
|  |  | ||||||
|  |   <script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/2.6.2/modernizr.min.js"></script> | ||||||
|  |  | ||||||
|  | </head> | ||||||
|  |  | ||||||
|  | <body class="wy-body-for-nav"> | ||||||
|  |  | ||||||
|  |   <div class="wy-grid-for-nav"> | ||||||
|  |  | ||||||
|  |     {# SIDE NAV, TOGGLES ON MOBILE #} | ||||||
|  |     <nav data-toggle="wy-nav-shift" class="wy-nav-side"> | ||||||
|  |       <div class="wy-side-nav-search"> | ||||||
|  |         <a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}</a> | ||||||
|  |         {% include "searchbox.html" %} | ||||||
|  |       </div> | ||||||
|  |  | ||||||
|  |       <div class="wy-menu wy-menu-vertical" data-spy="affix"> | ||||||
|  |         {% set toctree = toctree(maxdepth=2, collapse=False, includehidden=True) %} | ||||||
|  |         {% if toctree %} | ||||||
|  |             {{ toctree }} | ||||||
|  |         {% else %} | ||||||
|  |             <!-- Local TOC --> | ||||||
|  |             <div class="local-toc">{{ toc }}</div> | ||||||
|  |         {% endif %} | ||||||
|  |       </div> | ||||||
|  |         | ||||||
|  |     </nav> | ||||||
|  |  | ||||||
|  |     <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap"> | ||||||
|  |  | ||||||
|  |       {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} | ||||||
|  |       <nav class="wy-nav-top"> | ||||||
|  |         <i data-toggle="wy-nav-top" class="icon icon-reorder"></i> | ||||||
|  |         <a href="{{ pathto(master_doc) }}">{{ project }}</a> | ||||||
|  |       </nav> | ||||||
|  |  | ||||||
|  |  | ||||||
|  |       {# PAGE CONTENT #} | ||||||
|  |       <div class="wy-nav-content"> | ||||||
|  |         <div class="rst-content"> | ||||||
|  |           {% include "breadcrumbs.html" %} | ||||||
|  |           {% block body %}{% endblock %} | ||||||
|  |           {% include "footer.html" %} | ||||||
|  |         </div> | ||||||
|  |       </div> | ||||||
|  |  | ||||||
|  |     </section> | ||||||
|  |  | ||||||
|  |   </div> | ||||||
|  |   {% include "versions.html" %} | ||||||
|  | </body> | ||||||
|  | </html> | ||||||
							
								
								
									
										205
									
								
								docs/_themes/sphinx_rtd_theme/layout_old.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										205
									
								
								docs/_themes/sphinx_rtd_theme/layout_old.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,205 @@ | |||||||
|  | {# | ||||||
|  |     basic/layout.html | ||||||
|  |     ~~~~~~~~~~~~~~~~~ | ||||||
|  |  | ||||||
|  |     Master layout template for Sphinx themes. | ||||||
|  |  | ||||||
|  |     :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. | ||||||
|  |     :license: BSD, see LICENSE for details. | ||||||
|  | #} | ||||||
|  | {%- block doctype -%} | ||||||
|  | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" | ||||||
|  |   "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> | ||||||
|  | {%- endblock %} | ||||||
|  | {%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} | ||||||
|  | {%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} | ||||||
|  | {%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and | ||||||
|  |                          (sidebars != []) %} | ||||||
|  | {%- set url_root = pathto('', 1) %} | ||||||
|  | {# XXX necessary? #} | ||||||
|  | {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} | ||||||
|  | {%- if not embedded and docstitle %} | ||||||
|  |   {%- set titlesuffix = " — "|safe + docstitle|e %} | ||||||
|  | {%- else %} | ||||||
|  |   {%- set titlesuffix = "" %} | ||||||
|  | {%- endif %} | ||||||
|  |  | ||||||
|  | {%- macro relbar() %} | ||||||
|  |     <div class="related"> | ||||||
|  |       <h3>{{ _('Navigation') }}</h3> | ||||||
|  |       <ul> | ||||||
|  |         {%- for rellink in rellinks %} | ||||||
|  |         <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}> | ||||||
|  |           <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}" | ||||||
|  |              {{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a> | ||||||
|  |           {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li> | ||||||
|  |         {%- endfor %} | ||||||
|  |         {%- block rootrellink %} | ||||||
|  |         <li><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li> | ||||||
|  |         {%- endblock %} | ||||||
|  |         {%- for parent in parents %} | ||||||
|  |           <li><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li> | ||||||
|  |         {%- endfor %} | ||||||
|  |         {%- block relbaritems %} {% endblock %} | ||||||
|  |       </ul> | ||||||
|  |     </div> | ||||||
|  | {%- endmacro %} | ||||||
|  |  | ||||||
|  | {%- macro sidebar() %} | ||||||
|  |       {%- if render_sidebar %} | ||||||
|  |       <div class="sphinxsidebar"> | ||||||
|  |         <div class="sphinxsidebarwrapper"> | ||||||
|  |           {%- block sidebarlogo %} | ||||||
|  |           {%- if logo %} | ||||||
|  |             <p class="logo"><a href="{{ pathto(master_doc) }}"> | ||||||
|  |               <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/> | ||||||
|  |             </a></p> | ||||||
|  |           {%- endif %} | ||||||
|  |           {%- endblock %} | ||||||
|  |           {%- if sidebars != None %} | ||||||
|  |             {#- new style sidebar: explicitly include/exclude templates #} | ||||||
|  |             {%- for sidebartemplate in sidebars %} | ||||||
|  |             {%- include sidebartemplate %} | ||||||
|  |             {%- endfor %} | ||||||
|  |           {%- else %} | ||||||
|  |             {#- old style sidebars: using blocks -- should be deprecated #} | ||||||
|  |             {%- block sidebartoc %} | ||||||
|  |             {%- include "localtoc.html" %} | ||||||
|  |             {%- endblock %} | ||||||
|  |             {%- block sidebarrel %} | ||||||
|  |             {%- include "relations.html" %} | ||||||
|  |             {%- endblock %} | ||||||
|  |             {%- block sidebarsourcelink %} | ||||||
|  |             {%- include "sourcelink.html" %} | ||||||
|  |             {%- endblock %} | ||||||
|  |             {%- if customsidebar %} | ||||||
|  |             {%- include customsidebar %} | ||||||
|  |             {%- endif %} | ||||||
|  |             {%- block sidebarsearch %} | ||||||
|  |             {%- include "searchbox.html" %} | ||||||
|  |             {%- endblock %} | ||||||
|  |           {%- endif %} | ||||||
|  |         </div> | ||||||
|  |       </div> | ||||||
|  |       {%- endif %} | ||||||
|  | {%- endmacro %} | ||||||
|  |  | ||||||
|  | {%- macro script() %} | ||||||
|  |     <script type="text/javascript"> | ||||||
|  |       var DOCUMENTATION_OPTIONS = { | ||||||
|  |         URL_ROOT:    '{{ url_root }}', | ||||||
|  |         VERSION:     '{{ release|e }}', | ||||||
|  |         COLLAPSE_INDEX: false, | ||||||
|  |         FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}', | ||||||
|  |         HAS_SOURCE:  {{ has_source|lower }} | ||||||
|  |       }; | ||||||
|  |     </script> | ||||||
|  |     {%- for scriptfile in script_files %} | ||||||
|  |     <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script> | ||||||
|  |     {%- endfor %} | ||||||
|  | {%- endmacro %} | ||||||
|  |  | ||||||
|  | {%- macro css() %} | ||||||
|  |     <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" /> | ||||||
|  |     <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" /> | ||||||
|  |     {%- for cssfile in css_files %} | ||||||
|  |     <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" /> | ||||||
|  |     {%- endfor %} | ||||||
|  | {%- endmacro %} | ||||||
|  |  | ||||||
|  | <html xmlns="http://www.w3.org/1999/xhtml"> | ||||||
|  |   <head> | ||||||
|  |     <meta http-equiv="Content-Type" content="text/html; charset={{ encoding }}" /> | ||||||
|  |     {{ metatags }} | ||||||
|  |     {%- block htmltitle %} | ||||||
|  |     <title>{{ title|striptags|e }}{{ titlesuffix }}</title> | ||||||
|  |     {%- endblock %} | ||||||
|  |     {{ css() }} | ||||||
|  |     {%- if not embedded %} | ||||||
|  |     {{ script() }} | ||||||
|  |     {%- if use_opensearch %} | ||||||
|  |     <link rel="search" type="application/opensearchdescription+xml" | ||||||
|  |           title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" | ||||||
|  |           href="{{ pathto('_static/opensearch.xml', 1) }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if favicon %} | ||||||
|  |     <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- endif %} | ||||||
|  | {%- block linktags %} | ||||||
|  |     {%- if hasdoc('about') %} | ||||||
|  |     <link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if hasdoc('genindex') %} | ||||||
|  |     <link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if hasdoc('search') %} | ||||||
|  |     <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if hasdoc('copyright') %} | ||||||
|  |     <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  |     <link rel="top" title="{{ docstitle|e }}" href="{{ pathto('index') }}" /> | ||||||
|  |     {%- if parents %} | ||||||
|  |     <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if next %} | ||||||
|  |     <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if prev %} | ||||||
|  |     <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}" /> | ||||||
|  |     {%- endif %} | ||||||
|  | {%- endblock %} | ||||||
|  | {%- block extrahead %} {% endblock %} | ||||||
|  |   </head> | ||||||
|  |   <body> | ||||||
|  | {%- block header %}{% endblock %} | ||||||
|  |  | ||||||
|  | {%- block relbar1 %}{{ relbar() }}{% endblock %} | ||||||
|  |  | ||||||
|  | {%- block content %} | ||||||
|  |   {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} | ||||||
|  |  | ||||||
|  |     <div class="document"> | ||||||
|  |   {%- block document %} | ||||||
|  |       <div class="documentwrapper"> | ||||||
|  |       {%- if render_sidebar %} | ||||||
|  |         <div class="bodywrapper"> | ||||||
|  |       {%- endif %} | ||||||
|  |           <div class="body"> | ||||||
|  |             {% block body %} {% endblock %} | ||||||
|  |           </div> | ||||||
|  |       {%- if render_sidebar %} | ||||||
|  |         </div> | ||||||
|  |       {%- endif %} | ||||||
|  |       </div> | ||||||
|  |   {%- endblock %} | ||||||
|  |  | ||||||
|  |   {%- block sidebar2 %}{{ sidebar() }}{% endblock %} | ||||||
|  |       <div class="clearer"></div> | ||||||
|  |     </div> | ||||||
|  | {%- endblock %} | ||||||
|  |  | ||||||
|  | {%- block relbar2 %}{{ relbar() }}{% endblock %} | ||||||
|  |  | ||||||
|  | {%- block footer %} | ||||||
|  |     <div class="footer"> | ||||||
|  |     {%- if show_copyright %} | ||||||
|  |       {%- if hasdoc('copyright') %} | ||||||
|  |         {% trans path=pathto('copyright'), copyright=copyright|e %}© <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %} | ||||||
|  |       {%- else %} | ||||||
|  |         {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} | ||||||
|  |       {%- endif %} | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if last_updated %} | ||||||
|  |       {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} | ||||||
|  |     {%- endif %} | ||||||
|  |     {%- if show_sphinx %} | ||||||
|  |       {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %} | ||||||
|  |     {%- endif %} | ||||||
|  |     </div> | ||||||
|  |     <p>asdf asdf asdf asdf 22</p> | ||||||
|  | {%- endblock %} | ||||||
|  |   </body> | ||||||
|  | </html> | ||||||
|  |  | ||||||
							
								
								
									
										50
									
								
								docs/_themes/sphinx_rtd_theme/search.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										50
									
								
								docs/_themes/sphinx_rtd_theme/search.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,50 @@ | |||||||
|  | {# | ||||||
|  |     basic/search.html | ||||||
|  |     ~~~~~~~~~~~~~~~~~ | ||||||
|  |  | ||||||
|  |     Template for the search page. | ||||||
|  |  | ||||||
|  |     :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. | ||||||
|  |     :license: BSD, see LICENSE for details. | ||||||
|  | #} | ||||||
|  | {%- extends "layout.html" %} | ||||||
|  | {% set title = _('Search') %} | ||||||
|  | {% set script_files = script_files + ['_static/searchtools.js'] %} | ||||||
|  | {% block extrahead %} | ||||||
|  |   <script type="text/javascript"> | ||||||
|  |     jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); }); | ||||||
|  |   </script> | ||||||
|  |   {# this is used when loading the search index using $.ajax fails, | ||||||
|  |      such as on Chrome for documents on localhost #} | ||||||
|  |   <script type="text/javascript" id="searchindexloader"></script> | ||||||
|  |   {{ super() }} | ||||||
|  | {% endblock %} | ||||||
|  | {% block body %} | ||||||
|  |   <noscript> | ||||||
|  |   <div id="fallback" class="admonition warning"> | ||||||
|  |     <p class="last"> | ||||||
|  |       {% trans %}Please activate JavaScript to enable the search | ||||||
|  |       functionality.{% endtrans %} | ||||||
|  |     </p> | ||||||
|  |   </div> | ||||||
|  |   </noscript> | ||||||
|  |  | ||||||
|  |   {% if search_performed %} | ||||||
|  |     <h2>{{ _('Search Results') }}</h2> | ||||||
|  |     {% if not search_results %} | ||||||
|  |       <p>{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}</p> | ||||||
|  |     {% endif %} | ||||||
|  |   {% endif %} | ||||||
|  |   <div id="search-results"> | ||||||
|  |   {% if search_results %} | ||||||
|  |     <ul> | ||||||
|  |     {% for href, caption, context in search_results %} | ||||||
|  |       <li> | ||||||
|  |         <a href="{{ pathto(item.href) }}">{{ caption }}</a> | ||||||
|  |         <p class="context">{{ context|e }}</p> | ||||||
|  |       </li> | ||||||
|  |     {% endfor %} | ||||||
|  |     </ul> | ||||||
|  |   {% endif %} | ||||||
|  |   </div> | ||||||
|  | {% endblock %} | ||||||
							
								
								
									
										5
									
								
								docs/_themes/sphinx_rtd_theme/searchbox.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										5
									
								
								docs/_themes/sphinx_rtd_theme/searchbox.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,5 @@ | |||||||
|  | <form id ="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get"> | ||||||
|  |   <input type="text" name="q" placeholder="Search docs" /> | ||||||
|  |   <input type="hidden" name="check_keywords" value="yes" /> | ||||||
|  |   <input type="hidden" name="area" value="default" /> | ||||||
|  | </form> | ||||||
							
								
								
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/badge_only.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/badge_only.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | .font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} | ||||||
							
								
								
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/theme.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										1
									
								
								docs/_themes/sphinx_rtd_theme/static/css/theme.css
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/favicon.ico
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/favicon.ico
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 6.1 KiB | 
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										399
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										399
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,399 @@ | |||||||
|  | <?xml version="1.0" standalone="no"?> | ||||||
|  | <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" > | ||||||
|  | <svg xmlns="http://www.w3.org/2000/svg"> | ||||||
|  | <metadata></metadata> | ||||||
|  | <defs> | ||||||
|  | <font id="fontawesomeregular" horiz-adv-x="1536" > | ||||||
|  | <font-face units-per-em="1792" ascent="1536" descent="-256" /> | ||||||
|  | <missing-glyph horiz-adv-x="448" /> | ||||||
|  | <glyph unicode=" "  horiz-adv-x="448" /> | ||||||
|  | <glyph unicode="	" horiz-adv-x="448" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="448" /> | ||||||
|  | <glyph unicode="¨" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="©" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="®" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="´" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="Æ" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="768" /> | ||||||
|  | <glyph unicode=" " /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="768" /> | ||||||
|  | <glyph unicode=" " /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="512" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="384" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="256" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="256" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="192" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="307" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="85" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="307" /> | ||||||
|  | <glyph unicode=" " horiz-adv-x="384" /> | ||||||
|  | <glyph unicode="™" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="∞" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="≠" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="500" d="M0 0z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1699 1350q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 1312v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89 t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5 t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1664 32v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1664 1083v11v13.5t-0.5 13 t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 29.5q208 165 401 317q54 43 100.5 115.5t46.5 131.5z M1792 1120v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M896 -128q-26 0 -44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600 q-18 -18 -44 -18z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455 l502 -73q56 -9 56 -46z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1137 532l306 297l-422 62l-189 382l-189 -382l-422 -62l306 -297l-73 -421l378 199l377 -199zM1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1408 131q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q9 0 42 -21.5t74.5 -48t108 -48t133.5 -21.5t133.5 21.5t108 48t74.5 48t42 21.5q61 0 111.5 -20t85.5 -53.5t62 -81 t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M384 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 320v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 704v128q0 26 -19 45t-45 19h-128 q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 -64v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM384 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45 t45 -19h128q26 0 45 19t19 45zM1792 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 704v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1792 320v128 q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 704v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19 t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1920 1248v-1344q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1344q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M768 512v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM768 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 512v-384q0 -52 -38 -90t-90 -38 h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 288v-192q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 h960q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1671 970q0 -40 -28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1298 214q0 -40 -28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68t28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68 t-28 -68l-294 -294l294 -294q28 -28 28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224 q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5 t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5z M1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z " /> | ||||||
|  | <glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5 t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343zM896 1408v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90v640q0 52 38 90t90 38t90 -38t38 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M256 96v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 224v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 480v-576q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1408 864v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1376v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1472q0 14 9 23t23 9h192q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1536 749v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108q-44 -23 -91 -38 q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5t-8 23.5v222q0 12 8 23t19 13 l186 28q14 46 39 92q-40 57 -107 138q-10 12 -10 24q0 10 9 23q26 36 98.5 107.5t94.5 71.5q13 0 26 -10l138 -107q44 23 91 38q16 136 29 186q7 28 36 28h222q14 0 24.5 -8.5t11.5 -21.5l28 -184q49 -16 90 -37l142 107q9 9 24 9q13 0 25 -10q129 -119 165 -170q7 -8 7 -22 q0 -12 -8 -23q-15 -21 -51 -66.5t-54 -70.5q26 -50 41 -98l183 -28q13 -2 21 -12.5t8 -23.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M512 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM768 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1024 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1152 76v948h-896v-948q0 -22 7 -40.5t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832 q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h309l70 167q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1408 544v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6zM1631 613l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5t11 21.5 l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280zM768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z " /> | ||||||
|  | <glyph unicode="" d="M896 992v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1111 540v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20zM1870 73q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256 q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t26 -33l417 -1044q26 -62 26 -116z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1280 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 416v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h465l135 -136 q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68zM1339 985q17 -41 -14 -70l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q42 0 59 -39z" /> | ||||||
|  | <glyph unicode="" d="M1120 608q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273 t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1118 660q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198 t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1023 576h316q-1 3 -2.5 8t-2.5 8l-212 496h-708l-212 -496q-1 -2 -2.5 -8t-2.5 -8h316l95 -192h320zM1536 546v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552 q25 -61 25 -123z" /> | ||||||
|  | <glyph unicode="" d="M1184 640q0 -37 -32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q14 0 25 -9 l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59z" /> | ||||||
|  | <glyph unicode="" d="M1511 480q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129q-19 -19 -45 -19t-45 19t-19 45v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117 q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5zM1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 22.5v7q65 268 270 434.5t480 166.5 q146 0 284 -55.5t245 -156.5l130 129q19 19 45 19t45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M384 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M384 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1536 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5z M1536 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5zM1536 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5 t9.5 -22.5zM1664 160v832q0 13 -9.5 22.5t-22.5 9.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1792 1248v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47 t47 -113z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192zM1152 672v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M320 1280q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48 t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1664 650q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78 t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32q-88 0 -156.5 53.5t-90.5 136.5l-185 33l-20 49q-60 148 -60 314q0 151 67 291t179 242.5 t266 163.5t320 61t320 -61t266 -163.5t179 -242.5t67 -291z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="768" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142zM1408 640q0 -153 -85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45q0 39 39 59q56 29 76 44q74 54 115.5 135.5t41.5 173.5t-41.5 173.5 t-115.5 135.5q-20 15 -76 44q-39 20 -39 59q0 26 19 45t45 19q13 0 26 -5q140 -59 225 -188.5t85 -282.5zM1664 640q0 -230 -127 -422.5t-338 -283.5q-13 -5 -26 -5q-26 0 -45 19t-19 45q0 36 39 59q7 4 22.5 10.5t22.5 10.5q46 25 82 51q123 91 192 227t69 289t-69 289 t-192 227q-36 26 -82 51q-7 4 -22.5 10.5t-22.5 10.5q-39 23 -39 59q0 26 19 45t45 19q13 0 26 -5q211 -91 338 -283.5t127 -422.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M384 384v-128h-128v128h128zM384 1152v-128h-128v128h128zM1152 1152v-128h-128v128h128zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM896 896h384v384h-384v-384zM640 640v-640h-640v640h640zM1152 128v-128h-128v128h128zM1408 128v-128h-128v128h128z M1408 640v-384h-384v128h-128v-384h-128v640h384v-128h128v128h128zM640 1408v-640h-640v640h640zM1408 1408v-640h-640v640h640z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M63 0h-63v1408h63v-1408zM126 1h-32v1407h32v-1407zM220 1h-31v1407h31v-1407zM377 1h-31v1407h31v-1407zM534 1h-62v1407h62v-1407zM660 1h-31v1407h31v-1407zM723 1h-31v1407h31v-1407zM786 1h-31v1407h31v-1407zM943 1h-63v1407h63v-1407zM1100 1h-63v1407h63v-1407z M1226 1h-63v1407h63v-1407zM1352 1h-63v1407h63v-1407zM1446 1h-63v1407h63v-1407zM1635 1h-94v1407h94v-1407zM1698 1h-32v1407h32v-1407zM1792 0h-63v1408h63v-1408z" /> | ||||||
|  | <glyph unicode="" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91zM1899 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5h224q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1639 1058q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23 q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 9h761q74 0 114 -56t18 -130l-274 -906 q-36 -119 -71.5 -153.5t-128.5 -34.5h-869q-27 0 -38 -15q-11 -16 -1 -43q24 -70 144 -70h923q29 0 56 15.5t35 41.5l300 987q7 22 5 57q38 -15 59 -43zM575 1056q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5 t-16.5 -22.5zM492 800q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1536 576q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 576v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68 v160h-224q-13 0 -22.5 9.5t-9.5 22.5v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M960 864q119 0 203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5t84.5 203.5t203.5 84.5zM1664 1280q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181v896q0 106 75 181t181 75h224l51 136 q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224zM960 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M725 977l-170 -450q73 -1 153.5 -2t119 -1.5t52.5 -0.5l29 2q-32 95 -92 241q-53 132 -92 211zM21 -128h-21l2 79q22 7 80 18q89 16 110 31q20 16 48 68l237 616l280 724h75h53l11 -21l205 -480q103 -242 124 -297q39 -102 96 -235q26 -58 65 -164q24 -67 65 -149 q22 -49 35 -57q22 -19 69 -23q47 -6 103 -27q6 -39 6 -57q0 -14 -1 -26q-80 0 -192 8q-93 8 -189 8q-79 0 -135 -2l-200 -11l-58 -2q0 45 4 78l131 28q56 13 68 23q12 12 12 27t-6 32l-47 114l-92 228l-450 2q-29 -65 -104 -274q-23 -64 -23 -84q0 -31 17 -43 q26 -21 103 -32q3 0 13.5 -2t30 -5t40.5 -6q1 -28 1 -58q0 -17 -2 -27q-66 0 -349 20l-48 -8q-81 -14 -167 -14z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M555 15q76 -32 140 -32q131 0 216 41t122 113q38 70 38 181q0 114 -41 180q-58 94 -141 126q-80 32 -247 32q-74 0 -101 -10v-144l-1 -173l3 -270q0 -15 12 -44zM541 761q43 -7 109 -7q175 0 264 65t89 224q0 112 -85 187q-84 75 -255 75q-52 0 -130 -13q0 -44 2 -77 q7 -122 6 -279l-1 -98q0 -43 1 -77zM0 -128l2 94q45 9 68 12q77 12 123 31q17 27 21 51q9 66 9 194l-2 497q-5 256 -9 404q-1 87 -11 109q-1 4 -12 12q-18 12 -69 15q-30 2 -114 13l-4 83l260 6l380 13l45 1q5 0 14 0.5t14 0.5q1 0 21.5 -0.5t40.5 -0.5h74q88 0 191 -27 q43 -13 96 -39q57 -29 102 -76q44 -47 65 -104t21 -122q0 -70 -32 -128t-95 -105q-26 -20 -150 -77q177 -41 267 -146q92 -106 92 -236q0 -76 -29 -161q-21 -62 -71 -117q-66 -72 -140 -108q-73 -36 -203 -60q-82 -15 -198 -11l-197 4q-84 2 -298 -11q-33 -3 -272 -11z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M0 -126l17 85q4 1 77 20q76 19 116 39q29 37 41 101l27 139l56 268l12 64q8 44 17 84.5t16 67t12.5 46.5t9 30.5t3.5 11.5l29 157l16 63l22 135l8 50v38q-41 22 -144 28q-28 2 -38 4l19 103l317 -14q39 -2 73 -2q66 0 214 9q33 2 68 4.5t36 2.5q-2 -19 -6 -38 q-7 -29 -13 -51q-55 -19 -109 -31q-64 -16 -101 -31q-12 -31 -24 -88q-9 -44 -13 -82q-44 -199 -66 -306l-61 -311l-38 -158l-43 -235l-12 -45q-2 -7 1 -27q64 -15 119 -21q36 -5 66 -10q-1 -29 -7 -58q-7 -31 -9 -41q-18 0 -23 -1q-24 -2 -42 -2q-9 0 -28 3q-19 4 -145 17 l-198 2q-41 1 -174 -11q-74 -7 -98 -9z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l215 -1h293l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -42.5 2t-103.5 -1t-111 -1 q-34 0 -67 -5q-10 -97 -8 -136l1 -152v-332l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-88 0 -233 -14q-48 -4 -70 -4q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q8 192 6 433l-5 428q-1 62 -0.5 118.5t0.5 102.5t-2 57t-6 15q-6 5 -14 6q-38 6 -148 6q-43 0 -100 -13.5t-73 -24.5q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1744 128q33 0 42 -18.5t-11 -44.5 l-126 -162q-20 -26 -49 -26t-49 26l-126 162q-20 26 -11 44.5t42 18.5h80v1024h-80q-33 0 -42 18.5t11 44.5l126 162q20 26 49 26t49 -26l126 -162q20 -26 11 -44.5t-42 -18.5h-80v-1024h80z" /> | ||||||
|  | <glyph unicode="" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l446 -1h318l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -58.5 2t-138.5 -1t-128 -1 q-94 0 -127 -5q-10 -97 -8 -136l1 -152v52l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-82 0 -233 -13q-45 -5 -70 -5q-2 22 -2 26l-1 26v9q21 33 79 49 q139 38 159 50q9 21 12 56q6 137 6 433l-5 44q0 265 -2 278q-2 11 -6 15q-6 5 -14 6q-38 6 -148 6q-50 0 -168.5 -14t-132.5 -24q-13 -9 -22 -33t-22 -75t-24 -84q-6 -19 -19.5 -32t-20.5 -13q-44 27 -56 44v297v86zM1505 113q26 -20 26 -49t-26 -49l-162 -126 q-26 -20 -44.5 -11t-18.5 42v80h-1024v-80q0 -33 -18.5 -42t-44.5 11l-162 126q-26 20 -26 49t26 49l162 126q26 20 44.5 11t18.5 -42v-80h1024v80q0 33 18.5 42t44.5 -11z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h896q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45t-45 -19 h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M256 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM256 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5 t9.5 -22.5zM256 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344 q13 0 22.5 -9.5t9.5 -22.5zM256 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192 q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M384 992v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23t9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M352 704q0 -14 -9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 1184v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5 q39 -17 39 -59z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M640 960q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 576v-448h-1408v192l320 320l160 -160l512 512zM1760 1280h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216 q0 13 -9.5 22.5t-22.5 9.5zM1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" d="M363 0l91 91l-235 235l-91 -91v-107h128v-128h107zM886 928q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17zM832 1120l416 -416l-832 -832h-416v416zM1515 1024q0 -53 -37 -90l-166 -166l-416 416l166 165q36 38 90 38 q53 0 91 -38l235 -234q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M768 896q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1024 896q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179q0 212 150 362t362 150t362 -150t150 -362z" /> | ||||||
|  | <glyph unicode="" d="M768 96v1088q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M512 384q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 512q0 -212 -150 -362t-362 -150t-362 150t-150 362 q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M888 352l116 116l-152 152l-116 -116v-56h96v-96h56zM1328 1072q-16 16 -33 -1l-350 -350q-17 -17 -1 -33t33 1l350 350q17 17 1 33zM1408 478v-190q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29zM1312 1216l288 -288l-672 -672h-288v288zM1756 1084l-92 -92 l-288 288l92 92q28 28 68 28t68 -28l152 -152q28 -28 28 -68t-28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1408 547v-259q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h255v0q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832 q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29zM1645 1043l-384 -384q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34q-8 -2 -12 -2q-16 0 -26 13q-10 14 -21 31t-39.5 68.5t-49.5 99.5 t-38.5 114t-17.5 122q0 49 3.5 91t14 90t28 88t47 81.5t68.5 74t94.5 61.5t124.5 48.5t159.5 30.5t196.5 11h160v192q0 42 39 59q13 5 25 5q26 0 45 -19l384 -384q19 -19 19 -45t-19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1408 606v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832 q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3q20 -8 20 -29zM1639 1095l-814 -814q-24 -24 -57 -24t-57 24l-430 430q-24 24 -24 57t24 57l110 110q24 24 57 24t57 -24l263 -263l647 647q24 24 57 24t57 -24l110 -110 q24 -24 24 -57t-24 -57z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45 t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M979 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1747 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710 q19 19 32 13t13 -32v-710q4 11 13 19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1619 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-8 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45t19 45l710 710q19 19 32 13t13 -32v-710q5 11 13 19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1384 609l-1328 -738q-23 -13 -39.5 -3t-16.5 36v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31z" /> | ||||||
|  | <glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45zM640 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710 q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1538" d="M14 557l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13t13 32zM1473 0h-1408q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M742 -37l-652 651q-37 37 -37 90.5t37 90.5l652 651q37 37 90.5 37t90.5 -37l75 -75q37 -37 37 -90.5t-37 -90.5l-486 -486l486 -485q37 -38 37 -91t-37 -90l-75 -75q-37 -37 -90.5 -37t-90.5 37z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1099 704q0 -52 -37 -91l-652 -651q-37 -37 -90 -37t-90 37l-76 75q-37 39 -37 91q0 53 37 90l486 486l-486 485q-37 39 -37 91q0 53 37 90l76 75q36 38 90 38t90 -38l652 -651q37 -37 37 -90z" /> | ||||||
|  | <glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5 t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1216 576v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1149 414q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45q0 -27 19 -46l90 -90q19 -19 46 -19 q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19l90 90q19 19 19 46zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1284 802q0 28 -18 46l-91 90q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M896 160v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1152 832q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26t37.5 -59 q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1024 160v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23zM896 1056v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23 t23 -9h192q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1197 512h-109q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109 q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5zM1536 704v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v143 q-161 37 -278.5 154.5t-154.5 278.5h-143q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h143q37 161 154.5 278.5t278.5 154.5v143q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-143q161 -37 278.5 -154.5t154.5 -278.5h143q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" d="M1097 457l-146 -146q-10 -10 -23 -10t-23 10l-137 137l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 l-137 -137l137 -137q10 -10 10 -23t-10 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5 t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1171 723l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45t19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198 t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1312 643q0 161 -87 295l-754 -753q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5zM313 344l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199t-73 -274q0 -162 89 -299zM1536 643q0 -157 -61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61 t-245 164t-163.5 246t-61 300t61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 640v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5 t32.5 -90.5z" /> | ||||||
|  | <glyph unicode="" d="M1472 576q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1611 565q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75q-38 38 -38 90q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651 q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1611 704q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 896q0 -26 -19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22 t-13.5 30t-10.5 24q-127 285 -127 451q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" d="M755 480q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23zM1536 1344v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332 q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" d="M768 576v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45zM1523 1248q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45 t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1482 486q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5t59.5 77.5l266 154l-266 154 q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5l-266 -154z" /> | ||||||
|  | <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM896 161v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190q0 -13 10 -23t23 -10h192 q13 0 22 9.5t9 23.5zM894 505l18 621q0 12 -10 18q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5z" /> | ||||||
|  | <glyph unicode="" d="M928 180v56v468v192h-320v-192v-468v-56q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5zM472 1024h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68t28 -68t68 -28zM1160 1120q0 40 -28 68t-68 28q-43 0 -69 -31l-125 -161h194q40 0 68 28t28 68zM1536 864v-320 q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 77 168 77q93 0 158.5 -65.5t65.5 -158.5 t-65.5 -158.5t-158.5 -65.5h440q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1280 832q0 26 -19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45q0 -26 19 -45t45 -19q24 0 45 19q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45zM1792 1030q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268 q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-30 0 -51 11t-31 24t-27 42q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 38t-16 44q-9 51 -9 104q0 115 43.5 220t119 184.5 t170.5 139t204 95.5q55 18 145 25.5t179.5 9t178.5 6t163.5 24t113.5 56.5l29.5 29.5t29.5 28t27 20t36.5 16t43.5 4.5q39 0 70.5 -46t47.5 -112t24 -124t8 -96z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1408 -160v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1152 896q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1 q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100t113.5 -122.5t72.5 -150.5t27.5 -184z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1664 576q-152 236 -381 353q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5 t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1792 576q0 -34 -20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69t20 69q140 229 376.5 368t499.5 139t499.5 -139t376.5 -368q20 -35 20 -69z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M555 201l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353q167 -258 427 -375zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1307 1151q0 -7 -1 -9 q-105 -188 -315 -566t-316 -567l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87q-143 65 -263.5 173t-208.5 245q-20 31 -20 69t20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 18 -6t31 -15.5t33 -18.5t31.5 -18.5t19.5 -11.5 q16 -10 16 -27zM1344 704q0 -139 -79 -253.5t-209 -164.5l280 502q8 -45 8 -84zM1792 576q0 -35 -20 -69q-39 -64 -109 -145q-150 -172 -347.5 -267t-419.5 -95l74 132q212 18 392.5 137t301.5 307q-115 179 -282 294l63 112q95 -64 182.5 -153t144.5 -184q20 -34 20 -69z " /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1024 161v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5zM1022 535l18 459q0 12 -10 19q-13 11 -24 11h-220q-11 0 -24 -11q-10 -7 -10 -21l17 -457q0 -10 10 -16.5t24 -6.5h185 q14 0 23.5 6.5t10.5 16.5zM1008 1469l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126l768 1408q17 31 47 49t65 18t65 -18t47 -49z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1376 1376q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23q-1 13 9 25l96 97q9 9 23 9 q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M128 -128h288v288h-288v-288zM480 -128h320v288h-320v-288zM128 224h288v320h-288v-320zM480 224h320v320h-320v-320zM128 608h288v288h-288v-288zM864 -128h320v288h-320v-288zM480 608h320v288h-320v-288zM1248 -128h288v288h-288v-288zM864 224h320v320h-320v-320z M512 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1248 224h288v320h-288v-320zM864 608h320v288h-320v-288zM1248 608h288v288h-288v-288zM1280 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64 q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47 h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M666 1055q-60 -92 -137 -273q-22 45 -37 72.5t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q250 0 410 -225zM1792 256q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192q-32 0 -85 -0.5t-81 -1t-73 1 t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1792 1152q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5 v192h-256q-48 0 -87 -15t-69 -45t-51 -61.5t-45 -77.5q-32 -62 -78 -171q-29 -66 -49.5 -111t-54 -105t-64 -100t-74 -83t-90 -68.5t-106.5 -42t-128 -16.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q48 0 87 15t69 45t51 61.5t45 77.5q32 62 78 171q29 66 49.5 111 t54 105t64 100t74 83t90 68.5t106.5 42t128 16.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281 q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 704v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5t-98.5 362v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384 q26 0 45 -19t19 -45zM512 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45zM1536 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1611 320q0 -53 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-486 485l-486 -485q-36 -38 -90 -38t-90 38l-75 75q-38 36 -38 90q0 53 38 91l651 651q37 37 90 37q52 0 91 -37l650 -651q38 -38 38 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1611 832q0 -53 -37 -90l-651 -651q-38 -38 -91 -38q-54 0 -90 38l-651 651q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l486 -486l486 486q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1280 32q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -11 7 -21 zM1920 448q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5 -8t3 -11.5t1 -11.5v-13v-11v-160v-416h192q26 0 45 -19t19 -45z " /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5 l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5 t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1879 584q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43zM1536 928v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5 t-0.5 12.5v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="768" d="M704 1216q0 -26 -19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M512 512v-384h-256v384h256zM896 1024v-896h-256v896h256zM1280 768v-640h-256v640h256zM1664 1152v-1024h-256v1024h256zM1792 32v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5z M1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" d="M1280 926q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4 q21 -63 74.5 -104t121.5 -42q-116 -90 -261 -90q-26 0 -50 3q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5 t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1307 618l23 219h-198v109q0 49 15.5 68.5t71.5 19.5h110v219h-175q-152 0 -218 -72t-66 -213v-131h-131v-219h131v-635h262v635h175zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960 q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M928 704q0 14 -9 23t-23 9q-66 0 -113 -47t-47 -113q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9t9 23zM1152 574q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM128 0h1536v128h-1536v-128zM1280 574q0 159 -112.5 271.5 t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM256 1216h384v128h-384v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM1792 1280v-1280q0 -53 -37.5 -90.5t-90.5 -37.5h-1536q-53 0 -90.5 37.5t-37.5 90.5v1280 q0 53 37.5 90.5t90.5 37.5h1536q53 0 90.5 -37.5t37.5 -90.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M832 1024q0 80 -56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136t56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56t56 136zM1683 320q0 -17 -49 -66t-66 -49q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26 l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189 -131 -365l355 -355l96 96q-3 3 -26 24.5t-40 38.5t-33 36.5 t-16 28.5q0 17 49 66t66 49q13 0 23 -10q6 -6 46 -44.5t82 -79.5t86.5 -86t73 -78t28.5 -41z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M896 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1664 128q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1152q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1280 731v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -10 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90q-37 -19 -77 -31q-11 -108 -23 -155q-7 -24 -30 -24h-186q-11 0 -20 7.5t-10 17.5 l-23 153q-34 10 -75 31l-118 -89q-7 -7 -20 -7q-11 0 -21 8q-144 133 -144 160q0 9 7 19q10 14 41 53t47 61q-23 44 -35 82l-152 24q-10 1 -17 9.5t-7 19.5v185q0 10 7 19.5t16 10.5l155 24q11 35 32 76q-34 48 -90 115q-7 11 -7 20q0 12 7 20q22 30 82 89t79 59q11 0 21 -7 l115 -90q34 18 77 32q11 108 23 154q7 24 30 24h186q11 0 20 -7.5t10 -17.5l23 -153q34 -10 75 -31l118 89q8 7 20 7q11 0 21 -8q144 -133 144 -160q0 -9 -7 -19q-12 -16 -42 -54t-45 -60q23 -48 34 -82l152 -23q10 -2 17 -10.5t7 -19.5zM1920 198v-140q0 -16 -149 -31 q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20 t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31zM1920 1222v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68 q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70 q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1408 768q0 -139 -94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224 q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257zM1792 512q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7 q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230z" /> | ||||||
|  | <glyph unicode="" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 768q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5 t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5q32 1 53.5 47t21.5 81zM1536 769 q0 -89 -49 -163q9 -33 9 -69q0 -77 -38 -144q3 -21 3 -43q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5h-36h-93q-96 0 -189.5 22.5t-216.5 65.5q-116 40 -138 40h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h274q36 24 137 155q58 75 107 128 q24 25 35.5 85.5t30.5 126.5t62 108q39 37 90 37q84 0 151 -32.5t102 -101.5t35 -186q0 -93 -48 -192h176q104 0 180 -76t76 -179z" /> | ||||||
|  | <glyph unicode="" d="M256 1088q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 512q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 32 18 69t-17.5 73.5t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5 t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640h32q16 0 35.5 -9t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h352q50 0 89 38.5t39 89.5z M1536 511q0 -103 -76 -179t-180 -76h-176q48 -99 48 -192q0 -118 -35 -186q-35 -69 -102 -101.5t-151 -32.5q-51 0 -90 37q-34 33 -54 82t-25.5 90.5t-17.5 84.5t-31 64q-48 50 -107 127q-101 131 -137 155h-274q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5 h288q22 0 138 40q128 44 223 66t200 22h112q140 0 226.5 -79t85.5 -216v-5q60 -77 60 -178q0 -22 -3 -43q38 -67 38 -144q0 -36 -9 -69q49 -74 49 -163z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="896" d="M832 1504v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1664 940q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5t-21.5 -143q0 -168 187 -355l581 -560l580 559 q188 188 188 356zM1792 940q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5 q224 0 351 -124t127 -344z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M640 96q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704 q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5zM1568 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" d="M237 122h231v694h-231v-694zM483 1030q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5zM1068 122h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694h231v388q0 38 7 56q15 35 45 59.5t74 24.5 q116 0 116 -157v-371zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M480 672v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448q0 -14 9 -23t23 -9t23 9t9 23zM1152 320q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19t-19 45q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38 t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1408 608v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1792 1472v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" d="M1184 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45zM1536 992v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5 q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M458 653q-74 162 -74 371h-256v-96q0 -78 94.5 -162t235.5 -113zM1536 928v96h-256q0 -209 -74 -371q141 29 235.5 113t94.5 162zM1664 1056v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91 t97.5 -37q75 0 133.5 -45.5t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 44.5t-173 97.5t-112 130t-41.5 143v128q0 40 28 68t68 28h288v96 q0 66 47 113t113 47h576q66 0 113 -47t47 -113v-96h288q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" d="M394 184q-8 -9 -20 3q-13 11 -4 19q8 9 20 -3q12 -11 4 -19zM352 245q9 -12 0 -19q-8 -6 -17 7t0 18q9 7 17 -6zM291 305q-5 -7 -13 -2q-10 5 -7 12q3 5 13 2q10 -5 7 -12zM322 271q-6 -7 -16 3q-9 11 -2 16q6 6 16 -3q9 -11 2 -16zM451 159q-4 -12 -19 -6q-17 4 -13 15 t19 7q16 -5 13 -16zM514 154q0 -11 -16 -11q-17 -2 -17 11q0 11 16 11q17 2 17 -11zM572 164q2 -10 -14 -14t-18 8t14 15q16 2 18 -9zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-224q-16 0 -24.5 1t-19.5 5t-16 14.5t-5 27.5v239q0 97 -52 142q57 6 102.5 18t94 39 t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103 q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -103t0.5 -68q0 -22 -11 -33.5t-22 -13t-33 -1.5 h-224q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1280 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 288v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h427q21 -56 70.5 -92 t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68zM1339 936q-17 -40 -59 -40h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40q-17 39 14 69l448 448q18 19 45 19t45 -19l448 -448q31 -30 14 -69z" /> | ||||||
|  | <glyph unicode="" d="M1407 710q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5 q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275zM1535 712q0 -165 -70 -327.5t-196 -288t-281 -180.5q-124 -44 -326 -44 q-57 0 -170 14.5t-169 14.5q-24 0 -72.5 -14.5t-73.5 -14.5q-73 0 -123.5 55.5t-50.5 128.5q0 24 11 68t11 67q0 40 -12.5 120.5t-12.5 121.5q0 111 18 217.5t54.5 209.5t100.5 194t150 156q78 59 232 120q194 78 316 78q60 0 175.5 -24t173.5 -24q19 0 57 5t58 5 q81 0 118 -50.5t37 -134.5q0 -23 -5 -68t-5 -68q0 -10 1 -18.5t3 -17t4 -13.5t6.5 -16t6.5 -17q16 -40 25 -118.5t9 -136.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1408 296q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -52.5 3.5t-57.5 12.5t-47.5 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-128 79 -264.5 215.5t-215.5 264.5q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47.5t-12.5 57.5t-3.5 52.5 q0 92 51 186q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5 -20.5t14 -24t11.5 -19q76 -137 174 -235t235 -174 q2 -1 19 -11.5t24 -14t20.5 -8.5t22.5 -5q18 0 46 28.5t53 62t55 62t50 28.5q14 0 28.5 -7t35.5 -21.5t25 -17.5q25 -15 53.5 -31t63.5 -35t54 -30q70 -35 76 -53q3 -7 3 -21z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1120 1280h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1152 1280h-1024v-1242l423 406l89 85l89 -85l423 -406v1242zM1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289 q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> | ||||||
|  | <glyph unicode="" d="M1280 343q0 11 -2 16q-3 8 -38.5 29.5t-88.5 49.5l-53 29q-5 3 -19 13t-25 15t-21 5q-18 0 -47 -32.5t-57 -65.5t-44 -33q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170.5 126.5t-126.5 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5t-3.5 16.5q0 13 20.5 33.5t45 38.5 t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216.5 -320.5t320.5 -216.5q6 -2 30 -11t33 -12.5 t29.5 -10t33 -9t30.5 -5t34 -2.5q57 0 130.5 34t94.5 80q22 53 22 101zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1620 1128q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41 q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="768" d="M511 980h257l-30 -284h-227v-824h-341v824h-170v284h170v171q0 182 86 275.5t283 93.5h227v-284h-142q-39 0 -62.5 -6.5t-34 -23.5t-13.5 -34.5t-3 -49.5v-142z" /> | ||||||
|  | <glyph unicode="" d="M1536 640q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -39.5 7t-12.5 30v211q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5 q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23 q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -89t0.5 -54q0 -18 -13 -30t-40 -7q-232 77 -378.5 277.5t-146.5 451.5q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1664 960v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5 t316.5 -131.5t131.5 -316.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1760 1408q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600zM160 1280q-13 0 -22.5 -9.5t-9.5 -22.5v-224h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600zM1760 0q13 0 22.5 9.5t9.5 22.5v608h-1664v-608 q0 -13 9.5 -22.5t22.5 -9.5h1600zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 69q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5t-391.5 184.5q-25 2 -41.5 20t-16.5 43v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5 t259 -181.5q114 -113 181.5 -259t80.5 -306zM1408 67q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102q-25 1 -42.5 19.5t-17.5 43.5v143q0 28 20 46q18 18 44 18h3q262 -13 501.5 -120t425.5 -294 q187 -186 294 -425.5t120 -501.5z" /> | ||||||
|  | <glyph unicode="" d="M1040 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1296 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1408 160v320q0 13 -9.5 22.5t-22.5 9.5 h-1216q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM1536 480v-320q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v320q0 25 16 75 l197 606q17 53 63 86t101 33h782q55 0 101 -33t63 -86l197 -606q16 -50 16 -75z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1664 896q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5 t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384zM1536 292v954q-394 -302 -768 -343v-270q377 -42 768 -341z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM183 128h1298q-164 181 -246.5 411.5t-82.5 484.5q0 256 -320 256t-320 -256q0 -254 -82.5 -484.5t-246.5 -411.5zM1664 128q0 -52 -38 -90t-90 -38 h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" /> | ||||||
|  | <glyph unicode="" d="M1376 640l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53q-41 -12 -70 19q-31 29 -19 70 l53 186l-188 48q-40 10 -52 51q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 -188l186 53q41 12 70 -19q31 -29 19 -70 l-53 -186l188 -48q40 -10 52 -51q10 -42 -20 -70z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 768q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106 q-69 -57 -140 -57h-32v-640h32q72 0 167 -32t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90zM1792 769q0 -105 -75.5 -181t-180.5 -76h-169q-4 -62 -37 -119q3 -21 3 -43 q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5q-133 0 -322 69q-164 59 -223 59h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h288q10 0 21.5 4.5t23.5 14t22.5 18t24 22.5t20.5 21.5t19 21.5t14 17q65 74 100 129q13 21 33 62t37 72t40.5 63t55 49.5 t69.5 17.5q125 0 206.5 -67t81.5 -189q0 -68 -22 -128h374q104 0 180 -76t76 -179z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1376 128h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-2 3 -3.5 4.5t-4 4.5t-4.5 5q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576 q-50 0 -89 -38.5t-39 -89.5q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32zM1664 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45 t45 -19t45 19t19 45zM1792 768v-640q0 -53 -37.5 -90.5t-90.5 -37.5h-288q-59 0 -223 -59q-190 -69 -317 -69q-142 0 -230 77.5t-87 217.5l1 5q-61 76 -61 178q0 22 3 43q-33 57 -37 119h-169q-105 0 -180.5 76t-75.5 181q0 103 76 179t180 76h374q-22 60 -22 128 q0 122 81.5 189t206.5 67q38 0 69.5 -17.5t55 -49.5t40.5 -63t37 -72t33 -62q35 -55 100 -129q2 -3 14 -17t19 -21.5t20.5 -21.5t24 -22.5t22.5 -18t23.5 -14t21.5 -4.5h288q53 0 90.5 -37.5t37.5 -90.5z" /> | ||||||
|  | <glyph unicode="" d="M1280 -64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 700q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576 q-20 0 -48.5 15t-55 33t-68 33t-84.5 15q-67 0 -97.5 -44.5t-30.5 -115.5q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140v-32h640v32q0 72 32 167t64 193.5t32 179.5zM1536 705q0 -133 -69 -322q-59 -164 -59 -223v-288q0 -53 -37.5 -90.5 t-90.5 -37.5h-640q-53 0 -90.5 37.5t-37.5 90.5v288q0 10 -4.5 21.5t-14 23.5t-18 22.5t-22.5 24t-21.5 20.5t-21.5 19t-17 14q-74 65 -129 100q-21 13 -62 33t-72 37t-63 40.5t-49.5 55t-17.5 69.5q0 125 67 206.5t189 81.5q68 0 128 -22v374q0 104 76 180t179 76 q105 0 181 -75.5t76 -180.5v-169q62 -4 119 -37q21 3 43 3q101 0 178 -60q139 1 219.5 -85t80.5 -227z" /> | ||||||
|  | <glyph unicode="" d="M1408 576q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33 t55 33t48.5 15v-576q0 -50 38.5 -89t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5zM1280 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 580 q0 -142 -77.5 -230t-217.5 -87l-5 1q-76 -61 -178 -61q-22 0 -43 3q-54 -30 -119 -37v-169q0 -105 -76 -180.5t-181 -75.5q-103 0 -179 76t-76 180v374q-54 -22 -128 -22q-121 0 -188.5 81.5t-67.5 206.5q0 38 17.5 69.5t49.5 55t63 40.5t72 37t62 33q55 35 129 100 q3 2 17 14t21.5 19t21.5 20.5t22.5 24t18 22.5t14 23.5t4.5 21.5v288q0 53 37.5 90.5t90.5 37.5h640q53 0 90.5 -37.5t37.5 -90.5v-288q0 -59 59 -223q69 -190 69 -317z" /> | ||||||
|  | <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45t18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502q26 0 45 19t19 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1285 640q0 27 -18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18l362 362l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1284 641q0 27 -18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45t18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1284 639q0 27 -18 45l-91 91q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45t18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1042 887q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11 q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 10.5t-9.5 10.5q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5q-3 2 -6 1.5t-4.5 -1t-4.5 -3t-5 -3.5 q-3 -2 -8.5 -3t-8.5 -2q15 5 -1 11q-10 4 -16 3q9 4 7.5 12t-8.5 14h5q-1 4 -8.5 8.5t-17.5 8.5t-13 6q-8 5 -34 9.5t-33 0.5q-5 -6 -4.5 -10.5t4 -14t3.5 -12.5q1 -6 -5.5 -13t-6.5 -12q0 -7 14 -15.5t10 -21.5q-3 -8 -16 -16t-16 -12q-5 -8 -1.5 -18.5t10.5 -16.5 q2 -2 1.5 -4t-3.5 -4.5t-5.5 -4t-6.5 -3.5l-3 -2q-11 -5 -20.5 6t-13.5 26q-7 25 -16 30q-23 8 -29 -1q-5 13 -41 26q-25 9 -58 4q6 1 0 15q-7 15 -19 12q3 6 4 17.5t1 13.5q3 13 12 23q1 1 7 8.5t9.5 13.5t0.5 6q35 -4 50 11q5 5 11.5 17t10.5 17q9 6 14 5.5t14.5 -5.5 t14.5 -5q14 -1 15.5 11t-7.5 20q12 -1 3 17q-5 7 -8 9q-12 4 -27 -5q-8 -4 2 -8q-1 1 -9.5 -10.5t-16.5 -17.5t-16 5q-1 1 -5.5 13.5t-9.5 13.5q-8 0 -16 -15q3 8 -11 15t-24 8q19 12 -8 27q-7 4 -20.5 5t-19.5 -4q-5 -7 -5.5 -11.5t5 -8t10.5 -5.5t11.5 -4t8.5 -3 q14 -10 8 -14q-2 -1 -8.5 -3.5t-11.5 -4.5t-6 -4q-3 -4 0 -14t-2 -14q-5 5 -9 17.5t-7 16.5q7 -9 -25 -6l-10 1q-4 0 -16 -2t-20.5 -1t-13.5 8q-4 8 0 20q1 4 4 2q-4 3 -11 9.5t-10 8.5q-46 -15 -94 -41q6 -1 12 1q5 2 13 6.5t10 5.5q34 14 42 7l5 5q14 -16 20 -25 q-7 4 -30 1q-20 -6 -22 -12q7 -12 5 -18q-4 3 -11.5 10t-14.5 11t-15 5q-16 0 -22 -1q-146 -80 -235 -222q7 -7 12 -8q4 -1 5 -9t2.5 -11t11.5 3q9 -8 3 -19q1 1 44 -27q19 -17 21 -21q3 -11 -10 -18q-1 2 -9 9t-9 4q-3 -5 0.5 -18.5t10.5 -12.5q-7 0 -9.5 -16t-2.5 -35.5 t-1 -23.5l2 -1q-3 -12 5.5 -34.5t21.5 -19.5q-13 -3 20 -43q6 -8 8 -9q3 -2 12 -7.5t15 -10t10 -10.5q4 -5 10 -22.5t14 -23.5q-2 -6 9.5 -20t10.5 -23q-1 0 -2.5 -1t-2.5 -1q3 -7 15.5 -14t15.5 -13q1 -3 2 -10t3 -11t8 -2q2 20 -24 62q-15 25 -17 29q-3 5 -5.5 15.5 t-4.5 14.5q2 0 6 -1.5t8.5 -3.5t7.5 -4t2 -3q-3 -7 2 -17.5t12 -18.5t17 -19t12 -13q6 -6 14 -19.5t0 -13.5q9 0 20 -10t17 -20q5 -8 8 -26t5 -24q2 -7 8.5 -13.5t12.5 -9.5l16 -8t13 -7q5 -2 18.5 -10.5t21.5 -11.5q10 -4 16 -4t14.5 2.5t13.5 3.5q15 2 29 -15t21 -21 q36 -19 55 -11q-2 -1 0.5 -7.5t8 -15.5t9 -14.5t5.5 -8.5q5 -6 18 -15t18 -15q6 4 7 9q-3 -8 7 -20t18 -10q14 3 14 32q-31 -15 -49 18q0 1 -2.5 5.5t-4 8.5t-2.5 8.5t0 7.5t5 3q9 0 10 3.5t-2 12.5t-4 13q-1 8 -11 20t-12 15q-5 -9 -16 -8t-16 9q0 -1 -1.5 -5.5t-1.5 -6.5 q-13 0 -15 1q1 3 2.5 17.5t3.5 22.5q1 4 5.5 12t7.5 14.5t4 12.5t-4.5 9.5t-17.5 2.5q-19 -1 -26 -20q-1 -3 -3 -10.5t-5 -11.5t-9 -7q-7 -3 -24 -2t-24 5q-13 8 -22.5 29t-9.5 37q0 10 2.5 26.5t3 25t-5.5 24.5q3 2 9 9.5t10 10.5q2 1 4.5 1.5t4.5 0t4 1.5t3 6q-1 1 -4 3 q-3 3 -4 3q7 -3 28.5 1.5t27.5 -1.5q15 -11 22 2q0 1 -2.5 9.5t-0.5 13.5q5 -27 29 -9q3 -3 15.5 -5t17.5 -5q3 -2 7 -5.5t5.5 -4.5t5 0.5t8.5 6.5q10 -14 12 -24q11 -40 19 -44q7 -3 11 -2t4.5 9.5t0 14t-1.5 12.5l-1 8v18l-1 8q-15 3 -18.5 12t1.5 18.5t15 18.5q1 1 8 3.5 t15.5 6.5t12.5 8q21 19 15 35q7 0 11 9q-1 0 -5 3t-7.5 5t-4.5 2q9 5 2 16q5 3 7.5 11t7.5 10q9 -12 21 -2q7 8 1 16q5 7 20.5 10.5t18.5 9.5q7 -2 8 2t1 12t3 12q4 5 15 9t13 5l17 11q3 4 0 4q18 -2 31 11q10 11 -6 20q3 6 -3 9.5t-15 5.5q3 1 11.5 0.5t10.5 1.5 q15 10 -7 16q-17 5 -43 -12zM879 10q206 36 351 189q-3 3 -12.5 4.5t-12.5 3.5q-18 7 -24 8q1 7 -2.5 13t-8 9t-12.5 8t-11 7q-2 2 -7 6t-7 5.5t-7.5 4.5t-8.5 2t-10 -1l-3 -1q-3 -1 -5.5 -2.5t-5.5 -3t-4 -3t0 -2.5q-21 17 -36 22q-5 1 -11 5.5t-10.5 7t-10 1.5t-11.5 -7 q-5 -5 -6 -15t-2 -13q-7 5 0 17.5t2 18.5q-3 6 -10.5 4.5t-12 -4.5t-11.5 -8.5t-9 -6.5t-8.5 -5.5t-8.5 -7.5q-3 -4 -6 -12t-5 -11q-2 4 -11.5 6.5t-9.5 5.5q2 -10 4 -35t5 -38q7 -31 -12 -48q-27 -25 -29 -40q-4 -22 12 -26q0 -7 -8 -20.5t-7 -21.5q0 -6 2 -16z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M384 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1028 484l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5zM1662 919q0 -39 -23 -106q-47 -134 -164.5 -217.5 t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q58 0 121.5 -16.5t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1024 128h640v128h-640v-128zM640 640h1024v128h-1024v-128zM1280 1152h384v128h-384v-128zM1792 320v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 832v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19 t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1403 1241q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70q17 39 59 39h1280q42 0 59 -39z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M640 1280h512v128h-512v-128zM1792 640v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672zM1024 640v-128h-256v128h256zM1792 1120v-384h-1792v384q0 66 47 113t113 47h352v160q0 40 28 68 t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" d="M1283 995l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l144 -144 l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45v-448q0 -42 -39 -59q-13 -5 -25 -5q-26 0 -45 19z " /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M593 640q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256zM1664 3q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5 t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM640 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75 t75 -181zM1344 896q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5zM1920 671q0 -78 -56 -118.5t-138 -40.5h-134q-103 123 -265 128q81 117 81 256q0 29 -5 66q66 -23 133 -23q59 0 119 21.5t97.5 42.5 t43.5 21q124 0 124 -353zM1792 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1456 320q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26 l147 146q28 28 28 67zM753 1025q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3.5 27.5q0 40 28 68t68 28q15 0 27.5 -3.5t25.5 -13t19 -15 t21.5 -21.5t18.5 -19q33 31 33 73zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-206 207q-83 83 -83 203q0 123 88 209l-88 88q-86 -88 -208 -88q-120 0 -204 84l-208 208q-84 84 -84 204t85 203l147 146q83 83 203 83q121 0 204 -85l206 -207 q83 -83 83 -203q0 -123 -88 -209l88 -88q86 88 208 88q120 0 204 -84l208 -208q84 -84 84 -204z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5z " /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1527 88q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5t21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399zM748 813l-272 -429h712l-272 429l-20 31v37v399h-128v-399v-37z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M960 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1260 576l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 q-136 0 -222 78q-90 84 -79 207q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 79 222 79q145 0 277 -84q83 -52 132 -123t56 -148 q4 -48 -10 -97q4 -1 12 -5l110 -66l690 387q14 8 31 8q16 0 29 -7l128 -64q30 -16 35 -51q3 -36 -25 -56zM579 836q46 42 21 108t-106 117q-92 59 -192 59q-74 0 -113 -36q-46 -42 -21 -108t106 -117q92 -59 192 -59q74 0 113 36zM494 91q81 51 106 117t-21 108 q-39 36 -113 36q-100 0 -192 -59q-81 -51 -106 -117t21 -108q39 -36 113 -36q100 0 192 59zM672 704l96 -58v11q0 36 33 56l14 8l-79 47l-26 -26q-3 -3 -10 -11t-12 -12q-2 -2 -4 -3.5t-3 -2.5zM896 480l96 -32l736 576l-128 64l-768 -431v-113l-160 -96l9 -8q2 -2 7 -6 q4 -4 11 -12t11 -12l26 -26zM1600 64l128 64l-520 408l-177 -138q-2 -3 -13 -7z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1696 1152q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416zM1152 939l-299 -299h299v299zM512 1323l-299 -299 h299v299zM708 676l316 316v416h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h512v256q0 40 20 88t48 76zM1664 -128v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h896z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1404 151q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776q-113 115 -113 271q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181 l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0 82 57 139t139 57q88 0 149 -63l581 -581q100 -98 100 -235 z" /> | ||||||
|  | <glyph unicode="" d="M384 0h768v384h-768v-384zM1280 0h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416zM896 928v320q0 13 -9.5 22.5t-22.5 9.5 h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5zM1536 896v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 -28 48 -76t20 -88z" /> | ||||||
|  | <glyph unicode="" d="M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 192v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 704v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 1216v-128q0 -26 -19 -45 t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M384 128q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 640q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1152q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z M1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M381 -84q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88zM383 543v-159h-362 q-6 36 -6 54q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t-35.5 -52.5h127v60h105zM1792 224v-192q0 -13 -9.5 -22.5 t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1123v-99h-335v99h107q0 41 0.5 122t0.5 121v12h-2q-8 -17 -50 -54l-71 76l136 127h106v-404h108zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5 t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1760 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h1728zM483 704q-28 35 -51 80q-48 97 -48 188q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6 l-14 2q-50 149 -103 205q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743zM990 448h411q7 -39 7 -92q0 -111 -41 -212q-23 -55 -71 -104q-37 -35 -109 -81q-80 -48 -153 -66q-80 -21 -203 -21q-114 0 -195 23 l-140 40q-57 16 -72 28q-8 8 -8 22v13q0 108 -2 156q-1 30 0 68l2 37v44l102 2q15 -34 30 -71t22.5 -56t12.5 -27q35 -57 80 -94q43 -36 105 -57q59 -22 132 -22q64 0 139 27q77 26 122 86q47 61 47 129q0 84 -81 157q-34 29 -137 71z" /> | ||||||
|  | <glyph unicode="" d="M48 1313q-37 2 -45 4l-3 88q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47 q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -13q-73 -11 -79 -17q-15 -15 -15 -41 q0 -7 1.5 -27t1.5 -31q8 -19 22 -396q6 -195 -15 -304q-15 -76 -41 -122q-38 -65 -112 -123q-75 -57 -182 -89q-109 -33 -255 -33q-167 0 -284 46q-119 47 -179 122q-61 76 -83 195q-16 80 -16 237v333q0 188 -17 213q-25 36 -147 39zM1536 -96v64q0 14 -9 23t-23 9h-1472 q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h1472q14 0 23 9t9 23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M512 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23 v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 160v192 q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192 q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1664 1248v-1088q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1344q66 0 113 -47t47 -113 z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1190 955l293 293l-107 107l-293 -293zM1637 1248q0 -27 -18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45t18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45zM286 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM636 1276 l196 -60l-196 -60l-60 -196l-60 196l-196 60l196 60l60 196zM1566 798l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM926 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M640 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM1536 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1792 1216v-1024q0 -15 -4 -26.5t-13.5 -18.5 t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5 18.5t-4 26.5q0 26 19 45t45 19v320q0 8 -0.5 35t0 38 t2.5 34.5t6.5 37t14 30.5t22.5 30l198 198q19 19 50.5 32t58.5 13h160v192q0 26 19 45t45 19h1024q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134 q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5q8 35 26.5 93.5t30 103t11.5 75.5q0 50 -27 83t-77 33 q-62 0 -105 -57t-43 -142q0 -73 25 -122l-99 -418q-17 -70 -13 -177q-206 91 -333 281t-127 423q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5q-104 0 -194.5 -28.5t-153 -76.5 t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118q-60 0 -97 43.5t-23 103.5q8 34 26.5 92.5 t29.5 102t11 74.5q0 49 -26.5 81.5t-75.5 32.5q-61 0 -103.5 -56.5t-42.5 -139.5q0 -72 24 -121l-98 -414q-24 -100 -7 -254h-183q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960z" /> | ||||||
|  | <glyph unicode="" d="M678 -57q0 -38 -10 -71h-380q-95 0 -171.5 56.5t-103.5 147.5q24 45 69 77.5t100 49.5t107 24t107 7q32 0 49 -2q6 -4 30.5 -21t33 -23t31 -23t32 -25.5t27.5 -25.5t26.5 -29.5t21 -30.5t17.5 -34.5t9.5 -36t4.5 -40.5zM385 294q-234 -7 -385 -85v433q103 -118 273 -118 q32 0 70 5q-21 -61 -21 -86q0 -67 63 -149zM558 805q0 -100 -43.5 -160.5t-140.5 -60.5q-51 0 -97 26t-78 67.5t-56 93.5t-35.5 104t-11.5 99q0 96 51.5 165t144.5 69q66 0 119 -41t84 -104t47 -130t16 -128zM1536 896v-736q0 -119 -84.5 -203.5t-203.5 -84.5h-468 q39 73 39 157q0 66 -22 122.5t-55.5 93t-72 71t-72 59.5t-55.5 54.5t-22 59.5q0 36 23 68t56 61.5t65.5 64.5t55.5 93t23 131t-26.5 145.5t-75.5 118.5q-6 6 -14 11t-12.5 7.5t-10 9.5t-10.5 17h135l135 64h-437q-138 0 -244.5 -38.5t-182.5 -133.5q0 126 81 213t207 87h960 q119 0 203.5 -84.5t84.5 -203.5v-96h-256v256h-128v-256h-256v-128h256v-256h128v256h256z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M876 71q0 21 -4.5 40.5t-9.5 36t-17.5 34.5t-21 30.5t-26.5 29.5t-27.5 25.5t-32 25.5t-31 23t-33 23t-30.5 21q-17 2 -50 2q-54 0 -106 -7t-108 -25t-98 -46t-69 -75t-27 -107q0 -68 35.5 -121.5t93 -84t120.5 -45.5t127 -15q59 0 112.5 12.5t100.5 39t74.5 73.5 t27.5 110zM756 933q0 60 -16.5 127.5t-47 130.5t-84 104t-119.5 41q-93 0 -144 -69t-51 -165q0 -47 11.5 -99t35.5 -104t56 -93.5t78 -67.5t97 -26q97 0 140.5 60.5t43.5 160.5zM625 1408h437l-135 -79h-135q71 -45 110 -126t39 -169q0 -74 -23 -131.5t-56 -92.5t-66 -64.5 t-56 -61t-23 -67.5q0 -26 16.5 -51t43 -48t58.5 -48t64 -55.5t58.5 -66t43 -85t16.5 -106.5q0 -160 -140 -282q-152 -131 -420 -131q-59 0 -119.5 10t-122 33.5t-108.5 58t-77 89t-30 121.5q0 61 37 135q32 64 96 110.5t145 71t155 36t150 13.5q-64 83 -64 149q0 12 2 23.5 t5 19.5t8 21.5t7 21.5q-40 -5 -70 -5q-149 0 -255.5 98t-106.5 246q0 140 95 250.5t234 141.5q94 20 187 20zM1664 1152v-128h-256v-256h-128v256h-256v128h256v256h128v-256h256z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M768 384h384v96h-128v448h-114l-148 -137l77 -80q42 37 55 57h2v-288h-128v-96zM1280 640q0 -70 -21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142t21 142t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142zM1792 384 v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512q106 0 181 -75t75 -181h1152q0 106 75 181t181 75zM1920 1216v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45v1152q0 26 19 45t45 19h1792q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1024 320q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="640" d="M640 1088v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45t19 45l448 448q19 19 45 19t45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="640" d="M576 640q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M160 0h608v1152h-640v-1120q0 -13 9.5 -22.5t22.5 -9.5zM1536 32v1120h-640v-1152h608q13 0 22.5 9.5t9.5 22.5zM1664 1248v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45zM1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 826v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87zM1792 1120q0 -79 -49 -151t-122 -123 q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5q0 78 41.5 130t118.5 52h1472q65 0 112.5 -47t47.5 -113z" /> | ||||||
|  | <glyph unicode="" d="M349 911v-991h-330v991h330zM370 1217q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5zM1536 488v-568h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329 q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1771 0q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5 t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 -12.5q-14 14 -14 34t14 34l348 348q14 14 34 14t34 -14 q-2 2 -12.5 12t-12.5 13t-10 11.5t-10 13.5t-6 13.5t-5.5 16.5t-1.5 18q0 38 28 68q3 3 16.5 18t19 20.5t18.5 16.5t22 15.5t22 9t26 4.5q40 0 68 -28l408 -408q28 -28 28 -68q0 -13 -4.5 -26t-9 -22t-15.5 -22t-16.5 -18.5t-20.5 -19t-18 -16.5q-30 -28 -68 -28 q-10 0 -18 1.5t-16.5 5.5t-13.5 6t-13.5 10t-11.5 10t-13 12.5t-12 12.5q14 -14 14 -34t-14 -34l-126 -126l256 -256q43 43 96 43q52 0 91 -37l363 -363q37 -39 37 -91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM576 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1004 351l101 382q6 26 -7.5 48.5t-38.5 29.5 t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5t-63 -98.5q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91zM1664 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 1024q0 53 -37.5 90.5 t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1472 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1792 384q0 -261 -141 -483q-19 -29 -54 -29h-1402q-35 0 -54 29 q-141 221 -141 483q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640 q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281q0 174 120 321.5 t326 233t450 85.5t450 -85.5t326 -233t120 -321.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M704 1152q-153 0 -286 -52t-211.5 -141t-78.5 -191q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52zM704 1280q191 0 353.5 -68.5t256.5 -186.5t94 -257t-94 -257 t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224q0 139 94 257t256.5 186.5 t353.5 68.5zM1526 111q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129 q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="896" d="M885 970q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 288v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h512q52 0 90 -38t38 -90v-192h96q40 0 68 -28t28 -68 z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M896 708v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11zM1664 681q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97 q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 28q-43 60 -103 97t-128 37q-58 0 -102 -23t-93 -69 q-12 -10 -23 -10q-13 0 -22.5 9.5t-9.5 22.5q0 5 1 7q45 183 172.5 319.5t298 204.5t360.5 68q140 0 274.5 -40t246.5 -113.5t194.5 -187t115.5 -251.5q1 -2 1 -7zM896 1408v-98q-42 2 -64 2t-64 -2v98q0 26 19 45t45 19t45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1024 1312v64q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5zM1280 640h299l-299 299v-299zM1792 512v-672q0 -40 -28 -68t-68 -28 h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M736 960q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5t9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90zM896 960q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134 q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180zM1024 960q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q0 -37 -25 -64q25 -27 25 -64q0 -52 -45 -81q13 -23 13 -47 q0 -46 -31.5 -71t-77.5 -25q-20 -44 -60 -70t-87 -26t-87 26t-60 70q-46 0 -77.5 25t-31.5 71q0 24 13 47q-45 29 -45 81q0 37 25 64q-25 27 -25 64q0 54 47 82q-4 50 -34 107.5t-59.5 95.5t-74.5 87q-103 113 -103 268q0 99 44.5 184.5t117 142t164 89t186.5 32.5 t186.5 -32.5t164 -89t117 -142t44.5 -184.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 352v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5zM1792 896q0 -14 -9 -23l-320 -320q-9 -9 -23 -9 q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1280 608q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1280 672q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M384 192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56 t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 38 90t90 38t90 -38t38 -90v-89q-32 -29 -32 -71q0 -40 28 -68 t68 -28t68 28t28 68q0 42 -32 71v89q0 68 -34.5 127.5t-93.5 93.5q0 10 0.5 42.5t0 48t-2.5 41.5t-7 47t-13 40q68 -15 120 -60.5t81 -103t47.5 -132.5t24 -138t5.5 -131zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5 t271.5 -112.5t112.5 -271.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1280 832q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 832q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5t-131.5 271.5v132q-164 20 -274 128t-110 252v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48 t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 -48q10 2 16 2q26 0 45 -19t19 -45v-512q0 -144 -110 -252 t-274 -128v-132q0 -106 94 -181t226 -75t226 75t94 181v395q-57 21 -92.5 70t-35.5 111q0 80 56 136t136 56t136 -56t56 -136z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M640 1152h512v128h-512v-128zM288 1152v-1280h-64q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h64zM1408 1152v-1280h-1024v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128zM1792 928v-832q0 -92 -66 -158t-158 -66h-64v1280h64q92 0 158 -66 t66 -158z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM1664 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5 q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1664 896q0 80 -56 136t-136 56h-64v-384h64q80 0 136 56t56 136zM0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM1856 896q0 -159 -112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158v736q0 26 19 45 t45 19h1152q159 0 271.5 -112.5t112.5 -271.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M640 1472v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45 t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45zM1408 1472v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5v800q0 132 94 226t226 94h256q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1024 352v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1024 608v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280z M768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1536h-1152v-1536h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM1408 1472v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M896 -128h384v1152h-256v-32q0 -40 -28 -68t-68 -28h-448q-40 0 -68 28t-28 68v32h-256v-1152h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM896 1056v320q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-96h-128v96q0 13 -9.5 22.5 t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v96h128v-96q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1408 1088v-1280q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1280q0 26 19 45t45 19h320 v288q0 40 28 68t68 28h448q40 0 68 -28t28 -68v-288h320q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M640 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM1536 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1664 800v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM1920 1344v-1152 q0 -26 -19 -45t-45 -19h-192q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-128q-26 0 -45 19t-19 45t19 45t45 19v416q0 26 13 58t32 51l198 198q19 19 51 32t58 13h160v320q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1280 416v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM640 1152h512v128h-512v-128zM256 1152v-1280h-32 q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h32zM1440 1152v-1280h-1088v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160zM1792 928v-832q0 -92 -66 -158t-158 -66h-32v1280h32q92 0 158 -66t66 -158z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1920 576q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8l-192 24v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96 q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q261 -58 287 -93z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M640 640v384h-256v-256q0 -53 37.5 -90.5t90.5 -37.5h128zM1664 192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320l-64 64l32 128h480l32 128h960l32 -192l-64 -32v-800z" /> | ||||||
|  | <glyph unicode="" d="M1280 192v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M627 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23zM1011 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM979 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23 l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1075 224q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM1075 608q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393 q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1075 672q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23zM1075 1056q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="640" d="M627 992q0 -13 -10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="640" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1075 352q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1075 800q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1792 544v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1376v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19 t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M416 256q-66 0 -113 47t-47 113v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088zM384 1120v-704q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5z M1760 192h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68v96h160h1600zM1040 96q16 0 16 16t-16 16h-160q-16 0 -16 -16t16 -16h160z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M640 128q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1024 288v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5zM1152 1248v-1088q0 -66 -47 -113t-113 -47h-832 q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="768" d="M464 128q0 33 -23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5zM672 288v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5zM480 1136 q0 16 -16 16h-160q-16 0 -16 -16t16 -16h160q16 0 16 16zM768 1152v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> | ||||||
|  | <glyph unicode="" d="M768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103 t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M768 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z M1664 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M768 1216v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136zM1664 1216 v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1568" d="M496 192q0 -60 -42.5 -102t-101.5 -42q-60 0 -102 42t-42 102t42 102t102 42q59 0 101.5 -42t42.5 -102zM928 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM320 640q0 -66 -47 -113t-113 -47t-113 47t-47 113 t47 113t113 47t113 -47t47 -113zM1360 192q0 -46 -33 -79t-79 -33t-79 33t-33 79t33 79t79 33t79 -33t33 -79zM528 1088q0 -73 -51.5 -124.5t-124.5 -51.5t-124.5 51.5t-51.5 124.5t51.5 124.5t124.5 51.5t124.5 -51.5t51.5 -124.5zM992 1280q0 -80 -56 -136t-136 -56 t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1536 640q0 -40 -28 -68t-68 -28t-68 28t-28 68t28 68t68 28t68 -28t28 -68zM1328 1088q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5z" /> | ||||||
|  | <glyph unicode="" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 416q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19 l-512 512q-19 19 -19 45t19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 q0 -87 -27 -168q136 -160 136 -398z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z " /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" /> | ||||||
|  | <glyph unicode="" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M832 448v128q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23zM1408 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1920 512q0 -212 -150 -362t-362 -150q-192 0 -338 128h-220q-146 -128 -338 -128q-212 0 -362 150 t-150 362t150 362t362 150h896q212 0 362 -150t150 -362z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M384 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM512 624v-96q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h224q16 0 16 -16zM384 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 368v-96q0 -16 -16 -16 h-864q-16 0 -16 16v96q0 16 16 16h864q16 0 16 -16zM768 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM640 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1024 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16 h96q16 0 16 -16zM896 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1280 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1152 880v-96 q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 880v-352q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h112v240q0 16 16 16h96q16 0 16 -16zM1792 128v896h-1664v-896 h1664zM1920 1024v-896q0 -53 -37.5 -90.5t-90.5 -37.5h-1664q-53 0 -90.5 37.5t-37.5 90.5v896q0 53 37.5 90.5t90.5 37.5h1664q53 0 90.5 -37.5t37.5 -90.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1664 491v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9 h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102 q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M832 536v192q-181 -16 -384 -117v-185q205 96 384 110zM832 954v197q-172 -8 -384 -126v-189q215 111 384 118zM1664 491v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2 q-23 0 -49 -3v-222h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92zM1664 918v189q-169 -91 -306 -91q-45 0 -78 8v-196q148 -42 384 90zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266 q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8 q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M585 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23zM1664 96v-64q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h960q14 0 23 -9 t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M617 137l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23zM1208 1204l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5 l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5zM1865 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M640 454v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45zM1792 416q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1 q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221q169 -173 169 -509z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1186 579l257 250l-356 52l-66 10l-30 60l-159 322v-963l59 -31l318 -168l-60 355l-12 66zM1638 841l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5t54 34.5 l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1401 1187l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5t4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M557 256h595v595zM512 301l595 595h-595v-595zM1664 224v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23 v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M288 64q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM288 1216q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM928 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1024 1088q0 -52 -26 -96.5t-70 -69.5 q-2 -287 -226 -414q-68 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136q0 -52 -26 -96.5t-70 -69.5v-497 q54 26 154 57q55 17 87.5 29.5t70.5 31t59 39.5t40.5 51t28 69.5t8.5 91.5q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M439 265l-256 -256q-10 -9 -23 -9q-12 0 -23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23zM608 224v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM384 448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23t9 23t23 9h320 q14 0 23 -9t9 -23zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239q35 -21 56 -42l336 -336q84 -86 84 -204zM1031 1044l-239 -18 l-273 274q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l274 -274l-18 -240q-35 21 -56 42l-336 336q-84 86 -84 204q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l334 -335q21 -21 42 -56zM1664 960q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9 t-9 23t9 23t23 9h320q14 0 23 -9t9 -23zM1120 1504v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM1527 1353l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M704 280v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28zM1020 880q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5 t-10.5 37.5v45q0 83 65 156.5t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25t5.5 28q160 266 464 266q80 0 161 -31t146 -83t106 -127.5t41 -158.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="640" d="M640 192v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45zM512 1344v-192q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v192 q0 26 19 45t45 19h256q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="640" d="M512 288v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45zM542 1344l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45l-28 768q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45z" /> | ||||||
|  | <glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1534 846v-206h-514l-3 27 q-4 28 -4 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t-82 -50.5t-65.5 -51.5t-30.5 -63h232v80 h126z" /> | ||||||
|  | <glyph unicode="" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1536 -50v-206h-514l-4 27 q-3 45 -3 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t-87 -63t-41 -73h232v80h126z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M896 128l336 384h-768l-336 -384h768zM1909 1205q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5t30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1664 438q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5 t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1v1024q2 -1 17.5 -3.5t34 -5t21.5 -3.5q150 -24 245 -24q80 0 117 35q46 44 46 89 q0 22 -15 50.5t-33.5 53t-33.5 64.5t-15 83q0 82 59 127.5t144 45.5q80 0 134 -44.5t54 -123.5q0 -41 -17.5 -77.5t-38 -59t-38 -56.5t-17.5 -71q0 -57 42 -83.5t103 -26.5q64 0 180 15t163 17v-2q-1 -2 -3.5 -17.5t-5 -34t-3.5 -21.5q-24 -150 -24 -245q0 -80 35 -117 q44 -46 89 -46q22 0 50.5 15t53 33.5t64.5 33.5t83 15q82 0 127.5 -59t45.5 -143z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1152 832v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5 t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45zM896 1216v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226v512q0 132 94 226t226 94t226 -94t94 -226z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M271 591l-101 -101q-42 103 -42 214v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113zM1385 1193l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128 q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 23l1234 1234q10 10 23 10t23 -10l82 -82q10 -10 10 -23 t-10 -23zM1005 1325l-621 -621v512q0 132 94 226t226 94q102 0 184.5 -59t116.5 -152z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1088 576v640h-448v-1137q119 63 213 137q235 184 235 360zM1280 1344v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150 t-33.5 170.5v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" /> | ||||||
|  | <glyph unicode="" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M813 237l454 454q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1130 939l16 175h-884l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674zM0 1408h1408l-128 -1438l-578 -162l-574 162z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M275 1408h1505l-266 -1333l-804 -267l-698 267l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M960 1280q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1792 352v-352q0 -22 -20 -30q-8 -2 -12 -2q-13 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30v352 q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 -181q0 -70 -35 -128.5t-93 -92.5v-163h192q26 0 45 -19 t19 -45v-128q0 -26 -19 -45t-45 -19h-192v-647q149 20 271.5 82.5t189.5 153.5l-100 100q-15 16 -7 35q8 20 30 20h352q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1152" d="M1056 768q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181 v-320h736z" /> | ||||||
|  | <glyph unicode="" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM1152 640q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1280 640q0 -212 -150 -362t-362 -150t-362 150 t-150 362t150 362t362 150t362 -150t150 -362zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM896 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM1408 800v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="384" d="M384 288v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 1312v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> | ||||||
|  | <glyph unicode="" d="M512 256q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM863 162q-13 232 -177 396t-396 177q-14 1 -24 -9t-10 -23v-128q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128q13 0 23 10 t9 24zM1247 161q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128q13 0 23 10q11 9 9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1152 585q32 18 32 55t-32 55l-544 320q-31 19 -64 1q-32 -19 -32 -56v-640q0 -37 32 -56 q16 -8 32 -8q17 0 32 9z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1024 1084l316 -316l-572 -572l-316 316zM813 105l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45t19 -45l362 -362q18 -18 45 -18t45 18zM1702 742l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136 t-136 56t-136 -56l-125 126q-37 37 -37 90.5t37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5z" /> | ||||||
|  | <glyph unicode="" d="M1280 576v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h896q26 0 45 19t19 45zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5 t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1018 933q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M32 1280h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34q9 19 29 19z" /> | ||||||
|  | <glyph unicode="" d="M685 237l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l358 -358q19 -19 45 -19t45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5 t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M404 428l152 -152l-52 -52h-56v96h-96v56zM818 818q14 -13 -3 -30l-291 -291q-17 -17 -30 -3q-14 13 3 30l291 291q17 17 30 3zM544 128l544 544l-288 288l-544 -544v-288h288zM1152 736l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28l-92 -92zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1280 608v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1005 435l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5q0 -181 167 -404q10 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5 t224 23.5v-160q0 -42 40 -59q12 -5 24 -5q26 0 45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M640 448l256 128l-256 128v-256zM1024 1039v-542l-512 -256v542zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1145 861q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66q17 35 57 35h640q40 0 57 -35zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1145 419q-17 -35 -57 -35h-640q-40 0 -57 35q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M1088 640q0 -33 -27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52zM1280 160v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h960q14 0 23 9t9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M976 229l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9 t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5t-29.5 1q-126 0 -226 -64t-150 -176h468q16 0 25 -12q10 -12 7 -26 l-24 -114q-5 -26 -32 -26h-488q-3 -37 0 -105h459q15 0 25 -12q9 -12 6 -27l-24 -112q-2 -11 -11 -18.5t-20 -7.5h-387q48 -117 149.5 -185.5t228.5 -68.5q18 0 36 1.5t33.5 3.5t29.5 4.5t24.5 5t18.5 4.5l12 3l5 2q13 5 26 -2q12 -7 15 -21z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1020 399v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7 q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 t53 -63.5t31.5 -76.5t13 -94z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t-23 -9h-224v-512q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v64q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1280 1376v-160q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h960q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1024 160v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1024 416v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28 t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1572 -23 v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121zM1661 874v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162 l230 -662h70z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1661 -150 v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162l230 -662h70zM1572 1001v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -10v-3l14 3q9 1 30 1h248 v119h121z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1792 -32v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832 q14 0 23 -9t9 -23zM1600 480v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1408 992v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1216 1504v-192q0 -14 -9 -23t-23 -9h-256 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1216 -32v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192 q14 0 23 -9t9 -23zM1408 480v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1600 992v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1792 1504v-192q0 -14 -9 -23t-23 -9h-832 q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" d="M1346 223q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23 zM1486 165q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5 t82 -252.5zM1456 882v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165z" /> | ||||||
|  | <glyph unicode="" d="M1346 1247q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9 t9 -23zM1456 -142v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165zM1486 1189q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13 q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M256 192q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5zM416 704v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45zM1600 704q0 -86 -55 -149q15 -44 15 -76 q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5v641q0 25 18 43.5t43 20.5q24 2 76 59t101 121q68 87 101 120q18 18 31 48t17.5 48.5 t13.5 60.5q7 39 12.5 61t19.5 52t34 50q19 19 45 19q46 0 82.5 -10.5t60 -26t40 -40.5t24 -45t12 -50t5 -45t0.5 -39q0 -38 -9.5 -76t-19 -60t-27.5 -56q-3 -6 -10 -18t-11 -22t-8 -24h277q78 0 135 -57t57 -135z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M256 960q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5zM416 448v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640q0 -26 19 -45t45 -19h288q26 0 45 19t19 45zM1545 597q55 -61 55 -149q-1 -78 -57.5 -135 t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 -13.5 60.5t-17.5 48.5t-31 48q-33 33 -101 120q-49 64 -101 121 t-76 59q-25 2 -43 20.5t-18 43.5v641q0 26 19 44.5t45 19.5q35 1 158 44q77 26 120.5 39.5t121.5 29t144 15.5h17h76h36q133 -2 197 -78q58 -69 49 -181q39 -37 54 -94q17 -61 0 -117q46 -61 43 -137q0 -32 -15 -76z" /> | ||||||
|  | <glyph unicode="" d="M919 233v157q0 50 -29 50q-17 0 -33 -16v-224q16 -16 33 -16q29 0 29 49zM1103 355h66v34q0 51 -33 51t-33 -51v-34zM532 621v-70h-80v-423h-74v423h-78v70h232zM733 495v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 16 -6 54v290h66v-270q0 -24 1 -26q1 -15 15 -15 q20 0 42 31v280h67zM985 384v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74zM1236 255v-9q0 -29 -2 -43q-3 -22 -15 -40q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86v129q0 59 20 86q29 38 80 38t78 -38 q21 -28 21 -86v-76h-133v-65q0 -51 34 -51q24 0 30 26q0 1 0.5 7t0.5 16.5v21.5h68zM785 1079v-156q0 -51 -32 -51t-32 51v156q0 52 32 52t32 -52zM1318 366q0 177 -19 260q-10 44 -43 73.5t-76 34.5q-136 15 -412 15q-275 0 -411 -15q-44 -5 -76.5 -34.5t-42.5 -73.5 q-20 -87 -20 -260q0 -176 20 -260q10 -43 42.5 -73t75.5 -35q137 -15 412 -15t412 15q43 5 75.5 35t42.5 73q20 84 20 260zM563 1017l90 296h-75l-51 -195l-53 195h-78l24 -69t23 -69q35 -103 46 -158v-201h74v201zM852 936v130q0 58 -21 87q-29 38 -78 38q-51 0 -78 -38 q-21 -29 -21 -87v-130q0 -58 21 -87q27 -38 78 -38q49 0 78 38q21 27 21 87zM1033 816h67v370h-67v-283q-22 -31 -42 -31q-15 0 -16 16q-1 2 -1 26v272h-67v-293q0 -37 6 -55q11 -27 43 -27q36 0 77 45v-40zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" d="M971 292v-211q0 -67 -39 -67q-23 0 -45 22v301q22 22 45 22q39 0 39 -67zM1309 291v-46h-90v46q0 68 45 68t45 -68zM343 509h107v94h-312v-94h105v-569h100v569zM631 -60h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391q0 -49 8 -73 q12 -37 58 -37q48 0 102 61v-54zM1060 88v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89v-663h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100zM1398 98v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39 51 -106 51q-68 0 -107 -51 q-28 -37 -28 -116v-173q0 -79 29 -116q39 -51 108 -51q72 0 108 53q18 27 21 54q2 9 2 58zM790 1011v210q0 69 -43 69t-43 -69v-210q0 -70 43 -70t43 70zM1509 260q0 -234 -26 -350q-14 -59 -58 -99t-102 -46q-184 -21 -555 -21t-555 21q-58 6 -102.5 46t-57.5 99 q-26 112 -26 350q0 234 26 350q14 59 58 99t103 47q183 20 554 20t555 -20q58 -7 102.5 -47t57.5 -99q26 -112 26 -350zM511 1536h102l-121 -399v-271h-100v271q-14 74 -61 212q-37 103 -65 187h106l71 -263zM881 1203v-175q0 -81 -28 -118q-37 -51 -106 -51q-67 0 -105 51 q-28 38 -28 118v175q0 80 28 117q38 51 105 51q69 0 106 -51q28 -37 28 -117zM1216 1365v-499h-91v55q-53 -62 -103 -62q-46 0 -59 37q-8 24 -8 75v394h91v-367q0 -33 1 -35q3 -22 21 -22q27 0 57 43v381h91z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M597 869q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17t0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45zM1403 1511q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37q-10 -15 -32 -15h-239q-42 0 -66 45l-339 622q18 32 531 942 q25 45 64 45h241q22 0 31 -15z" /> | ||||||
|  | <glyph unicode="" d="M685 771q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29l125 -216v-1l-196 -346q-9 -14 0 -28q8 -13 24 -13h185q31 0 50 36zM1309 1268q-7 12 -24 12h-187q-30 0 -49 -35l-411 -729q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1 l409 723q8 16 0 28zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1280 640q0 37 -30 54l-512 320q-31 20 -65 2q-33 -18 -33 -56v-640q0 -38 33 -56q16 -8 31 -8q20 0 34 10l512 320q30 17 30 54zM1792 640q0 -96 -1 -150t-8.5 -136.5t-22.5 -147.5q-16 -73 -69 -123t-124 -58q-222 -25 -671 -25t-671 25q-71 8 -124.5 58t-69.5 123 q-14 65 -21.5 147.5t-8.5 136.5t-1 150t1 150t8.5 136.5t22.5 147.5q16 73 69 123t124 58q222 25 671 25t671 -25q71 -8 124.5 -58t69.5 -123q14 -65 21.5 -147.5t8.5 -136.5t1 -150z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M402 829l494 -305l-342 -285l-490 319zM1388 274v-108l-490 -293v-1l-1 1l-1 -1v1l-489 293v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284zM554 1418l342 -285l-494 -304l-338 270zM1390 829l338 -271l-489 -319l-343 285zM1239 1418l489 -319l-338 -270l-494 304z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M928 135v-151l-707 -1v151zM1169 481v-701l-1 -35v-1h-1132l-35 1h-1v736h121v-618h928v618h120zM241 393l704 -65l-13 -150l-705 65zM309 709l683 -183l-39 -146l-683 183zM472 1058l609 -360l-77 -130l-609 360zM832 1389l398 -585l-124 -85l-399 584zM1285 1536 l121 -697l-149 -26l-121 697z" /> | ||||||
|  | <glyph unicode="" d="M1362 110v648h-135q20 -63 20 -131q0 -126 -64 -232.5t-174 -168.5t-240 -62q-197 0 -337 135.5t-140 327.5q0 68 20 131h-141v-648q0 -26 17.5 -43.5t43.5 -17.5h1069q25 0 43 17.5t18 43.5zM1078 643q0 124 -90.5 211.5t-218.5 87.5q-127 0 -217.5 -87.5t-90.5 -211.5 t90.5 -211.5t217.5 -87.5q128 0 218.5 87.5t90.5 211.5zM1362 1003v165q0 28 -20 48.5t-49 20.5h-174q-29 0 -49 -20.5t-20 -48.5v-165q0 -29 20 -49t49 -20h174q29 0 49 20t20 49zM1536 1211v-1142q0 -81 -58 -139t-139 -58h-1142q-81 0 -139 58t-58 139v1142q0 81 58 139 t139 58h1142q81 0 139 -58t58 -139z" /> | ||||||
|  | <glyph unicode="" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM698 640q0 88 -62 150t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150zM1262 640q0 88 -62 150 t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150z" /> | ||||||
|  | <glyph unicode="" d="M768 914l201 -306h-402zM1133 384h94l-459 691l-459 -691h94l104 160h522zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M815 677q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82t52 58q36 18 72.5 12t64 -35.5t27.5 -67.5zM926 698q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5q4 -91 77.5 -155t165.5 -56q91 8 152 84t50 168zM1165 1240q-20 27 -56 44.5t-58 22 t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5zM1222 205q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q-86 -48 -189.5 -71.5t-202 -22t-201.5 18.5q-46 8 -81.5 18 t-76.5 27t-73 43.5t-52 61.5q-25 96 -57 292l6 16l18 9q223 -148 506.5 -148t507.5 148q21 -6 24 -23t-5 -45t-8 -37zM1403 1166q-26 -167 -111 -655q-5 -30 -27 -56t-43.5 -40t-54.5 -31q-252 -126 -610 -88q-248 27 -394 139q-15 12 -25.5 26.5t-17 35t-9 34t-6 39.5 t-5.5 35q-9 50 -26.5 150t-28 161.5t-23.5 147.5t-22 158q3 26 17.5 48.5t31.5 37.5t45 30t46 22.5t48 18.5q125 46 313 64q379 37 676 -50q155 -46 215 -122q16 -20 16.5 -51t-5.5 -54z" /> | ||||||
|  | <glyph unicode="" d="M848 666q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5t43.5 -70.5q39 -23 81 4t36 72zM928 682q8 -66 -36 -121t-110 -61t-119 40t-56 113q-2 49 25.5 93t72.5 64q70 31 141.5 -10t81.5 -118zM1100 1073q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5 t-52.5 16t-54.5 32.5q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5zM1142 327q0 7 5.5 26.5t3 32t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6l-5 -12q26 -154 41 -210q47 -81 204 -108q249 -46 428 53q34 19 49 51.5t22.5 85.5t12.5 71z M1272 1020q9 53 -8 75q-43 55 -155 88q-216 63 -487 36q-132 -12 -226 -46q-38 -15 -59.5 -25t-47 -34t-29.5 -54q8 -68 19 -138t29 -171t24 -137q1 -5 5 -31t7 -36t12 -27t22 -28q105 -80 284 -100q259 -28 440 63q24 13 39.5 23t31 29t19.5 40q48 267 80 473zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M390 1408h219v-388h364v-241h-364v-394q0 -136 14 -172q13 -37 52 -60q50 -31 117 -31q117 0 232 76v-242q-102 -48 -178 -65q-77 -19 -173 -19q-105 0 -186 27q-78 25 -138 75q-58 51 -79 105q-22 54 -22 161v539h-170v217q91 30 155 84q64 55 103 132q39 78 54 196z " /> | ||||||
|  | <glyph unicode="" d="M1123 127v181q-88 -56 -174 -56q-51 0 -88 23q-29 17 -39 45q-11 30 -11 129v295h274v181h-274v291h-164q-11 -90 -40 -147t-78 -99q-48 -40 -116 -63v-163h127v-404q0 -78 17 -121q17 -42 59 -78q43 -37 104 -57q62 -20 140 -20q67 0 129 14q57 13 134 49zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="768" d="M765 237q8 -19 -5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="768" d="M765 1043q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19t5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1792 736v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1728 643q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M1393 321q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503q0 228 113 374q112 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65 q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126zM1017 1494q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11q0 -4 0.5 -10t0.5 -10z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M682 530v-651l-682 94v557h682zM682 1273v-659h-682v565zM1664 530v-786l-907 125v661h907zM1664 1408v-794h-907v669z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1408" d="M493 1053q16 0 27.5 11.5t11.5 27.5t-11.5 27.5t-27.5 11.5t-27 -11.5t-11 -27.5t11 -27.5t27 -11.5zM915 1053q16 0 27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5t11.5 -27.5t27.5 -11.5zM103 869q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30 t-73 30t-30 73v430q0 42 30 72t73 30zM1163 850v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78 32t-32 78v666h918zM931 1255q107 -55 171 -153.5t64 -215.5 h-925q0 117 64 215.5t172 153.5l-71 131q-7 13 5 20q13 6 20 -6l72 -132q95 42 201 42t201 -42l72 132q7 12 20 6q12 -7 5 -20zM1408 767v-430q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73v430q0 43 30 72.5t72 29.5q43 0 73 -29.5t30 -72.5z" /> | ||||||
|  | <glyph unicode="" d="M663 1125q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5q0 12 19 15h10zM750 1111q-4 -1 -11.5 6.5t-17.5 4.5q24 11 32 -2q3 -6 -3 -9zM399 684q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-7 -10 -1 -12q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2z M1254 325q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69t-19.5 45.5t-15.5 24.5t-13 15t-7.5 7 q-14 62 -31 103t-29.5 56t-23.5 33t-15 40q-4 21 6 53.5t4.5 49.5t-44.5 25q-15 3 -44.5 18t-35.5 16q-8 1 -11 26t8 51t36 27q37 3 51 -30t4 -58q-11 -19 -2 -26.5t30 -0.5q13 4 13 36v37q-5 30 -13.5 50t-21 30.5t-23.5 15t-27 7.5q-107 -8 -89 -134q0 -15 -1 -15 q-9 9 -29.5 10.5t-33 -0.5t-15.5 5q1 57 -16 90t-45 34q-27 1 -41.5 -27.5t-16.5 -59.5q-1 -15 3.5 -37t13 -37.5t15.5 -13.5q10 3 16 14q4 9 -7 8q-7 0 -15.5 14.5t-9.5 33.5q-1 22 9 37t34 14q17 0 27 -21t9.5 -39t-1.5 -22q-22 -15 -31 -29q-8 -12 -27.5 -23.5 t-20.5 -12.5q-13 -14 -15.5 -27t7.5 -18q14 -8 25 -19.5t16 -19t18.5 -13t35.5 -6.5q47 -2 102 15q2 1 23 7t34.5 10.5t29.5 13t21 17.5q9 14 20 8q5 -3 6.5 -8.5t-3 -12t-16.5 -9.5q-20 -6 -56.5 -21.5t-45.5 -19.5q-44 -19 -70 -23q-25 -5 -79 2q-10 2 -9 -2t17 -19 q25 -23 67 -22q17 1 36 7t36 14t33.5 17.5t30 17t24.5 12t17.5 2.5t8.5 -11q0 -2 -1 -4.5t-4 -5t-6 -4.5t-8.5 -5t-9 -4.5t-10 -5t-9.5 -4.5q-28 -14 -67.5 -44t-66.5 -43t-49 -1q-21 11 -63 73q-22 31 -25 22q-1 -3 -1 -10q0 -25 -15 -56.5t-29.5 -55.5t-21 -58t11.5 -63 q-23 -6 -62.5 -90t-47.5 -141q-2 -18 -1.5 -69t-5.5 -59q-8 -24 -29 -3q-32 31 -36 94q-2 28 4 56q4 19 -1 18l-4 -5q-36 -65 10 -166q5 -12 25 -28t24 -20q20 -23 104 -90.5t93 -76.5q16 -15 17.5 -38t-14 -43t-45.5 -23q8 -15 29 -44.5t28 -54t7 -70.5q46 24 7 92 q-4 8 -10.5 16t-9.5 12t-2 6q3 5 13 9.5t20 -2.5q46 -52 166 -36q133 15 177 87q23 38 34 30q12 -6 10 -52q-1 -25 -23 -92q-9 -23 -6 -37.5t24 -15.5q3 19 14.5 77t13.5 90q2 21 -6.5 73.5t-7.5 97t23 70.5q15 18 51 18q1 37 34.5 53t72.5 10.5t60 -22.5zM626 1152 q3 17 -2.5 30t-11.5 15q-9 2 -9 -7q2 -5 5 -6q10 0 7 -15q-3 -20 8 -20q3 0 3 3zM1045 955q-2 8 -6.5 11.5t-13 5t-14.5 5.5q-5 3 -9.5 8t-7 8t-5.5 6.5t-4 4t-4 -1.5q-14 -16 7 -43.5t39 -31.5q9 -1 14.5 8t3.5 20zM867 1168q0 11 -5 19.5t-11 12.5t-9 3q-14 -1 -7 -7l4 -2 q14 -4 18 -31q0 -3 8 2zM921 1401q0 2 -2.5 5t-9 7t-9.5 6q-15 15 -24 15q-9 -1 -11.5 -7.5t-1 -13t-0.5 -12.5q-1 -4 -6 -10.5t-6 -9t3 -8.5q4 -3 8 0t11 9t15 9q1 1 9 1t15 2t9 7zM1486 60q20 -12 31 -24.5t12 -24t-2.5 -22.5t-15.5 -22t-23.5 -19.5t-30 -18.5 t-31.5 -16.5t-32 -15.5t-27 -13q-38 -19 -85.5 -56t-75.5 -64q-17 -16 -68 -19.5t-89 14.5q-18 9 -29.5 23.5t-16.5 25.5t-22 19.5t-47 9.5q-44 1 -130 1q-19 0 -57 -1.5t-58 -2.5q-44 -1 -79.5 -15t-53.5 -30t-43.5 -28.5t-53.5 -11.5q-29 1 -111 31t-146 43q-19 4 -51 9.5 t-50 9t-39.5 9.5t-33.5 14.5t-17 19.5q-10 23 7 66.5t18 54.5q1 16 -4 40t-10 42.5t-4.5 36.5t10.5 27q14 12 57 14t60 12q30 18 42 35t12 51q21 -73 -32 -106q-32 -20 -83 -15q-34 3 -43 -10q-13 -15 5 -57q2 -6 8 -18t8.5 -18t4.5 -17t1 -22q0 -15 -17 -49t-14 -48 q3 -17 37 -26q20 -6 84.5 -18.5t99.5 -20.5q24 -6 74 -22t82.5 -23t55.5 -4q43 6 64.5 28t23 48t-7.5 58.5t-19 52t-20 36.5q-121 190 -169 242q-68 74 -113 40q-11 -9 -15 15q-3 16 -2 38q1 29 10 52t24 47t22 42q8 21 26.5 72t29.5 78t30 61t39 54q110 143 124 195 q-12 112 -16 310q-2 90 24 151.5t106 104.5q39 21 104 21q53 1 106 -13.5t89 -41.5q57 -42 91.5 -121.5t29.5 -147.5q-5 -95 30 -214q34 -113 133 -218q55 -59 99.5 -163t59.5 -191q8 -49 5 -84.5t-12 -55.5t-20 -22q-10 -2 -23.5 -19t-27 -35.5t-40.5 -33.5t-61 -14 q-18 1 -31.5 5t-22.5 13.5t-13.5 15.5t-11.5 20.5t-9 19.5q-22 37 -41 30t-28 -49t7 -97q20 -70 1 -195q-10 -65 18 -100.5t73 -33t85 35.5q59 49 89.5 66.5t103.5 42.5q53 18 77 36.5t18.5 34.5t-25 28.5t-51.5 23.5q-33 11 -49.5 48t-15 72.5t15.5 47.5q1 -31 8 -56.5 t14.5 -40.5t20.5 -28.5t21 -19t21.5 -13t16.5 -9.5z" /> | ||||||
|  | <glyph unicode="" d="M1024 36q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5t-103 -148l-15 11q184 -150 418 -150q132 0 256 52zM839 643q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81 t99.5 48l37 13q4 1 13 3.5t13 4.5zM732 855q-120 213 -244 378q-138 -65 -234 -186t-128 -272q302 0 606 80zM1416 536q-210 60 -409 29q87 -239 128 -469q111 75 185 189.5t96 250.5zM611 1277q-1 0 -2 -1q1 1 2 1zM1201 1132q-185 164 -433 164q-76 0 -155 -19 q131 -170 246 -382q69 26 130 60.5t96.5 61.5t65.5 57t37.5 40.5zM1424 647q-3 232 -149 410l-1 -1q-9 -12 -19 -24.5t-43.5 -44.5t-71 -60.5t-100 -65t-131.5 -64.5q25 -53 44 -95q2 -6 6.5 -17.5t7.5 -16.5q36 5 74.5 7t73.5 2t69 -1.5t64 -4t56.5 -5.5t48 -6.5t36.5 -6 t25 -4.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" d="M1173 473q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5 t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75q0 -92 122 -157.5t291 -65.5 q73 0 140 18.5t122.5 53.5t88.5 93.5t33 131.5zM1536 256q0 -159 -112.5 -271.5t-271.5 -112.5q-130 0 -234 80q-77 -16 -150 -16q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5q0 73 16 150q-80 104 -80 234q0 159 112.5 271.5t271.5 112.5q130 0 234 -80 q77 16 150 16q143 0 273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -73 -16 -150q80 -104 80 -234z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1483 512l-587 -587q-52 -53 -127.5 -53t-128.5 53l-587 587q-53 53 -53 128t53 128l587 587q53 53 128 53t128 -53l265 -265l-398 -399l-188 188q-42 42 -99 42q-59 0 -100 -41l-120 -121q-42 -40 -42 -99q0 -58 42 -100l406 -408q30 -28 67 -37l6 -4h28q60 0 99 41 l619 619l2 -3q53 -53 53 -128t-53 -128zM1406 1138l120 -120q14 -15 14 -36t-14 -36l-730 -730q-17 -15 -37 -15v0q-4 0 -6 1q-18 2 -30 14l-407 408q-14 15 -14 36t14 35l121 120q13 15 35 15t36 -15l252 -252l574 575q15 15 36 15t36 -15z" /> | ||||||
|  | <glyph unicode="" d="M704 192v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1376 576v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408 q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1280" d="M1280 480q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43 q-40 0 -68 28t-28 68q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53zM864 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1024" d="M1024 832v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136z M736 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> | ||||||
|  | <glyph unicode="" d="M773 234l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85t24.5 -59zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1472 640q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5t45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5zM1748 363q-4 -15 -20 -20l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4 l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 29 4l292 -94l180 248q9 12 26 12t26 -12l180 -248l292 94 q14 6 29 -4q13 -10 13 -26v-306l292 -96q16 -5 20 -20q5 -16 -4 -29l-180 -248l180 -248q9 -12 4 -29z" /> | ||||||
|  | <glyph unicode="" d="M1262 233q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5zM1465 318q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 t-245 164t-164 245t-61 298q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M1088 704q0 26 -19 45t-45 19h-256q-26 0 -45 -19t-19 -45t19 -45t45 -19h256q26 0 45 19t19 45zM1664 896v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1728 1344v-256q0 -26 -19 -45t-45 -19h-1536 q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q17 19 38 30q53 26 239 24 q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 q39 5 64 -2.5t31 -16.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 328 185zM1920 640q0 -201 -129 -351q-145 70 -234.5 218 t-89.5 328v341q194 -32 323.5 -184t129.5 -352z" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" /> | ||||||
|  | <glyph unicode="" horiz-adv-x="1792" /> | ||||||
|  | </font> | ||||||
|  | </defs></svg>  | ||||||
| After Width: | Height: | Size: 193 KiB | 
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										16
									
								
								docs/_themes/sphinx_rtd_theme/static/js/theme.js
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										16
									
								
								docs/_themes/sphinx_rtd_theme/static/js/theme.js
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,16 @@ | |||||||
|  | $( document ).ready(function() { | ||||||
|  |   // Shift nav in mobile when clicking the menu. | ||||||
|  |   $("[data-toggle='wy-nav-top']").click(function() { | ||||||
|  |     $("[data-toggle='wy-nav-shift']").toggleClass("shift"); | ||||||
|  |     $("[data-toggle='rst-versions']").toggleClass("shift"); | ||||||
|  |   }); | ||||||
|  |   // Close menu when you click a link. | ||||||
|  |   $(".wy-menu-vertical .current ul li a").click(function() { | ||||||
|  |     $("[data-toggle='wy-nav-shift']").removeClass("shift"); | ||||||
|  |     $("[data-toggle='rst-versions']").toggleClass("shift"); | ||||||
|  |   }); | ||||||
|  |   $("[data-toggle='rst-current-version']").click(function() { | ||||||
|  |     $("[data-toggle='rst-versions']").toggleClass("shift-up"); | ||||||
|  |   }); | ||||||
|  |   $("table.docutils:not(.field-list").wrap("<div class='wy-table-responsive'></div>"); | ||||||
|  | }); | ||||||
							
								
								
									
										8
									
								
								docs/_themes/sphinx_rtd_theme/theme.conf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										8
									
								
								docs/_themes/sphinx_rtd_theme/theme.conf
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,8 @@ | |||||||
|  | [theme] | ||||||
|  | inherit = basic | ||||||
|  | stylesheet = css/theme.css | ||||||
|  |  | ||||||
|  | [options] | ||||||
|  | typekit_id = hiw1hhg | ||||||
|  | analytics_id = | ||||||
|  | canonical_url = | ||||||
							
								
								
									
										37
									
								
								docs/_themes/sphinx_rtd_theme/versions.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							
							
						
						
									
										37
									
								
								docs/_themes/sphinx_rtd_theme/versions.html
									
									
									
									
										vendored
									
									
										Executable file
									
								
							| @@ -0,0 +1,37 @@ | |||||||
|  | {% if READTHEDOCS %} | ||||||
|  | {# Add rst-badge after rst-versions for small badge style. #} | ||||||
|  |   <div class="rst-versions" data-toggle="rst-versions"> | ||||||
|  |     <span class="rst-current-version" data-toggle="rst-current-version"> | ||||||
|  |       <span class="icon icon-book"> Read the Docs</span> | ||||||
|  |       v: {{ current_version }}  | ||||||
|  |       <span class="icon icon-caret-down"></span> | ||||||
|  |     </span> | ||||||
|  |     <div class="rst-other-versions"> | ||||||
|  |       <dl> | ||||||
|  |         <dt>Versions</dt> | ||||||
|  |         {% for slug, url in versions %} | ||||||
|  |           <dd><a href="{{ url }}">{{ slug }}</a></dd> | ||||||
|  |         {% endfor %} | ||||||
|  |       </dl> | ||||||
|  |       <dl> | ||||||
|  |         <dt>Downloads</dt> | ||||||
|  |         {% for type, url in downloads %} | ||||||
|  |           <dd><a href="{{ url }}">{{ type }}</a></dd> | ||||||
|  |         {% endfor %} | ||||||
|  |       </dl> | ||||||
|  |       <dl> | ||||||
|  |         <dt>On Read the Docs</dt> | ||||||
|  |           <dd> | ||||||
|  |             <a href="//{{ PRODUCTION_DOMAIN }}/projects/{{ slug }}/?fromdocs={{ slug }}">Project Home</a> | ||||||
|  |           </dd> | ||||||
|  |           <dd> | ||||||
|  |             <a href="//{{ PRODUCTION_DOMAIN }}/builds/{{ slug }}/?fromdocs={{ slug }}">Builds</a> | ||||||
|  |           </dd> | ||||||
|  |       </dl> | ||||||
|  |       <hr/> | ||||||
|  |       Free document hosting provided by <a href="http://www.readthedocs.org">Read the Docs</a>. | ||||||
|  |  | ||||||
|  |     </div> | ||||||
|  |   </div> | ||||||
|  | {% endif %} | ||||||
|  |  | ||||||
| @@ -13,7 +13,6 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.Document | .. autoclass:: mongoengine.Document | ||||||
|    :members: |    :members: | ||||||
|    :inherited-members: |  | ||||||
|  |  | ||||||
|    .. attribute:: objects |    .. attribute:: objects | ||||||
|  |  | ||||||
| @@ -22,15 +21,12 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|    :inherited-members: |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicDocument | .. autoclass:: mongoengine.DynamicDocument | ||||||
|    :members: |    :members: | ||||||
|    :inherited-members: |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicEmbeddedDocument | .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|    :inherited-members: |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|   :members: |   :members: | ||||||
| @@ -91,9 +87,7 @@ Fields | |||||||
| .. autoclass:: mongoengine.fields.DictField | .. autoclass:: mongoengine.fields.DictField | ||||||
| .. autoclass:: mongoengine.fields.MapField | .. autoclass:: mongoengine.fields.MapField | ||||||
| .. autoclass:: mongoengine.fields.ReferenceField | .. autoclass:: mongoengine.fields.ReferenceField | ||||||
| .. autoclass:: mongoengine.fields.LazyReferenceField |  | ||||||
| .. autoclass:: mongoengine.fields.GenericReferenceField | .. autoclass:: mongoengine.fields.GenericReferenceField | ||||||
| .. autoclass:: mongoengine.fields.GenericLazyReferenceField |  | ||||||
| .. autoclass:: mongoengine.fields.CachedReferenceField | .. autoclass:: mongoengine.fields.CachedReferenceField | ||||||
| .. autoclass:: mongoengine.fields.BinaryField | .. autoclass:: mongoengine.fields.BinaryField | ||||||
| .. autoclass:: mongoengine.fields.FileField | .. autoclass:: mongoengine.fields.FileField | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
|  |  | ||||||
| ========= | ========= | ||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
| @@ -6,159 +5,11 @@ Changelog | |||||||
| Development | Development | ||||||
| =========== | =========== | ||||||
| - (Fill this out as you fix issues and develop your features). | - (Fill this out as you fix issues and develop your features). | ||||||
|  | - Fixed using sets in field choices #1481 | ||||||
| Changes in 0.18.2 |  | ||||||
| ================= |  | ||||||
| - Replace some of the deprecated PyMongo v2.x methods with their v3.x equivalents #2097 |  | ||||||
| - Various code clarity and documentation improvements |  | ||||||
|  |  | ||||||
| Changes in 0.18.1 |  | ||||||
| ================= |  | ||||||
| - Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields |  | ||||||
|     instead of updating only the modified fields. This bug only occurs when using custom pk #2082 |  | ||||||
| - Add Python 3.7 in travis #2058 |  | ||||||
|  |  | ||||||
| Changes in 0.18.0 |  | ||||||
| ================= |  | ||||||
| - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. |  | ||||||
| - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6 (#2017 #2066). |  | ||||||
| - Improve performance by avoiding a call to `to_mongo` in `Document.save()` #2049 |  | ||||||
| - Connection/disconnection improvements: |  | ||||||
|     - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all` |  | ||||||
|     - Fix disconnecting #566 #1599 #605 #607 #1213 #565 |  | ||||||
|     - Improve documentation of `connect`/`disconnect` |  | ||||||
|     - Fix issue when using multiple connections to the same mongo with different credentials #2047 |  | ||||||
|     - `connect` fails immediately when db name contains invalid characters #2031 #1718 |  | ||||||
| - Fix the default write concern of `Document.save` that was overwriting the connection write concern #568 |  | ||||||
| - Fix querying on `List(EmbeddedDocument)` subclasses fields #1961 #1492 |  | ||||||
| - Fix querying on `(Generic)EmbeddedDocument` subclasses fields #475 |  | ||||||
| - Fix `QuerySet.aggregate` so that it takes limit and skip value into account #2029 |  | ||||||
| - Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields` #2020 |  | ||||||
| - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning True/False #2050 |  | ||||||
| - BREAKING CHANGES (associated with connect/disconnect fixes): |  | ||||||
|     - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). |  | ||||||
|     - `disconnect` now clears `mongoengine.connection._connection_settings`. |  | ||||||
|     - `disconnect` now clears the cached attribute `Document._collection`. |  | ||||||
| - BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longier exist #1552 |  | ||||||
|  |  | ||||||
| Changes in 0.17.0 |  | ||||||
| ================= |  | ||||||
| - Fix .only() working improperly after using .count() of the same instance of QuerySet |  | ||||||
| - Fix batch_size that was not copied when cloning a queryset object #2011 |  | ||||||
| - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976 |  | ||||||
| - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995) |  | ||||||
| - Fix InvalidStringData error when using modify on a BinaryField #1127 |  | ||||||
| - DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552 |  | ||||||
| - Fix test suite and CI to support MongoDB 3.4 #1445 |  | ||||||
| - Fix reference fields querying the database on each access if value contains orphan DBRefs |  | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.3 |  | ||||||
| ================= |  | ||||||
| - Fix $push with $position operator not working with lists in embedded document #1965 |  | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.2 |  | ||||||
| ================= |  | ||||||
| - Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958 |  | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.1 |  | ||||||
| ================= |  | ||||||
| - Fix `_cls` that is not set properly in Document constructor (regression) #1950 |  | ||||||
| - Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733 |  | ||||||
| - Remove deprecated `save()` method and used `insert_one()` #1899 |  | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.0 |  | ||||||
| ================= |  | ||||||
| - Various improvements to the doc |  | ||||||
| - Improvement to code quality |  | ||||||
| - POTENTIAL BREAKING CHANGES: |  | ||||||
|     - EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661 |  | ||||||
|     - Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876 |  | ||||||
|     - default value of ComplexDateTime is now None (and no longer the current datetime) #1368 |  | ||||||
| - Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685 |  | ||||||
| - Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768 |  | ||||||
| - Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919 |  | ||||||
| - Fix bug when referencing the abstract class in a ReferenceField #1920 |  | ||||||
| - Allow modification to the document made in pre_save_post_validation to be taken into account #1202 |  | ||||||
| - Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903 |  | ||||||
| - Fix side effects of using queryset.`no_dereference` on other documents #1677 |  | ||||||
| - Fix TypeError when using lazy django translation objects as translated choices #1879 |  | ||||||
| - Improve 2-3 codebase compatibility #1889 |  | ||||||
| - Fix the support for changing the default value of ComplexDateTime #1368 |  | ||||||
| - Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance |  | ||||||
|     instead of a list #1877 |  | ||||||
| - Fix the Decimal operator inc/dec #1517 #1320 |  | ||||||
| - Ignore killcursors queries in `query_counter` context manager #1869 |  | ||||||
| - Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870 |  | ||||||
| - Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865 |  | ||||||
| - Fix index creation error that was swallowed by hasattr under python2 #1688 |  | ||||||
| - QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611 |  | ||||||
| - bulk insert updates the ids of the input documents instances #1919 |  | ||||||
| - Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document |  | ||||||
|     were tracked in the parent #1934 |  | ||||||
| - Improve validator of BinaryField #273 |  | ||||||
| - Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806 |  | ||||||
| - Updated GridFSProxy.__str__  so that it would always print both the filename and grid_id #710 |  | ||||||
| - Add __repr__ to Q and QCombination #1843 |  | ||||||
| - fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676 |  | ||||||
| - Added field `DateField`#513 |  | ||||||
|  |  | ||||||
| Changes in 0.15.3 |  | ||||||
| ================= |  | ||||||
| -  BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491) |  | ||||||
| -  Subfield resolve error in generic_emdedded_document query #1651 #1652 |  | ||||||
| -  use each modifier only with $position #1673 #1675 |  | ||||||
| -  Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 |  | ||||||
| -  Fix validation error instance in GenericEmbeddedDocumentField #1067 |  | ||||||
| -  Update cached fields when fields argument is given #1712 |  | ||||||
| -  Add a db parameter to register_connection for compatibility with connect |  | ||||||
| -  Use insert_one, insert_many in Document.insert #1491 |  | ||||||
| -  Use new update_one, update_many on document/queryset update #1491 |  | ||||||
| -  Use insert_one, insert_many in Document.insert #1491 |  | ||||||
| -  Fix reload(fields) affect changed fields #1371 |  | ||||||
| -  Fix Read-only access to database fails when trying to create indexes #1338 |  | ||||||
|  |  | ||||||
| Changes in 0.15.0 |  | ||||||
| ================= |  | ||||||
| - Add LazyReferenceField and GenericLazyReferenceField to address #1230 |  | ||||||
|  |  | ||||||
| Changes in 0.14.1 |  | ||||||
| ================= |  | ||||||
| - Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630 |  | ||||||
| - Added support for the `$position` param in the `$push` operator #1566 |  | ||||||
| - Fixed `DateTimeField` interpreting an empty string as today #1533 |  | ||||||
| - Added a missing `__ne__` method to the `GridFSProxy` class #1632 |  | ||||||
| - Fixed `BaseQuerySet._fields_to_db_fields` #1553 |  | ||||||
|  |  | ||||||
| Changes in 0.14.0 |  | ||||||
| ================= |  | ||||||
| - BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549 |  | ||||||
| - POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528 |  | ||||||
| - Improved code quality #1531, #1540, #1541, #1547 |  | ||||||
|  |  | ||||||
| Changes in 0.13.0 |  | ||||||
| ================= |  | ||||||
| - POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see |  | ||||||
|   docs/upgrade.rst for details. |  | ||||||
|  |  | ||||||
| Changes in 0.12.0 |  | ||||||
| ================= |  | ||||||
| - POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 | - POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 | ||||||
| - POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 | - POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 | ||||||
| - Fixed the way `Document.objects.create` works with duplicate IDs #1485 |  | ||||||
| - Fixed connecting to a replica set with PyMongo 2.x #1436 | - Fixed connecting to a replica set with PyMongo 2.x #1436 | ||||||
| - Fixed using sets in field choices #1481 |  | ||||||
| - Fixed deleting items from a `ListField` #1318 |  | ||||||
| - Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 | - Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 | ||||||
| - Fixed behavior of a `dec` update operator #1450 |  | ||||||
| - Added a `rename` update operator #1454 |  | ||||||
| - Added validation for the `db_field` parameter #1448 |  | ||||||
| - Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 |  | ||||||
| - Fixed the error message displayed when validating unicode URLs #1486 |  | ||||||
| - Raise an error when trying to save an abstract document #1449 |  | ||||||
|  |  | ||||||
| Changes in 0.11.0 | Changes in 0.11.0 | ||||||
| ================= | ================= | ||||||
|   | |||||||
| @@ -45,27 +45,27 @@ post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | |||||||
| post2.tags = ['mongoengine'] | post2.tags = ['mongoengine'] | ||||||
| post2.save() | post2.save() | ||||||
|  |  | ||||||
| print('ALL POSTS') | print 'ALL POSTS' | ||||||
| print() | print | ||||||
| for post in Post.objects: | for post in Post.objects: | ||||||
|     print(post.title) |     print post.title | ||||||
|     #print '=' * post.title.count() |     #print '=' * post.title.count() | ||||||
|     print("=" * 20) |     print "=" * 20 | ||||||
|  |  | ||||||
|     if isinstance(post, TextPost): |     if isinstance(post, TextPost): | ||||||
|         print(post.content) |         print post.content | ||||||
|  |  | ||||||
|     if isinstance(post, LinkPost): |     if isinstance(post, LinkPost): | ||||||
|         print('Link:', post.link_url) |         print 'Link:', post.link_url | ||||||
|  |  | ||||||
|     print() |     print | ||||||
| print() | print | ||||||
|  |  | ||||||
| print('POSTS TAGGED \'MONGODB\'') | print 'POSTS TAGGED \'MONGODB\'' | ||||||
| print() | print | ||||||
| for post in Post.objects(tags='mongodb'): | for post in Post.objects(tags='mongodb'): | ||||||
|     print(post.title) |     print post.title | ||||||
| print() | print | ||||||
|  |  | ||||||
| num_posts = Post.objects(tags='mongodb').count() | num_posts = Post.objects(tags='mongodb').count() | ||||||
| print('Found %d posts with tag "mongodb"' % num_posts) | print 'Found %d posts with tag "mongodb"' % num_posts | ||||||
|   | |||||||
							
								
								
									
										15
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -13,10 +13,6 @@ | |||||||
|  |  | ||||||
| import sys, os | import sys, os | ||||||
|  |  | ||||||
| import sphinx_rtd_theme |  | ||||||
|  |  | ||||||
| import mongoengine |  | ||||||
|  |  | ||||||
| # If extensions (or modules to document with autodoc) are in another directory, | # If extensions (or modules to document with autodoc) are in another directory, | ||||||
| # add these directories to sys.path here. If the directory is relative to the | # add these directories to sys.path here. If the directory is relative to the | ||||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||||
| @@ -48,6 +44,7 @@ copyright = u'2009, MongoEngine Authors' | |||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| # built documents. | # built documents. | ||||||
| # | # | ||||||
|  | import mongoengine | ||||||
| # The short X.Y version. | # The short X.Y version. | ||||||
| version = mongoengine.get_version() | version = mongoengine.get_version() | ||||||
| # The full version, including alpha/beta/rc tags. | # The full version, including alpha/beta/rc tags. | ||||||
| @@ -100,12 +97,10 @@ html_theme = 'sphinx_rtd_theme' | |||||||
| # Theme options are theme-specific and customize the look and feel of a theme | # Theme options are theme-specific and customize the look and feel of a theme | ||||||
| # further.  For a list of options available for each theme, see the | # further.  For a list of options available for each theme, see the | ||||||
| # documentation. | # documentation. | ||||||
| html_theme_options = { | #html_theme_options = {} | ||||||
|     'canonical_url': 'http://docs.mongoengine.org/en/latest/' |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # Add any paths that contain custom themes here, relative to this directory. | # Add any paths that contain custom themes here, relative to this directory. | ||||||
| html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | html_theme_path = ['_themes'] | ||||||
|  |  | ||||||
| # The name for this set of Sphinx documents.  If None, it defaults to | # The name for this set of Sphinx documents.  If None, it defaults to | ||||||
| # "<project> v<release> documentation". | # "<project> v<release> documentation". | ||||||
| @@ -204,3 +199,7 @@ latex_documents = [ | |||||||
| #latex_use_modindex = True | #latex_use_modindex = True | ||||||
|  |  | ||||||
| autoclass_content = 'both' | autoclass_content = 'both' | ||||||
|  |  | ||||||
|  | html_theme_options = dict( | ||||||
|  |     canonical_url='http://docs.mongoengine.org/en/latest/' | ||||||
|  | ) | ||||||
|   | |||||||
| @@ -4,11 +4,9 @@ | |||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| Connections in MongoEngine are registered globally and are identified with aliases. | To connect to a running instance of :program:`mongod`, use the | ||||||
| If no `alias` is provided during the connection, it will use "default" as alias. | :func:`~mongoengine.connect` function. The first argument is the name of the | ||||||
|  | database to connect to:: | ||||||
| To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` |  | ||||||
| function. The first argument is the name of the database to connect to:: |  | ||||||
|  |  | ||||||
|     from mongoengine import connect |     from mongoengine import connect | ||||||
|     connect('project1') |     connect('project1') | ||||||
| @@ -20,10 +18,10 @@ provide the :attr:`host` and :attr:`port` arguments to | |||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) |     connect('project1', host='192.168.1.35', port=12345) | ||||||
|  |  | ||||||
| If the database requires authentication, :attr:`username`, :attr:`password` | If the database requires authentication, :attr:`username` and :attr:`password` | ||||||
| and :attr:`authentication_source` arguments should be provided:: | arguments should be provided:: | ||||||
|  |  | ||||||
|     connect('project1', username='webapp', password='pwd123', authentication_source='admin') |     connect('project1', username='webapp', password='pwd123') | ||||||
|  |  | ||||||
| URI style connections are also supported -- just supply the URI as | URI style connections are also supported -- just supply the URI as | ||||||
| the :attr:`host` to | the :attr:`host` to | ||||||
| @@ -44,9 +42,6 @@ the :attr:`host` to | |||||||
|     will establish connection to ``production`` database using |     will establish connection to ``production`` database using | ||||||
|     ``admin`` username and ``qwerty`` password. |     ``admin`` username and ``qwerty`` password. | ||||||
|  |  | ||||||
| .. note:: Calling :func:`~mongoengine.connect` without argument will establish |  | ||||||
|     a connection to the "test" database by default |  | ||||||
|  |  | ||||||
| Replica Sets | Replica Sets | ||||||
| ============ | ============ | ||||||
|  |  | ||||||
| @@ -76,61 +71,28 @@ is used. | |||||||
| In the background this uses :func:`~mongoengine.register_connection` to | In the background this uses :func:`~mongoengine.register_connection` to | ||||||
| store the data and you can register all aliases up front if required. | store the data and you can register all aliases up front if required. | ||||||
|  |  | ||||||
| Documents defined in different database | Individual documents can also support multiple databases by providing a | ||||||
| --------------------------------------- |  | ||||||
| Individual documents can be attached to different databases by providing a |  | ||||||
| `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | ||||||
| objects to point across databases and collections. Below is an example schema, | objects to point across databases and collections. Below is an example schema, | ||||||
| using 3 different databases to store data:: | using 3 different databases to store data:: | ||||||
|  |  | ||||||
|         connect(alias='user-db-alias', db='user-db') |  | ||||||
|         connect(alias='book-db-alias', db='book-db') |  | ||||||
|         connect(alias='users-books-db-alias', db='users-books-db') |  | ||||||
|          |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {'db_alias': 'user-db-alias'} |             meta = {'db_alias': 'user-db'} | ||||||
|  |  | ||||||
|         class Book(Document): |         class Book(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {'db_alias': 'book-db-alias'} |             meta = {'db_alias': 'book-db'} | ||||||
|  |  | ||||||
|         class AuthorBooks(Document): |         class AuthorBooks(Document): | ||||||
|             author = ReferenceField(User) |             author = ReferenceField(User) | ||||||
|             book = ReferenceField(Book) |             book = ReferenceField(Book) | ||||||
|  |  | ||||||
|             meta = {'db_alias': 'users-books-db-alias'} |             meta = {'db_alias': 'users-books-db'} | ||||||
|  |  | ||||||
|  |  | ||||||
| Disconnecting an existing connection |  | ||||||
| ------------------------------------ |  | ||||||
| The function :func:`~mongoengine.disconnect` can be used to |  | ||||||
| disconnect a particular connection. This can be used to change a |  | ||||||
| connection globally:: |  | ||||||
|  |  | ||||||
|         from mongoengine import connect, disconnect |  | ||||||
|         connect('a_db', alias='db1') |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'db_alias': 'db1'} |  | ||||||
|  |  | ||||||
|         disconnect(alias='db1') |  | ||||||
|  |  | ||||||
|         connect('another_db', alias='db1') |  | ||||||
|  |  | ||||||
| .. note:: Calling :func:`~mongoengine.disconnect` without argument |  | ||||||
|     will disconnect the "default" connection |  | ||||||
|  |  | ||||||
| .. note:: Since connections gets registered globally, it is important |  | ||||||
|     to use the `disconnect` function from MongoEngine and not the |  | ||||||
|     `disconnect()` method of an existing connection (pymongo.MongoClient) |  | ||||||
|  |  | ||||||
| .. note:: :class:`~mongoengine.Document` are caching the pymongo collection. |  | ||||||
|     using `disconnect` ensures that it gets cleaned as well |  | ||||||
|  |  | ||||||
| Context Managers | Context Managers | ||||||
| ================ | ================ | ||||||
| Sometimes you may want to switch the database or collection to query against. | Sometimes you may want to switch the database or collection to query against. | ||||||
| @@ -157,7 +119,7 @@ access to the same User document across databases:: | |||||||
|  |  | ||||||
| Switch Collection | Switch Collection | ||||||
| ----------------- | ----------------- | ||||||
| The :func:`~mongoengine.context_managers.switch_collection` context manager | The :class:`~mongoengine.context_managers.switch_collection` context manager | ||||||
| allows you to change the collection for a given class allowing quick and easy | allows you to change the collection for a given class allowing quick and easy | ||||||
| access to the same Group document across collection:: | access to the same Group document across collection:: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -22,7 +22,7 @@ objects** as class attributes to the document class:: | |||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         date_modified = DateTimeField(default=datetime.datetime.utcnow) |         date_modified = DateTimeField(default=datetime.datetime.now) | ||||||
|  |  | ||||||
| As BSON (the binary format for storing data in mongodb) is order dependent, | As BSON (the binary format for storing data in mongodb) is order dependent, | ||||||
| documents are serialized based on their field order. | documents are serialized based on their field order. | ||||||
| @@ -80,16 +80,13 @@ are as follows: | |||||||
| * :class:`~mongoengine.fields.FloatField` | * :class:`~mongoengine.fields.FloatField` | ||||||
| * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||||
| * :class:`~mongoengine.fields.GenericReferenceField` | * :class:`~mongoengine.fields.GenericReferenceField` | ||||||
| * :class:`~mongoengine.fields.GenericLazyReferenceField` |  | ||||||
| * :class:`~mongoengine.fields.GeoPointField` | * :class:`~mongoengine.fields.GeoPointField` | ||||||
| * :class:`~mongoengine.fields.ImageField` | * :class:`~mongoengine.fields.ImageField` | ||||||
| * :class:`~mongoengine.fields.IntField` | * :class:`~mongoengine.fields.IntField` | ||||||
| * :class:`~mongoengine.fields.ListField` | * :class:`~mongoengine.fields.ListField` | ||||||
| * :class:`~mongoengine.fields.LongField` |  | ||||||
| * :class:`~mongoengine.fields.MapField` | * :class:`~mongoengine.fields.MapField` | ||||||
| * :class:`~mongoengine.fields.ObjectIdField` | * :class:`~mongoengine.fields.ObjectIdField` | ||||||
| * :class:`~mongoengine.fields.ReferenceField` | * :class:`~mongoengine.fields.ReferenceField` | ||||||
| * :class:`~mongoengine.fields.LazyReferenceField` |  | ||||||
| * :class:`~mongoengine.fields.SequenceField` | * :class:`~mongoengine.fields.SequenceField` | ||||||
| * :class:`~mongoengine.fields.SortedListField` | * :class:`~mongoengine.fields.SortedListField` | ||||||
| * :class:`~mongoengine.fields.StringField` | * :class:`~mongoengine.fields.StringField` | ||||||
| @@ -156,7 +153,7 @@ arguments can be set on all fields: | |||||||
|     An iterable (e.g. list, tuple or set) of choices to which the value of this |     An iterable (e.g. list, tuple or set) of choices to which the value of this | ||||||
|     field should be limited. |     field should be limited. | ||||||
|  |  | ||||||
|     Can either be nested tuples of value (stored in mongo) and a |     Can be either be a nested tuples of value (stored in mongo) and a | ||||||
|     human readable key :: |     human readable key :: | ||||||
|  |  | ||||||
|         SIZE = (('S', 'Small'), |         SIZE = (('S', 'Small'), | ||||||
| @@ -176,21 +173,6 @@ arguments can be set on all fields: | |||||||
|         class Shirt(Document): |         class Shirt(Document): | ||||||
|             size = StringField(max_length=3, choices=SIZE) |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
| :attr:`validation` (Optional) |  | ||||||
|     A callable to validate the value of the field. |  | ||||||
|     The callable takes the value as parameter and should raise a ValidationError |  | ||||||
|     if validation fails |  | ||||||
|  |  | ||||||
|     e.g :: |  | ||||||
|  |  | ||||||
|         def _not_empty(val): |  | ||||||
|             if not val: |  | ||||||
|                 raise ValidationError('value can not be empty') |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField(validation=_not_empty) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`**kwargs` (Optional) | :attr:`**kwargs` (Optional) | ||||||
|     You can supply additional metadata as arbitrary additional keyword |     You can supply additional metadata as arbitrary additional keyword | ||||||
|     arguments.  You can not override existing attributes, however.  Common |     arguments.  You can not override existing attributes, however.  Common | ||||||
| @@ -242,7 +224,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate | |||||||
|         user = ReferenceField(User) |         user = ReferenceField(User) | ||||||
|         answers = DictField() |         answers = DictField() | ||||||
|  |  | ||||||
|     survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user) |     survey_response = SurveyResponse(date=datetime.now(), user=request.user) | ||||||
|     response_form = ResponseForm(request.POST) |     response_form = ResponseForm(request.POST) | ||||||
|     survey_response.answers = response_form.cleaned_data() |     survey_response.answers = response_form.cleaned_data() | ||||||
|     survey_response.save() |     survey_response.save() | ||||||
| @@ -508,9 +490,7 @@ the field name with a **#**:: | |||||||
|             ] |             ] | ||||||
|         } |         } | ||||||
|  |  | ||||||
| If a dictionary is passed then additional options become available. Valid options include, | If a dictionary is passed then the following options are available: | ||||||
| but are not limited to: |  | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`fields` (Default: None) | :attr:`fields` (Default: None) | ||||||
|     The fields to index. Specified in the same format as described above. |     The fields to index. Specified in the same format as described above. | ||||||
| @@ -531,15 +511,8 @@ but are not limited to: | |||||||
|     Allows you to automatically expire data from a collection by setting the |     Allows you to automatically expire data from a collection by setting the | ||||||
|     time in seconds to expire the a field. |     time in seconds to expire the a field. | ||||||
|  |  | ||||||
| :attr:`name` (Optional) |  | ||||||
|     Allows you to specify a name for the index |  | ||||||
|  |  | ||||||
| :attr:`collation` (Optional) |  | ||||||
|     Allows to create case insensitive indexes (MongoDB v3.4+ only) |  | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|     Additional options are forwarded as **kwargs to pymongo's create_index method. |  | ||||||
|     Inheritance adds extra fields indices see: :ref:`document-inheritance`. |     Inheritance adds extra fields indices see: :ref:`document-inheritance`. | ||||||
|  |  | ||||||
| Global index default options | Global index default options | ||||||
| @@ -551,16 +524,15 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|         title = StringField() |         title = StringField() | ||||||
|         rating = StringField() |         rating = StringField() | ||||||
|         meta = { |         meta = { | ||||||
|             'index_opts': {}, |             'index_options': {}, | ||||||
|             'index_background': True, |             'index_background': True, | ||||||
|             'index_cls': False, |  | ||||||
|             'auto_create_index': True, |  | ||||||
|             'index_drop_dups': True, |             'index_drop_dups': True, | ||||||
|  |             'index_cls': False | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`index_opts` (Optional) | :attr:`index_options` (Optional) | ||||||
|     Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_ |     Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_ | ||||||
|  |  | ||||||
| :attr:`index_background` (Optional) | :attr:`index_background` (Optional) | ||||||
|     Set the default value for if an index should be indexed in the background |     Set the default value for if an index should be indexed in the background | ||||||
| @@ -568,15 +540,10 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
| :attr:`index_cls` (Optional) | :attr:`index_cls` (Optional) | ||||||
|     A way to turn off a specific index for _cls. |     A way to turn off a specific index for _cls. | ||||||
|  |  | ||||||
| :attr:`auto_create_index` (Optional) |  | ||||||
|     When this is True (default), MongoEngine will ensure that the correct |  | ||||||
|     indexes exist in MongoDB each time a command is run. This can be disabled |  | ||||||
|     in systems where indexes are managed separately. Disabling this will improve |  | ||||||
|     performance. |  | ||||||
|  |  | ||||||
| :attr:`index_drop_dups` (Optional) | :attr:`index_drop_dups` (Optional) | ||||||
|     Set the default value for if an index should drop duplicates |     Set the default value for if an index should drop duplicates | ||||||
|     Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning |  | ||||||
|  | .. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning | ||||||
|     and has no effect |     and has no effect | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -651,7 +618,7 @@ collection after a given period. See the official | |||||||
| documentation for more information.  A common usecase might be session data:: | documentation for more information.  A common usecase might be session data:: | ||||||
|  |  | ||||||
|     class Session(Document): |     class Session(Document): | ||||||
|         created = DateTimeField(default=datetime.utcnow) |         created = DateTimeField(default=datetime.now) | ||||||
|         meta = { |         meta = { | ||||||
|             'indexes': [ |             'indexes': [ | ||||||
|                 {'fields': ['created'], 'expireAfterSeconds': 3600} |                 {'fields': ['created'], 'expireAfterSeconds': 3600} | ||||||
| @@ -714,16 +681,11 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
| Shard keys | Shard keys | ||||||
| ========== | ========== | ||||||
|  |  | ||||||
| If your collection is sharded by multiple keys, then you can improve shard | If your collection is sharded, then you need to specify the shard key as a tuple, | ||||||
| routing (and thus the performance of your application) by specifying the shard | using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. | ||||||
| key, using the :attr:`shard_key` attribute of | This ensures that the shard key is sent with the query when calling the | ||||||
| :attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. | :meth:`~mongoengine.document.Document.save` or | ||||||
|  | :meth:`~mongoengine.document.Document.update` method on an existing | ||||||
| This ensures that the full shard key is sent with the query when calling |  | ||||||
| methods such as :meth:`~mongoengine.document.Document.save`, |  | ||||||
| :meth:`~mongoengine.document.Document.update`, |  | ||||||
| :meth:`~mongoengine.document.Document.modify`, or |  | ||||||
| :meth:`~mongoengine.document.Document.delete` on an existing |  | ||||||
| :class:`~mongoengine.Document` instance:: | :class:`~mongoengine.Document` instance:: | ||||||
|  |  | ||||||
|     class LogEntry(Document): |     class LogEntry(Document): | ||||||
| @@ -733,8 +695,7 @@ methods such as :meth:`~mongoengine.document.Document.save`, | |||||||
|         data = StringField() |         data = StringField() | ||||||
|  |  | ||||||
|         meta = { |         meta = { | ||||||
|             'shard_key': ('machine', 'timestamp'), |             'shard_key': ('machine', 'timestamp',) | ||||||
|             'indexes': ('machine', 'timestamp'), |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
| .. _document-inheritance: | .. _document-inheritance: | ||||||
| @@ -764,9 +725,6 @@ document.:: | |||||||
| .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | ||||||
|           to False, meaning you must set it to True to use inheritance. |           to False, meaning you must set it to True to use inheritance. | ||||||
|  |  | ||||||
|           Setting :attr:`allow_inheritance` to True should also be used in |  | ||||||
|           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it |  | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||||
|   | |||||||
| @@ -57,8 +57,7 @@ document values for example:: | |||||||
|  |  | ||||||
|         def clean(self): |         def clean(self): | ||||||
|             """Ensures that only published essays have a `pub_date` and |             """Ensures that only published essays have a `pub_date` and | ||||||
|             automatically sets `pub_date` if essay is published and `pub_date` |             automatically sets the pub_date if published and not set""" | ||||||
|             is not set""" |  | ||||||
|             if self.status == 'Draft' and self.pub_date is not None: |             if self.status == 'Draft' and self.pub_date is not None: | ||||||
|                 msg = 'Draft entries should not have a publication date.' |                 msg = 'Draft entries should not have a publication date.' | ||||||
|                 raise ValidationError(msg) |                 raise ValidationError(msg) | ||||||
|   | |||||||
| @@ -53,8 +53,7 @@ Deletion | |||||||
|  |  | ||||||
| Deleting stored files is achieved with the :func:`delete` method:: | Deleting stored files is achieved with the :func:`delete` method:: | ||||||
|  |  | ||||||
|     marmot.photo.delete()    # Deletes the GridFS document |     marmot.photo.delete() | ||||||
|     marmot.save()            # Saves the GridFS reference (being None) contained in the marmot instance |  | ||||||
|  |  | ||||||
| .. warning:: | .. warning:: | ||||||
|  |  | ||||||
| @@ -72,5 +71,4 @@ Files can be replaced with the :func:`replace` method. This works just like | |||||||
| the :func:`put` method so even metadata can (and should) be replaced:: | the :func:`put` method so even metadata can (and should) be replaced:: | ||||||
|  |  | ||||||
|     another_marmot = open('another_marmot.png', 'rb') |     another_marmot = open('another_marmot.png', 'rb') | ||||||
|     marmot.photo.replace(another_marmot, content_type='image/png')  # Replaces the GridFS document |     marmot.photo.replace(another_marmot, content_type='image/png') | ||||||
|     marmot.save()                                                   # Replaces the GridFS reference contained in marmot instance |  | ||||||
|   | |||||||
| @@ -19,30 +19,3 @@ or with an alias: | |||||||
|  |  | ||||||
|     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') |     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') | ||||||
|     conn = get_connection('testdb') |     conn = get_connection('testdb') | ||||||
|  |  | ||||||
| Example of test file: |  | ||||||
| -------- |  | ||||||
| .. code-block:: python |  | ||||||
|  |  | ||||||
|     import unittest |  | ||||||
|     from mongoengine import connect, disconnect |  | ||||||
|  |  | ||||||
|     class Person(Document): |  | ||||||
|         name = StringField() |  | ||||||
|  |  | ||||||
|     class TestPerson(unittest.TestCase): |  | ||||||
|  |  | ||||||
|         @classmethod |  | ||||||
|         def setUpClass(cls): |  | ||||||
|             connect('mongoenginetest', host='mongomock://localhost') |  | ||||||
|  |  | ||||||
|         @classmethod |  | ||||||
|         def tearDownClass(cls): |  | ||||||
|            disconnect() |  | ||||||
|  |  | ||||||
|         def test_thing(self): |  | ||||||
|             pers = Person(name='John') |  | ||||||
|             pers.save() |  | ||||||
|  |  | ||||||
|             fresh_pers = Person.objects().first() |  | ||||||
|             self.assertEqual(fresh_pers.name, 'John') |  | ||||||
|   | |||||||
| @@ -64,7 +64,7 @@ Available operators are as follows: | |||||||
| * ``gt`` -- greater than | * ``gt`` -- greater than | ||||||
| * ``gte`` -- greater than or equal to | * ``gte`` -- greater than or equal to | ||||||
| * ``not`` -- negate a standard check, may be used before other operators (e.g. | * ``not`` -- negate a standard check, may be used before other operators (e.g. | ||||||
|   ``Q(age__not__mod=(5, 0))``) |   ``Q(age__not__mod=5)``) | ||||||
| * ``in`` -- value is in list (a list of values should be provided) | * ``in`` -- value is in list (a list of values should be provided) | ||||||
| * ``nin`` -- value is not in list (a list of values should be provided) | * ``nin`` -- value is not in list (a list of values should be provided) | ||||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||||
| @@ -456,14 +456,14 @@ data. To turn off dereferencing of the results of a query use | |||||||
| :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | ||||||
|  |  | ||||||
|     post = Post.objects.no_dereference().first() |     post = Post.objects.no_dereference().first() | ||||||
|     assert(isinstance(post.author, DBRef)) |     assert(isinstance(post.author, ObjectId)) | ||||||
|  |  | ||||||
| You can also turn off all dereferencing for a fixed period by using the | You can also turn off all dereferencing for a fixed period by using the | ||||||
| :class:`~mongoengine.context_managers.no_dereference` context manager:: | :class:`~mongoengine.context_managers.no_dereference` context manager:: | ||||||
|  |  | ||||||
|     with no_dereference(Post) as Post: |     with no_dereference(Post) as Post: | ||||||
|         post = Post.objects.first() |         post = Post.objects.first() | ||||||
|         assert(isinstance(post.author, DBRef)) |         assert(isinstance(post.author, ObjectId)) | ||||||
|  |  | ||||||
|     # Outside the context manager dereferencing occurs. |     # Outside the context manager dereferencing occurs. | ||||||
|     assert(isinstance(post.author, User)) |     assert(isinstance(post.author, User)) | ||||||
| @@ -565,15 +565,6 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | |||||||
|     >>> post.tags |     >>> post.tags | ||||||
|     ['database', 'mongodb'] |     ['database', 'mongodb'] | ||||||
|  |  | ||||||
| From MongoDB version 2.6, push operator supports $position value which allows |  | ||||||
| to push values with index. |  | ||||||
|     >>> post = BlogPost(title="Test", tags=["mongo"]) |  | ||||||
|     >>> post.save() |  | ||||||
|     >>> post.update(push__tags__0=["database", "code"]) |  | ||||||
|     >>> post.reload() |  | ||||||
|     >>> post.tags |  | ||||||
|     ['database', 'code', 'mongo'] |  | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|     Currently only top level lists are handled, future versions of mongodb / |     Currently only top level lists are handled, future versions of mongodb / | ||||||
|     pymongo plan to support nested positional operators.  See `The $ positional |     pymongo plan to support nested positional operators.  See `The $ positional | ||||||
|   | |||||||
| @@ -43,10 +43,10 @@ Available signals include: | |||||||
|   has taken place but before saving. |   has taken place but before saving. | ||||||
|  |  | ||||||
| `post_save` | `post_save` | ||||||
|   Called within :meth:`~mongoengine.Document.save` after most actions |   Called within :meth:`~mongoengine.Document.save` after all actions | ||||||
|   (validation, insert/update, and cascades, but not clearing dirty flags) have  |   (validation, insert/update, cascades, clearing dirty flags) have completed | ||||||
|   completed successfully.  Passed the additional boolean keyword argument  |   successfully.  Passed the additional boolean keyword argument `created` to | ||||||
|   `created` to indicate if the save was an insert or an update. |   indicate if the save was an insert or an update. | ||||||
|  |  | ||||||
| `pre_delete` | `pre_delete` | ||||||
|   Called within :meth:`~mongoengine.Document.delete` prior to |   Called within :meth:`~mongoengine.Document.delete` prior to | ||||||
| @@ -113,10 +113,6 @@ handlers within your subclass:: | |||||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) |     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|     signals.post_save.connect(Author.post_save, sender=Author) |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
| .. warning:: |  | ||||||
|  |  | ||||||
|     Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. |  | ||||||
|  |  | ||||||
| Finally, you can also use this small decorator to quickly create a number of | Finally, you can also use this small decorator to quickly create a number of | ||||||
| signals and attach them to your :class:`~mongoengine.Document` or | signals and attach them to your :class:`~mongoengine.Document` or | ||||||
| :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | ||||||
|   | |||||||
| @@ -48,4 +48,4 @@ Ordering by text score | |||||||
|  |  | ||||||
| :: | :: | ||||||
|  |  | ||||||
|   objects = News.objects.search_text('mongo').order_by('$text_score') |   objects = News.objects.search('mongo').order_by('$text_score') | ||||||
|   | |||||||
| @@ -86,7 +86,7 @@ of them stand out as particularly intuitive solutions. | |||||||
| Posts | Posts | ||||||
| ^^^^^ | ^^^^^ | ||||||
|  |  | ||||||
| Happily MongoDB *isn't* a relational database, so we're not going to do it that | Happily mongoDB *isn't* a relational database, so we're not going to do it that | ||||||
| way. As it turns out, we can use MongoDB's schemaless nature to provide us with | way. As it turns out, we can use MongoDB's schemaless nature to provide us with | ||||||
| a much nicer solution. We will store all of the posts in *one collection* and | a much nicer solution. We will store all of the posts in *one collection* and | ||||||
| each post type will only store the fields it needs. If we later want to add | each post type will only store the fields it needs. If we later want to add | ||||||
| @@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments | |||||||
| separately from their associated posts, other than to work around the | separately from their associated posts, other than to work around the | ||||||
| relational model. Using MongoDB we can store the comments as a list of | relational model. Using MongoDB we can store the comments as a list of | ||||||
| *embedded documents* directly on a post document. An embedded document should | *embedded documents* directly on a post document. An embedded document should | ||||||
| be treated no differently than a regular document; it just doesn't have its own | be treated no differently that a regular document; it just doesn't have its own | ||||||
| collection in the database. Using MongoEngine, we can define the structure of | collection in the database. Using MongoEngine, we can define the structure of | ||||||
| embedded documents, along with utility methods, in exactly the same way we do | embedded documents, along with utility methods, in exactly the same way we do | ||||||
| with regular documents:: | with regular documents:: | ||||||
| @@ -206,10 +206,7 @@ object:: | |||||||
|         ross.last_name = 'Lawley' |         ross.last_name = 'Lawley' | ||||||
|         ross.save() |         ross.save() | ||||||
|  |  | ||||||
| Assign another user to a variable called ``john``, just like we did above with | Now that we've got our user in the database, let's add a couple of posts:: | ||||||
| ``ross``. |  | ||||||
|  |  | ||||||
| Now that we've got our users in the database, let's add a couple of posts:: |  | ||||||
|  |  | ||||||
|     post1 = TextPost(title='Fun with MongoEngine', author=john) |     post1 = TextPost(title='Fun with MongoEngine', author=john) | ||||||
|     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' |     post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | ||||||
|   | |||||||
| @@ -6,37 +6,6 @@ Development | |||||||
| *********** | *********** | ||||||
| (Fill this out whenever you introduce breaking changes to MongoEngine) | (Fill this out whenever you introduce breaking changes to MongoEngine) | ||||||
|  |  | ||||||
| URLField's constructor no longer takes `verify_exists` |  | ||||||
|  |  | ||||||
| 0.15.0 |  | ||||||
| ****** |  | ||||||
|  |  | ||||||
| 0.14.0 |  | ||||||
| ****** |  | ||||||
| This release includes a few bug fixes and a significant code cleanup. The most |  | ||||||
| important change is that `QuerySet.as_pymongo` no longer supports a |  | ||||||
| `coerce_types` mode. If you used it in the past, a) please let us know of your |  | ||||||
| use case, b) you'll need to override `as_pymongo` to get the desired outcome. |  | ||||||
|  |  | ||||||
| This release also makes the EmbeddedDocument not hashable by default. If you |  | ||||||
| use embedded documents in sets or dictionaries, you might have to override |  | ||||||
| `__hash__` and implement a hashing logic specific to your use case. See #1528 |  | ||||||
| for the reason behind this change. |  | ||||||
|  |  | ||||||
| 0.13.0 |  | ||||||
| ****** |  | ||||||
| This release adds Unicode support to the `EmailField` and changes its |  | ||||||
| structure significantly. Previously, email addresses containing Unicode |  | ||||||
| characters didn't work at all. Starting with v0.13.0, domains with Unicode |  | ||||||
| characters are supported out of the box, meaning some emails that previously |  | ||||||
| didn't pass validation now do. Make sure the rest of your application can |  | ||||||
| accept such email addresses. Additionally, if you subclassed the `EmailField` |  | ||||||
| in your application and overrode `EmailField.EMAIL_REGEX`, you will have to |  | ||||||
| adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`, |  | ||||||
| and potentially `EmailField.UTF8_USER_REGEX`. |  | ||||||
|  |  | ||||||
| 0.12.0 |  | ||||||
| ****** |  | ||||||
| This release includes various fixes for the `BaseQuerySet` methods and how they | This release includes various fixes for the `BaseQuerySet` methods and how they | ||||||
| are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size | are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size | ||||||
| to an already-existing queryset wouldn't modify the underlying PyMongo cursor. | to an already-existing queryset wouldn't modify the underlying PyMongo cursor. | ||||||
|   | |||||||
| @@ -23,13 +23,12 @@ __all__ = (list(document.__all__) + list(fields.__all__) + | |||||||
|            list(signals.__all__) + list(errors.__all__)) |            list(signals.__all__) + list(errors.__all__)) | ||||||
|  |  | ||||||
|  |  | ||||||
| VERSION = (0, 18, 2) | VERSION = (0, 11, 0) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     """Return the VERSION as a string. |     """Return the VERSION as a string, e.g. for VERSION == (0, 10, 7), | ||||||
|  |     return '0.10.7'. | ||||||
|     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. |  | ||||||
|     """ |     """ | ||||||
|     return '.'.join(map(str, VERSION)) |     return '.'.join(map(str, VERSION)) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -15,7 +15,7 @@ __all__ = ( | |||||||
|     'UPDATE_OPERATORS', '_document_registry', 'get_document', |     'UPDATE_OPERATORS', '_document_registry', 'get_document', | ||||||
|  |  | ||||||
|     # datastructures |     # datastructures | ||||||
|     'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference', |     'BaseDict', 'BaseList', 'EmbeddedDocumentList', | ||||||
|  |  | ||||||
|     # document |     # document | ||||||
|     'BaseDocument', |     'BaseDocument', | ||||||
|   | |||||||
| @@ -3,23 +3,22 @@ from mongoengine.errors import NotRegistered | |||||||
| __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | ||||||
|  |  | ||||||
|  |  | ||||||
| UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul', | UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||||
|                     'pop', 'push', 'push_all', 'pull', |                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||||
|                     'pull_all', 'add_to_set', 'set_on_insert', |                         'set_on_insert', 'min', 'max', 'rename']) | ||||||
|                     'min', 'max', 'rename'} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _document_registry = {} | _document_registry = {} | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_document(name): | def get_document(name): | ||||||
|     """Get a registered Document class by name.""" |     """Get a document class by name.""" | ||||||
|     doc = _document_registry.get(name, None) |     doc = _document_registry.get(name, None) | ||||||
|     if not doc: |     if not doc: | ||||||
|         # Possible old style name |         # Possible old style name | ||||||
|         single_end = name.split('.')[-1] |         single_end = name.split('.')[-1] | ||||||
|         compound_end = '.%s' % single_end |         compound_end = '.%s' % single_end | ||||||
|         possible_match = [k for k in _document_registry |         possible_match = [k for k in _document_registry.keys() | ||||||
|                           if k.endswith(compound_end) or k == single_end] |                           if k.endswith(compound_end) or k == single_end] | ||||||
|         if len(possible_match) == 1: |         if len(possible_match) == 1: | ||||||
|             doc = _document_registry.get(possible_match.pop(), None) |             doc = _document_registry.get(possible_match.pop(), None) | ||||||
| @@ -30,12 +29,3 @@ def get_document(name): | |||||||
|             been imported? |             been imported? | ||||||
|         """.strip() % name) |         """.strip() % name) | ||||||
|     return doc |     return doc | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_documents_by_db(connection_alias, default_connection_alias): |  | ||||||
|     """Get all registered Documents class attached to a given database""" |  | ||||||
|     def get_doc_alias(doc_cls): |  | ||||||
|         return doc_cls._meta.get('db_alias', default_connection_alias) |  | ||||||
|  |  | ||||||
|     return [doc_cls for doc_cls in _document_registry.values() |  | ||||||
|             if get_doc_alias(doc_cls) == connection_alias] |  | ||||||
|   | |||||||
| @@ -1,33 +1,12 @@ | |||||||
|  | import itertools | ||||||
| import weakref | import weakref | ||||||
|  |  | ||||||
| from bson import DBRef |  | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||||
|  |  | ||||||
| __all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') | __all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') | ||||||
|  |  | ||||||
|  |  | ||||||
| def mark_as_changed_wrapper(parent_method): |  | ||||||
|     """Decorator that ensures _mark_as_changed method gets called.""" |  | ||||||
|     def wrapper(self, *args, **kwargs): |  | ||||||
|         # Can't use super() in the decorator. |  | ||||||
|         result = parent_method(self, *args, **kwargs) |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return result |  | ||||||
|     return wrapper |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def mark_key_as_changed_wrapper(parent_method): |  | ||||||
|     """Decorator that ensures _mark_as_changed method gets called with the key argument""" |  | ||||||
|     def wrapper(self, key, *args, **kwargs): |  | ||||||
|         # Can't use super() in the decorator. |  | ||||||
|         result = parent_method(self, key, *args, **kwargs) |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return result |  | ||||||
|     return wrapper |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseDict(dict): | class BaseDict(dict): | ||||||
| @@ -38,36 +17,46 @@ class BaseDict(dict): | |||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, dict_items, instance, name): |     def __init__(self, dict_items, instance, name): | ||||||
|         BaseDocument = _import_class('BaseDocument') |         Document = _import_class('Document') | ||||||
|  |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|  |  | ||||||
|         if isinstance(instance, BaseDocument): |         if isinstance(instance, (Document, EmbeddedDocument)): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseDict, self).__init__(dict_items) |         super(BaseDict, self).__init__(dict_items) | ||||||
|  |  | ||||||
|     def get(self, key, default=None): |     def __getitem__(self, key, *args, **kwargs): | ||||||
|         # get does not use __getitem__ by default so we must override it as well |  | ||||||
|         try: |  | ||||||
|             return self.__getitem__(key) |  | ||||||
|         except KeyError: |  | ||||||
|             return default |  | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |  | ||||||
|         value = super(BaseDict, self).__getitem__(key) |         value = super(BaseDict, self).__getitem__(key) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): |         elif not isinstance(value, BaseDict) and isinstance(value, dict): | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super(BaseDict, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): |         elif not isinstance(value, BaseList) and isinstance(value, list): | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super(BaseDict, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|  |     def __setitem__(self, key, value, *args, **kwargs): | ||||||
|  |         self._mark_as_changed(key) | ||||||
|  |         return super(BaseDict, self).__setitem__(key, value) | ||||||
|  |  | ||||||
|  |     def __delete__(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseDict, self).__delete__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def __delitem__(self, key, *args, **kwargs): | ||||||
|  |         self._mark_as_changed(key) | ||||||
|  |         return super(BaseDict, self).__delitem__(key) | ||||||
|  |  | ||||||
|  |     def __delattr__(self, key, *args, **kwargs): | ||||||
|  |         self._mark_as_changed(key) | ||||||
|  |         return super(BaseDict, self).__delattr__(key) | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
|         self._dereferenced = False |         self._dereferenced = False | ||||||
| @@ -77,14 +66,25 @@ class BaseDict(dict): | |||||||
|         self = state |         self = state | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     __setitem__ = mark_key_as_changed_wrapper(dict.__setitem__) |     def clear(self, *args, **kwargs): | ||||||
|     __delattr__ = mark_key_as_changed_wrapper(dict.__delattr__) |         self._mark_as_changed() | ||||||
|     __delitem__ = mark_key_as_changed_wrapper(dict.__delitem__) |         return super(BaseDict, self).clear() | ||||||
|     pop = mark_as_changed_wrapper(dict.pop) |  | ||||||
|     clear = mark_as_changed_wrapper(dict.clear) |     def pop(self, *args, **kwargs): | ||||||
|     update = mark_as_changed_wrapper(dict.update) |         self._mark_as_changed() | ||||||
|     popitem = mark_as_changed_wrapper(dict.popitem) |         return super(BaseDict, self).pop(*args, **kwargs) | ||||||
|     setdefault = mark_as_changed_wrapper(dict.setdefault) |  | ||||||
|  |     def popitem(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseDict, self).popitem() | ||||||
|  |  | ||||||
|  |     def setdefault(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseDict, self).setdefault(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def update(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseDict, self).update(*args, **kwargs) | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, '_mark_as_changed'): | ||||||
| @@ -102,39 +102,52 @@ class BaseList(list): | |||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |     def __init__(self, list_items, instance, name): | ||||||
|         BaseDocument = _import_class('BaseDocument') |         Document = _import_class('Document') | ||||||
|  |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|  |  | ||||||
|         if isinstance(instance, BaseDocument): |         if isinstance(instance, (Document, EmbeddedDocument)): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseList, self).__init__(list_items) |         super(BaseList, self).__init__(list_items) | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key, *args, **kwargs): | ||||||
|         value = super(BaseList, self).__getitem__(key) |         value = super(BaseList, self).__getitem__(key) | ||||||
|  |  | ||||||
|         if isinstance(key, slice): |  | ||||||
|             # When receiving a slice operator, we don't convert the structure and bind |  | ||||||
|             # to parent's instance. This is buggy for now but would require more work to be handled properly |  | ||||||
|             return value |  | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): |         elif not isinstance(value, BaseDict) and isinstance(value, dict): | ||||||
|             # Replace dict by BaseDict |  | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseList, self).__setitem__(key, value) |             super(BaseList, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): |         elif not isinstance(value, BaseList) and isinstance(value, list): | ||||||
|             # Replace list by BaseList |  | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseList, self).__setitem__(key, value) |             super(BaseList, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         for v in super(BaseList, self).__iter__(): |         for i in xrange(self.__len__()): | ||||||
|             yield v |             yield self[i] | ||||||
|  |  | ||||||
|  |     def __setitem__(self, key, value, *args, **kwargs): | ||||||
|  |         if isinstance(key, slice): | ||||||
|  |             self._mark_as_changed() | ||||||
|  |         else: | ||||||
|  |             self._mark_as_changed(key) | ||||||
|  |         return super(BaseList, self).__setitem__(key, value) | ||||||
|  |  | ||||||
|  |     def __delitem__(self, key, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).__delitem__(key) | ||||||
|  |  | ||||||
|  |     def __setslice__(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).__setslice__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def __delslice__(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).__delslice__(*args, **kwargs) | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
| @@ -145,40 +158,41 @@ class BaseList(list): | |||||||
|         self = state |         self = state | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value): |     def __iadd__(self, other): | ||||||
|         changed_key = key |         self._mark_as_changed() | ||||||
|         if isinstance(key, slice): |         return super(BaseList, self).__iadd__(other) | ||||||
|             # In case of slice, we don't bother to identify the exact elements being updated |  | ||||||
|             # instead, we simply marks the whole list as changed |  | ||||||
|             changed_key = None |  | ||||||
|  |  | ||||||
|         result = super(BaseList, self).__setitem__(key, value) |     def __imul__(self, other): | ||||||
|         self._mark_as_changed(changed_key) |         self._mark_as_changed() | ||||||
|         return result |         return super(BaseList, self).__imul__(other) | ||||||
|  |  | ||||||
|     append = mark_as_changed_wrapper(list.append) |     def append(self, *args, **kwargs): | ||||||
|     extend = mark_as_changed_wrapper(list.extend) |         self._mark_as_changed() | ||||||
|     insert = mark_as_changed_wrapper(list.insert) |         return super(BaseList, self).append(*args, **kwargs) | ||||||
|     pop = mark_as_changed_wrapper(list.pop) |  | ||||||
|     remove = mark_as_changed_wrapper(list.remove) |  | ||||||
|     reverse = mark_as_changed_wrapper(list.reverse) |  | ||||||
|     sort = mark_as_changed_wrapper(list.sort) |  | ||||||
|     __delitem__ = mark_as_changed_wrapper(list.__delitem__) |  | ||||||
|     __iadd__ = mark_as_changed_wrapper(list.__iadd__) |  | ||||||
|     __imul__ = mark_as_changed_wrapper(list.__imul__) |  | ||||||
|  |  | ||||||
|     if six.PY2: |     def extend(self, *args, **kwargs): | ||||||
|         # Under py3 __setslice__, __delslice__ and __getslice__ |         self._mark_as_changed() | ||||||
|         # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter |         return super(BaseList, self).extend(*args, **kwargs) | ||||||
|         # so we mimic this under python 2 |  | ||||||
|         def __setslice__(self, i, j, sequence): |  | ||||||
|             return self.__setitem__(slice(i, j), sequence) |  | ||||||
|  |  | ||||||
|         def __delslice__(self, i, j): |     def insert(self, *args, **kwargs): | ||||||
|             return self.__delitem__(slice(i, j)) |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).insert(*args, **kwargs) | ||||||
|  |  | ||||||
|         def __getslice__(self, i, j): |     def pop(self, *args, **kwargs): | ||||||
|             return self.__getitem__(slice(i, j)) |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).pop(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def remove(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).remove(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def reverse(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).reverse() | ||||||
|  |  | ||||||
|  |     def sort(self, *args, **kwargs): | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return super(BaseList, self).sort(*args, **kwargs) | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, '_mark_as_changed'): | ||||||
| @@ -192,10 +206,6 @@ class BaseList(list): | |||||||
|  |  | ||||||
| class EmbeddedDocumentList(BaseList): | class EmbeddedDocumentList(BaseList): | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |  | ||||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) |  | ||||||
|         self._instance = instance |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def __match_all(cls, embedded_doc, kwargs): |     def __match_all(cls, embedded_doc, kwargs): | ||||||
|         """Return True if a given embedded doc matches all the filter |         """Return True if a given embedded doc matches all the filter | ||||||
| @@ -214,14 +224,15 @@ class EmbeddedDocumentList(BaseList): | |||||||
|             return embedded_docs |             return embedded_docs | ||||||
|         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] |         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||||
|  |  | ||||||
|  |     def __init__(self, list_items, instance, name): | ||||||
|  |         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||||
|  |         self._instance = instance | ||||||
|  |  | ||||||
|     def filter(self, **kwargs): |     def filter(self, **kwargs): | ||||||
|         """ |         """ | ||||||
|         Filters the list by only including embedded documents with the |         Filters the list by only including embedded documents with the | ||||||
|         given keyword arguments. |         given keyword arguments. | ||||||
|  |  | ||||||
|         This method only supports simple comparison (e.g: .filter(name='John Doe')) |  | ||||||
|         and does not support operators like __gte, __lte, __icontains like queryset.filter does |  | ||||||
|  |  | ||||||
|         :param kwargs: The keyword arguments corresponding to the fields to |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|          filter on. *Multiple arguments are treated as if they are ANDed |          filter on. *Multiple arguments are treated as if they are ANDed | ||||||
|          together.* |          together.* | ||||||
| @@ -339,8 +350,7 @@ class EmbeddedDocumentList(BaseList): | |||||||
|  |  | ||||||
|     def update(self, **update): |     def update(self, **update): | ||||||
|         """ |         """ | ||||||
|         Updates the embedded documents with the given replacement values. This |         Updates the embedded documents with the given update values. | ||||||
|         function does not support mongoDB update operators such as ``inc__``. |  | ||||||
|  |  | ||||||
|         .. note:: |         .. note:: | ||||||
|             The embedded document changes are not automatically saved |             The embedded document changes are not automatically saved | ||||||
| @@ -362,11 +372,11 @@ class EmbeddedDocumentList(BaseList): | |||||||
|  |  | ||||||
| class StrictDict(object): | class StrictDict(object): | ||||||
|     __slots__ = () |     __slots__ = () | ||||||
|     _special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'} |     _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) | ||||||
|     _classes = {} |     _classes = {} | ||||||
|  |  | ||||||
|     def __init__(self, **kwargs): |     def __init__(self, **kwargs): | ||||||
|         for k, v in iteritems(kwargs): |         for k, v in kwargs.iteritems(): | ||||||
|             setattr(self, k, v) |             setattr(self, k, v) | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key): | ||||||
| @@ -414,7 +424,7 @@ class StrictDict(object): | |||||||
|         return (key for key in self.__slots__ if hasattr(self, key)) |         return (key for key in self.__slots__ if hasattr(self, key)) | ||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         return len(list(iteritems(self))) |         return len(list(self.iteritems())) | ||||||
|  |  | ||||||
|     def __eq__(self, other): |     def __eq__(self, other): | ||||||
|         return self.items() == other.items() |         return self.items() == other.items() | ||||||
| @@ -437,40 +447,40 @@ class StrictDict(object): | |||||||
|         return cls._classes[allowed_keys] |         return cls._classes[allowed_keys] | ||||||
|  |  | ||||||
|  |  | ||||||
| class LazyReference(DBRef): | class SemiStrictDict(StrictDict): | ||||||
|     __slots__ = ('_cached_doc', 'passthrough', 'document_type') |     __slots__ = ('_extras', ) | ||||||
|  |     _classes = {} | ||||||
|  |  | ||||||
|     def fetch(self, force=False): |     def __getattr__(self, attr): | ||||||
|         if not self._cached_doc or force: |  | ||||||
|             self._cached_doc = self.document_type.objects.get(pk=self.pk) |  | ||||||
|             if not self._cached_doc: |  | ||||||
|                 raise DoesNotExist('Trying to dereference unknown document %s' % (self)) |  | ||||||
|         return self._cached_doc |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def pk(self): |  | ||||||
|         return self.id |  | ||||||
|  |  | ||||||
|     def __init__(self, document_type, pk, cached_doc=None, passthrough=False): |  | ||||||
|         self.document_type = document_type |  | ||||||
|         self._cached_doc = cached_doc |  | ||||||
|         self.passthrough = passthrough |  | ||||||
|         super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk) |  | ||||||
|  |  | ||||||
|     def __getitem__(self, name): |  | ||||||
|         if not self.passthrough: |  | ||||||
|             raise KeyError() |  | ||||||
|         document = self.fetch() |  | ||||||
|         return document[name] |  | ||||||
|  |  | ||||||
|     def __getattr__(self, name): |  | ||||||
|         if not object.__getattribute__(self, 'passthrough'): |  | ||||||
|             raise AttributeError() |  | ||||||
|         document = self.fetch() |  | ||||||
|         try: |         try: | ||||||
|             return document[name] |             super(SemiStrictDict, self).__getattr__(attr) | ||||||
|         except KeyError: |         except AttributeError: | ||||||
|             raise AttributeError() |             try: | ||||||
|  |                 return self.__getattribute__('_extras')[attr] | ||||||
|  |             except KeyError as e: | ||||||
|  |                 raise AttributeError(e) | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __setattr__(self, attr, value): | ||||||
|         return "<LazyReference(%s, %r)>" % (self.document_type, self.pk) |         try: | ||||||
|  |             super(SemiStrictDict, self).__setattr__(attr, value) | ||||||
|  |         except AttributeError: | ||||||
|  |             try: | ||||||
|  |                 self._extras[attr] = value | ||||||
|  |             except AttributeError: | ||||||
|  |                 self._extras = {attr: value} | ||||||
|  |  | ||||||
|  |     def __delattr__(self, attr): | ||||||
|  |         try: | ||||||
|  |             super(SemiStrictDict, self).__delattr__(attr) | ||||||
|  |         except AttributeError: | ||||||
|  |             try: | ||||||
|  |                 del self._extras[attr] | ||||||
|  |             except KeyError as e: | ||||||
|  |                 raise AttributeError(e) | ||||||
|  |  | ||||||
|  |     def __iter__(self): | ||||||
|  |         try: | ||||||
|  |             extras_iter = iter(self.__getattribute__('_extras')) | ||||||
|  |         except AttributeError: | ||||||
|  |             extras_iter = () | ||||||
|  |         return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter) | ||||||
|   | |||||||
| @@ -1,40 +1,30 @@ | |||||||
| import copy | import copy | ||||||
| import numbers | import numbers | ||||||
|  | from collections import Hashable | ||||||
| from functools import partial | from functools import partial | ||||||
|  |  | ||||||
| from bson import DBRef, ObjectId, SON, json_util | from bson import ObjectId, json_util | ||||||
|  | from bson.dbref import DBRef | ||||||
|  | from bson.son import SON | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base.common import get_document | from mongoengine.base.common import get_document | ||||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||||
|                                              EmbeddedDocumentList, |                                              EmbeddedDocumentList, | ||||||
|                                              LazyReference, |                                              SemiStrictDict, StrictDict) | ||||||
|                                              StrictDict) |  | ||||||
| from mongoengine.base.fields import ComplexBaseField | from mongoengine.base.fields import ComplexBaseField | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, | from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, | ||||||
|                                 LookUpError, OperationError, ValidationError) |                                 LookUpError, OperationError, ValidationError) | ||||||
| from mongoengine.python_support import Hashable |  | ||||||
|  |  | ||||||
| __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') | __all__ = ('BaseDocument',) | ||||||
|  |  | ||||||
| NON_FIELD_ERRORS = '__all__' | NON_FIELD_ERRORS = '__all__' | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseDocument(object): | class BaseDocument(object): | ||||||
|     # TODO simplify how `_changed_fields` is used. |  | ||||||
|     # Currently, handling of `_changed_fields` seems unnecessarily convoluted: |  | ||||||
|     # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's |  | ||||||
|     #    not setting it to `[]` (or any other value) in `__init__`. |  | ||||||
|     # 2. `EmbeddedDocument` sets `_changed_fields` to `[]` it its overloaded |  | ||||||
|     #    `__init__`. |  | ||||||
|     # 3. `Document` does NOT set `_changed_fields` upon initialization. The |  | ||||||
|     #    field is primarily set via `_from_son` or `_clear_changed_fields`, |  | ||||||
|     #    though there are also other methods that manipulate it. |  | ||||||
|     # 4. The codebase is littered with `hasattr` calls for `_changed_fields`. |  | ||||||
|     __slots__ = ('_changed_fields', '_initialised', '_created', '_data', |     __slots__ = ('_changed_fields', '_initialised', '_created', '_data', | ||||||
|                  '_dynamic_fields', '_auto_id_field', '_db_field_map', |                  '_dynamic_fields', '_auto_id_field', '_db_field_map', | ||||||
|                  '__weakref__') |                  '__weakref__') | ||||||
| @@ -45,20 +35,13 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|     def __init__(self, *args, **values): |     def __init__(self, *args, **values): | ||||||
|         """ |         """ | ||||||
|         Initialise a document or an embedded document. |         Initialise a document or embedded document | ||||||
|  |  | ||||||
|         :param dict values: A dictionary of keys and values for the document. |         :param __auto_convert: Try and will cast python objects to Object types | ||||||
|             It may contain additional reserved keywords, e.g. "__auto_convert". |         :param values: A dictionary of values for the document | ||||||
|         :param bool __auto_convert: If True, supplied values will be converted |  | ||||||
|             to Python-type values via each field's `to_python` method. |  | ||||||
|         :param set __only_fields: A set of fields that have been loaded for |  | ||||||
|             this document. Empty if all fields have been loaded. |  | ||||||
|         :param bool _created: Indicates whether this is a brand new document |  | ||||||
|             or whether it's already been persisted before. Defaults to true. |  | ||||||
|         """ |         """ | ||||||
|         self._initialised = False |         self._initialised = False | ||||||
|         self._created = True |         self._created = True | ||||||
|  |  | ||||||
|         if args: |         if args: | ||||||
|             # Combine positional arguments with named arguments. |             # Combine positional arguments with named arguments. | ||||||
|             # We only want named arguments. |             # We only want named arguments. | ||||||
| @@ -75,6 +58,7 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         __auto_convert = values.pop('__auto_convert', True) |         __auto_convert = values.pop('__auto_convert', True) | ||||||
|  |  | ||||||
|  |         # 399: set default values only to fields loaded from DB | ||||||
|         __only_fields = set(values.pop('__only_fields', values)) |         __only_fields = set(values.pop('__only_fields', values)) | ||||||
|  |  | ||||||
|         _created = values.pop('_created', True) |         _created = values.pop('_created', True) | ||||||
| @@ -95,14 +79,13 @@ class BaseDocument(object): | |||||||
|         if self.STRICT and not self._dynamic: |         if self.STRICT and not self._dynamic: | ||||||
|             self._data = StrictDict.create(allowed_keys=self._fields_ordered)() |             self._data = StrictDict.create(allowed_keys=self._fields_ordered)() | ||||||
|         else: |         else: | ||||||
|             self._data = {} |             self._data = SemiStrictDict.create( | ||||||
|  |                 allowed_keys=self._fields_ordered)() | ||||||
|  |  | ||||||
|         self._dynamic_fields = SON() |         self._dynamic_fields = SON() | ||||||
|  |  | ||||||
|         # Assign default values to the instance. |         # Assign default values to instance | ||||||
|         # We set default values only for fields loaded from DB. See |         for key, field in self._fields.iteritems(): | ||||||
|         # https://github.com/mongoengine/mongoengine/issues/399 for more info. |  | ||||||
|         for key, field in iteritems(self._fields): |  | ||||||
|             if self._db_field_map.get(key, key) in __only_fields: |             if self._db_field_map.get(key, key) in __only_fields: | ||||||
|                 continue |                 continue | ||||||
|             value = getattr(self, key, None) |             value = getattr(self, key, None) | ||||||
| @@ -114,14 +97,16 @@ class BaseDocument(object): | |||||||
|         # Set passed values after initialisation |         # Set passed values after initialisation | ||||||
|         if self._dynamic: |         if self._dynamic: | ||||||
|             dynamic_data = {} |             dynamic_data = {} | ||||||
|             for key, value in iteritems(values): |             for key, value in values.iteritems(): | ||||||
|                 if key in self._fields or key == '_id': |                 if key in self._fields or key == '_id': | ||||||
|                     setattr(self, key, value) |                     setattr(self, key, value) | ||||||
|                 else: |                 elif self._dynamic: | ||||||
|                     dynamic_data[key] = value |                     dynamic_data[key] = value | ||||||
|         else: |         else: | ||||||
|             FileField = _import_class('FileField') |             FileField = _import_class('FileField') | ||||||
|             for key, value in iteritems(values): |             for key, value in values.iteritems(): | ||||||
|  |                 if key == '__auto_convert': | ||||||
|  |                     continue | ||||||
|                 key = self._reverse_db_field_map.get(key, key) |                 key = self._reverse_db_field_map.get(key, key) | ||||||
|                 if key in self._fields or key in ('id', 'pk', '_cls'): |                 if key in self._fields or key in ('id', 'pk', '_cls'): | ||||||
|                     if __auto_convert and value is not None: |                     if __auto_convert and value is not None: | ||||||
| @@ -137,13 +122,12 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         if self._dynamic: |         if self._dynamic: | ||||||
|             self._dynamic_lock = False |             self._dynamic_lock = False | ||||||
|             for key, value in iteritems(dynamic_data): |             for key, value in dynamic_data.iteritems(): | ||||||
|                 setattr(self, key, value) |                 setattr(self, key, value) | ||||||
|  |  | ||||||
|         # Flag initialised |         # Flag initialised | ||||||
|         self._initialised = True |         self._initialised = True | ||||||
|         self._created = _created |         self._created = _created | ||||||
|  |  | ||||||
|         signals.post_init.send(self.__class__, document=self) |         signals.post_init.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |     def __delattr__(self, *args, **kwargs): | ||||||
| @@ -163,7 +147,7 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|             if not hasattr(self, name) and not name.startswith('_'): |             if not hasattr(self, name) and not name.startswith('_'): | ||||||
|                 DynamicField = _import_class('DynamicField') |                 DynamicField = _import_class('DynamicField') | ||||||
|                 field = DynamicField(db_field=name, null=True) |                 field = DynamicField(db_field=name) | ||||||
|                 field.name = name |                 field.name = name | ||||||
|                 self._dynamic_fields[name] = field |                 self._dynamic_fields[name] = field | ||||||
|                 self._fields_ordered += (name,) |                 self._fields_ordered += (name,) | ||||||
| @@ -288,6 +272,13 @@ class BaseDocument(object): | |||||||
|     def __ne__(self, other): |     def __ne__(self, other): | ||||||
|         return not self.__eq__(other) |         return not self.__eq__(other) | ||||||
|  |  | ||||||
|  |     def __hash__(self): | ||||||
|  |         if getattr(self, 'pk', None) is None: | ||||||
|  |             # For new object | ||||||
|  |             return super(BaseDocument, self).__hash__() | ||||||
|  |         else: | ||||||
|  |             return hash(self.pk) | ||||||
|  |  | ||||||
|     def clean(self): |     def clean(self): | ||||||
|         """ |         """ | ||||||
|         Hook for doing document level data cleaning before validation is run. |         Hook for doing document level data cleaning before validation is run. | ||||||
| @@ -312,14 +303,15 @@ class BaseDocument(object): | |||||||
|         """ |         """ | ||||||
|         Return as SON data ready for use with MongoDB. |         Return as SON data ready for use with MongoDB. | ||||||
|         """ |         """ | ||||||
|         fields = fields or [] |         if not fields: | ||||||
|  |             fields = [] | ||||||
|  |  | ||||||
|         data = SON() |         data = SON() | ||||||
|         data['_id'] = None |         data['_id'] = None | ||||||
|         data['_cls'] = self._class_name |         data['_cls'] = self._class_name | ||||||
|  |  | ||||||
|         # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] |         # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] | ||||||
|         root_fields = {f.split('.')[0] for f in fields} |         root_fields = set([f.split('.')[0] for f in fields]) | ||||||
|  |  | ||||||
|         for field_name in self: |         for field_name in self: | ||||||
|             if root_fields and field_name not in root_fields: |             if root_fields and field_name not in root_fields: | ||||||
| @@ -352,7 +344,7 @@ class BaseDocument(object): | |||||||
|                 value = field.generate() |                 value = field.generate() | ||||||
|                 self._data[field_name] = value |                 self._data[field_name] = value | ||||||
|  |  | ||||||
|             if (value is not None) or (field.null): |             if value is not None: | ||||||
|                 if use_db_field: |                 if use_db_field: | ||||||
|                     data[field.db_field] = value |                     data[field.db_field] = value | ||||||
|                 else: |                 else: | ||||||
| @@ -367,9 +359,6 @@ class BaseDocument(object): | |||||||
|     def validate(self, clean=True): |     def validate(self, clean=True): | ||||||
|         """Ensure that all fields' values are valid and that required fields |         """Ensure that all fields' values are valid and that required fields | ||||||
|         are present. |         are present. | ||||||
|  |  | ||||||
|         Raises :class:`ValidationError` if any of the fields' values are found |  | ||||||
|         to be invalid. |  | ||||||
|         """ |         """ | ||||||
|         # Ensure that each field is matched to a valid value |         # Ensure that each field is matched to a valid value | ||||||
|         errors = {} |         errors = {} | ||||||
| @@ -424,15 +413,7 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def from_json(cls, json_data, created=False): |     def from_json(cls, json_data, created=False): | ||||||
|         """Converts json data to a Document instance |         """Converts json data to an unsaved document instance""" | ||||||
|  |  | ||||||
|         :param json_data: The json data to load into the Document |  | ||||||
|         :param created: If True, the document will be considered as a brand new document |  | ||||||
|                         If False and an id is provided, it will consider that the data being |  | ||||||
|                         loaded corresponds to what's already in the database (This has an impact of subsequent call to .save()) |  | ||||||
|                         If False and no id is provided, it will consider the data as a new document |  | ||||||
|                         (default ``False``) |  | ||||||
|         """ |  | ||||||
|         return cls._from_son(json_util.loads(json_data), created=created) |         return cls._from_son(json_util.loads(json_data), created=created) | ||||||
|  |  | ||||||
|     def __expand_dynamic_values(self, name, value): |     def __expand_dynamic_values(self, name, value): | ||||||
| @@ -515,7 +496,7 @@ class BaseDocument(object): | |||||||
|                 else: |                 else: | ||||||
|                     data = getattr(data, part, None) |                     data = getattr(data, part, None) | ||||||
|  |  | ||||||
|                 if not isinstance(data, LazyReference) and hasattr(data, '_changed_fields'): |                 if hasattr(data, '_changed_fields'): | ||||||
|                     if getattr(data, '_is_document', False): |                     if getattr(data, '_is_document', False): | ||||||
|                         continue |                         continue | ||||||
|  |  | ||||||
| @@ -523,74 +504,76 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         self._changed_fields = [] |         self._changed_fields = [] | ||||||
|  |  | ||||||
|     def _nestable_types_changed_fields(self, changed_fields, base_key, data): |     def _nestable_types_changed_fields(self, changed_fields, key, data, inspected): | ||||||
|         """Inspect nested data for changed fields |  | ||||||
|  |  | ||||||
|         :param changed_fields: Previously collected changed fields |  | ||||||
|         :param base_key: The base key that must be used to prepend changes to this data |  | ||||||
|         :param data: data to inspect for changes |  | ||||||
|         """ |  | ||||||
|         # Loop list / dict fields as they contain documents |         # Loop list / dict fields as they contain documents | ||||||
|         # Determine the iterator to use |         # Determine the iterator to use | ||||||
|         if not hasattr(data, 'items'): |         if not hasattr(data, 'items'): | ||||||
|             iterator = enumerate(data) |             iterator = enumerate(data) | ||||||
|         else: |         else: | ||||||
|             iterator = iteritems(data) |             iterator = data.iteritems() | ||||||
|  |  | ||||||
|         for index_or_key, value in iterator: |         for index, value in iterator: | ||||||
|             item_key = '%s%s.' % (base_key, index_or_key) |             list_key = '%s%s.' % (key, index) | ||||||
|             # don't check anything lower if this key is already marked |             # don't check anything lower if this key is already marked | ||||||
|             # as changed. |             # as changed. | ||||||
|             if item_key[:-1] in changed_fields: |             if list_key[:-1] in changed_fields: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             if hasattr(value, '_get_changed_fields'): |             if hasattr(value, '_get_changed_fields'): | ||||||
|                 changed = value._get_changed_fields() |                 changed = value._get_changed_fields(inspected) | ||||||
|                 changed_fields += ['%s%s' % (item_key, k) for k in changed if k] |                 changed_fields += ['%s%s' % (list_key, k) | ||||||
|  |                                    for k in changed if k] | ||||||
|             elif isinstance(value, (list, tuple, dict)): |             elif isinstance(value, (list, tuple, dict)): | ||||||
|                 self._nestable_types_changed_fields( |                 self._nestable_types_changed_fields( | ||||||
|                     changed_fields, item_key, value) |                     changed_fields, list_key, value, inspected) | ||||||
|  |  | ||||||
|     def _get_changed_fields(self): |     def _get_changed_fields(self, inspected=None): | ||||||
|         """Return a list of all fields that have explicitly been changed. |         """Return a list of all fields that have explicitly been changed. | ||||||
|         """ |         """ | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|  |         DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument') | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class('ReferenceField') | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |  | ||||||
|         SortedListField = _import_class('SortedListField') |         SortedListField = _import_class('SortedListField') | ||||||
|  |  | ||||||
|         changed_fields = [] |         changed_fields = [] | ||||||
|         changed_fields += getattr(self, '_changed_fields', []) |         changed_fields += getattr(self, '_changed_fields', []) | ||||||
|  |  | ||||||
|  |         inspected = inspected or set() | ||||||
|  |         if hasattr(self, 'id') and isinstance(self.id, Hashable): | ||||||
|  |             if self.id in inspected: | ||||||
|  |                 return changed_fields | ||||||
|  |             inspected.add(self.id) | ||||||
|  |  | ||||||
|         for field_name in self._fields_ordered: |         for field_name in self._fields_ordered: | ||||||
|             db_field_name = self._db_field_map.get(field_name, field_name) |             db_field_name = self._db_field_map.get(field_name, field_name) | ||||||
|             key = '%s.' % db_field_name |             key = '%s.' % db_field_name | ||||||
|             data = self._data.get(field_name, None) |             data = self._data.get(field_name, None) | ||||||
|             field = self._fields.get(field_name) |             field = self._fields.get(field_name) | ||||||
|  |  | ||||||
|             if db_field_name in changed_fields: |             if hasattr(data, 'id'): | ||||||
|                 # Whole field already marked as changed, no need to go further |                 if data.id in inspected: | ||||||
|                     continue |                     continue | ||||||
|  |             if isinstance(field, ReferenceField): | ||||||
|             if isinstance(field, ReferenceField):   # Don't follow referenced documents |  | ||||||
|                 continue |                 continue | ||||||
|  |             elif ( | ||||||
|             if isinstance(data, EmbeddedDocument): |                 isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and | ||||||
|  |                 db_field_name not in changed_fields | ||||||
|  |             ): | ||||||
|                 # Find all embedded fields that have been changed |                 # Find all embedded fields that have been changed | ||||||
|                 changed = data._get_changed_fields() |                 changed = data._get_changed_fields(inspected) | ||||||
|                 changed_fields += ['%s%s' % (key, k) for k in changed if k] |                 changed_fields += ['%s%s' % (key, k) for k in changed if k] | ||||||
|             elif isinstance(data, (list, tuple, dict)): |             elif (isinstance(data, (list, tuple, dict)) and | ||||||
|  |                     db_field_name not in changed_fields): | ||||||
|                 if (hasattr(field, 'field') and |                 if (hasattr(field, 'field') and | ||||||
|                         isinstance(field.field, (ReferenceField, GenericReferenceField))): |                         isinstance(field.field, ReferenceField)): | ||||||
|                     continue |                     continue | ||||||
|                 elif isinstance(field, SortedListField) and field._ordering: |                 elif isinstance(field, SortedListField) and field._ordering: | ||||||
|                     # if ordering is affected whole list is changed |                     # if ordering is affected whole list is changed | ||||||
|                     if any(field._ordering in d._changed_fields for d in data): |                     if any(map(lambda d: field._ordering in d._changed_fields, data)): | ||||||
|                         changed_fields.append(db_field_name) |                         changed_fields.append(db_field_name) | ||||||
|                         continue |                         continue | ||||||
|  |  | ||||||
|                 self._nestable_types_changed_fields( |                 self._nestable_types_changed_fields( | ||||||
|                     changed_fields, key, data) |                     changed_fields, key, data, inspected) | ||||||
|         return changed_fields |         return changed_fields | ||||||
|  |  | ||||||
|     def _delta(self): |     def _delta(self): | ||||||
| @@ -602,6 +585,7 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         set_fields = self._get_changed_fields() |         set_fields = self._get_changed_fields() | ||||||
|         unset_data = {} |         unset_data = {} | ||||||
|  |         parts = [] | ||||||
|         if hasattr(self, '_changed_fields'): |         if hasattr(self, '_changed_fields'): | ||||||
|             set_data = {} |             set_data = {} | ||||||
|             # Fetch each set item from its path |             # Fetch each set item from its path | ||||||
| @@ -611,13 +595,15 @@ class BaseDocument(object): | |||||||
|                 new_path = [] |                 new_path = [] | ||||||
|                 for p in parts: |                 for p in parts: | ||||||
|                     if isinstance(d, (ObjectId, DBRef)): |                     if isinstance(d, (ObjectId, DBRef)): | ||||||
|                         # Don't dig in the references |  | ||||||
|                         break |                         break | ||||||
|                     elif isinstance(d, list) and p.isdigit(): |                     elif isinstance(d, list) and p.lstrip('-').isdigit(): | ||||||
|                         # An item of a list (identified by its index) is updated |                         if p[0] == '-': | ||||||
|  |                             p = str(len(d) + int(p)) | ||||||
|  |                         try: | ||||||
|                             d = d[int(p)] |                             d = d[int(p)] | ||||||
|  |                         except IndexError: | ||||||
|  |                             d = None | ||||||
|                     elif hasattr(d, 'get'): |                     elif hasattr(d, 'get'): | ||||||
|                         # dict-like (dict, embedded document) |  | ||||||
|                         d = d.get(p) |                         d = d.get(p) | ||||||
|                     new_path.append(p) |                     new_path.append(p) | ||||||
|                 path = '.'.join(new_path) |                 path = '.'.join(new_path) | ||||||
| @@ -629,26 +615,26 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         # Determine if any changed items were actually unset. |         # Determine if any changed items were actually unset. | ||||||
|         for path, value in set_data.items(): |         for path, value in set_data.items(): | ||||||
|             if value or isinstance(value, (numbers.Number, bool)):  # Account for 0 and True that are truthy |             if value or isinstance(value, (numbers.Number, bool)): | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             parts = path.split('.') |             # If we've set a value that ain't the default value don't unset it. | ||||||
|  |             default = None | ||||||
|             if (self._dynamic and len(parts) and parts[0] in |             if (self._dynamic and len(parts) and parts[0] in | ||||||
|                     self._dynamic_fields): |                     self._dynamic_fields): | ||||||
|                 del set_data[path] |                 del set_data[path] | ||||||
|                 unset_data[path] = 1 |                 unset_data[path] = 1 | ||||||
|                 continue |                 continue | ||||||
|  |             elif path in self._fields: | ||||||
|             # If we've set a value that ain't the default value don't unset it. |  | ||||||
|             default = None |  | ||||||
|             if path in self._fields: |  | ||||||
|                 default = self._fields[path].default |                 default = self._fields[path].default | ||||||
|             else:  # Perform a full lookup for lists / embedded lookups |             else:  # Perform a full lookup for lists / embedded lookups | ||||||
|                 d = self |                 d = self | ||||||
|  |                 parts = path.split('.') | ||||||
|                 db_field_name = parts.pop() |                 db_field_name = parts.pop() | ||||||
|                 for p in parts: |                 for p in parts: | ||||||
|                     if isinstance(d, list) and p.isdigit(): |                     if isinstance(d, list) and p.lstrip('-').isdigit(): | ||||||
|  |                         if p[0] == '-': | ||||||
|  |                             p = str(len(d) + int(p)) | ||||||
|                         d = d[int(p)] |                         d = d[int(p)] | ||||||
|                     elif (hasattr(d, '__getattribute__') and |                     elif (hasattr(d, '__getattribute__') and | ||||||
|                           not isinstance(d, dict)): |                           not isinstance(d, dict)): | ||||||
| @@ -666,9 +652,10 @@ class BaseDocument(object): | |||||||
|                         default = None |                         default = None | ||||||
|  |  | ||||||
|             if default is not None: |             if default is not None: | ||||||
|                 default = default() if callable(default) else default |                 if callable(default): | ||||||
|  |                     default = default() | ||||||
|  |  | ||||||
|             if value != default: |             if default != value: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             del set_data[path] |             del set_data[path] | ||||||
| @@ -684,7 +671,9 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): |     def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): | ||||||
|         """Create an instance of a Document (subclass) from a PyMongo SON.""" |         """Create an instance of a Document (subclass) from a PyMongo | ||||||
|  |         SON. | ||||||
|  |         """ | ||||||
|         if not only_fields: |         if not only_fields: | ||||||
|             only_fields = [] |             only_fields = [] | ||||||
|  |  | ||||||
| @@ -695,25 +684,21 @@ class BaseDocument(object): | |||||||
|         # class if unavailable |         # class if unavailable | ||||||
|         class_name = son.get('_cls', cls._class_name) |         class_name = son.get('_cls', cls._class_name) | ||||||
|  |  | ||||||
|         # Convert SON to a data dict, making sure each key is a string and |         # Convert SON to a dict, making sure each key is a string | ||||||
|         # corresponds to the right db field. |         data = {str(key): value for key, value in son.iteritems()} | ||||||
|         data = {} |  | ||||||
|         for key, value in iteritems(son): |  | ||||||
|             key = str(key) |  | ||||||
|             key = cls._db_field_map.get(key, key) |  | ||||||
|             data[key] = value |  | ||||||
|  |  | ||||||
|         # Return correct subclass for document type |         # Return correct subclass for document type | ||||||
|         if class_name != cls._class_name: |         if class_name != cls._class_name: | ||||||
|             cls = get_document(class_name) |             cls = get_document(class_name) | ||||||
|  |  | ||||||
|  |         changed_fields = [] | ||||||
|         errors_dict = {} |         errors_dict = {} | ||||||
|  |  | ||||||
|         fields = cls._fields |         fields = cls._fields | ||||||
|         if not _auto_dereference: |         if not _auto_dereference: | ||||||
|             fields = copy.deepcopy(fields) |             fields = copy.copy(fields) | ||||||
|  |  | ||||||
|         for field_name, field in iteritems(fields): |         for field_name, field in fields.iteritems(): | ||||||
|             field._auto_dereference = _auto_dereference |             field._auto_dereference = _auto_dereference | ||||||
|             if field.db_field in data: |             if field.db_field in data: | ||||||
|                 value = data[field.db_field] |                 value = data[field.db_field] | ||||||
| @@ -734,15 +719,10 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         # In STRICT documents, remove any keys that aren't in cls._fields |         # In STRICT documents, remove any keys that aren't in cls._fields | ||||||
|         if cls.STRICT: |         if cls.STRICT: | ||||||
|             data = {k: v for k, v in iteritems(data) if k in cls._fields} |             data = {k: v for k, v in data.iteritems() if k in cls._fields} | ||||||
|  |  | ||||||
|         obj = cls( |         obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data) | ||||||
|             __auto_convert=False, |         obj._changed_fields = changed_fields | ||||||
|             _created=created, |  | ||||||
|             __only_fields=only_fields, |  | ||||||
|             **data |  | ||||||
|         ) |  | ||||||
|         obj._changed_fields = [] |  | ||||||
|         if not _auto_dereference: |         if not _auto_dereference: | ||||||
|             obj._fields = fields |             obj._fields = fields | ||||||
|  |  | ||||||
| @@ -906,8 +886,7 @@ class BaseDocument(object): | |||||||
|                 index = {'fields': fields, 'unique': True, 'sparse': sparse} |                 index = {'fields': fields, 'unique': True, 'sparse': sparse} | ||||||
|                 unique_indexes.append(index) |                 unique_indexes.append(index) | ||||||
|  |  | ||||||
|             if field.__class__.__name__ in {'EmbeddedDocumentListField', |             if field.__class__.__name__ == 'ListField': | ||||||
|                                             'ListField', 'SortedListField'}: |  | ||||||
|                 field = field.field |                 field = field.field | ||||||
|  |  | ||||||
|             # Grab any embedded document field unique indexes |             # Grab any embedded document field unique indexes | ||||||
| @@ -1103,11 +1082,5 @@ class BaseDocument(object): | |||||||
|         """Return the display value for a choice field""" |         """Return the display value for a choice field""" | ||||||
|         value = getattr(self, field.name) |         value = getattr(self, field.name) | ||||||
|         if field.choices and isinstance(field.choices[0], (list, tuple)): |         if field.choices and isinstance(field.choices[0], (list, tuple)): | ||||||
|             if value is None: |             return dict(field.choices).get(value, value) | ||||||
|                 return None |  | ||||||
|             sep = getattr(field, 'display_sep', ' ') |  | ||||||
|             values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] |  | ||||||
|             return sep.join([ |  | ||||||
|                 six.text_type(dict(field.choices).get(val, val)) |  | ||||||
|                 for val in values or []]) |  | ||||||
|         return value |         return value | ||||||
|   | |||||||
| @@ -5,13 +5,13 @@ import weakref | |||||||
| from bson import DBRef, ObjectId, SON | from bson import DBRef, ObjectId, SON | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.base.common import UPDATE_OPERATORS | from mongoengine.base.common import UPDATE_OPERATORS | ||||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||||
|                                              EmbeddedDocumentList) |                                              EmbeddedDocumentList) | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import DeprecatedError, ValidationError | from mongoengine.errors import ValidationError | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | ||||||
|            'GeoJsonBaseField') |            'GeoJsonBaseField') | ||||||
| @@ -52,10 +52,10 @@ class BaseField(object): | |||||||
|             unique with. |             unique with. | ||||||
|         :param primary_key: Mark this field as the primary key. Defaults to False. |         :param primary_key: Mark this field as the primary key. Defaults to False. | ||||||
|         :param validation: (optional) A callable to validate the value of the |         :param validation: (optional) A callable to validate the value of the | ||||||
|             field.  The callable takes the value as parameter and should raise |             field.  Generally this is deprecated in favour of the | ||||||
|             a ValidationError if validation fails |             `FIELD.validate` method | ||||||
|         :param choices: (optional) The valid choices |         :param choices: (optional) The valid choices | ||||||
|         :param null: (optional) If the field value can be null. If no and there is a default value |         :param null: (optional) Is the field value can be null. If no and there is a default value | ||||||
|             then the default value is set |             then the default value is set | ||||||
|         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` |         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` | ||||||
|             means that uniqueness won't be enforced for `None` values |             means that uniqueness won't be enforced for `None` values | ||||||
| @@ -81,14 +81,7 @@ class BaseField(object): | |||||||
|         self.sparse = sparse |         self.sparse = sparse | ||||||
|         self._owner_document = None |         self._owner_document = None | ||||||
|  |  | ||||||
|         # Make sure db_field is a string (if it's explicitly defined). |         # Validate the db_field | ||||||
|         if ( |  | ||||||
|             self.db_field is not None and |  | ||||||
|             not isinstance(self.db_field, six.string_types) |  | ||||||
|         ): |  | ||||||
|             raise TypeError('db_field should be a string.') |  | ||||||
|  |  | ||||||
|         # Make sure db_field doesn't contain any forbidden characters. |  | ||||||
|         if isinstance(self.db_field, six.string_types) and ( |         if isinstance(self.db_field, six.string_types) and ( | ||||||
|             '.' in self.db_field or |             '.' in self.db_field or | ||||||
|             '\0' in self.db_field or |             '\0' in self.db_field or | ||||||
| @@ -128,9 +121,11 @@ class BaseField(object): | |||||||
|         return instance._data.get(self.name) |         return instance._data.get(self.name) | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         """Descriptor for assigning a value to a field in a document.""" |         """Descriptor for assigning a value to a field in a document. | ||||||
|         # If setting to None and there is a default value provided for this |         """ | ||||||
|         # field, then set the value to the default value. |  | ||||||
|  |         # If setting to None and there is a default | ||||||
|  |         # Then set the value to the default value | ||||||
|         if value is None: |         if value is None: | ||||||
|             if self.null: |             if self.null: | ||||||
|                 value = None |                 value = None | ||||||
| @@ -141,16 +136,12 @@ class BaseField(object): | |||||||
|  |  | ||||||
|         if instance._initialised: |         if instance._initialised: | ||||||
|             try: |             try: | ||||||
|                 value_has_changed = ( |                 if (self.name not in instance._data or | ||||||
|                     self.name not in instance._data or |                         instance._data[self.name] != value): | ||||||
|                     instance._data[self.name] != value |  | ||||||
|                 ) |  | ||||||
|                 if value_has_changed: |  | ||||||
|                     instance._mark_as_changed(self.name) |                     instance._mark_as_changed(self.name) | ||||||
|             except Exception: |             except Exception: | ||||||
|                 # Some values can't be compared and throw an error when we |                 # Values cant be compared eg: naive and tz datetimes | ||||||
|                 # attempt to do so (e.g. tz-naive and tz-aware datetimes). |                 # So mark it as changed | ||||||
|                 # Mark the field as changed in such cases. |  | ||||||
|                 instance._mark_as_changed(self.name) |                 instance._mark_as_changed(self.name) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
| @@ -160,7 +151,6 @@ class BaseField(object): | |||||||
|             for v in value: |             for v in value: | ||||||
|                 if isinstance(v, EmbeddedDocument): |                 if isinstance(v, EmbeddedDocument): | ||||||
|                     v._instance = weakref.proxy(instance) |                     v._instance = weakref.proxy(instance) | ||||||
|  |  | ||||||
|         instance._data[self.name] = value |         instance._data[self.name] = value | ||||||
|  |  | ||||||
|     def error(self, message='', errors=None, field_name=None): |     def error(self, message='', errors=None, field_name=None): | ||||||
| @@ -216,9 +206,7 @@ class BaseField(object): | |||||||
|                     ) |                     ) | ||||||
|                 ) |                 ) | ||||||
|         # Choices which are types other than Documents |         # Choices which are types other than Documents | ||||||
|         else: |         elif value not in choice_list: | ||||||
|             values = value if isinstance(value, (list, tuple)) else [value] |  | ||||||
|             if len(set(values) - set(choice_list)): |  | ||||||
|             self.error('Value must be one of %s' % six.text_type(choice_list)) |             self.error('Value must be one of %s' % six.text_type(choice_list)) | ||||||
|  |  | ||||||
|     def _validate(self, value, **kwargs): |     def _validate(self, value, **kwargs): | ||||||
| @@ -229,18 +217,10 @@ class BaseField(object): | |||||||
|         # check validation argument |         # check validation argument | ||||||
|         if self.validation is not None: |         if self.validation is not None: | ||||||
|             if callable(self.validation): |             if callable(self.validation): | ||||||
|                 try: |                 if not self.validation(value): | ||||||
|                     # breaking change of 0.18 |                     self.error('Value does not match custom validation method') | ||||||
|                     # Get rid of True/False-type return for the validation method |  | ||||||
|                     # in favor of having validation raising a ValidationError |  | ||||||
|                     ret = self.validation(value) |  | ||||||
|                     if ret is not None: |  | ||||||
|                         raise DeprecatedError('validation argument for `%s` must not return anything, ' |  | ||||||
|                                               'it should raise a ValidationError if validation fails' % self.name) |  | ||||||
|                 except ValidationError as ex: |  | ||||||
|                     self.error(str(ex)) |  | ||||||
|             else: |             else: | ||||||
|                 raise ValueError('validation argument for `"%s"` must be a ' |                 raise ValueError('validation argument for "%s" must be a ' | ||||||
|                                  'callable.' % self.name) |                                  'callable.' % self.name) | ||||||
|  |  | ||||||
|         self.validate(value, **kwargs) |         self.validate(value, **kwargs) | ||||||
| @@ -278,25 +258,18 @@ class ComplexBaseField(BaseField): | |||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class('ReferenceField') | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class('GenericReferenceField') | ||||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') |         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') | ||||||
|  |         dereference = (self._auto_dereference and | ||||||
|         auto_dereference = instance._fields[self.name]._auto_dereference |  | ||||||
|  |  | ||||||
|         dereference = (auto_dereference and |  | ||||||
|                        (self.field is None or isinstance(self.field, |                        (self.field is None or isinstance(self.field, | ||||||
|                                                          (GenericReferenceField, ReferenceField)))) |                                                          (GenericReferenceField, ReferenceField)))) | ||||||
|  |  | ||||||
|         _dereference = _import_class('DeReference')() |         _dereference = _import_class('DeReference')() | ||||||
|  |  | ||||||
|         if (instance._initialised and |         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|                 dereference and |         if instance._initialised and dereference and instance._data.get(self.name): | ||||||
|                 instance._data.get(self.name) and |  | ||||||
|                 not getattr(instance._data[self.name], '_dereferenced', False)): |  | ||||||
|             instance._data[self.name] = _dereference( |             instance._data[self.name] = _dereference( | ||||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, |                 instance._data.get(self.name), max_depth=1, instance=instance, | ||||||
|                 name=self.name |                 name=self.name | ||||||
|             ) |             ) | ||||||
|             if hasattr(instance._data[self.name], '_dereferenced'): |  | ||||||
|                 instance._data[self.name]._dereferenced = True |  | ||||||
|  |  | ||||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) |         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||||
|  |  | ||||||
| @@ -312,7 +285,7 @@ class ComplexBaseField(BaseField): | |||||||
|             value = BaseDict(value, instance, self.name) |             value = BaseDict(value, instance, self.name) | ||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
|  |  | ||||||
|         if (auto_dereference and instance._initialised and |         if (self._auto_dereference and instance._initialised and | ||||||
|                 isinstance(value, (BaseList, BaseDict)) and |                 isinstance(value, (BaseList, BaseDict)) and | ||||||
|                 not value._dereferenced): |                 not value._dereferenced): | ||||||
|             value = _dereference( |             value = _dereference( | ||||||
| @@ -331,16 +304,11 @@ class ComplexBaseField(BaseField): | |||||||
|         if hasattr(value, 'to_python'): |         if hasattr(value, 'to_python'): | ||||||
|             return value.to_python() |             return value.to_python() | ||||||
|  |  | ||||||
|         BaseDocument = _import_class('BaseDocument') |  | ||||||
|         if isinstance(value, BaseDocument): |  | ||||||
|             # Something is wrong, return the value as it is |  | ||||||
|             return value |  | ||||||
|  |  | ||||||
|         is_list = False |         is_list = False | ||||||
|         if not hasattr(value, 'items'): |         if not hasattr(value, 'items'): | ||||||
|             try: |             try: | ||||||
|                 is_list = True |                 is_list = True | ||||||
|                 value = {idx: v for idx, v in enumerate(value)} |                 value = {k: v for k, v in enumerate(value)} | ||||||
|             except TypeError:  # Not iterable return the value |             except TypeError:  # Not iterable return the value | ||||||
|                 return value |                 return value | ||||||
|  |  | ||||||
| @@ -399,11 +367,11 @@ class ComplexBaseField(BaseField): | |||||||
|         if self.field: |         if self.field: | ||||||
|             value_dict = { |             value_dict = { | ||||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) |                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||||
|                 for key, item in iteritems(value) |                 for key, item in value.iteritems() | ||||||
|             } |             } | ||||||
|         else: |         else: | ||||||
|             value_dict = {} |             value_dict = {} | ||||||
|             for k, v in iteritems(value): |             for k, v in value.iteritems(): | ||||||
|                 if isinstance(v, Document): |                 if isinstance(v, Document): | ||||||
|                     # We need the id from the saved object to create the DBRef |                     # We need the id from the saved object to create the DBRef | ||||||
|                     if v.pk is None: |                     if v.pk is None: | ||||||
| @@ -440,7 +408,7 @@ class ComplexBaseField(BaseField): | |||||||
|         errors = {} |         errors = {} | ||||||
|         if self.field: |         if self.field: | ||||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): |             if hasattr(value, 'iteritems') or hasattr(value, 'items'): | ||||||
|                 sequence = iteritems(value) |                 sequence = value.iteritems() | ||||||
|             else: |             else: | ||||||
|                 sequence = enumerate(value) |                 sequence = enumerate(value) | ||||||
|             for k, v in sequence: |             for k, v in sequence: | ||||||
| @@ -525,7 +493,7 @@ class GeoJsonBaseField(BaseField): | |||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """Validate the GeoJson object based on its type.""" |         """Validate the GeoJson object based on its type.""" | ||||||
|         if isinstance(value, dict): |         if isinstance(value, dict): | ||||||
|             if set(value.keys()) == {'type', 'coordinates'}: |             if set(value.keys()) == set(['type', 'coordinates']): | ||||||
|                 if value['type'] != self._type: |                 if value['type'] != self._type: | ||||||
|                     self.error('%s type must be "%s"' % |                     self.error('%s type must be "%s"' % | ||||||
|                                (self._name, self._type)) |                                (self._name, self._type)) | ||||||
|   | |||||||
| @@ -1,7 +1,6 @@ | |||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
| import six | import six | ||||||
| from six import iteritems, itervalues |  | ||||||
|  |  | ||||||
| from mongoengine.base.common import _document_registry | from mongoengine.base.common import _document_registry | ||||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||||
| @@ -19,14 +18,14 @@ class DocumentMetaclass(type): | |||||||
|     """Metaclass for all documents.""" |     """Metaclass for all documents.""" | ||||||
|  |  | ||||||
|     # TODO lower complexity of this method |     # TODO lower complexity of this method | ||||||
|     def __new__(mcs, name, bases, attrs): |     def __new__(cls, name, bases, attrs): | ||||||
|         flattened_bases = mcs._get_bases(bases) |         flattened_bases = cls._get_bases(bases) | ||||||
|         super_new = super(DocumentMetaclass, mcs).__new__ |         super_new = super(DocumentMetaclass, cls).__new__ | ||||||
|  |  | ||||||
|         # If a base class just call super |         # If a base class just call super | ||||||
|         metaclass = attrs.get('my_metaclass') |         metaclass = attrs.get('my_metaclass') | ||||||
|         if metaclass and issubclass(metaclass, DocumentMetaclass): |         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||||
|             return super_new(mcs, name, bases, attrs) |             return super_new(cls, name, bases, attrs) | ||||||
|  |  | ||||||
|         attrs['_is_document'] = attrs.get('_is_document', False) |         attrs['_is_document'] = attrs.get('_is_document', False) | ||||||
|         attrs['_cached_reference_fields'] = [] |         attrs['_cached_reference_fields'] = [] | ||||||
| @@ -63,7 +62,7 @@ class DocumentMetaclass(type): | |||||||
|             # Standard object mixin - merge in any Fields |             # Standard object mixin - merge in any Fields | ||||||
|             if not hasattr(base, '_meta'): |             if not hasattr(base, '_meta'): | ||||||
|                 base_fields = {} |                 base_fields = {} | ||||||
|                 for attr_name, attr_value in iteritems(base.__dict__): |                 for attr_name, attr_value in base.__dict__.iteritems(): | ||||||
|                     if not isinstance(attr_value, BaseField): |                     if not isinstance(attr_value, BaseField): | ||||||
|                         continue |                         continue | ||||||
|                     attr_value.name = attr_name |                     attr_value.name = attr_name | ||||||
| @@ -75,7 +74,7 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         # Discover any document fields |         # Discover any document fields | ||||||
|         field_names = {} |         field_names = {} | ||||||
|         for attr_name, attr_value in iteritems(attrs): |         for attr_name, attr_value in attrs.iteritems(): | ||||||
|             if not isinstance(attr_value, BaseField): |             if not isinstance(attr_value, BaseField): | ||||||
|                 continue |                 continue | ||||||
|             attr_value.name = attr_name |             attr_value.name = attr_name | ||||||
| @@ -104,7 +103,7 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( |         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( | ||||||
|                                          (v.creation_counter, v.name) |                                          (v.creation_counter, v.name) | ||||||
|                                          for v in itervalues(doc_fields))) |                                          for v in doc_fields.itervalues())) | ||||||
|  |  | ||||||
|         # |         # | ||||||
|         # Set document hierarchy |         # Set document hierarchy | ||||||
| @@ -122,8 +121,7 @@ class DocumentMetaclass(type): | |||||||
|                 # inheritance of classes where inheritance is set to False |                 # inheritance of classes where inheritance is set to False | ||||||
|                 allow_inheritance = base._meta.get('allow_inheritance') |                 allow_inheritance = base._meta.get('allow_inheritance') | ||||||
|                 if not allow_inheritance and not base._meta.get('abstract'): |                 if not allow_inheritance and not base._meta.get('abstract'): | ||||||
|                     raise ValueError('Document %s may not be subclassed. ' |                     raise ValueError('Document %s may not be subclassed' % | ||||||
|                                      'To enable inheritance, use the "allow_inheritance" meta attribute.' % |  | ||||||
|                                      base.__name__) |                                      base.__name__) | ||||||
|  |  | ||||||
|         # Get superclasses from last base superclass |         # Get superclasses from last base superclass | ||||||
| @@ -140,7 +138,7 @@ class DocumentMetaclass(type): | |||||||
|         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types |         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types | ||||||
|  |  | ||||||
|         # Create the new_class |         # Create the new_class | ||||||
|         new_class = super_new(mcs, name, bases, attrs) |         new_class = super_new(cls, name, bases, attrs) | ||||||
|  |  | ||||||
|         # Set _subclasses |         # Set _subclasses | ||||||
|         for base in document_bases: |         for base in document_bases: | ||||||
| @@ -149,7 +147,7 @@ class DocumentMetaclass(type): | |||||||
|             base._types = base._subclasses  # TODO depreciate _types |             base._types = base._subclasses  # TODO depreciate _types | ||||||
|  |  | ||||||
|         (Document, EmbeddedDocument, DictField, |         (Document, EmbeddedDocument, DictField, | ||||||
|          CachedReferenceField) = mcs._import_classes() |          CachedReferenceField) = cls._import_classes() | ||||||
|  |  | ||||||
|         if issubclass(new_class, Document): |         if issubclass(new_class, Document): | ||||||
|             new_class._collection = None |             new_class._collection = None | ||||||
| @@ -174,7 +172,7 @@ class DocumentMetaclass(type): | |||||||
|                         f.__dict__.update({'im_self': getattr(f, '__self__')}) |                         f.__dict__.update({'im_self': getattr(f, '__self__')}) | ||||||
|  |  | ||||||
|         # Handle delete rules |         # Handle delete rules | ||||||
|         for field in itervalues(new_class._fields): |         for field in new_class._fields.itervalues(): | ||||||
|             f = field |             f = field | ||||||
|             if f.owner_document is None: |             if f.owner_document is None: | ||||||
|                 f.owner_document = new_class |                 f.owner_document = new_class | ||||||
| @@ -184,6 +182,9 @@ class DocumentMetaclass(type): | |||||||
|                 if issubclass(new_class, EmbeddedDocument): |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|                     raise InvalidDocumentError('CachedReferenceFields is not ' |                     raise InvalidDocumentError('CachedReferenceFields is not ' | ||||||
|                                                'allowed in EmbeddedDocuments') |                                                'allowed in EmbeddedDocuments') | ||||||
|  |                 if not f.document_type: | ||||||
|  |                     raise InvalidDocumentError( | ||||||
|  |                         'Document is not available to sync') | ||||||
|  |  | ||||||
|                 if f.auto_sync: |                 if f.auto_sync: | ||||||
|                     f.start_listener() |                     f.start_listener() | ||||||
| @@ -218,26 +219,29 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|  |     def add_to_class(self, name, value): | ||||||
|  |         setattr(self, name, value) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_bases(mcs, bases): |     def _get_bases(cls, bases): | ||||||
|         if isinstance(bases, BasesTuple): |         if isinstance(bases, BasesTuple): | ||||||
|             return bases |             return bases | ||||||
|         seen = [] |         seen = [] | ||||||
|         bases = mcs.__get_bases(bases) |         bases = cls.__get_bases(bases) | ||||||
|         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) |         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) | ||||||
|         return BasesTuple(unique_bases) |         return BasesTuple(unique_bases) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def __get_bases(mcs, bases): |     def __get_bases(cls, bases): | ||||||
|         for base in bases: |         for base in bases: | ||||||
|             if base is object: |             if base is object: | ||||||
|                 continue |                 continue | ||||||
|             yield base |             yield base | ||||||
|             for child_base in mcs.__get_bases(base.__bases__): |             for child_base in cls.__get_bases(base.__bases__): | ||||||
|                 yield child_base |                 yield child_base | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _import_classes(mcs): |     def _import_classes(cls): | ||||||
|         Document = _import_class('Document') |         Document = _import_class('Document') | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         DictField = _import_class('DictField') |         DictField = _import_class('DictField') | ||||||
| @@ -250,9 +254,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|     collection in the database. |     collection in the database. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __new__(mcs, name, bases, attrs): |     def __new__(cls, name, bases, attrs): | ||||||
|         flattened_bases = mcs._get_bases(bases) |         flattened_bases = cls._get_bases(bases) | ||||||
|         super_new = super(TopLevelDocumentMetaclass, mcs).__new__ |         super_new = super(TopLevelDocumentMetaclass, cls).__new__ | ||||||
|  |  | ||||||
|         # Set default _meta data if base class, otherwise get user defined meta |         # Set default _meta data if base class, otherwise get user defined meta | ||||||
|         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: |         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: | ||||||
| @@ -315,7 +319,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|                     not parent_doc_cls._meta.get('abstract', False)): |                     not parent_doc_cls._meta.get('abstract', False)): | ||||||
|                 msg = 'Abstract document cannot have non-abstract base' |                 msg = 'Abstract document cannot have non-abstract base' | ||||||
|                 raise ValueError(msg) |                 raise ValueError(msg) | ||||||
|             return super_new(mcs, name, bases, attrs) |             return super_new(cls, name, bases, attrs) | ||||||
|  |  | ||||||
|         # Merge base class metas. |         # Merge base class metas. | ||||||
|         # Uses a special MetaDict that handles various merging rules |         # Uses a special MetaDict that handles various merging rules | ||||||
| @@ -356,7 +360,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|         attrs['_meta'] = meta |         attrs['_meta'] = meta | ||||||
|  |  | ||||||
|         # Call super and get the new class |         # Call super and get the new class | ||||||
|         new_class = super_new(mcs, name, bases, attrs) |         new_class = super_new(cls, name, bases, attrs) | ||||||
|  |  | ||||||
|         meta = new_class._meta |         meta = new_class._meta | ||||||
|  |  | ||||||
| @@ -373,7 +377,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|             new_class.objects = QuerySetManager() |             new_class.objects = QuerySetManager() | ||||||
|  |  | ||||||
|         # Validate the fields and set primary key if needed |         # Validate the fields and set primary key if needed | ||||||
|         for field_name, field in iteritems(new_class._fields): |         for field_name, field in new_class._fields.iteritems(): | ||||||
|             if field.primary_key: |             if field.primary_key: | ||||||
|                 # Ensure only one primary key is set |                 # Ensure only one primary key is set | ||||||
|                 current_pk = new_class._meta.get('id_field') |                 current_pk = new_class._meta.get('id_field') | ||||||
| @@ -390,7 +394,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|                                            '_auto_id_field', False) |                                            '_auto_id_field', False) | ||||||
|         if not new_class._meta.get('id_field'): |         if not new_class._meta.get('id_field'): | ||||||
|             # After 0.10, find not existing names, instead of overwriting |             # After 0.10, find not existing names, instead of overwriting | ||||||
|             id_name, id_db_name = mcs.get_auto_id_names(new_class) |             id_name, id_db_name = cls.get_auto_id_names(new_class) | ||||||
|             new_class._auto_id_field = True |             new_class._auto_id_field = True | ||||||
|             new_class._meta['id_field'] = id_name |             new_class._meta['id_field'] = id_name | ||||||
|             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) |             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||||
| @@ -415,7 +419,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def get_auto_id_names(mcs, new_class): |     def get_auto_id_names(cls, new_class): | ||||||
|         id_name, id_db_name = ('id', '_id') |         id_name, id_db_name = ('id', '_id') | ||||||
|         if id_name not in new_class._fields and \ |         if id_name not in new_class._fields and \ | ||||||
|                 id_db_name not in (v.db_field for v in new_class._fields.values()): |                 id_db_name not in (v.db_field for v in new_class._fields.values()): | ||||||
| @@ -436,7 +440,7 @@ class MetaDict(dict): | |||||||
|     _merge_options = ('indexes',) |     _merge_options = ('indexes',) | ||||||
|  |  | ||||||
|     def merge(self, new_options): |     def merge(self, new_options): | ||||||
|         for k, v in iteritems(new_options): |         for k, v in new_options.iteritems(): | ||||||
|             if k in self._merge_options: |             if k in self._merge_options: | ||||||
|                 self[k] = self.get(k, []) + v |                 self[k] = self.get(k, []) + v | ||||||
|             else: |             else: | ||||||
|   | |||||||
| @@ -1,22 +0,0 @@ | |||||||
| import re |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class LazyRegexCompiler(object): |  | ||||||
|     """Descriptor to allow lazy compilation of regex""" |  | ||||||
|  |  | ||||||
|     def __init__(self, pattern, flags=0): |  | ||||||
|         self._pattern = pattern |  | ||||||
|         self._flags = flags |  | ||||||
|         self._compiled_regex = None |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def compiled_regex(self): |  | ||||||
|         if self._compiled_regex is None: |  | ||||||
|             self._compiled_regex = re.compile(self._pattern, self._flags) |  | ||||||
|         return self._compiled_regex |  | ||||||
|  |  | ||||||
|     def __get__(self, instance, owner): |  | ||||||
|         return self.compiled_regex |  | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |  | ||||||
|         raise AttributeError("Can not set attribute LazyRegexCompiler") |  | ||||||
| @@ -31,6 +31,7 @@ def _import_class(cls_name): | |||||||
|  |  | ||||||
|     field_classes = _field_list_cache |     field_classes = _field_list_cache | ||||||
|  |  | ||||||
|  |     queryset_classes = ('OperationError',) | ||||||
|     deref_classes = ('DeReference',) |     deref_classes = ('DeReference',) | ||||||
|  |  | ||||||
|     if cls_name == 'BaseDocument': |     if cls_name == 'BaseDocument': | ||||||
| @@ -42,11 +43,14 @@ def _import_class(cls_name): | |||||||
|     elif cls_name in field_classes: |     elif cls_name in field_classes: | ||||||
|         from mongoengine import fields as module |         from mongoengine import fields as module | ||||||
|         import_classes = field_classes |         import_classes = field_classes | ||||||
|  |     elif cls_name in queryset_classes: | ||||||
|  |         from mongoengine import queryset as module | ||||||
|  |         import_classes = queryset_classes | ||||||
|     elif cls_name in deref_classes: |     elif cls_name in deref_classes: | ||||||
|         from mongoengine import dereference as module |         from mongoengine import dereference as module | ||||||
|         import_classes = deref_classes |         import_classes = deref_classes | ||||||
|     else: |     else: | ||||||
|         raise ValueError('No import set for: %s' % cls_name) |         raise ValueError('No import set for: ' % cls_name) | ||||||
|  |  | ||||||
|     for cls in import_classes: |     for cls in import_classes: | ||||||
|         _class_registry_cache[cls] = getattr(module, cls) |         _class_registry_cache[cls] = getattr(module, cls) | ||||||
|   | |||||||
| @@ -1,30 +1,19 @@ | |||||||
| from pymongo import MongoClient, ReadPreference, uri_parser | from pymongo import MongoClient, ReadPreference, uri_parser | ||||||
| from pymongo.database import _check_name |  | ||||||
| import six | import six | ||||||
|  |  | ||||||
| __all__ = [ | from mongoengine.python_support import IS_PYMONGO_3 | ||||||
|     'DEFAULT_CONNECTION_NAME', |  | ||||||
|     'DEFAULT_DATABASE_NAME', | __all__ = ['MongoEngineConnectionError', 'connect', 'register_connection', | ||||||
|     'MongoEngineConnectionError', |            'DEFAULT_CONNECTION_NAME'] | ||||||
|     'connect', |  | ||||||
|     'disconnect', |  | ||||||
|     'disconnect_all', |  | ||||||
|     'get_connection', |  | ||||||
|     'get_db', |  | ||||||
|     'register_connection', |  | ||||||
| ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| DEFAULT_CONNECTION_NAME = 'default' | DEFAULT_CONNECTION_NAME = 'default' | ||||||
| DEFAULT_DATABASE_NAME = 'test' |  | ||||||
| DEFAULT_HOST = 'localhost' |  | ||||||
| DEFAULT_PORT = 27017 |  | ||||||
|  |  | ||||||
| _connection_settings = {} | if IS_PYMONGO_3: | ||||||
| _connections = {} |     READ_PREFERENCE = ReadPreference.PRIMARY | ||||||
| _dbs = {} | else: | ||||||
|  |     from pymongo import MongoReplicaSetClient | ||||||
| READ_PREFERENCE = ReadPreference.PRIMARY |     READ_PREFERENCE = False | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoEngineConnectionError(Exception): | class MongoEngineConnectionError(Exception): | ||||||
| @@ -34,48 +23,44 @@ class MongoEngineConnectionError(Exception): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def _check_db_name(name): | _connection_settings = {} | ||||||
|     """Check if a database name is valid. | _connections = {} | ||||||
|     This functionality is copied from pymongo Database class constructor. | _dbs = {} | ||||||
|     """ |  | ||||||
|     if not isinstance(name, six.string_types): |  | ||||||
|         raise TypeError('name must be an instance of %s' % six.string_types) |  | ||||||
|     elif name != '$external': |  | ||||||
|         _check_name(name) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_connection_settings( | def register_connection(alias, name=None, host=None, port=None, | ||||||
|         db=None, name=None, host=None, port=None, |  | ||||||
|                         read_preference=READ_PREFERENCE, |                         read_preference=READ_PREFERENCE, | ||||||
|                         username=None, password=None, |                         username=None, password=None, | ||||||
|                         authentication_source=None, |                         authentication_source=None, | ||||||
|                         authentication_mechanism=None, |                         authentication_mechanism=None, | ||||||
|                         **kwargs): |                         **kwargs): | ||||||
|     """Get the connection settings as a dict |     """Add a connection. | ||||||
|  |  | ||||||
|     : param db: the name of the database to use, for compatibility with connect |     :param alias: the name that will be used to refer to this connection | ||||||
|     : param name: the name of the specific database to use |         throughout MongoEngine | ||||||
|     : param host: the host name of the: program: `mongod` instance to connect to |     :param name: the name of the specific database to use | ||||||
|     : param port: the port that the: program: `mongod` instance is running on |     :param host: the host name of the :program:`mongod` instance to connect to | ||||||
|     : param read_preference: The read preference for the collection |     :param port: the port that the :program:`mongod` instance is running on | ||||||
|     : param username: username to authenticate with |     :param read_preference: The read preference for the collection | ||||||
|     : param password: password to authenticate with |        ** Added pymongo 2.1 | ||||||
|     : param authentication_source: database to authenticate against |     :param username: username to authenticate with | ||||||
|     : param authentication_mechanism: database authentication mechanisms. |     :param password: password to authenticate with | ||||||
|  |     :param authentication_source: database to authenticate against | ||||||
|  |     :param authentication_mechanism: database authentication mechanisms. | ||||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|     : param is_mock: explicitly use mongomock for this connection |     :param is_mock: explicitly use mongomock for this connection | ||||||
|         (can also be done by using `mongomock: // ` as db host prefix) |         (can also be done by using `mongomock://` as db host prefix) | ||||||
|     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, |     :param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|         for example maxpoolsize, tz_aware, etc. See the documentation |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|         for pymongo's `MongoClient` for a full list. |         for pymongo's `MongoClient` for a full list. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.10.6 - added mongomock support |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|     """ |     """ | ||||||
|     conn_settings = { |     conn_settings = { | ||||||
|         'name': name or db or DEFAULT_DATABASE_NAME, |         'name': name or 'test', | ||||||
|         'host': host or DEFAULT_HOST, |         'host': host or 'localhost', | ||||||
|         'port': port or DEFAULT_PORT, |         'port': port or 27017, | ||||||
|         'read_preference': read_preference, |         'read_preference': read_preference, | ||||||
|         'username': username, |         'username': username, | ||||||
|         'password': password, |         'password': password, | ||||||
| @@ -83,7 +68,6 @@ def _get_connection_settings( | |||||||
|         'authentication_mechanism': authentication_mechanism |         'authentication_mechanism': authentication_mechanism | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     _check_db_name(conn_settings['name']) |  | ||||||
|     conn_host = conn_settings['host'] |     conn_host = conn_settings['host'] | ||||||
|  |  | ||||||
|     # Host can be a list or a string, so if string, force to a list. |     # Host can be a list or a string, so if string, force to a list. | ||||||
| @@ -119,30 +103,6 @@ def _get_connection_settings( | |||||||
|                 conn_settings['authentication_source'] = uri_options['authsource'] |                 conn_settings['authentication_source'] = uri_options['authsource'] | ||||||
|             if 'authmechanism' in uri_options: |             if 'authmechanism' in uri_options: | ||||||
|                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] |                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] | ||||||
|             if 'readpreference' in uri_options: |  | ||||||
|                 read_preferences = ( |  | ||||||
|                     ReadPreference.NEAREST, |  | ||||||
|                     ReadPreference.PRIMARY, |  | ||||||
|                     ReadPreference.PRIMARY_PREFERRED, |  | ||||||
|                     ReadPreference.SECONDARY, |  | ||||||
|                     ReadPreference.SECONDARY_PREFERRED, |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|                 # Starting with PyMongo v3.5, the "readpreference" option is |  | ||||||
|                 # returned as a string (e.g. "secondaryPreferred") and not an |  | ||||||
|                 # int (e.g. 3). |  | ||||||
|                 # TODO simplify the code below once we drop support for |  | ||||||
|                 # PyMongo v3.4. |  | ||||||
|                 read_pf_mode = uri_options['readpreference'] |  | ||||||
|                 if isinstance(read_pf_mode, six.string_types): |  | ||||||
|                     read_pf_mode = read_pf_mode.lower() |  | ||||||
|                 for preference in read_preferences: |  | ||||||
|                     if ( |  | ||||||
|                         preference.name.lower() == read_pf_mode or |  | ||||||
|                         preference.mode == read_pf_mode |  | ||||||
|                     ): |  | ||||||
|                         conn_settings['read_preference'] = preference |  | ||||||
|                         break |  | ||||||
|         else: |         else: | ||||||
|             resolved_hosts.append(entity) |             resolved_hosts.append(entity) | ||||||
|     conn_settings['host'] = resolved_hosts |     conn_settings['host'] = resolved_hosts | ||||||
| @@ -152,74 +112,17 @@ def _get_connection_settings( | |||||||
|     kwargs.pop('is_slave', None) |     kwargs.pop('is_slave', None) | ||||||
|  |  | ||||||
|     conn_settings.update(kwargs) |     conn_settings.update(kwargs) | ||||||
|     return conn_settings |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def register_connection(alias, db=None, name=None, host=None, port=None, |  | ||||||
|                         read_preference=READ_PREFERENCE, |  | ||||||
|                         username=None, password=None, |  | ||||||
|                         authentication_source=None, |  | ||||||
|                         authentication_mechanism=None, |  | ||||||
|                         **kwargs): |  | ||||||
|     """Register the connection settings. |  | ||||||
|  |  | ||||||
|     : param alias: the name that will be used to refer to this connection |  | ||||||
|         throughout MongoEngine |  | ||||||
|     : param name: the name of the specific database to use |  | ||||||
|     : param db: the name of the database to use, for compatibility with connect |  | ||||||
|     : param host: the host name of the: program: `mongod` instance to connect to |  | ||||||
|     : param port: the port that the: program: `mongod` instance is running on |  | ||||||
|     : param read_preference: The read preference for the collection |  | ||||||
|     : param username: username to authenticate with |  | ||||||
|     : param password: password to authenticate with |  | ||||||
|     : param authentication_source: database to authenticate against |  | ||||||
|     : param authentication_mechanism: database authentication mechanisms. |  | ||||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, |  | ||||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. |  | ||||||
|     : param is_mock: explicitly use mongomock for this connection |  | ||||||
|         (can also be done by using `mongomock: // ` as db host prefix) |  | ||||||
|     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, |  | ||||||
|         for example maxpoolsize, tz_aware, etc. See the documentation |  | ||||||
|         for pymongo's `MongoClient` for a full list. |  | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.10.6 - added mongomock support |  | ||||||
|     """ |  | ||||||
|     conn_settings = _get_connection_settings( |  | ||||||
|         db=db, name=name, host=host, port=port, |  | ||||||
|         read_preference=read_preference, |  | ||||||
|         username=username, password=password, |  | ||||||
|         authentication_source=authentication_source, |  | ||||||
|         authentication_mechanism=authentication_mechanism, |  | ||||||
|         **kwargs) |  | ||||||
|     _connection_settings[alias] = conn_settings |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
|     """Close the connection with a given alias.""" |     """Close the connection with a given alias.""" | ||||||
|     from mongoengine.base.common import _get_documents_by_db |  | ||||||
|     from mongoengine import Document |  | ||||||
|  |  | ||||||
|     if alias in _connections: |     if alias in _connections: | ||||||
|         get_connection(alias=alias).close() |         get_connection(alias=alias).close() | ||||||
|         del _connections[alias] |         del _connections[alias] | ||||||
|  |  | ||||||
|     if alias in _dbs: |     if alias in _dbs: | ||||||
|         # Detach all cached collections in Documents |  | ||||||
|         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): |  | ||||||
|             if issubclass(doc_cls, Document):     # Skip EmbeddedDocument |  | ||||||
|                 doc_cls._disconnect() |  | ||||||
|  |  | ||||||
|         del _dbs[alias] |         del _dbs[alias] | ||||||
|  |  | ||||||
|     if alias in _connection_settings: |  | ||||||
|         del _connection_settings[alias] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect_all(): |  | ||||||
|     """Close all registered database.""" |  | ||||||
|     for alias in list(_connections.keys()): |  | ||||||
|         disconnect(alias) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     """Return a connection with a given alias.""" |     """Return a connection with a given alias.""" | ||||||
| @@ -243,21 +146,19 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|         raise MongoEngineConnectionError(msg) |         raise MongoEngineConnectionError(msg) | ||||||
|  |  | ||||||
|     def _clean_settings(settings_dict): |     def _clean_settings(settings_dict): | ||||||
|         irrelevant_fields_set = { |         irrelevant_fields = set([ | ||||||
|             'name', 'username', 'password', |             'name', 'username', 'password', 'authentication_source', | ||||||
|             'authentication_source', 'authentication_mechanism' |             'authentication_mechanism' | ||||||
|         } |         ]) | ||||||
|         return { |         return { | ||||||
|             k: v for k, v in settings_dict.items() |             k: v for k, v in settings_dict.items() | ||||||
|             if k not in irrelevant_fields_set |             if k not in irrelevant_fields | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     raw_conn_settings = _connection_settings[alias].copy() |  | ||||||
|  |  | ||||||
|     # Retrieve a copy of the connection settings associated with the requested |     # Retrieve a copy of the connection settings associated with the requested | ||||||
|     # alias and remove the database name and authentication info (we don't |     # alias and remove the database name and authentication info (we don't | ||||||
|     # care about them at this point). |     # care about them at this point). | ||||||
|     conn_settings = _clean_settings(raw_conn_settings) |     conn_settings = _clean_settings(_connection_settings[alias].copy()) | ||||||
|  |  | ||||||
|     # Determine if we should use PyMongo's or mongomock's MongoClient. |     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||||
|     is_mock = conn_settings.pop('is_mock', False) |     is_mock = conn_settings.pop('is_mock', False) | ||||||
| @@ -271,58 +172,49 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|     else: |     else: | ||||||
|         connection_class = MongoClient |         connection_class = MongoClient | ||||||
|  |  | ||||||
|     # Re-use existing connection if one is suitable. |         # For replica set connections with PyMongo 2.x, use | ||||||
|     existing_connection = _find_existing_connection(raw_conn_settings) |         # MongoReplicaSetClient. | ||||||
|     if existing_connection: |         # TODO remove this once we stop supporting PyMongo 2.x. | ||||||
|         connection = existing_connection |         if 'replicaSet' in conn_settings and not IS_PYMONGO_3: | ||||||
|     else: |             connection_class = MongoReplicaSetClient | ||||||
|         connection = _create_connection( |             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||||
|             alias=alias, |  | ||||||
|             connection_class=connection_class, |             # hosts_or_uri has to be a string, so if 'host' was provided | ||||||
|             **conn_settings |             # as a list, join its parts and separate them by ',' | ||||||
|  |             if isinstance(conn_settings['hosts_or_uri'], list): | ||||||
|  |                 conn_settings['hosts_or_uri'] = ','.join( | ||||||
|  |                     conn_settings['hosts_or_uri']) | ||||||
|  |  | ||||||
|  |             # Discard port since it can't be used on MongoReplicaSetClient | ||||||
|  |             conn_settings.pop('port', None) | ||||||
|  |  | ||||||
|  |     # Iterate over all of the connection settings and if a connection with | ||||||
|  |     # the same parameters is already established, use it instead of creating | ||||||
|  |     # a new one. | ||||||
|  |     existing_connection = None | ||||||
|  |     connection_settings_iterator = ( | ||||||
|  |         (db_alias, settings.copy()) | ||||||
|  |         for db_alias, settings in _connection_settings.items() | ||||||
|     ) |     ) | ||||||
|     _connections[alias] = connection |     for db_alias, connection_settings in connection_settings_iterator: | ||||||
|     return _connections[alias] |         connection_settings = _clean_settings(connection_settings) | ||||||
|  |         if conn_settings == connection_settings and _connections.get(db_alias): | ||||||
|  |             existing_connection = _connections[db_alias] | ||||||
|  |             break | ||||||
|  |  | ||||||
|  |     # If an existing connection was found, assign it to the new alias | ||||||
| def _create_connection(alias, connection_class, **connection_settings): |     if existing_connection: | ||||||
|     """ |         _connections[alias] = existing_connection | ||||||
|     Create the new connection for this alias. Raise |     else: | ||||||
|     MongoEngineConnectionError if it can't be established. |         # Otherwise, create the new connection for this alias. Raise | ||||||
|     """ |         # MongoEngineConnectionError if it can't be established. | ||||||
|         try: |         try: | ||||||
|         return connection_class(**connection_settings) |             _connections[alias] = connection_class(**conn_settings) | ||||||
|         except Exception as e: |         except Exception as e: | ||||||
|             raise MongoEngineConnectionError( |             raise MongoEngineConnectionError( | ||||||
|                 'Cannot connect to database %s :\n%s' % (alias, e)) |                 'Cannot connect to database %s :\n%s' % (alias, e)) | ||||||
|  |  | ||||||
|  |     return _connections[alias] | ||||||
| def _find_existing_connection(connection_settings): |  | ||||||
|     """ |  | ||||||
|     Check if an existing connection could be reused |  | ||||||
|  |  | ||||||
|     Iterate over all of the connection settings and if an existing connection |  | ||||||
|     with the same parameters is suitable, return it |  | ||||||
|  |  | ||||||
|     :param connection_settings: the settings of the new connection |  | ||||||
|     :return: An existing connection or None |  | ||||||
|     """ |  | ||||||
|     connection_settings_bis = ( |  | ||||||
|         (db_alias, settings.copy()) |  | ||||||
|         for db_alias, settings in _connection_settings.items() |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def _clean_settings(settings_dict): |  | ||||||
|         # Only remove the name but it's important to |  | ||||||
|         # keep the username/password/authentication_source/authentication_mechanism |  | ||||||
|         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) |  | ||||||
|         return {k: v for k, v in settings_dict.items() if k != 'name'} |  | ||||||
|  |  | ||||||
|     cleaned_conn_settings = _clean_settings(connection_settings) |  | ||||||
|     for db_alias, connection_settings in connection_settings_bis: |  | ||||||
|         db_conn_settings = _clean_settings(connection_settings) |  | ||||||
|         if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias): |  | ||||||
|             return _connections[db_alias] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
| @@ -352,27 +244,14 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | |||||||
|     provide username and password arguments as well. |     provide username and password arguments as well. | ||||||
|  |  | ||||||
|     Multiple databases are supported by using aliases. Provide a separate |     Multiple databases are supported by using aliases. Provide a separate | ||||||
|     `alias` to connect to a different instance of: program: `mongod`. |     `alias` to connect to a different instance of :program:`mongod`. | ||||||
|  |  | ||||||
|     In order to replace a connection identified by a given alias, you'll |  | ||||||
|     need to call ``disconnect`` first |  | ||||||
|  |  | ||||||
|     See the docstring for `register_connection` for more details about all |     See the docstring for `register_connection` for more details about all | ||||||
|     supported kwargs. |     supported kwargs. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.6 - added multiple database support. |     .. versionchanged:: 0.6 - added multiple database support. | ||||||
|     """ |     """ | ||||||
|     if alias in _connections: |     if alias not in _connections: | ||||||
|         prev_conn_setting = _connection_settings[alias] |  | ||||||
|         new_conn_settings = _get_connection_settings(db, **kwargs) |  | ||||||
|  |  | ||||||
|         if new_conn_settings != prev_conn_setting: |  | ||||||
|             err_msg = ( |  | ||||||
|                 u'A different connection with alias `{}` was already ' |  | ||||||
|                 u'registered. Use disconnect() first' |  | ||||||
|             ).format(alias) |  | ||||||
|             raise MongoEngineConnectionError(err_msg) |  | ||||||
|     else: |  | ||||||
|         register_connection(alias, db, **kwargs) |         register_connection(alias, db, **kwargs) | ||||||
|  |  | ||||||
|     return get_connection(alias) |     return get_connection(alias) | ||||||
|   | |||||||
| @@ -1,14 +1,9 @@ | |||||||
| from contextlib import contextmanager |  | ||||||
|  |  | ||||||
| from pymongo.write_concern import WriteConcern |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
| from mongoengine.pymongo_support import count_documents |  | ||||||
|  |  | ||||||
| __all__ = ('switch_db', 'switch_collection', 'no_dereference', | __all__ = ('switch_db', 'switch_collection', 'no_dereference', | ||||||
|            'no_sub_classes', 'query_counter', 'set_write_concern') |            'no_sub_classes', 'query_counter') | ||||||
|  |  | ||||||
|  |  | ||||||
| class switch_db(object): | class switch_db(object): | ||||||
| @@ -115,7 +110,7 @@ class no_dereference(object): | |||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class('GenericReferenceField') | ||||||
|         ComplexBaseField = _import_class('ComplexBaseField') |         ComplexBaseField = _import_class('ComplexBaseField') | ||||||
|  |  | ||||||
|         self.deref_fields = [k for k, v in iteritems(self.cls._fields) |         self.deref_fields = [k for k, v in self.cls._fields.iteritems() | ||||||
|                              if isinstance(v, (ReferenceField, |                              if isinstance(v, (ReferenceField, | ||||||
|                                                GenericReferenceField, |                                                GenericReferenceField, | ||||||
|                                                ComplexBaseField))] |                                                ComplexBaseField))] | ||||||
| @@ -148,85 +143,66 @@ class no_sub_classes(object): | |||||||
|         :param cls: the class to turn querying sub classes on |         :param cls: the class to turn querying sub classes on | ||||||
|         """ |         """ | ||||||
|         self.cls = cls |         self.cls = cls | ||||||
|         self.cls_initial_subclasses = None |  | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """Change the objects default and _auto_dereference values.""" |         """Change the objects default and _auto_dereference values.""" | ||||||
|         self.cls_initial_subclasses = self.cls._subclasses |         self.cls._all_subclasses = self.cls._subclasses | ||||||
|         self.cls._subclasses = (self.cls._class_name,) |         self.cls._subclasses = (self.cls,) | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """Reset the default and _auto_dereference values.""" |         """Reset the default and _auto_dereference values.""" | ||||||
|         self.cls._subclasses = self.cls_initial_subclasses |         self.cls._subclasses = self.cls._all_subclasses | ||||||
|  |         delattr(self.cls, '_all_subclasses') | ||||||
|  |         return self.cls | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): | class query_counter(object): | ||||||
|     """Query_counter context manager to get the number of queries. |     """Query_counter context manager to get the number of queries.""" | ||||||
|     This works by updating the `profiling_level` of the database so that all queries get logged, |  | ||||||
|     resetting the db.system.profile collection at the beginnig of the context and counting the new entries. |  | ||||||
|  |  | ||||||
|     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes |  | ||||||
|     can interfere with it |  | ||||||
|  |  | ||||||
|     Be aware that: |  | ||||||
|     - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of |  | ||||||
|         documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) |  | ||||||
|     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def __init__(self): |     def __init__(self): | ||||||
|         """Construct the query_counter |         """Construct the query_counter.""" | ||||||
|         """ |         self.counter = 0 | ||||||
|         self.db = get_db() |         self.db = get_db() | ||||||
|         self.initial_profiling_level = None |  | ||||||
|         self._ctx_query_counter = 0             # number of queries issued by the context |  | ||||||
|  |  | ||||||
|         self._ignored_query = { |     def __enter__(self): | ||||||
|             'ns': |         """On every with block we need to drop the profile collection.""" | ||||||
|                 {'$ne': '%s.system.indexes' % self.db.name}, |  | ||||||
|             'op':                       # MONGODB < 3.2 |  | ||||||
|                 {'$ne': 'killcursors'}, |  | ||||||
|             'command.killCursors':      # MONGODB >= 3.2 |  | ||||||
|                 {'$exists': False} |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     def _turn_on_profiling(self): |  | ||||||
|         self.initial_profiling_level = self.db.profiling_level() |  | ||||||
|         self.db.set_profiling_level(0) |         self.db.set_profiling_level(0) | ||||||
|         self.db.system.profile.drop() |         self.db.system.profile.drop() | ||||||
|         self.db.set_profiling_level(2) |         self.db.set_profiling_level(2) | ||||||
|  |  | ||||||
|     def _resets_profiling(self): |  | ||||||
|         self.db.set_profiling_level(self.initial_profiling_level) |  | ||||||
|  |  | ||||||
|     def __enter__(self): |  | ||||||
|         self._turn_on_profiling() |  | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         self._resets_profiling() |         """Reset the profiling level.""" | ||||||
|  |         self.db.set_profiling_level(0) | ||||||
|  |  | ||||||
|     def __eq__(self, value): |     def __eq__(self, value): | ||||||
|  |         """== Compare querycounter.""" | ||||||
|         counter = self._get_count() |         counter = self._get_count() | ||||||
|         return value == counter |         return value == counter | ||||||
|  |  | ||||||
|     def __ne__(self, value): |     def __ne__(self, value): | ||||||
|  |         """!= Compare querycounter.""" | ||||||
|         return not self.__eq__(value) |         return not self.__eq__(value) | ||||||
|  |  | ||||||
|     def __lt__(self, value): |     def __lt__(self, value): | ||||||
|  |         """< Compare querycounter.""" | ||||||
|         return self._get_count() < value |         return self._get_count() < value | ||||||
|  |  | ||||||
|     def __le__(self, value): |     def __le__(self, value): | ||||||
|  |         """<= Compare querycounter.""" | ||||||
|         return self._get_count() <= value |         return self._get_count() <= value | ||||||
|  |  | ||||||
|     def __gt__(self, value): |     def __gt__(self, value): | ||||||
|  |         """> Compare querycounter.""" | ||||||
|         return self._get_count() > value |         return self._get_count() > value | ||||||
|  |  | ||||||
|     def __ge__(self, value): |     def __ge__(self, value): | ||||||
|  |         """>= Compare querycounter.""" | ||||||
|         return self._get_count() >= value |         return self._get_count() >= value | ||||||
|  |  | ||||||
|     def __int__(self): |     def __int__(self): | ||||||
|  |         """int representation.""" | ||||||
|         return self._get_count() |         return self._get_count() | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
| @@ -234,17 +210,8 @@ class query_counter(object): | |||||||
|         return u"%s" % self._get_count() |         return u"%s" % self._get_count() | ||||||
|  |  | ||||||
|     def _get_count(self): |     def _get_count(self): | ||||||
|         """Get the number of queries by counting the current number of entries in db.system.profile |         """Get the number of queries.""" | ||||||
|         and substracting the queries issued by this context. In fact everytime this is called, 1 query is |         ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} | ||||||
|         issued so we need to balance that |         count = self.db.system.profile.find(ignore_query).count() - self.counter | ||||||
|         """ |         self.counter += 1 | ||||||
|         count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter |  | ||||||
|         self._ctx_query_counter += 1    # Account for the query we just issued to gather the information |  | ||||||
|         return count |         return count | ||||||
|  |  | ||||||
|  |  | ||||||
| @contextmanager |  | ||||||
| def set_write_concern(collection, write_concerns): |  | ||||||
|     combined_concerns = dict(collection.write_concern.document.items()) |  | ||||||
|     combined_concerns.update(write_concerns) |  | ||||||
|     yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) |  | ||||||
|   | |||||||
| @@ -1,10 +1,8 @@ | |||||||
| from bson import DBRef, SON | from bson import DBRef, SON | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | ||||||
|                               TopLevelDocumentMetaclass, get_document) |                               TopLevelDocumentMetaclass, get_document) | ||||||
| from mongoengine.base.datastructures import LazyReference |  | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.document import Document, EmbeddedDocument | from mongoengine.document import Document, EmbeddedDocument | ||||||
| from mongoengine.fields import DictField, ListField, MapField, ReferenceField | from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||||
| @@ -53,40 +51,26 @@ class DeReference(object): | |||||||
|                         [i.__class__ == doc_type for i in items.values()]): |                         [i.__class__ == doc_type for i in items.values()]): | ||||||
|                     return items |                     return items | ||||||
|                 elif not field.dbref: |                 elif not field.dbref: | ||||||
|                     # We must turn the ObjectIds into DBRefs |                     if not hasattr(items, 'items'): | ||||||
|  |  | ||||||
|                     # Recursively dig into the sub items of a list/dict |                         def _get_items(items): | ||||||
|                     # to turn the ObjectIds into DBRefs |  | ||||||
|                     def _get_items_from_list(items): |  | ||||||
|                             new_items = [] |                             new_items = [] | ||||||
|                             for v in items: |                             for v in items: | ||||||
|                             value = v |  | ||||||
|                             if isinstance(v, dict): |  | ||||||
|                                 value = _get_items_from_dict(v) |  | ||||||
|                             elif isinstance(v, list): |  | ||||||
|                                 value = _get_items_from_list(v) |  | ||||||
|                             elif not isinstance(v, (DBRef, Document)): |  | ||||||
|                                 value = field.to_python(v) |  | ||||||
|                             new_items.append(value) |  | ||||||
|                         return new_items |  | ||||||
|  |  | ||||||
|                     def _get_items_from_dict(items): |  | ||||||
|                         new_items = {} |  | ||||||
|                         for k, v in iteritems(items): |  | ||||||
|                             value = v |  | ||||||
|                                 if isinstance(v, list): |                                 if isinstance(v, list): | ||||||
|                                 value = _get_items_from_list(v) |                                     new_items.append(_get_items(v)) | ||||||
|                             elif isinstance(v, dict): |  | ||||||
|                                 value = _get_items_from_dict(v) |  | ||||||
|                                 elif not isinstance(v, (DBRef, Document)): |                                 elif not isinstance(v, (DBRef, Document)): | ||||||
|                                 value = field.to_python(v) |                                     new_items.append(field.to_python(v)) | ||||||
|                             new_items[k] = value |                                 else: | ||||||
|  |                                     new_items.append(v) | ||||||
|                             return new_items |                             return new_items | ||||||
|  |  | ||||||
|                     if not hasattr(items, 'items'): |                         items = _get_items(items) | ||||||
|                         items = _get_items_from_list(items) |  | ||||||
|                     else: |                     else: | ||||||
|                         items = _get_items_from_dict(items) |                         items = { | ||||||
|  |                             k: (v if isinstance(v, (DBRef, Document)) | ||||||
|  |                                 else field.to_python(v)) | ||||||
|  |                             for k, v in items.iteritems() | ||||||
|  |                         } | ||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |         self.reference_map = self._find_references(items) | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
| @@ -113,32 +97,26 @@ class DeReference(object): | |||||||
|         depth += 1 |         depth += 1 | ||||||
|         for item in iterator: |         for item in iterator: | ||||||
|             if isinstance(item, (Document, EmbeddedDocument)): |             if isinstance(item, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in iteritems(item._fields): |                 for field_name, field in item._fields.iteritems(): | ||||||
|                     v = item._data.get(field_name, None) |                     v = item._data.get(field_name, None) | ||||||
|                     if isinstance(v, LazyReference): |                     if isinstance(v, DBRef): | ||||||
|                         # LazyReference inherits DBRef but should not be dereferenced here ! |  | ||||||
|                         continue |  | ||||||
|                     elif isinstance(v, DBRef): |  | ||||||
|                         reference_map.setdefault(field.document_type, set()).add(v.id) |                         reference_map.setdefault(field.document_type, set()).add(v.id) | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||||
|                         reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) |                         reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) | ||||||
|                         references = self._find_references(v, depth) |                         references = self._find_references(v, depth) | ||||||
|                         for key, refs in iteritems(references): |                         for key, refs in references.iteritems(): | ||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): | ||||||
|                                 key = field_cls |                                 key = field_cls | ||||||
|                             reference_map.setdefault(key, set()).update(refs) |                             reference_map.setdefault(key, set()).update(refs) | ||||||
|             elif isinstance(item, LazyReference): |  | ||||||
|                 # LazyReference inherits DBRef but should not be dereferenced here ! |  | ||||||
|                 continue |  | ||||||
|             elif isinstance(item, DBRef): |             elif isinstance(item, DBRef): | ||||||
|                 reference_map.setdefault(item.collection, set()).add(item.id) |                 reference_map.setdefault(item.collection, set()).add(item.id) | ||||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: |             elif isinstance(item, (dict, SON)) and '_ref' in item: | ||||||
|                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) |                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|                 references = self._find_references(item, depth - 1) |                 references = self._find_references(item, depth - 1) | ||||||
|                 for key, refs in iteritems(references): |                 for key, refs in references.iteritems(): | ||||||
|                     reference_map.setdefault(key, set()).update(refs) |                     reference_map.setdefault(key, set()).update(refs) | ||||||
|  |  | ||||||
|         return reference_map |         return reference_map | ||||||
| @@ -147,21 +125,16 @@ class DeReference(object): | |||||||
|         """Fetch all references and convert to their document objects |         """Fetch all references and convert to their document objects | ||||||
|         """ |         """ | ||||||
|         object_map = {} |         object_map = {} | ||||||
|         for collection, dbrefs in iteritems(self.reference_map): |         for collection, dbrefs in self.reference_map.iteritems(): | ||||||
|  |             if hasattr(collection, 'objects'):  # We have a document class for the refs | ||||||
|             # we use getattr instead of hasattr because hasattr swallows any exception under python2 |  | ||||||
|             # so it could hide nasty things without raising exceptions (cfr bug #1688)) |  | ||||||
|             ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) |  | ||||||
|  |  | ||||||
|             if ref_document_cls_exists: |  | ||||||
|                 col_name = collection._get_collection_name() |                 col_name = collection._get_collection_name() | ||||||
|                 refs = [dbref for dbref in dbrefs |                 refs = [dbref for dbref in dbrefs | ||||||
|                         if (col_name, dbref) not in object_map] |                         if (col_name, dbref) not in object_map] | ||||||
|                 references = collection.objects.in_bulk(refs) |                 references = collection.objects.in_bulk(refs) | ||||||
|                 for key, doc in iteritems(references): |                 for key, doc in references.iteritems(): | ||||||
|                     object_map[(col_name, key)] = doc |                     object_map[(col_name, key)] = doc | ||||||
|             else:  # Generic reference: use the refs data to convert to document |             else:  # Generic reference: use the refs data to convert to document | ||||||
|                 if isinstance(doc_type, (ListField, DictField, MapField)): |                 if isinstance(doc_type, (ListField, DictField, MapField,)): | ||||||
|                     continue |                     continue | ||||||
|  |  | ||||||
|                 refs = [dbref for dbref in dbrefs |                 refs = [dbref for dbref in dbrefs | ||||||
| @@ -230,7 +203,7 @@ class DeReference(object): | |||||||
|             data = [] |             data = [] | ||||||
|         else: |         else: | ||||||
|             is_list = False |             is_list = False | ||||||
|             iterator = iteritems(items) |             iterator = items.iteritems() | ||||||
|             data = {} |             data = {} | ||||||
|  |  | ||||||
|         depth += 1 |         depth += 1 | ||||||
| @@ -257,7 +230,7 @@ class DeReference(object): | |||||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                 item_name = '%s.%s' % (name, k) if name else name |                 item_name = '%s.%s' % (name, k) if name else name | ||||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) |                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) | ||||||
|             elif isinstance(v, DBRef) and hasattr(v, 'id'): |             elif hasattr(v, 'id'): | ||||||
|                 data[k] = self.object_map.get((v.collection, v.id), v) |                 data[k] = self.object_map.get((v.collection, v.id), v) | ||||||
|  |  | ||||||
|         if instance and name: |         if instance and name: | ||||||
|   | |||||||
| @@ -5,7 +5,6 @@ from bson.dbref import DBRef | |||||||
| import pymongo | import pymongo | ||||||
| from pymongo.read_preferences import ReadPreference | from pymongo.read_preferences import ReadPreference | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base import (BaseDict, BaseDocument, BaseList, | from mongoengine.base import (BaseDict, BaseDocument, BaseList, | ||||||
| @@ -13,12 +12,10 @@ from mongoengine.base import (BaseDict, BaseDocument, BaseList, | |||||||
|                               TopLevelDocumentMetaclass, get_document) |                               TopLevelDocumentMetaclass, get_document) | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
| from mongoengine.context_managers import (set_write_concern, | from mongoengine.context_managers import switch_collection, switch_db | ||||||
|                                           switch_collection, |  | ||||||
|                                           switch_db) |  | ||||||
| from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, | from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, | ||||||
|                                 SaveConditionError) |                                 SaveConditionError) | ||||||
| from mongoengine.pymongo_support import list_collection_names | from mongoengine.python_support import IS_PYMONGO_3 | ||||||
| from mongoengine.queryset import (NotUniqueError, OperationError, | from mongoengine.queryset import (NotUniqueError, OperationError, | ||||||
|                                   QuerySet, transform) |                                   QuerySet, transform) | ||||||
|  |  | ||||||
| @@ -42,7 +39,7 @@ class InvalidCollectionError(Exception): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | class EmbeddedDocument(BaseDocument): | ||||||
|     """A :class:`~mongoengine.Document` that isn't stored in its own |     """A :class:`~mongoengine.Document` that isn't stored in its own | ||||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as |     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||||
|     fields on :class:`~mongoengine.Document`\ s through the |     fields on :class:`~mongoengine.Document`\ s through the | ||||||
| @@ -61,12 +58,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | |||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = DocumentMetaclass |     my_metaclass = DocumentMetaclass | ||||||
|  |     __metaclass__ = DocumentMetaclass | ||||||
|     # A generic embedded document doesn't have any immutable properties |  | ||||||
|     # that describe it uniquely, hence it shouldn't be hashable. You can |  | ||||||
|     # define your own __hash__ method on a subclass if you need your |  | ||||||
|     # embedded documents to be hashable. |  | ||||||
|     __hash__ = None |  | ||||||
|  |  | ||||||
|     def __init__(self, *args, **kwargs): |     def __init__(self, *args, **kwargs): | ||||||
|         super(EmbeddedDocument, self).__init__(*args, **kwargs) |         super(EmbeddedDocument, self).__init__(*args, **kwargs) | ||||||
| @@ -90,8 +82,14 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         return data |         return data | ||||||
|  |  | ||||||
|  |     def save(self, *args, **kwargs): | ||||||
|  |         self._instance.save(*args, **kwargs) | ||||||
|  |  | ||||||
| class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): |     def reload(self, *args, **kwargs): | ||||||
|  |         self._instance.reload(*args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Document(BaseDocument): | ||||||
|     """The base class used for defining the structure and properties of |     """The base class used for defining the structure and properties of | ||||||
|     collections of documents stored in MongoDB. Inherit from this class, and |     collections of documents stored in MongoDB. Inherit from this class, and | ||||||
|     add fields as class attributes to define a document's structure. |     add fields as class attributes to define a document's structure. | ||||||
| @@ -146,6 +144,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = TopLevelDocumentMetaclass |     my_metaclass = TopLevelDocumentMetaclass | ||||||
|  |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     __slots__ = ('__objects',) |     __slots__ = ('__objects',) | ||||||
|  |  | ||||||
| @@ -161,98 +160,57 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """Set the primary key.""" |         """Set the primary key.""" | ||||||
|         return setattr(self, self._meta['id_field'], value) |         return setattr(self, self._meta['id_field'], value) | ||||||
|  |  | ||||||
|     def __hash__(self): |  | ||||||
|         """Return the hash based on the PK of this document. If it's new |  | ||||||
|         and doesn't have a PK yet, return the default object hash instead. |  | ||||||
|         """ |  | ||||||
|         if self.pk is None: |  | ||||||
|             return super(BaseDocument, self).__hash__() |  | ||||||
|  |  | ||||||
|         return hash(self.pk) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_db(cls): |     def _get_db(cls): | ||||||
|         """Some Model using other db_alias""" |         """Some Model using other db_alias""" | ||||||
|         return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) |         return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _disconnect(cls): |  | ||||||
|         """Detach the Document class from the (cached) database collection""" |  | ||||||
|         cls._collection = None |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_collection(cls): |     def _get_collection(cls): | ||||||
|         """Return the PyMongo collection corresponding to this document. |         """Returns the collection for the document.""" | ||||||
|  |         # TODO: use new get_collection() with PyMongo3 ? | ||||||
|         Upon first call, this method: |  | ||||||
|         1. Initializes a :class:`~pymongo.collection.Collection` corresponding |  | ||||||
|            to this document. |  | ||||||
|         2. Creates indexes defined in this document's :attr:`meta` dictionary. |  | ||||||
|            This happens only if `auto_create_index` is True. |  | ||||||
|         """ |  | ||||||
|         if not hasattr(cls, '_collection') or cls._collection is None: |         if not hasattr(cls, '_collection') or cls._collection is None: | ||||||
|             # Get the collection, either capped or regular. |             db = cls._get_db() | ||||||
|  |             collection_name = cls._get_collection_name() | ||||||
|  |             # Create collection as a capped collection if specified | ||||||
|             if cls._meta.get('max_size') or cls._meta.get('max_documents'): |             if cls._meta.get('max_size') or cls._meta.get('max_documents'): | ||||||
|                 cls._collection = cls._get_capped_collection() |                 # Get max document limit and max byte size from meta | ||||||
|             else: |  | ||||||
|                 db = cls._get_db() |  | ||||||
|                 collection_name = cls._get_collection_name() |  | ||||||
|                 cls._collection = db[collection_name] |  | ||||||
|  |  | ||||||
|             # Ensure indexes on the collection unless auto_create_index was |  | ||||||
|             # set to False. |  | ||||||
|             # Also there is no need to ensure indexes on slave. |  | ||||||
|             db = cls._get_db() |  | ||||||
|             if cls._meta.get('auto_create_index', True) and\ |  | ||||||
|                     db.client.is_primary: |  | ||||||
|                 cls.ensure_indexes() |  | ||||||
|  |  | ||||||
|         return cls._collection |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _get_capped_collection(cls): |  | ||||||
|         """Create a new or get an existing capped PyMongo collection.""" |  | ||||||
|         db = cls._get_db() |  | ||||||
|         collection_name = cls._get_collection_name() |  | ||||||
|  |  | ||||||
|         # Get max document limit and max byte size from meta. |  | ||||||
|                 max_size = cls._meta.get('max_size') or 10 * 2 ** 20  # 10MB default |                 max_size = cls._meta.get('max_size') or 10 * 2 ** 20  # 10MB default | ||||||
|                 max_documents = cls._meta.get('max_documents') |                 max_documents = cls._meta.get('max_documents') | ||||||
|  |                 # Round up to next 256 bytes as MongoDB would do it to avoid exception | ||||||
|         # MongoDB will automatically raise the size to make it a multiple of |  | ||||||
|         # 256 bytes. We raise it here ourselves to be able to reliably compare |  | ||||||
|         # the options below. |  | ||||||
|                 if max_size % 256: |                 if max_size % 256: | ||||||
|                     max_size = (max_size // 256 + 1) * 256 |                     max_size = (max_size // 256 + 1) * 256 | ||||||
|  |  | ||||||
|         # If the collection already exists and has different options |                 if collection_name in db.collection_names(): | ||||||
|         # (i.e. isn't capped or has different max/size), raise an error. |                     cls._collection = db[collection_name] | ||||||
|         if collection_name in list_collection_names(db, include_system_collections=True): |                     # The collection already exists, check if its capped | ||||||
|             collection = db[collection_name] |                     # options match the specified capped options | ||||||
|             options = collection.options() |                     options = cls._collection.options() | ||||||
|             if ( |                     if options.get('max') != max_documents or \ | ||||||
|                 options.get('max') != max_documents or |                             options.get('size') != max_size: | ||||||
|                 options.get('size') != max_size |                         msg = (('Cannot create collection "%s" as a capped ' | ||||||
|             ): |                                 'collection as it already exists') | ||||||
|                 raise InvalidCollectionError( |                                % cls._collection) | ||||||
|                     'Cannot create collection "{}" as a capped ' |                         raise InvalidCollectionError(msg) | ||||||
|                     'collection as it already exists'.format(cls._collection) |                 else: | ||||||
|                 ) |                     # Create the collection as a capped collection | ||||||
|  |  | ||||||
|             return collection |  | ||||||
|  |  | ||||||
|         # Create a new capped collection. |  | ||||||
|                     opts = {'capped': True, 'size': max_size} |                     opts = {'capped': True, 'size': max_size} | ||||||
|                     if max_documents: |                     if max_documents: | ||||||
|                         opts['max'] = max_documents |                         opts['max'] = max_documents | ||||||
|  |                     cls._collection = db.create_collection( | ||||||
|         return db.create_collection(collection_name, **opts) |                         collection_name, **opts | ||||||
|  |                     ) | ||||||
|  |             else: | ||||||
|  |                 cls._collection = db[collection_name] | ||||||
|  |             if cls._meta.get('auto_create_index', True): | ||||||
|  |                 cls.ensure_indexes() | ||||||
|  |         return cls._collection | ||||||
|  |  | ||||||
|     def to_mongo(self, *args, **kwargs): |     def to_mongo(self, *args, **kwargs): | ||||||
|         data = super(Document, self).to_mongo(*args, **kwargs) |         data = super(Document, self).to_mongo(*args, **kwargs) | ||||||
|  |  | ||||||
|         # If '_id' is None, try and set it from self._data. If that |         # If '_id' is None, try and set it from self._data. If that | ||||||
|         # doesn't exist either, remove '_id' from the SON completely. |         # doesn't exist either, remote '_id' from the SON completely. | ||||||
|         if data['_id'] is None: |         if data['_id'] is None: | ||||||
|             if self._data.get('id') is None: |             if self._data.get('id') is None: | ||||||
|                 del data['_id'] |                 del data['_id'] | ||||||
| @@ -289,9 +247,6 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         elif query[id_field] != self.pk: |         elif query[id_field] != self.pk: | ||||||
|             raise InvalidQueryError('Invalid document modify query: it must modify only this document.') |             raise InvalidQueryError('Invalid document modify query: it must modify only this document.') | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |  | ||||||
|         query.update(self._object_key) |  | ||||||
|  |  | ||||||
|         updated = self._qs(**query).modify(new=True, **update) |         updated = self._qs(**query).modify(new=True, **update) | ||||||
|         if updated is None: |         if updated is None: | ||||||
|             return False |             return False | ||||||
| @@ -312,7 +267,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         created. |         created. | ||||||
|  |  | ||||||
|         :param force_insert: only try to create a new document, don't allow |         :param force_insert: only try to create a new document, don't allow | ||||||
|             updates of existing documents. |             updates of existing documents | ||||||
|         :param validate: validates the document; set to ``False`` to skip. |         :param validate: validates the document; set to ``False`` to skip. | ||||||
|         :param clean: call the document clean method, requires `validate` to be |         :param clean: call the document clean method, requires `validate` to be | ||||||
|             True. |             True. | ||||||
| @@ -332,7 +287,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         :param save_condition: only perform save if matching record in db |         :param save_condition: only perform save if matching record in db | ||||||
|             satisfies condition(s) (e.g. version number). |             satisfies condition(s) (e.g. version number). | ||||||
|             Raises :class:`OperationError` if the conditions are not satisfied |             Raises :class:`OperationError` if the conditions are not satisfied | ||||||
|         :param signal_kwargs: (optional) kwargs dictionary to be passed to |         :parm signal_kwargs: (optional) kwargs dictionary to be passed to | ||||||
|             the signal calls. |             the signal calls. | ||||||
|  |  | ||||||
|         .. versionchanged:: 0.5 |         .. versionchanged:: 0.5 | ||||||
| @@ -358,26 +313,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         .. versionchanged:: 0.10.7 |         .. versionchanged:: 0.10.7 | ||||||
|             Add signal_kwargs argument |             Add signal_kwargs argument | ||||||
|         """ |         """ | ||||||
|         signal_kwargs = signal_kwargs or {} |  | ||||||
|  |  | ||||||
|         if self._meta.get('abstract'): |         if self._meta.get('abstract'): | ||||||
|             raise InvalidDocumentError('Cannot save an abstract document.') |             raise InvalidDocumentError('Cannot save an abstract document.') | ||||||
|  |  | ||||||
|  |         signal_kwargs = signal_kwargs or {} | ||||||
|         signals.pre_save.send(self.__class__, document=self, **signal_kwargs) |         signals.pre_save.send(self.__class__, document=self, **signal_kwargs) | ||||||
|  |  | ||||||
|         if validate: |         if validate: | ||||||
|             self.validate(clean=clean) |             self.validate(clean=clean) | ||||||
|  |  | ||||||
|         if write_concern is None: |         if write_concern is None: | ||||||
|             write_concern = {} |             write_concern = {'w': 1} | ||||||
|  |  | ||||||
|         doc_id = self.to_mongo(fields=[self._meta['id_field']]) |         doc = self.to_mongo() | ||||||
|         created = ('_id' not in doc_id or self._created or force_insert) |  | ||||||
|  |         created = ('_id' not in doc or self._created or force_insert) | ||||||
|  |  | ||||||
|         signals.pre_save_post_validation.send(self.__class__, document=self, |         signals.pre_save_post_validation.send(self.__class__, document=self, | ||||||
|                                               created=created, **signal_kwargs) |                                               created=created, **signal_kwargs) | ||||||
|         # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation |  | ||||||
|         doc = self.to_mongo() |  | ||||||
|  |  | ||||||
|         if self._meta.get('auto_create_index', True): |         if self._meta.get('auto_create_index', True): | ||||||
|             self.ensure_indexes() |             self.ensure_indexes() | ||||||
| @@ -437,36 +390,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         Helper method, should only be used inside save(). |         Helper method, should only be used inside save(). | ||||||
|         """ |         """ | ||||||
|         collection = self._get_collection() |         collection = self._get_collection() | ||||||
|         with set_write_concern(collection, write_concern) as wc_collection: |  | ||||||
|             if force_insert: |  | ||||||
|                 return wc_collection.insert_one(doc).inserted_id |  | ||||||
|             # insert_one will provoke UniqueError alongside save does not |  | ||||||
|             # therefore, it need to catch and call replace_one. |  | ||||||
|             if '_id' in doc: |  | ||||||
|                 raw_object = wc_collection.find_one_and_replace( |  | ||||||
|                     {'_id': doc['_id']}, doc) |  | ||||||
|                 if raw_object: |  | ||||||
|                     return doc['_id'] |  | ||||||
|  |  | ||||||
|             object_id = wc_collection.insert_one(doc).inserted_id |         if force_insert: | ||||||
|  |             return collection.insert(doc, **write_concern) | ||||||
|  |  | ||||||
|  |         object_id = collection.save(doc, **write_concern) | ||||||
|  |  | ||||||
|  |         # In PyMongo 3.0, the save() call calls internally the _update() call | ||||||
|  |         # but they forget to return the _id value passed back, therefore getting it back here | ||||||
|  |         # Correct behaviour in 2.X and in 3.0.1+ versions | ||||||
|  |         if not object_id and pymongo.version_tuple == (3, 0): | ||||||
|  |             pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk) | ||||||
|  |             object_id = ( | ||||||
|  |                 self._qs.filter(pk=pk_as_mongo_obj).first() and | ||||||
|  |                 self._qs.filter(pk=pk_as_mongo_obj).first().pk | ||||||
|  |             )  # TODO doesn't this make 2 queries? | ||||||
|  |  | ||||||
|         return object_id |         return object_id | ||||||
|  |  | ||||||
|     def _get_update_doc(self): |  | ||||||
|         """Return a dict containing all the $set and $unset operations |  | ||||||
|         that should be sent to MongoDB based on the changes made to this |  | ||||||
|         Document. |  | ||||||
|         """ |  | ||||||
|         updates, removals = self._delta() |  | ||||||
|  |  | ||||||
|         update_doc = {} |  | ||||||
|         if updates: |  | ||||||
|             update_doc['$set'] = updates |  | ||||||
|         if removals: |  | ||||||
|             update_doc['$unset'] = removals |  | ||||||
|  |  | ||||||
|         return update_doc |  | ||||||
|  |  | ||||||
|     def _save_update(self, doc, save_condition, write_concern): |     def _save_update(self, doc, save_condition, write_concern): | ||||||
|         """Update an existing document. |         """Update an existing document. | ||||||
|  |  | ||||||
| @@ -492,15 +433,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                 val = val[ak] |                 val = val[ak] | ||||||
|             select_dict['.'.join(actual_key)] = val |             select_dict['.'.join(actual_key)] = val | ||||||
|  |  | ||||||
|         update_doc = self._get_update_doc() |         updates, removals = self._delta() | ||||||
|         if update_doc: |         update_query = {} | ||||||
|  |         if updates: | ||||||
|  |             update_query['$set'] = updates | ||||||
|  |         if removals: | ||||||
|  |             update_query['$unset'] = removals | ||||||
|  |         if updates or removals: | ||||||
|             upsert = save_condition is None |             upsert = save_condition is None | ||||||
|             with set_write_concern(collection, write_concern) as wc_collection: |             last_error = collection.update(select_dict, update_query, | ||||||
|                 last_error = wc_collection.update_one( |                                            upsert=upsert, **write_concern) | ||||||
|                     select_dict, |  | ||||||
|                     update_doc, |  | ||||||
|                     upsert=upsert |  | ||||||
|                 ).raw_result |  | ||||||
|             if not upsert and last_error['n'] == 0: |             if not upsert and last_error['n'] == 0: | ||||||
|                 raise SaveConditionError('Race condition preventing' |                 raise SaveConditionError('Race condition preventing' | ||||||
|                                          ' document update detected') |                                          ' document update detected') | ||||||
| @@ -544,7 +486,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def _qs(self): |     def _qs(self): | ||||||
|         """Return the default queryset corresponding to this document.""" |         """Return the queryset to use for updating / reloading / deletions.""" | ||||||
|         if not hasattr(self, '__objects'): |         if not hasattr(self, '__objects'): | ||||||
|             self.__objects = QuerySet(self, self._get_collection()) |             self.__objects = QuerySet(self, self._get_collection()) | ||||||
|         return self.__objects |         return self.__objects | ||||||
| @@ -552,11 +494,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|     @property |     @property | ||||||
|     def _object_key(self): |     def _object_key(self): | ||||||
|         """Get the query dict that can be used to fetch this object from |         """Get the query dict that can be used to fetch this object from | ||||||
|         the database. |         the database. Most of the time it's a simple PK lookup, but in | ||||||
|  |         case of a sharded collection with a compound shard key, it can | ||||||
|         Most of the time the dict is a simple PK lookup, but in case of |         contain a more complex query. | ||||||
|         a sharded collection with a compound shard key, it can contain a more |  | ||||||
|         complex query. |  | ||||||
|         """ |         """ | ||||||
|         select_dict = {'pk': self.pk} |         select_dict = {'pk': self.pk} | ||||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) |         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
| @@ -593,11 +533,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """Delete the :class:`~mongoengine.Document` from the database. This |         """Delete the :class:`~mongoengine.Document` from the database. This | ||||||
|         will only take effect if the document has been previously saved. |         will only take effect if the document has been previously saved. | ||||||
|  |  | ||||||
|         :param signal_kwargs: (optional) kwargs dictionary to be passed to |         :parm signal_kwargs: (optional) kwargs dictionary to be passed to | ||||||
|             the signal calls. |             the signal calls. | ||||||
|         :param write_concern: Extra keyword arguments are passed down which |         :param write_concern: Extra keyword arguments are passed down which | ||||||
|             will be used as options for the resultant ``getLastError`` command. |             will be used as options for the resultant | ||||||
|             For example, ``save(..., w: 2, fsync: True)`` will |             ``getLastError`` command.  For example, | ||||||
|  |             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||||
|             wait until at least two servers have recorded the write and |             wait until at least two servers have recorded the write and | ||||||
|             will force an fsync on the primary server. |             will force an fsync on the primary server. | ||||||
|  |  | ||||||
| @@ -609,7 +550,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         # Delete FileFields separately |         # Delete FileFields separately | ||||||
|         FileField = _import_class('FileField') |         FileField = _import_class('FileField') | ||||||
|         for name, field in iteritems(self._fields): |         for name, field in self._fields.iteritems(): | ||||||
|             if isinstance(field, FileField): |             if isinstance(field, FileField): | ||||||
|                 getattr(self, name).delete() |                 getattr(self, name).delete() | ||||||
|  |  | ||||||
| @@ -718,6 +659,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|             obj = obj[0] |             obj = obj[0] | ||||||
|         else: |         else: | ||||||
|             raise self.DoesNotExist('Document does not exist') |             raise self.DoesNotExist('Document does not exist') | ||||||
|  |  | ||||||
|         for field in obj._data: |         for field in obj._data: | ||||||
|             if not fields or field in fields: |             if not fields or field in fields: | ||||||
|                 try: |                 try: | ||||||
| @@ -725,7 +667,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                 except (KeyError, AttributeError): |                 except (KeyError, AttributeError): | ||||||
|                     try: |                     try: | ||||||
|                         # If field is a special field, e.g. items is stored as _reserved_items, |                         # If field is a special field, e.g. items is stored as _reserved_items, | ||||||
|                         # a KeyError is thrown. So try to retrieve the field from _data |                         # an KeyError is thrown. So try to retrieve the field from _data | ||||||
|                         setattr(self, field, self._reload(field, obj._data.get(field))) |                         setattr(self, field, self._reload(field, obj._data.get(field))) | ||||||
|                     except KeyError: |                     except KeyError: | ||||||
|                         # If field is removed from the database while the object |                         # If field is removed from the database while the object | ||||||
| @@ -733,9 +675,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                         # i.e. obj.update(unset__field=1) followed by obj.reload() |                         # i.e. obj.update(unset__field=1) followed by obj.reload() | ||||||
|                         delattr(self, field) |                         delattr(self, field) | ||||||
|  |  | ||||||
|         self._changed_fields = list( |         self._changed_fields = obj._changed_fields | ||||||
|             set(self._changed_fields) - set(fields) |  | ||||||
|         ) if fields else obj._changed_fields |  | ||||||
|         self._created = False |         self._created = False | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
| @@ -794,13 +734,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         .. versionchanged:: 0.10.7 |         .. versionchanged:: 0.10.7 | ||||||
|             :class:`OperationError` exception raised if no collection available |             :class:`OperationError` exception raised if no collection available | ||||||
|         """ |         """ | ||||||
|         coll_name = cls._get_collection_name() |         col_name = cls._get_collection_name() | ||||||
|         if not coll_name: |         if not col_name: | ||||||
|             raise OperationError('Document %s has no collection defined ' |             raise OperationError('Document %s has no collection defined ' | ||||||
|                                  '(is it abstract ?)' % cls) |                                  '(is it abstract ?)' % cls) | ||||||
|         cls._collection = None |         cls._collection = None | ||||||
|         db = cls._get_db() |         db = cls._get_db() | ||||||
|         db.drop_collection(coll_name) |         db.drop_collection(col_name) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create_index(cls, keys, background=False, **kwargs): |     def create_index(cls, keys, background=False, **kwargs): | ||||||
| @@ -815,13 +755,18 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         index_spec = index_spec.copy() |         index_spec = index_spec.copy() | ||||||
|         fields = index_spec.pop('fields') |         fields = index_spec.pop('fields') | ||||||
|         drop_dups = kwargs.get('drop_dups', False) |         drop_dups = kwargs.get('drop_dups', False) | ||||||
|         if drop_dups: |         if IS_PYMONGO_3 and drop_dups: | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|  |         elif not IS_PYMONGO_3: | ||||||
|  |             index_spec['drop_dups'] = drop_dups | ||||||
|         index_spec['background'] = background |         index_spec['background'] = background | ||||||
|         index_spec.update(kwargs) |         index_spec.update(kwargs) | ||||||
|  |  | ||||||
|  |         if IS_PYMONGO_3: | ||||||
|             return cls._get_collection().create_index(fields, **index_spec) |             return cls._get_collection().create_index(fields, **index_spec) | ||||||
|  |         else: | ||||||
|  |             return cls._get_collection().ensure_index(fields, **index_spec) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def ensure_index(cls, key_or_list, drop_dups=False, background=False, |     def ensure_index(cls, key_or_list, drop_dups=False, background=False, | ||||||
| @@ -836,9 +781,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value |         :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value | ||||||
|             will be removed if PyMongo3+ is used |             will be removed if PyMongo3+ is used | ||||||
|         """ |         """ | ||||||
|         if drop_dups: |         if IS_PYMONGO_3 and drop_dups: | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|  |         elif not IS_PYMONGO_3: | ||||||
|  |             kwargs.update({'drop_dups': drop_dups}) | ||||||
|         return cls.create_index(key_or_list, background=background, **kwargs) |         return cls.create_index(key_or_list, background=background, **kwargs) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
| @@ -854,7 +801,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         drop_dups = cls._meta.get('index_drop_dups', False) |         drop_dups = cls._meta.get('index_drop_dups', False) | ||||||
|         index_opts = cls._meta.get('index_opts') or {} |         index_opts = cls._meta.get('index_opts') or {} | ||||||
|         index_cls = cls._meta.get('index_cls', True) |         index_cls = cls._meta.get('index_cls', True) | ||||||
|         if drop_dups: |         if IS_PYMONGO_3 and drop_dups: | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|  |  | ||||||
| @@ -885,7 +832,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                 if 'cls' in opts: |                 if 'cls' in opts: | ||||||
|                     del opts['cls'] |                     del opts['cls'] | ||||||
|  |  | ||||||
|  |                 if IS_PYMONGO_3: | ||||||
|                     collection.create_index(fields, background=background, **opts) |                     collection.create_index(fields, background=background, **opts) | ||||||
|  |                 else: | ||||||
|  |                     collection.ensure_index(fields, background=background, | ||||||
|  |                                             drop_dups=drop_dups, **opts) | ||||||
|  |  | ||||||
|         # If _cls is being used (for polymorphism), it needs an index, |         # If _cls is being used (for polymorphism), it needs an index, | ||||||
|         # only if another index doesn't begin with _cls |         # only if another index doesn't begin with _cls | ||||||
| @@ -896,8 +847,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|             if 'cls' in index_opts: |             if 'cls' in index_opts: | ||||||
|                 del index_opts['cls'] |                 del index_opts['cls'] | ||||||
|  |  | ||||||
|  |             if IS_PYMONGO_3: | ||||||
|                 collection.create_index('_cls', background=background, |                 collection.create_index('_cls', background=background, | ||||||
|                                         **index_opts) |                                         **index_opts) | ||||||
|  |             else: | ||||||
|  |                 collection.ensure_index('_cls', background=background, | ||||||
|  |                                         **index_opts) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def list_indexes(cls): |     def list_indexes(cls): | ||||||
| @@ -966,16 +921,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         required = cls.list_indexes() |         required = cls.list_indexes() | ||||||
|  |         existing = [info['key'] | ||||||
|         existing = [] |                     for info in cls._get_collection().index_information().values()] | ||||||
|         for info in cls._get_collection().index_information().values(): |  | ||||||
|             if '_fts' in info['key'][0]: |  | ||||||
|                 index_type = info['key'][0][1] |  | ||||||
|                 text_index_fields = info.get('weights').keys() |  | ||||||
|                 existing.append( |  | ||||||
|                     [(key, index_type) for key in text_index_fields]) |  | ||||||
|             else: |  | ||||||
|                 existing.append(info['key']) |  | ||||||
|         missing = [index for index in required if index not in existing] |         missing = [index for index in required if index not in existing] | ||||||
|         extra = [index for index in existing if index not in required] |         extra = [index for index in existing if index not in required] | ||||||
|  |  | ||||||
| @@ -992,10 +939,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         return {'missing': missing, 'extra': extra} |         return {'missing': missing, 'extra': extra} | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | class DynamicDocument(Document): | ||||||
|     """A Dynamic Document class allowing flexible, expandable and uncontrolled |     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||||
|     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same |     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||||
|     way as an ordinary document but has expanded style properties.  Any data |     way as an ordinary document but has expando style properties.  Any data | ||||||
|     passed or set against the :class:`~mongoengine.DynamicDocument` that is |     passed or set against the :class:`~mongoengine.DynamicDocument` that is | ||||||
|     not a field is automatically converted into a |     not a field is automatically converted into a | ||||||
|     :class:`~mongoengine.fields.DynamicField` and data can be attributed to that |     :class:`~mongoengine.fields.DynamicField` and data can be attributed to that | ||||||
| @@ -1003,12 +950,13 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | |||||||
|  |  | ||||||
|     .. note:: |     .. note:: | ||||||
|  |  | ||||||
|         There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` |         There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = TopLevelDocumentMetaclass |     my_metaclass = TopLevelDocumentMetaclass | ||||||
|  |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     _dynamic = True |     _dynamic = True | ||||||
|  |  | ||||||
| @@ -1019,12 +967,11 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | |||||||
|         field_name = args[0] |         field_name = args[0] | ||||||
|         if field_name in self._dynamic_fields: |         if field_name in self._dynamic_fields: | ||||||
|             setattr(self, field_name, None) |             setattr(self, field_name, None) | ||||||
|             self._dynamic_fields[field_name].null = False |  | ||||||
|         else: |         else: | ||||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) |             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): | class DynamicEmbeddedDocument(EmbeddedDocument): | ||||||
|     """A Dynamic Embedded Document class allowing flexible, expandable and |     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||||
|     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more |     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||||
|     information about dynamic documents. |     information about dynamic documents. | ||||||
| @@ -1033,6 +980,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu | |||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = DocumentMetaclass |     my_metaclass = DocumentMetaclass | ||||||
|  |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|     _dynamic = True |     _dynamic = True | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,12 +1,11 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  |  | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | ||||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', |            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', | ||||||
|            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', |            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', | ||||||
|            'ValidationError', 'SaveConditionError', 'DeprecatedError') |            'ValidationError', 'SaveConditionError') | ||||||
|  |  | ||||||
|  |  | ||||||
| class NotRegistered(Exception): | class NotRegistered(Exception): | ||||||
| @@ -72,7 +71,6 @@ class ValidationError(AssertionError): | |||||||
|     _message = None |     _message = None | ||||||
|  |  | ||||||
|     def __init__(self, message='', **kwargs): |     def __init__(self, message='', **kwargs): | ||||||
|         super(ValidationError, self).__init__(message) |  | ||||||
|         self.errors = kwargs.get('errors', {}) |         self.errors = kwargs.get('errors', {}) | ||||||
|         self.field_name = kwargs.get('field_name') |         self.field_name = kwargs.get('field_name') | ||||||
|         self.message = message |         self.message = message | ||||||
| @@ -110,8 +108,11 @@ class ValidationError(AssertionError): | |||||||
|  |  | ||||||
|         def build_dict(source): |         def build_dict(source): | ||||||
|             errors_dict = {} |             errors_dict = {} | ||||||
|  |             if not source: | ||||||
|  |                 return errors_dict | ||||||
|  |  | ||||||
|             if isinstance(source, dict): |             if isinstance(source, dict): | ||||||
|                 for field_name, error in iteritems(source): |                 for field_name, error in source.iteritems(): | ||||||
|                     errors_dict[field_name] = build_dict(error) |                     errors_dict[field_name] = build_dict(error) | ||||||
|             elif isinstance(source, ValidationError) and source.errors: |             elif isinstance(source, ValidationError) and source.errors: | ||||||
|                 return build_dict(source.errors) |                 return build_dict(source.errors) | ||||||
| @@ -133,17 +134,12 @@ class ValidationError(AssertionError): | |||||||
|                 value = ' '.join([generate_key(k) for k in value]) |                 value = ' '.join([generate_key(k) for k in value]) | ||||||
|             elif isinstance(value, dict): |             elif isinstance(value, dict): | ||||||
|                 value = ' '.join( |                 value = ' '.join( | ||||||
|                     [generate_key(v, k) for k, v in iteritems(value)]) |                     [generate_key(v, k) for k, v in value.iteritems()]) | ||||||
|  |  | ||||||
|             results = '%s.%s' % (prefix, value) if prefix else value |             results = '%s.%s' % (prefix, value) if prefix else value | ||||||
|             return results |             return results | ||||||
|  |  | ||||||
|         error_dict = defaultdict(list) |         error_dict = defaultdict(list) | ||||||
|         for k, v in iteritems(self.to_dict()): |         for k, v in self.to_dict().iteritems(): | ||||||
|             error_dict[generate_key(v)].append(k) |             error_dict[generate_key(v)].append(k) | ||||||
|         return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)]) |         return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeprecatedError(Exception): |  | ||||||
|     """Raise when a user uses a feature that has been Deprecated""" |  | ||||||
|     pass |  | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,19 +0,0 @@ | |||||||
| """ |  | ||||||
| Helper functions, constants, and types to aid with MongoDB version support |  | ||||||
| """ |  | ||||||
| from mongoengine.connection import get_connection |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # Constant that can be used to compare the version retrieved with |  | ||||||
| # get_mongodb_version() |  | ||||||
| MONGODB_34 = (3, 4) |  | ||||||
| MONGODB_36 = (3, 6) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_mongodb_version(): |  | ||||||
|     """Return the version of the connected mongoDB (first 2 digits) |  | ||||||
|  |  | ||||||
|     :return: tuple(int, int) |  | ||||||
|     """ |  | ||||||
|     version_list = get_connection().server_info()['versionArray'][:2]     # e.g: (3, 2) |  | ||||||
|     return tuple(version_list) |  | ||||||
| @@ -1,32 +0,0 @@ | |||||||
| """ |  | ||||||
| Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. |  | ||||||
| """ |  | ||||||
| import pymongo |  | ||||||
|  |  | ||||||
| _PYMONGO_37 = (3, 7) |  | ||||||
|  |  | ||||||
| PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) |  | ||||||
|  |  | ||||||
| IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def count_documents(collection, filter): |  | ||||||
|     """Pymongo>3.7 deprecates count in favour of count_documents""" |  | ||||||
|     if IS_PYMONGO_GTE_37: |  | ||||||
|         return collection.count_documents(filter) |  | ||||||
|     else: |  | ||||||
|         count = collection.find(filter).count() |  | ||||||
|     return count |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def list_collection_names(db, include_system_collections=False): |  | ||||||
|     """Pymongo>3.7 deprecates collection_names in favour of list_collection_names""" |  | ||||||
|     if IS_PYMONGO_GTE_37: |  | ||||||
|         collections = db.list_collection_names() |  | ||||||
|     else: |  | ||||||
|         collections = db.collection_names() |  | ||||||
|  |  | ||||||
|     if not include_system_collections: |  | ||||||
|         collections = [c for c in collections if not c.startswith('system.')] |  | ||||||
|  |  | ||||||
|     return collections |  | ||||||
| @@ -1,8 +1,17 @@ | |||||||
| """ | """ | ||||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x support | Helper functions, constants, and types to aid with Python v2.7 - v3.x and | ||||||
|  | PyMongo v2.7 - v3.x support. | ||||||
| """ | """ | ||||||
|  | import pymongo | ||||||
| import six | import six | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if pymongo.version_tuple[0] < 3: | ||||||
|  |     IS_PYMONGO_3 = False | ||||||
|  | else: | ||||||
|  |     IS_PYMONGO_3 = True | ||||||
|  |  | ||||||
|  |  | ||||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | ||||||
| StringIO = six.BytesIO | StringIO = six.BytesIO | ||||||
|  |  | ||||||
| @@ -14,10 +23,3 @@ if not six.PY3: | |||||||
|         pass |         pass | ||||||
|     else: |     else: | ||||||
|         StringIO = cStringIO.StringIO |         StringIO = cStringIO.StringIO | ||||||
|  |  | ||||||
|  |  | ||||||
| if six.PY3: |  | ||||||
|     from collections.abc import Hashable |  | ||||||
| else: |  | ||||||
|     # raises DeprecationWarnings in Python >=3.7 |  | ||||||
|     from collections import Hashable |  | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ from __future__ import absolute_import | |||||||
|  |  | ||||||
| import copy | import copy | ||||||
| import itertools | import itertools | ||||||
|  | import operator | ||||||
| import pprint | import pprint | ||||||
| import re | import re | ||||||
| import warnings | import warnings | ||||||
| @@ -10,22 +11,24 @@ from bson import SON, json_util | |||||||
| from bson.code import Code | from bson.code import Code | ||||||
| import pymongo | import pymongo | ||||||
| import pymongo.errors | import pymongo.errors | ||||||
| from pymongo.collection import ReturnDocument |  | ||||||
| from pymongo.common import validate_read_preference | from pymongo.common import validate_read_preference | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base import get_document | from mongoengine.base import get_document | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.context_managers import set_write_concern, switch_db | from mongoengine.context_managers import switch_db | ||||||
| from mongoengine.errors import (InvalidQueryError, LookUpError, | from mongoengine.errors import (InvalidQueryError, LookUpError, | ||||||
|                                 NotUniqueError, OperationError) |                                 NotUniqueError, OperationError) | ||||||
|  | from mongoengine.python_support import IS_PYMONGO_3 | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
| from mongoengine.queryset.field_list import QueryFieldList | from mongoengine.queryset.field_list import QueryFieldList | ||||||
| from mongoengine.queryset.visitor import Q, QNode | from mongoengine.queryset.visitor import Q, QNode | ||||||
|  |  | ||||||
|  | if IS_PYMONGO_3: | ||||||
|  |     from pymongo.collection import ReturnDocument | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') | __all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') | ||||||
|  |  | ||||||
| @@ -36,6 +39,8 @@ CASCADE = 2 | |||||||
| DENY = 3 | DENY = 3 | ||||||
| PULL = 4 | PULL = 4 | ||||||
|  |  | ||||||
|  | RE_TYPE = type(re.compile('')) | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseQuerySet(object): | class BaseQuerySet(object): | ||||||
|     """A set of results returned from a query. Wraps a MongoDB cursor, |     """A set of results returned from a query. Wraps a MongoDB cursor, | ||||||
| @@ -62,6 +67,7 @@ class BaseQuerySet(object): | |||||||
|         self._scalar = [] |         self._scalar = [] | ||||||
|         self._none = False |         self._none = False | ||||||
|         self._as_pymongo = False |         self._as_pymongo = False | ||||||
|  |         self._as_pymongo_coerce = False | ||||||
|         self._search_text = None |         self._search_text = None | ||||||
|  |  | ||||||
|         # If inheritance is allowed, only return instances and instances of |         # If inheritance is allowed, only return instances and instances of | ||||||
| @@ -73,7 +79,6 @@ class BaseQuerySet(object): | |||||||
|                 self._initial_query = { |                 self._initial_query = { | ||||||
|                     '_cls': {'$in': self._document._subclasses}} |                     '_cls': {'$in': self._document._subclasses}} | ||||||
|             self._loaded_fields = QueryFieldList(always_include=['_cls']) |             self._loaded_fields = QueryFieldList(always_include=['_cls']) | ||||||
|  |  | ||||||
|         self._cursor_obj = None |         self._cursor_obj = None | ||||||
|         self._limit = None |         self._limit = None | ||||||
|         self._skip = None |         self._skip = None | ||||||
| @@ -187,7 +192,7 @@ class BaseQuerySet(object): | |||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             if queryset._as_pymongo: |             if queryset._as_pymongo: | ||||||
|                 return queryset._cursor[key] |                 return queryset._get_as_pymongo(queryset._cursor[key]) | ||||||
|  |  | ||||||
|             return queryset._document._from_son( |             return queryset._document._from_son( | ||||||
|                 queryset._cursor[key], |                 queryset._cursor[key], | ||||||
| @@ -195,7 +200,7 @@ class BaseQuerySet(object): | |||||||
|                 only_fields=self.only_fields |                 only_fields=self.only_fields | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         raise TypeError('Provide a slice or an integer index') |         raise AttributeError('Provide a slice or an integer index') | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
| @@ -205,16 +210,18 @@ class BaseQuerySet(object): | |||||||
|         queryset = self.order_by() |         queryset = self.order_by() | ||||||
|         return False if queryset.first() is None else True |         return False if queryset.first() is None else True | ||||||
|  |  | ||||||
|  |     def __nonzero__(self): | ||||||
|  |         """Avoid to open all records in an if stmt in Py2.""" | ||||||
|  |         return self._has_data() | ||||||
|  |  | ||||||
|     def __bool__(self): |     def __bool__(self): | ||||||
|         """Avoid to open all records in an if stmt in Py3.""" |         """Avoid to open all records in an if stmt in Py3.""" | ||||||
|         return self._has_data() |         return self._has_data() | ||||||
|  |  | ||||||
|     __nonzero__ = __bool__  # For Py2 support |  | ||||||
|  |  | ||||||
|     # Core functions |     # Core functions | ||||||
|  |  | ||||||
|     def all(self): |     def all(self): | ||||||
|         """Returns a copy of the current QuerySet.""" |         """Returns all documents.""" | ||||||
|         return self.__call__() |         return self.__call__() | ||||||
|  |  | ||||||
|     def filter(self, *q_objs, **query): |     def filter(self, *q_objs, **query): | ||||||
| @@ -263,13 +270,13 @@ class BaseQuerySet(object): | |||||||
|         queryset = queryset.filter(*q_objs, **query) |         queryset = queryset.filter(*q_objs, **query) | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             result = six.next(queryset) |             result = queryset.next() | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             msg = ('%s matching query does not exist.' |             msg = ('%s matching query does not exist.' | ||||||
|                    % queryset._document._class_name) |                    % queryset._document._class_name) | ||||||
|             raise queryset._document.DoesNotExist(msg) |             raise queryset._document.DoesNotExist(msg) | ||||||
|         try: |         try: | ||||||
|             six.next(queryset) |             queryset.next() | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             return result |             return result | ||||||
|  |  | ||||||
| @@ -336,7 +343,7 @@ class BaseQuerySet(object): | |||||||
|                        % str(self._document)) |                        % str(self._document)) | ||||||
|                 raise OperationError(msg) |                 raise OperationError(msg) | ||||||
|             if doc.pk and not doc._created: |             if doc.pk and not doc._created: | ||||||
|                 msg = 'Some documents have ObjectIds, use doc.update() instead' |                 msg = 'Some documents have ObjectIds use doc.update() instead' | ||||||
|                 raise OperationError(msg) |                 raise OperationError(msg) | ||||||
|  |  | ||||||
|         signal_kwargs = signal_kwargs or {} |         signal_kwargs = signal_kwargs or {} | ||||||
| @@ -344,24 +351,11 @@ class BaseQuerySet(object): | |||||||
|                                      documents=docs, **signal_kwargs) |                                      documents=docs, **signal_kwargs) | ||||||
|  |  | ||||||
|         raw = [doc.to_mongo() for doc in docs] |         raw = [doc.to_mongo() for doc in docs] | ||||||
|  |  | ||||||
|         with set_write_concern(self._collection, write_concern) as collection: |  | ||||||
|             insert_func = collection.insert_many |  | ||||||
|             if return_one: |  | ||||||
|                 raw = raw[0] |  | ||||||
|                 insert_func = collection.insert_one |  | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             inserted_result = insert_func(raw) |             ids = self._collection.insert(raw, **write_concern) | ||||||
|             ids = [inserted_result.inserted_id] if return_one else inserted_result.inserted_ids |  | ||||||
|         except pymongo.errors.DuplicateKeyError as err: |         except pymongo.errors.DuplicateKeyError as err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             raise NotUniqueError(message % six.text_type(err)) |             raise NotUniqueError(message % six.text_type(err)) | ||||||
|         except pymongo.errors.BulkWriteError as err: |  | ||||||
|             # inserting documents that already have an _id field will |  | ||||||
|             # give huge performance debt or raise |  | ||||||
|             message = u'Document must not have _id value before bulk write (%s)' |  | ||||||
|             raise NotUniqueError(message % six.text_type(err)) |  | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             if re.match('^E1100[01] duplicate key', six.text_type(err)): |             if re.match('^E1100[01] duplicate key', six.text_type(err)): | ||||||
| @@ -371,20 +365,18 @@ class BaseQuerySet(object): | |||||||
|                 raise NotUniqueError(message % six.text_type(err)) |                 raise NotUniqueError(message % six.text_type(err)) | ||||||
|             raise OperationError(message % six.text_type(err)) |             raise OperationError(message % six.text_type(err)) | ||||||
|  |  | ||||||
|         # Apply inserted_ids to documents |  | ||||||
|         for doc, doc_id in zip(docs, ids): |  | ||||||
|             doc.pk = doc_id |  | ||||||
|  |  | ||||||
|         if not load_bulk: |         if not load_bulk: | ||||||
|             signals.post_bulk_insert.send( |             signals.post_bulk_insert.send( | ||||||
|                 self._document, documents=docs, loaded=False, **signal_kwargs) |                 self._document, documents=docs, loaded=False, **signal_kwargs) | ||||||
|             return ids[0] if return_one else ids |             return return_one and ids[0] or ids | ||||||
|  |  | ||||||
|         documents = self.in_bulk(ids) |         documents = self.in_bulk(ids) | ||||||
|         results = [documents.get(obj_id) for obj_id in ids] |         results = [] | ||||||
|  |         for obj_id in ids: | ||||||
|  |             results.append(documents.get(obj_id)) | ||||||
|         signals.post_bulk_insert.send( |         signals.post_bulk_insert.send( | ||||||
|             self._document, documents=results, loaded=True, **signal_kwargs) |             self._document, documents=results, loaded=True, **signal_kwargs) | ||||||
|         return results[0] if return_one else results |         return return_one and results[0] or results | ||||||
|  |  | ||||||
|     def count(self, with_limit_and_skip=False): |     def count(self, with_limit_and_skip=False): | ||||||
|         """Count the selected elements in the query. |         """Count the selected elements in the query. | ||||||
| @@ -393,11 +385,9 @@ class BaseQuerySet(object): | |||||||
|             :meth:`skip` that has been applied to this cursor into account when |             :meth:`skip` that has been applied to this cursor into account when | ||||||
|             getting the count |             getting the count | ||||||
|         """ |         """ | ||||||
|         if self._limit == 0 and with_limit_and_skip is False or self._none: |         if self._limit == 0 and with_limit_and_skip or self._none: | ||||||
|             return 0 |             return 0 | ||||||
|         count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) |         return self._cursor.count(with_limit_and_skip=with_limit_and_skip) | ||||||
|         self._cursor_obj = None |  | ||||||
|         return count |  | ||||||
|  |  | ||||||
|     def delete(self, write_concern=None, _from_doc_delete=False, |     def delete(self, write_concern=None, _from_doc_delete=False, | ||||||
|                cascade_refs=None): |                cascade_refs=None): | ||||||
| @@ -481,10 +471,9 @@ class BaseQuerySet(object): | |||||||
|                     write_concern=write_concern, |                     write_concern=write_concern, | ||||||
|                     **{'pull_all__%s' % field_name: self}) |                     **{'pull_all__%s' % field_name: self}) | ||||||
|  |  | ||||||
|         with set_write_concern(queryset._collection, write_concern) as collection: |         result = queryset._collection.remove(queryset._query, **write_concern) | ||||||
|             result = collection.delete_many(queryset._query) |         if result: | ||||||
|             if result.acknowledged: |             return result.get('n') | ||||||
|                 return result.deleted_count |  | ||||||
|  |  | ||||||
|     def update(self, upsert=False, multi=True, write_concern=None, |     def update(self, upsert=False, multi=True, write_concern=None, | ||||||
|                full_result=False, **update): |                full_result=False, **update): | ||||||
| @@ -498,12 +487,10 @@ class BaseQuerySet(object): | |||||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will |             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||||
|             wait until at least two servers have recorded the write and |             wait until at least two servers have recorded the write and | ||||||
|             will force an fsync on the primary server. |             will force an fsync on the primary server. | ||||||
|         :param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number |         :param full_result: Return the full result rather than just the number | ||||||
|             updated items |             updated. | ||||||
|         :param update: Django-style update keyword arguments |         :param update: Django-style update keyword arguments | ||||||
|  |  | ||||||
|         :returns the number of updated documents (unless ``full_result`` is True) |  | ||||||
|  |  | ||||||
|         .. versionadded:: 0.2 |         .. versionadded:: 0.2 | ||||||
|         """ |         """ | ||||||
|         if not update and not upsert: |         if not update and not upsert: | ||||||
| @@ -524,15 +511,12 @@ class BaseQuerySet(object): | |||||||
|             else: |             else: | ||||||
|                 update['$set'] = {'_cls': queryset._document._class_name} |                 update['$set'] = {'_cls': queryset._document._class_name} | ||||||
|         try: |         try: | ||||||
|             with set_write_concern(queryset._collection, write_concern) as collection: |             result = queryset._collection.update(query, update, multi=multi, | ||||||
|                 update_func = collection.update_one |                                                  upsert=upsert, **write_concern) | ||||||
|                 if multi: |  | ||||||
|                     update_func = collection.update_many |  | ||||||
|                 result = update_func(query, update, upsert=upsert) |  | ||||||
|             if full_result: |             if full_result: | ||||||
|                 return result |                 return result | ||||||
|             elif result.raw_result: |             elif result: | ||||||
|                 return result.raw_result['n'] |                 return result['n'] | ||||||
|         except pymongo.errors.DuplicateKeyError as err: |         except pymongo.errors.DuplicateKeyError as err: | ||||||
|             raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) |             raise NotUniqueError(u'Update failed (%s)' % six.text_type(err)) | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
| @@ -561,13 +545,13 @@ class BaseQuerySet(object): | |||||||
|                                     write_concern=write_concern, |                                     write_concern=write_concern, | ||||||
|                                     full_result=True, **update) |                                     full_result=True, **update) | ||||||
|  |  | ||||||
|         if atomic_update.raw_result['updatedExisting']: |         if atomic_update['updatedExisting']: | ||||||
|             document = self.get() |             document = self.get() | ||||||
|         else: |         else: | ||||||
|             document = self._document.objects.with_id(atomic_update.upserted_id) |             document = self._document.objects.with_id(atomic_update['upserted']) | ||||||
|         return document |         return document | ||||||
|  |  | ||||||
|     def update_one(self, upsert=False, write_concern=None, full_result=False, **update): |     def update_one(self, upsert=False, write_concern=None, **update): | ||||||
|         """Perform an atomic update on the fields of the first document |         """Perform an atomic update on the fields of the first document | ||||||
|         matched by the query. |         matched by the query. | ||||||
|  |  | ||||||
| @@ -578,19 +562,12 @@ class BaseQuerySet(object): | |||||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will |             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||||
|             wait until at least two servers have recorded the write and |             wait until at least two servers have recorded the write and | ||||||
|             will force an fsync on the primary server. |             will force an fsync on the primary server. | ||||||
|         :param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number |  | ||||||
|             updated items |  | ||||||
|         :param update: Django-style update keyword arguments |         :param update: Django-style update keyword arguments | ||||||
|             full_result |  | ||||||
|         :returns the number of updated documents (unless ``full_result`` is True) |  | ||||||
|         .. versionadded:: 0.2 |         .. versionadded:: 0.2 | ||||||
|         """ |         """ | ||||||
|         return self.update( |         return self.update( | ||||||
|             upsert=upsert, |             upsert=upsert, multi=False, write_concern=write_concern, **update) | ||||||
|             multi=False, |  | ||||||
|             write_concern=write_concern, |  | ||||||
|             full_result=full_result, |  | ||||||
|             **update) |  | ||||||
|  |  | ||||||
|     def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): |     def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): | ||||||
|         """Update and return the updated document. |         """Update and return the updated document. | ||||||
| @@ -625,11 +602,12 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         query = queryset._query |         query = queryset._query | ||||||
|         if not remove: |         if not IS_PYMONGO_3 or not remove: | ||||||
|             update = transform.update(queryset._document, **update) |             update = transform.update(queryset._document, **update) | ||||||
|         sort = queryset._ordering |         sort = queryset._ordering | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|  |             if IS_PYMONGO_3: | ||||||
|                 if full_response: |                 if full_response: | ||||||
|                     msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' |                     msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' | ||||||
|                     warnings.warn(msg, DeprecationWarning) |                     warnings.warn(msg, DeprecationWarning) | ||||||
| @@ -644,6 +622,11 @@ class BaseQuerySet(object): | |||||||
|                     result = queryset._collection.find_one_and_update( |                     result = queryset._collection.find_one_and_update( | ||||||
|                         query, update, upsert=upsert, sort=sort, return_document=return_doc, |                         query, update, upsert=upsert, sort=sort, return_document=return_doc, | ||||||
|                         **self._cursor_args) |                         **self._cursor_args) | ||||||
|  |  | ||||||
|  |             else: | ||||||
|  |                 result = queryset._collection.find_and_modify( | ||||||
|  |                     query, update, upsert=upsert, sort=sort, remove=remove, new=new, | ||||||
|  |                     full_response=full_response, **self._cursor_args) | ||||||
|         except pymongo.errors.DuplicateKeyError as err: |         except pymongo.errors.DuplicateKeyError as err: | ||||||
|             raise NotUniqueError(u'Update failed (%s)' % err) |             raise NotUniqueError(u'Update failed (%s)' % err) | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
| @@ -692,7 +675,7 @@ class BaseQuerySet(object): | |||||||
|                     self._document._from_son(doc, only_fields=self.only_fields)) |                     self._document._from_son(doc, only_fields=self.only_fields)) | ||||||
|         elif self._as_pymongo: |         elif self._as_pymongo: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc['_id']] = doc |                 doc_map[doc['_id']] = self._get_as_pymongo(doc) | ||||||
|         else: |         else: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc['_id']] = self._document._from_son( |                 doc_map[doc['_id']] = self._document._from_son( | ||||||
| @@ -709,9 +692,8 @@ class BaseQuerySet(object): | |||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|     def no_sub_classes(self): |     def no_sub_classes(self): | ||||||
|         """Filter for only the instances of this specific document. |         """ | ||||||
|  |         Only return instances of this document and not any inherited documents | ||||||
|         Do NOT return any inherited documents. |  | ||||||
|         """ |         """ | ||||||
|         if self._document._meta.get('allow_inheritance') is True: |         if self._document._meta.get('allow_inheritance') is True: | ||||||
|             self._initial_query = {'_cls': self._document._class_name} |             self._initial_query = {'_cls': self._document._class_name} | ||||||
| @@ -746,12 +728,11 @@ class BaseQuerySet(object): | |||||||
|                 '%s is not a subclass of BaseQuerySet' % new_qs.__name__) |                 '%s is not a subclass of BaseQuerySet' % new_qs.__name__) | ||||||
|  |  | ||||||
|         copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', |         copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', | ||||||
|                       '_where_clause', '_loaded_fields', '_ordering', |                       '_where_clause', '_loaded_fields', '_ordering', '_snapshot', | ||||||
|                       '_snapshot', '_timeout', '_class_check', '_slave_okay', |                       '_timeout', '_class_check', '_slave_okay', '_read_preference', | ||||||
|                       '_read_preference', '_iter', '_scalar', '_as_pymongo', |                       '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', | ||||||
|                       '_limit', '_skip', '_hint', '_auto_dereference', |                       '_limit', '_skip', '_hint', '_auto_dereference', | ||||||
|                       '_search_text', 'only_fields', '_max_time_ms', |                       '_search_text', 'only_fields', '_max_time_ms', '_comment') | ||||||
|                       '_comment', '_batch_size') |  | ||||||
|  |  | ||||||
|         for prop in copy_props: |         for prop in copy_props: | ||||||
|             val = getattr(self, prop) |             val = getattr(self, prop) | ||||||
| @@ -778,11 +759,10 @@ class BaseQuerySet(object): | |||||||
|         """Limit the number of returned documents to `n`. This may also be |         """Limit the number of returned documents to `n`. This may also be | ||||||
|         achieved using array-slicing syntax (e.g. ``User.objects[:5]``). |         achieved using array-slicing syntax (e.g. ``User.objects[:5]``). | ||||||
|  |  | ||||||
|         :param n: the maximum number of objects to return if n is greater than 0. |         :param n: the maximum number of objects to return | ||||||
|         When 0 is passed, returns all the documents in the cursor |  | ||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._limit = n |         queryset._limit = n if n != 0 else 1 | ||||||
|  |  | ||||||
|         # If a cursor object has already been created, apply the limit to it. |         # If a cursor object has already been created, apply the limit to it. | ||||||
|         if queryset._cursor_obj: |         if queryset._cursor_obj: | ||||||
| @@ -959,8 +939,7 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|             posts = BlogPost.objects(...).fields(slice__comments=5) |             posts = BlogPost.objects(...).fields(slice__comments=5) | ||||||
|  |  | ||||||
|         :param kwargs: A set of keyword arguments identifying what to |         :param kwargs: A set keywors arguments identifying what to include. | ||||||
|             include, exclude, or slice. |  | ||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |         .. versionadded:: 0.5 | ||||||
|         """ |         """ | ||||||
| @@ -980,9 +959,10 @@ class BaseQuerySet(object): | |||||||
|         # explicitly included, and then more complicated operators such as |         # explicitly included, and then more complicated operators such as | ||||||
|         # $slice. |         # $slice. | ||||||
|         def _sort_key(field_tuple): |         def _sort_key(field_tuple): | ||||||
|             _, value = field_tuple |             key, value = field_tuple | ||||||
|             if isinstance(value, int): |             if isinstance(value, (int)): | ||||||
|                 return value  # 0 for exclusion, 1 for inclusion |                 return value  # 0 for exclusion, 1 for inclusion | ||||||
|  |             else: | ||||||
|                 return 2  # so that complex values appear last |                 return 2  # so that complex values appear last | ||||||
|  |  | ||||||
|         fields = sorted(cleaned_fields, key=_sort_key) |         fields = sorted(cleaned_fields, key=_sort_key) | ||||||
| @@ -1012,15 +992,13 @@ class BaseQuerySet(object): | |||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|     def order_by(self, *keys): |     def order_by(self, *keys): | ||||||
|         """Order the :class:`~mongoengine.queryset.QuerySet` by the given keys. |         """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The | ||||||
|  |         order may be specified by prepending each of the keys by a + or a -. | ||||||
|         The order may be specified by prepending each of the keys by a "+" or |         Ascending order is assumed. If no keys are passed, existing ordering | ||||||
|         a "-". Ascending order is assumed if there's no prefix. |         is cleared instead. | ||||||
|  |  | ||||||
|         If no keys are passed, existing ordering is cleared instead. |  | ||||||
|  |  | ||||||
|         :param keys: fields to order the query results by; keys may be |         :param keys: fields to order the query results by; keys may be | ||||||
|             prefixed with "+" or a "-" to determine the ordering direction. |             prefixed with **+** or **-** to determine the ordering direction | ||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|  |  | ||||||
| @@ -1078,6 +1056,7 @@ class BaseQuerySet(object): | |||||||
|         ..versionchanged:: 0.5 - made chainable |         ..versionchanged:: 0.5 - made chainable | ||||||
|         .. deprecated:: Ignored with PyMongo 3+ |         .. deprecated:: Ignored with PyMongo 3+ | ||||||
|         """ |         """ | ||||||
|  |         if IS_PYMONGO_3: | ||||||
|             msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' |             msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
| @@ -1085,7 +1064,7 @@ class BaseQuerySet(object): | |||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|     def timeout(self, enabled): |     def timeout(self, enabled): | ||||||
|         """Enable or disable the default mongod timeout when querying. (no_cursor_timeout option) |         """Enable or disable the default mongod timeout when querying. | ||||||
|  |  | ||||||
|         :param enabled: whether or not the timeout is used |         :param enabled: whether or not the timeout is used | ||||||
|  |  | ||||||
| @@ -1103,6 +1082,7 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         .. deprecated:: Ignored with PyMongo 3+ |         .. deprecated:: Ignored with PyMongo 3+ | ||||||
|         """ |         """ | ||||||
|  |         if IS_PYMONGO_3: | ||||||
|             msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' |             msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
| @@ -1148,15 +1128,16 @@ class BaseQuerySet(object): | |||||||
|         """An alias for scalar""" |         """An alias for scalar""" | ||||||
|         return self.scalar(*fields) |         return self.scalar(*fields) | ||||||
|  |  | ||||||
|     def as_pymongo(self): |     def as_pymongo(self, coerce_types=False): | ||||||
|         """Instead of returning Document instances, return raw values from |         """Instead of returning Document instances, return raw values from | ||||||
|         pymongo. |         pymongo. | ||||||
|  |  | ||||||
|         This method is particularly useful if you don't need dereferencing |         :param coerce_types: Field types (if applicable) would be use to | ||||||
|         and care primarily about the speed of data retrieval. |             coerce types. | ||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._as_pymongo = True |         queryset._as_pymongo = True | ||||||
|  |         queryset._as_pymongo_coerce = coerce_types | ||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|     def max_time_ms(self, ms): |     def max_time_ms(self, ms): | ||||||
| @@ -1194,21 +1175,13 @@ class BaseQuerySet(object): | |||||||
|             initial_pipeline.append({'$sort': dict(self._ordering)}) |             initial_pipeline.append({'$sort': dict(self._ordering)}) | ||||||
|  |  | ||||||
|         if self._limit is not None: |         if self._limit is not None: | ||||||
|             # As per MongoDB Documentation (https://docs.mongodb.com/manual/reference/operator/aggregation/limit/), |             initial_pipeline.append({'$limit': self._limit}) | ||||||
|             # keeping limit stage right after sort stage is more efficient. But this leads to wrong set of documents |  | ||||||
|             # for a skip stage that might succeed these. So we need to maintain more documents in memory in such a |  | ||||||
|             # case (https://stackoverflow.com/a/24161461). |  | ||||||
|             initial_pipeline.append({'$limit': self._limit + (self._skip or 0)}) |  | ||||||
|  |  | ||||||
|         if self._skip is not None: |         if self._skip is not None: | ||||||
|             initial_pipeline.append({'$skip': self._skip}) |             initial_pipeline.append({'$skip': self._skip}) | ||||||
|  |  | ||||||
|         pipeline = initial_pipeline + list(pipeline) |         pipeline = initial_pipeline + list(pipeline) | ||||||
|  |  | ||||||
|         if self._read_preference is not None: |  | ||||||
|             return self._collection.with_options(read_preference=self._read_preference) \ |  | ||||||
|                        .aggregate(pipeline, cursor={}, **kwargs) |  | ||||||
|  |  | ||||||
|         return self._collection.aggregate(pipeline, cursor={}, **kwargs) |         return self._collection.aggregate(pipeline, cursor={}, **kwargs) | ||||||
|  |  | ||||||
|     # JS functionality |     # JS functionality | ||||||
| @@ -1415,7 +1388,11 @@ class BaseQuerySet(object): | |||||||
|         if isinstance(field_instances[-1], ListField): |         if isinstance(field_instances[-1], ListField): | ||||||
|             pipeline.insert(1, {'$unwind': '$' + field}) |             pipeline.insert(1, {'$unwind': '$' + field}) | ||||||
|  |  | ||||||
|         result = tuple(self._document._get_collection().aggregate(pipeline)) |         result = self._document._get_collection().aggregate(pipeline) | ||||||
|  |         if IS_PYMONGO_3: | ||||||
|  |             result = tuple(result) | ||||||
|  |         else: | ||||||
|  |             result = result.get('result') | ||||||
|  |  | ||||||
|         if result: |         if result: | ||||||
|             return result[0]['total'] |             return result[0]['total'] | ||||||
| @@ -1442,7 +1419,11 @@ class BaseQuerySet(object): | |||||||
|         if isinstance(field_instances[-1], ListField): |         if isinstance(field_instances[-1], ListField): | ||||||
|             pipeline.insert(1, {'$unwind': '$' + field}) |             pipeline.insert(1, {'$unwind': '$' + field}) | ||||||
|  |  | ||||||
|         result = tuple(self._document._get_collection().aggregate(pipeline)) |         result = self._document._get_collection().aggregate(pipeline) | ||||||
|  |         if IS_PYMONGO_3: | ||||||
|  |             result = tuple(result) | ||||||
|  |         else: | ||||||
|  |             result = result.get('result') | ||||||
|         if result: |         if result: | ||||||
|             return result[0]['total'] |             return result[0]['total'] | ||||||
|         return 0 |         return 0 | ||||||
| @@ -1476,16 +1457,16 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|     # Iterator helpers |     # Iterator helpers | ||||||
|  |  | ||||||
|     def __next__(self): |     def next(self): | ||||||
|         """Wrap the result in a :class:`~mongoengine.Document` object. |         """Wrap the result in a :class:`~mongoengine.Document` object. | ||||||
|         """ |         """ | ||||||
|         if self._limit == 0 or self._none: |         if self._limit == 0 or self._none: | ||||||
|             raise StopIteration |             raise StopIteration | ||||||
|  |  | ||||||
|         raw_doc = six.next(self._cursor) |         raw_doc = self._cursor.next() | ||||||
|  |  | ||||||
|         if self._as_pymongo: |         if self._as_pymongo: | ||||||
|             return raw_doc |             return self._get_as_pymongo(raw_doc) | ||||||
|  |  | ||||||
|         doc = self._document._from_son( |         doc = self._document._from_son( | ||||||
|             raw_doc, _auto_dereference=self._auto_dereference, |             raw_doc, _auto_dereference=self._auto_dereference, | ||||||
| @@ -1496,8 +1477,6 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         return doc |         return doc | ||||||
|  |  | ||||||
|     next = __next__     # For Python2 support |  | ||||||
|  |  | ||||||
|     def rewind(self): |     def rewind(self): | ||||||
|         """Rewind the cursor to its unevaluated state. |         """Rewind the cursor to its unevaluated state. | ||||||
|  |  | ||||||
| @@ -1517,6 +1496,17 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def _cursor_args(self): |     def _cursor_args(self): | ||||||
|  |         if not IS_PYMONGO_3: | ||||||
|  |             fields_name = 'fields' | ||||||
|  |             cursor_args = { | ||||||
|  |                 'timeout': self._timeout, | ||||||
|  |                 'snapshot': self._snapshot | ||||||
|  |             } | ||||||
|  |             if self._read_preference is not None: | ||||||
|  |                 cursor_args['read_preference'] = self._read_preference | ||||||
|  |             else: | ||||||
|  |                 cursor_args['slave_okay'] = self._slave_okay | ||||||
|  |         else: | ||||||
|             fields_name = 'projection' |             fields_name = 'projection' | ||||||
|             # snapshot is not handled at all by PyMongo 3+ |             # snapshot is not handled at all by PyMongo 3+ | ||||||
|             # TODO: evaluate similar possibilities using modifiers |             # TODO: evaluate similar possibilities using modifiers | ||||||
| @@ -1526,7 +1516,6 @@ class BaseQuerySet(object): | |||||||
|             cursor_args = { |             cursor_args = { | ||||||
|                 'no_cursor_timeout': not self._timeout |                 'no_cursor_timeout': not self._timeout | ||||||
|             } |             } | ||||||
|  |  | ||||||
|         if self._loaded_fields: |         if self._loaded_fields: | ||||||
|             cursor_args[fields_name] = self._loaded_fields.as_dict() |             cursor_args[fields_name] = self._loaded_fields.as_dict() | ||||||
|  |  | ||||||
| @@ -1550,7 +1539,7 @@ class BaseQuerySet(object): | |||||||
|         # XXX In PyMongo 3+, we define the read preference on a collection |         # XXX In PyMongo 3+, we define the read preference on a collection | ||||||
|         # level, not a cursor level. Thus, we need to get a cloned collection |         # level, not a cursor level. Thus, we need to get a cloned collection | ||||||
|         # object using `with_options` first. |         # object using `with_options` first. | ||||||
|         if self._read_preference is not None: |         if IS_PYMONGO_3 and self._read_preference is not None: | ||||||
|             self._cursor_obj = self._collection\ |             self._cursor_obj = self._collection\ | ||||||
|                 .with_options(read_preference=self._read_preference)\ |                 .with_options(read_preference=self._read_preference)\ | ||||||
|                 .find(self._query, **self._cursor_args) |                 .find(self._query, **self._cursor_args) | ||||||
| @@ -1589,9 +1578,6 @@ class BaseQuerySet(object): | |||||||
|         if self._batch_size is not None: |         if self._batch_size is not None: | ||||||
|             self._cursor_obj.batch_size(self._batch_size) |             self._cursor_obj.batch_size(self._batch_size) | ||||||
|  |  | ||||||
|         if self._comment is not None: |  | ||||||
|             self._cursor_obj.comment(self._comment) |  | ||||||
|  |  | ||||||
|         return self._cursor_obj |         return self._cursor_obj | ||||||
|  |  | ||||||
|     def __deepcopy__(self, memo): |     def __deepcopy__(self, memo): | ||||||
| @@ -1720,13 +1706,13 @@ class BaseQuerySet(object): | |||||||
|             } |             } | ||||||
|         """ |         """ | ||||||
|         total, data, types = self.exec_js(freq_func, field) |         total, data, types = self.exec_js(freq_func, field) | ||||||
|         values = {types.get(k): int(v) for k, v in iteritems(data)} |         values = {types.get(k): int(v) for k, v in data.iteritems()} | ||||||
|  |  | ||||||
|         if normalize: |         if normalize: | ||||||
|             values = {k: float(v) / total for k, v in values.items()} |             values = {k: float(v) / total for k, v in values.items()} | ||||||
|  |  | ||||||
|         frequencies = {} |         frequencies = {} | ||||||
|         for k, v in iteritems(values): |         for k, v in values.iteritems(): | ||||||
|             if isinstance(k, float): |             if isinstance(k, float): | ||||||
|                 if int(k) == k: |                 if int(k) == k: | ||||||
|                     k = int(k) |                     k = int(k) | ||||||
| @@ -1736,33 +1722,25 @@ class BaseQuerySet(object): | |||||||
|         return frequencies |         return frequencies | ||||||
|  |  | ||||||
|     def _fields_to_dbfields(self, fields): |     def _fields_to_dbfields(self, fields): | ||||||
|         """Translate fields' paths to their db equivalents.""" |         """Translate fields paths to its db equivalents""" | ||||||
|  |         ret = [] | ||||||
|         subclasses = [] |         subclasses = [] | ||||||
|         if self._document._meta['allow_inheritance']: |         document = self._document | ||||||
|  |         if document._meta['allow_inheritance']: | ||||||
|             subclasses = [get_document(x) |             subclasses = [get_document(x) | ||||||
|                           for x in self._document._subclasses][1:] |                           for x in document._subclasses][1:] | ||||||
|  |  | ||||||
|         db_field_paths = [] |  | ||||||
|         for field in fields: |         for field in fields: | ||||||
|             field_parts = field.split('.') |  | ||||||
|             try: |             try: | ||||||
|                 field = '.'.join( |                 field = '.'.join(f.db_field for f in | ||||||
|                     f if isinstance(f, six.string_types) else f.db_field |                                  document._lookup_field(field.split('.'))) | ||||||
|                     for f in self._document._lookup_field(field_parts) |                 ret.append(field) | ||||||
|                 ) |  | ||||||
|                 db_field_paths.append(field) |  | ||||||
|             except LookUpError as err: |             except LookUpError as err: | ||||||
|                 found = False |                 found = False | ||||||
|  |  | ||||||
|                 # If a field path wasn't found on the main document, go |  | ||||||
|                 # through its subclasses and see if it exists on any of them. |  | ||||||
|                 for subdoc in subclasses: |                 for subdoc in subclasses: | ||||||
|                     try: |                     try: | ||||||
|                         subfield = '.'.join( |                         subfield = '.'.join(f.db_field for f in | ||||||
|                             f if isinstance(f, six.string_types) else f.db_field |                                             subdoc._lookup_field(field.split('.'))) | ||||||
|                             for f in subdoc._lookup_field(field_parts) |                         ret.append(subfield) | ||||||
|                         ) |  | ||||||
|                         db_field_paths.append(subfield) |  | ||||||
|                         found = True |                         found = True | ||||||
|                         break |                         break | ||||||
|                     except LookUpError: |                     except LookUpError: | ||||||
| @@ -1770,8 +1748,7 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|                 if not found: |                 if not found: | ||||||
|                     raise err |                     raise err | ||||||
|  |         return ret | ||||||
|         return db_field_paths |  | ||||||
|  |  | ||||||
|     def _get_order_by(self, keys): |     def _get_order_by(self, keys): | ||||||
|         """Given a list of MongoEngine-style sort keys, return a list |         """Given a list of MongoEngine-style sort keys, return a list | ||||||
| @@ -1822,6 +1799,60 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         return tuple(data) |         return tuple(data) | ||||||
|  |  | ||||||
|  |     def _get_as_pymongo(self, row): | ||||||
|  |         # Extract which fields paths we should follow if .fields(...) was | ||||||
|  |         # used. If not, handle all fields. | ||||||
|  |         if not getattr(self, '__as_pymongo_fields', None): | ||||||
|  |             self.__as_pymongo_fields = [] | ||||||
|  |  | ||||||
|  |             for field in self._loaded_fields.fields - set(['_cls']): | ||||||
|  |                 self.__as_pymongo_fields.append(field) | ||||||
|  |                 while '.' in field: | ||||||
|  |                     field, _ = field.rsplit('.', 1) | ||||||
|  |                     self.__as_pymongo_fields.append(field) | ||||||
|  |  | ||||||
|  |         all_fields = not self.__as_pymongo_fields | ||||||
|  |  | ||||||
|  |         def clean(data, path=None): | ||||||
|  |             path = path or '' | ||||||
|  |  | ||||||
|  |             if isinstance(data, dict): | ||||||
|  |                 new_data = {} | ||||||
|  |                 for key, value in data.iteritems(): | ||||||
|  |                     new_path = '%s.%s' % (path, key) if path else key | ||||||
|  |  | ||||||
|  |                     if all_fields: | ||||||
|  |                         include_field = True | ||||||
|  |                     elif self._loaded_fields.value == QueryFieldList.ONLY: | ||||||
|  |                         include_field = new_path in self.__as_pymongo_fields | ||||||
|  |                     else: | ||||||
|  |                         include_field = new_path not in self.__as_pymongo_fields | ||||||
|  |  | ||||||
|  |                     if include_field: | ||||||
|  |                         new_data[key] = clean(value, path=new_path) | ||||||
|  |                 data = new_data | ||||||
|  |             elif isinstance(data, list): | ||||||
|  |                 data = [clean(d, path=path) for d in data] | ||||||
|  |             else: | ||||||
|  |                 if self._as_pymongo_coerce: | ||||||
|  |                     # If we need to coerce types, we need to determine the | ||||||
|  |                     # type of this field and use the corresponding | ||||||
|  |                     # .to_python(...) | ||||||
|  |                     EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||||
|  |  | ||||||
|  |                     obj = self._document | ||||||
|  |                     for chunk in path.split('.'): | ||||||
|  |                         obj = getattr(obj, chunk, None) | ||||||
|  |                         if obj is None: | ||||||
|  |                             break | ||||||
|  |                         elif isinstance(obj, EmbeddedDocumentField): | ||||||
|  |                             obj = obj.document_type | ||||||
|  |                     if obj and data is not None: | ||||||
|  |                         data = obj.to_python(data) | ||||||
|  |             return data | ||||||
|  |  | ||||||
|  |         return clean(row) | ||||||
|  |  | ||||||
|     def _sub_js_fields(self, code): |     def _sub_js_fields(self, code): | ||||||
|         """When fields are specified with [~fieldname] syntax, where |         """When fields are specified with [~fieldname] syntax, where | ||||||
|         *fieldname* is the Python name of a field, *fieldname* will be |         *fieldname* is the Python name of a field, *fieldname* will be | ||||||
| @@ -1843,8 +1874,8 @@ class BaseQuerySet(object): | |||||||
|             # Substitute the correct name for the field into the javascript |             # Substitute the correct name for the field into the javascript | ||||||
|             return '.'.join([f.db_field for f in fields]) |             return '.'.join([f.db_field for f in fields]) | ||||||
|  |  | ||||||
|         code = re.sub(r'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) |         code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) | ||||||
|         code = re.sub(r'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, |         code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, | ||||||
|                       code) |                       code) | ||||||
|         return code |         return code | ||||||
|  |  | ||||||
|   | |||||||
| @@ -63,11 +63,9 @@ class QueryFieldList(object): | |||||||
|             self._only_called = True |             self._only_called = True | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __bool__(self): |     def __nonzero__(self): | ||||||
|         return bool(self.fields) |         return bool(self.fields) | ||||||
|  |  | ||||||
|     __nonzero__ = __bool__  # For Py2 support |  | ||||||
|  |  | ||||||
|     def as_dict(self): |     def as_dict(self): | ||||||
|         field_list = {field: self.value for field in self.fields} |         field_list = {field: self.value for field in self.fields} | ||||||
|         if self.slice: |         if self.slice: | ||||||
|   | |||||||
| @@ -36,7 +36,7 @@ class QuerySetManager(object): | |||||||
|         queryset_class = owner._meta.get('queryset_class', self.default) |         queryset_class = owner._meta.get('queryset_class', self.default) | ||||||
|         queryset = queryset_class(owner, owner._get_collection()) |         queryset = queryset_class(owner, owner._get_collection()) | ||||||
|         if self.get_queryset: |         if self.get_queryset: | ||||||
|             arg_count = self.get_queryset.__code__.co_argcount |             arg_count = self.get_queryset.func_code.co_argcount | ||||||
|             if arg_count == 1: |             if arg_count == 1: | ||||||
|                 queryset = self.get_queryset(queryset) |                 queryset = self.get_queryset(queryset) | ||||||
|             elif arg_count == 2: |             elif arg_count == 2: | ||||||
|   | |||||||
| @@ -1,5 +1,3 @@ | |||||||
| import six |  | ||||||
|  |  | ||||||
| from mongoengine.errors import OperationError | from mongoengine.errors import OperationError | ||||||
| from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, | from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, | ||||||
|                                        NULLIFY, PULL) |                                        NULLIFY, PULL) | ||||||
| @@ -89,10 +87,10 @@ class QuerySet(BaseQuerySet): | |||||||
|                 yield self._result_cache[pos] |                 yield self._result_cache[pos] | ||||||
|                 pos += 1 |                 pos += 1 | ||||||
|  |  | ||||||
|             # return if we already established there were no more |             # Raise StopIteration if we already established there were no more | ||||||
|             # docs in the db cursor. |             # docs in the db cursor. | ||||||
|             if not self._has_more: |             if not self._has_more: | ||||||
|                 return |                 raise StopIteration | ||||||
|  |  | ||||||
|             # Otherwise, populate more of the cache and repeat. |             # Otherwise, populate more of the cache and repeat. | ||||||
|             if len(self._result_cache) <= pos: |             if len(self._result_cache) <= pos: | ||||||
| @@ -114,8 +112,8 @@ class QuerySet(BaseQuerySet): | |||||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in |         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||||
|         # the result cache. |         # the result cache. | ||||||
|         try: |         try: | ||||||
|             for _ in six.moves.range(ITER_CHUNK_SIZE): |             for _ in xrange(ITER_CHUNK_SIZE): | ||||||
|                 self._result_cache.append(six.next(self)) |                 self._result_cache.append(self.next()) | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             # Getting this exception means there are no more docs in the |             # Getting this exception means there are no more docs in the | ||||||
|             # db cursor. Set _has_more to False so that we can use that |             # db cursor. Set _has_more to False so that we can use that | ||||||
| @@ -168,9 +166,9 @@ class QuerySetNoCache(BaseQuerySet): | |||||||
|             return '.. queryset mid-iteration ..' |             return '.. queryset mid-iteration ..' | ||||||
|  |  | ||||||
|         data = [] |         data = [] | ||||||
|         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): |         for _ in xrange(REPR_OUTPUT_SIZE + 1): | ||||||
|             try: |             try: | ||||||
|                 data.append(six.next(self)) |                 data.append(self.next()) | ||||||
|             except StopIteration: |             except StopIteration: | ||||||
|                 break |                 break | ||||||
|  |  | ||||||
| @@ -186,3 +184,10 @@ class QuerySetNoCache(BaseQuerySet): | |||||||
|             queryset = self.clone() |             queryset = self.clone() | ||||||
|         queryset.rewind() |         queryset.rewind() | ||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class QuerySetNoDeRef(QuerySet): | ||||||
|  |     """Special no_dereference QuerySet""" | ||||||
|  |  | ||||||
|  |     def __dereference(items, max_depth=1, instance=None, name=None): | ||||||
|  |         return items | ||||||
|   | |||||||
| @@ -4,11 +4,12 @@ from bson import ObjectId, SON | |||||||
| from bson.dbref import DBRef | from bson.dbref import DBRef | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.base import UPDATE_OPERATORS | from mongoengine.base import UPDATE_OPERATORS | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
|  | from mongoengine.connection import get_connection | ||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
|  | from mongoengine.python_support import IS_PYMONGO_3 | ||||||
|  |  | ||||||
| __all__ = ('query', 'update') | __all__ = ('query', 'update') | ||||||
|  |  | ||||||
| @@ -86,14 +87,35 @@ def query(_doc_cls=None, **kwargs): | |||||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] |             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] | ||||||
|             singular_ops += STRING_OPERATORS |             singular_ops += STRING_OPERATORS | ||||||
|             if op in singular_ops: |             if op in singular_ops: | ||||||
|  |                 if isinstance(field, six.string_types): | ||||||
|  |                     if (op in STRING_OPERATORS and | ||||||
|  |                             isinstance(value, six.string_types)): | ||||||
|  |                         StringField = _import_class('StringField') | ||||||
|  |                         value = StringField.prepare_query_value(op, value) | ||||||
|  |                     else: | ||||||
|  |                         value = field | ||||||
|  |                 else: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|                     if isinstance(field, CachedReferenceField) and value: |                     if isinstance(field, CachedReferenceField) and value: | ||||||
|                         value = value['_id'] |                         value = value['_id'] | ||||||
|  |  | ||||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): |             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): | ||||||
|                 # Raise an error if the in/nin/all/near param is not iterable. |                 # Raise an error if the in/nin/all/near param is not iterable. We need a | ||||||
|                 value = _prepare_query_for_iterable(field, op, value) |                 # special check for BaseDocument, because - although it's iterable - using | ||||||
|  |                 # it as such in the context of this method is most definitely a mistake. | ||||||
|  |                 BaseDocument = _import_class('BaseDocument') | ||||||
|  |                 if isinstance(value, BaseDocument): | ||||||
|  |                     raise TypeError("When using the `in`, `nin`, `all`, or " | ||||||
|  |                                     "`near`-operators you can\'t use a " | ||||||
|  |                                     "`Document`, you must wrap your object " | ||||||
|  |                                     "in a list (object -> [object]).") | ||||||
|  |                 elif not hasattr(value, '__iter__'): | ||||||
|  |                     raise TypeError("The `in`, `nin`, `all`, or " | ||||||
|  |                                     "`near`-operators must be applied to an " | ||||||
|  |                                     "iterable (e.g. a list).") | ||||||
|  |                 else: | ||||||
|  |                     value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |  | ||||||
|             # If we're querying a GenericReferenceField, we need to alter the |             # If we're querying a GenericReferenceField, we need to alter the | ||||||
|             # key depending on the value: |             # key depending on the value: | ||||||
| @@ -138,14 +160,14 @@ def query(_doc_cls=None, **kwargs): | |||||||
|         if op is None or key not in mongo_query: |         if op is None or key not in mongo_query: | ||||||
|             mongo_query[key] = value |             mongo_query[key] = value | ||||||
|         elif key in mongo_query: |         elif key in mongo_query: | ||||||
|             if isinstance(mongo_query[key], dict) and isinstance(value, dict): |             if isinstance(mongo_query[key], dict): | ||||||
|                 mongo_query[key].update(value) |                 mongo_query[key].update(value) | ||||||
|                 # $max/minDistance needs to come last - convert to SON |                 # $max/minDistance needs to come last - convert to SON | ||||||
|                 value_dict = mongo_query[key] |                 value_dict = mongo_query[key] | ||||||
|                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ |                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ | ||||||
|                         ('$near' in value_dict or '$nearSphere' in value_dict): |                         ('$near' in value_dict or '$nearSphere' in value_dict): | ||||||
|                     value_son = SON() |                     value_son = SON() | ||||||
|                     for k, v in iteritems(value_dict): |                     for k, v in value_dict.iteritems(): | ||||||
|                         if k == '$maxDistance' or k == '$minDistance': |                         if k == '$maxDistance' or k == '$minDistance': | ||||||
|                             continue |                             continue | ||||||
|                         value_son[k] = v |                         value_son[k] = v | ||||||
| @@ -153,14 +175,16 @@ def query(_doc_cls=None, **kwargs): | |||||||
|                     # PyMongo 3+ and MongoDB < 2.6 |                     # PyMongo 3+ and MongoDB < 2.6 | ||||||
|                     near_embedded = False |                     near_embedded = False | ||||||
|                     for near_op in ('$near', '$nearSphere'): |                     for near_op in ('$near', '$nearSphere'): | ||||||
|                         if isinstance(value_dict.get(near_op), dict): |                         if isinstance(value_dict.get(near_op), dict) and ( | ||||||
|  |                                 IS_PYMONGO_3 or get_connection().max_wire_version > 1): | ||||||
|                             value_son[near_op] = SON(value_son[near_op]) |                             value_son[near_op] = SON(value_son[near_op]) | ||||||
|                             if '$maxDistance' in value_dict: |                             if '$maxDistance' in value_dict: | ||||||
|                                 value_son[near_op]['$maxDistance'] = value_dict['$maxDistance'] |                                 value_son[near_op][ | ||||||
|  |                                     '$maxDistance'] = value_dict['$maxDistance'] | ||||||
|                             if '$minDistance' in value_dict: |                             if '$minDistance' in value_dict: | ||||||
|                                 value_son[near_op]['$minDistance'] = value_dict['$minDistance'] |                                 value_son[near_op][ | ||||||
|  |                                     '$minDistance'] = value_dict['$minDistance'] | ||||||
|                             near_embedded = True |                             near_embedded = True | ||||||
|  |  | ||||||
|                     if not near_embedded: |                     if not near_embedded: | ||||||
|                         if '$maxDistance' in value_dict: |                         if '$maxDistance' in value_dict: | ||||||
|                             value_son['$maxDistance'] = value_dict['$maxDistance'] |                             value_son['$maxDistance'] = value_dict['$maxDistance'] | ||||||
| @@ -190,37 +214,30 @@ def update(_doc_cls=None, **update): | |||||||
|     format. |     format. | ||||||
|     """ |     """ | ||||||
|     mongo_update = {} |     mongo_update = {} | ||||||
|  |  | ||||||
|     for key, value in update.items(): |     for key, value in update.items(): | ||||||
|         if key == '__raw__': |         if key == '__raw__': | ||||||
|             mongo_update.update(value) |             mongo_update.update(value) | ||||||
|             continue |             continue | ||||||
|  |  | ||||||
|         parts = key.split('__') |         parts = key.split('__') | ||||||
|  |  | ||||||
|         # if there is no operator, default to 'set' |         # if there is no operator, default to 'set' | ||||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: |         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||||
|             parts.insert(0, 'set') |             parts.insert(0, 'set') | ||||||
|  |  | ||||||
|         # Check for an operator and transform to mongo-style if there is |         # Check for an operator and transform to mongo-style if there is | ||||||
|         op = None |         op = None | ||||||
|         if parts[0] in UPDATE_OPERATORS: |         if parts[0] in UPDATE_OPERATORS: | ||||||
|             op = parts.pop(0) |             op = parts.pop(0) | ||||||
|             # Convert Pythonic names to Mongo equivalents |             # Convert Pythonic names to Mongo equivalents | ||||||
|             operator_map = { |             if op in ('push_all', 'pull_all'): | ||||||
|                 'push_all': 'pushAll', |                 op = op.replace('_all', 'All') | ||||||
|                 'pull_all': 'pullAll', |             elif op == 'dec': | ||||||
|                 'dec': 'inc', |  | ||||||
|                 'add_to_set': 'addToSet', |  | ||||||
|                 'set_on_insert': 'setOnInsert' |  | ||||||
|             } |  | ||||||
|             if op == 'dec': |  | ||||||
|                 # Support decrement by flipping a positive value's sign |                 # Support decrement by flipping a positive value's sign | ||||||
|                 # and using 'inc' |                 # and using 'inc' | ||||||
|  |                 op = 'inc' | ||||||
|                 value = -value |                 value = -value | ||||||
|             # If the operator doesn't found from operator map, the op value |             elif op == 'add_to_set': | ||||||
|             # will stay unchanged |                 op = 'addToSet' | ||||||
|             op = operator_map.get(op, op) |             elif op == 'set_on_insert': | ||||||
|  |                 op = 'setOnInsert' | ||||||
|  |  | ||||||
|         match = None |         match = None | ||||||
|         if parts[-1] in COMPARISON_OPERATORS: |         if parts[-1] in COMPARISON_OPERATORS: | ||||||
| @@ -267,15 +284,7 @@ def update(_doc_cls=None, **update): | |||||||
|             if isinstance(field, GeoJsonBaseField): |             if isinstance(field, GeoJsonBaseField): | ||||||
|                 value = field.to_mongo(value) |                 value = field.to_mongo(value) | ||||||
|  |  | ||||||
|             if op == 'pull': |             if op in (None, 'set', 'push', 'pull'): | ||||||
|                 if field.required or value is not None: |  | ||||||
|                     if match in ('in', 'nin') and not isinstance(value, dict): |  | ||||||
|                         value = _prepare_query_for_iterable(field, op, value) |  | ||||||
|                     else: |  | ||||||
|                         value = field.prepare_query_value(op, value) |  | ||||||
|             elif op == 'push' and isinstance(value, (list, tuple, set)): |  | ||||||
|                 value = [field.prepare_query_value(op, v) for v in value] |  | ||||||
|             elif op in (None, 'set', 'push'): |  | ||||||
|                 if field.required or value is not None: |                 if field.required or value is not None: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op in ('pushAll', 'pullAll'): |             elif op in ('pushAll', 'pullAll'): | ||||||
| @@ -287,8 +296,6 @@ def update(_doc_cls=None, **update): | |||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op == 'unset': |             elif op == 'unset': | ||||||
|                 value = 1 |                 value = 1 | ||||||
|             elif op == 'inc': |  | ||||||
|                 value = field.prepare_query_value(op, value) |  | ||||||
|  |  | ||||||
|         if match: |         if match: | ||||||
|             match = '$' + match |             match = '$' + match | ||||||
| @@ -296,6 +303,10 @@ def update(_doc_cls=None, **update): | |||||||
|  |  | ||||||
|         key = '.'.join(parts) |         key = '.'.join(parts) | ||||||
|  |  | ||||||
|  |         if not op: | ||||||
|  |             raise InvalidQueryError('Updates must supply an operation ' | ||||||
|  |                                     'eg: set__FIELD=value') | ||||||
|  |  | ||||||
|         if 'pull' in op and '.' in key: |         if 'pull' in op and '.' in key: | ||||||
|             # Dot operators don't work on pull operations |             # Dot operators don't work on pull operations | ||||||
|             # unless they point to a list field |             # unless they point to a list field | ||||||
| @@ -308,17 +319,11 @@ def update(_doc_cls=None, **update): | |||||||
|             field_classes = [c.__class__ for c in cleaned_fields] |             field_classes = [c.__class__ for c in cleaned_fields] | ||||||
|             field_classes.reverse() |             field_classes.reverse() | ||||||
|             ListField = _import_class('ListField') |             ListField = _import_class('ListField') | ||||||
|             EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') |  | ||||||
|             if ListField in field_classes or EmbeddedDocumentListField in field_classes: |  | ||||||
|                 # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField |  | ||||||
|                 # Then process as normal |  | ||||||
|             if ListField in field_classes: |             if ListField in field_classes: | ||||||
|                     _check_field = ListField |                 # Join all fields via dot notation to the last ListField | ||||||
|                 else: |                 # Then process as normal | ||||||
|                     _check_field = EmbeddedDocumentListField |  | ||||||
|  |  | ||||||
|                 last_listField = len( |                 last_listField = len( | ||||||
|                     cleaned_fields) - field_classes.index(_check_field) |                     cleaned_fields) - field_classes.index(ListField) | ||||||
|                 key = '.'.join(parts[:last_listField]) |                 key = '.'.join(parts[:last_listField]) | ||||||
|                 parts = parts[last_listField:] |                 parts = parts[last_listField:] | ||||||
|                 parts.insert(0, key) |                 parts.insert(0, key) | ||||||
| @@ -328,26 +333,10 @@ def update(_doc_cls=None, **update): | |||||||
|                 value = {key: value} |                 value = {key: value} | ||||||
|         elif op == 'addToSet' and isinstance(value, list): |         elif op == 'addToSet' and isinstance(value, list): | ||||||
|             value = {key: {'$each': value}} |             value = {key: {'$each': value}} | ||||||
|         elif op in ('push', 'pushAll'): |  | ||||||
|             if parts[-1].isdigit(): |  | ||||||
|                 key = '.'.join(parts[0:-1]) |  | ||||||
|                 position = int(parts[-1]) |  | ||||||
|                 # $position expects an iterable. If pushing a single value, |  | ||||||
|                 # wrap it in a list. |  | ||||||
|                 if not isinstance(value, (set, tuple, list)): |  | ||||||
|                     value = [value] |  | ||||||
|                 value = {key: {'$each': value, '$position': position}} |  | ||||||
|             else: |  | ||||||
|                 if op == 'pushAll': |  | ||||||
|                     op = 'push'  # convert to non-deprecated keyword |  | ||||||
|                     if not isinstance(value, (set, tuple, list)): |  | ||||||
|                         value = [value] |  | ||||||
|                     value = {key: {'$each': value}} |  | ||||||
|                 else: |  | ||||||
|                     value = {key: value} |  | ||||||
|         else: |         else: | ||||||
|             value = {key: value} |             value = {key: value} | ||||||
|         key = '$' + op |         key = '$' + op | ||||||
|  |  | ||||||
|         if key not in mongo_update: |         if key not in mongo_update: | ||||||
|             mongo_update[key] = value |             mongo_update[key] = value | ||||||
|         elif key in mongo_update and isinstance(mongo_update[key], dict): |         elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||||
| @@ -414,6 +403,7 @@ def _infer_geometry(value): | |||||||
|                                 'type and coordinates keys') |                                 'type and coordinates keys') | ||||||
|     elif isinstance(value, (list, set)): |     elif isinstance(value, (list, set)): | ||||||
|         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? |         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||||
|  |         # TODO: should both TypeError and IndexError be alike interpreted? | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
| @@ -435,22 +425,3 @@ def _infer_geometry(value): | |||||||
|  |  | ||||||
|     raise InvalidQueryError('Invalid $geometry data. Can be either a ' |     raise InvalidQueryError('Invalid $geometry data. Can be either a ' | ||||||
|                             'dictionary or (nested) lists of coordinate(s)') |                             'dictionary or (nested) lists of coordinate(s)') | ||||||
|  |  | ||||||
|  |  | ||||||
| def _prepare_query_for_iterable(field, op, value): |  | ||||||
|     # We need a special check for BaseDocument, because - although it's iterable - using |  | ||||||
|     # it as such in the context of this method is most definitely a mistake. |  | ||||||
|     BaseDocument = _import_class('BaseDocument') |  | ||||||
|  |  | ||||||
|     if isinstance(value, BaseDocument): |  | ||||||
|         raise TypeError("When using the `in`, `nin`, `all`, or " |  | ||||||
|                         "`near`-operators you can\'t use a " |  | ||||||
|                         "`Document`, you must wrap your object " |  | ||||||
|                         "in a list (object -> [object]).") |  | ||||||
|  |  | ||||||
|     if not hasattr(value, '__iter__'): |  | ||||||
|         raise TypeError("The `in`, `nin`, `all`, or " |  | ||||||
|                         "`near`-operators must be applied to an " |  | ||||||
|                         "iterable (e.g. a list).") |  | ||||||
|  |  | ||||||
|     return [field.prepare_query_value(op, v) for v in value] |  | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import copy | |||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
|  |  | ||||||
| __all__ = ('Q', 'QNode') | __all__ = ('Q',) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNodeVisitor(object): | class QNodeVisitor(object): | ||||||
| @@ -131,10 +131,6 @@ class QCombination(QNode): | |||||||
|             else: |             else: | ||||||
|                 self.children.append(node) |                 self.children.append(node) | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         op = ' & ' if self.operation is self.AND else ' | ' |  | ||||||
|         return '(%s)' % op.join([repr(node) for node in self.children]) |  | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         for i in range(len(self.children)): |         for i in range(len(self.children)): | ||||||
|             if isinstance(self.children[i], QNode): |             if isinstance(self.children[i], QNode): | ||||||
| @@ -155,9 +151,6 @@ class Q(QNode): | |||||||
|     def __init__(self, **query): |     def __init__(self, **query): | ||||||
|         self.query = query |         self.query = query | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         return 'Q(**%s)' % repr(self.query) |  | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         return visitor.visit_query(self) |         return visitor.visit_query(self) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,7 +1,5 @@ | |||||||
| nose | nose | ||||||
| pymongo>=3.4 | pymongo>=2.7.1 | ||||||
| six==1.10.0 | six==1.10.0 | ||||||
| flake8 | flake8 | ||||||
| flake8-import-order | flake8-import-order | ||||||
| Sphinx==1.5.5 |  | ||||||
| sphinx-rtd-theme==0.2.4 |  | ||||||
|   | |||||||
| @@ -1,11 +1,11 @@ | |||||||
| [nosetests] | [nosetests] | ||||||
| verbosity=2 | verbosity=2 | ||||||
| detailed-errors=1 | detailed-errors=1 | ||||||
| #tests=tests | tests=tests | ||||||
| cover-package=mongoengine | cover-package=mongoengine | ||||||
|  |  | ||||||
| [flake8] | [flake8] | ||||||
| ignore=E501,F401,F403,F405,I201,I202,W504, W605 | ignore=E501,F401,F403,F405,I201 | ||||||
| exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||||
| max-complexity=47 | max-complexity=47 | ||||||
| application-import-names=mongoengine,tests | application-import-names=mongoengine,tests | ||||||
|   | |||||||
							
								
								
									
										11
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								setup.py
									
									
									
									
									
								
							| @@ -44,8 +44,9 @@ CLASSIFIERS = [ | |||||||
|     "Programming Language :: Python :: 2", |     "Programming Language :: Python :: 2", | ||||||
|     "Programming Language :: Python :: 2.7", |     "Programming Language :: Python :: 2.7", | ||||||
|     "Programming Language :: Python :: 3", |     "Programming Language :: Python :: 3", | ||||||
|  |     "Programming Language :: Python :: 3.3", | ||||||
|  |     "Programming Language :: Python :: 3.4", | ||||||
|     "Programming Language :: Python :: 3.5", |     "Programming Language :: Python :: 3.5", | ||||||
|     "Programming Language :: Python :: 3.6", |  | ||||||
|     "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     "Programming Language :: Python :: Implementation :: PyPy", |     "Programming Language :: Python :: Implementation :: PyPy", | ||||||
|     'Topic :: Database', |     'Topic :: Database', | ||||||
| @@ -69,9 +70,9 @@ setup( | |||||||
|     name='mongoengine', |     name='mongoengine', | ||||||
|     version=VERSION, |     version=VERSION, | ||||||
|     author='Harry Marr', |     author='Harry Marr', | ||||||
|     author_email='harry.marr@gmail.com', |     author_email='harry.marr@{nospam}gmail.com', | ||||||
|     maintainer="Stefan Wojcik", |     maintainer="Ross Lawley", | ||||||
|     maintainer_email="wojcikstefan@gmail.com", |     maintainer_email="ross.lawley@{nospam}gmail.com", | ||||||
|     url='http://mongoengine.org/', |     url='http://mongoengine.org/', | ||||||
|     download_url='https://github.com/MongoEngine/mongoengine/tarball/master', |     download_url='https://github.com/MongoEngine/mongoengine/tarball/master', | ||||||
|     license='MIT', |     license='MIT', | ||||||
| @@ -80,7 +81,7 @@ setup( | |||||||
|     long_description=LONG_DESCRIPTION, |     long_description=LONG_DESCRIPTION, | ||||||
|     platforms=['any'], |     platforms=['any'], | ||||||
|     classifiers=CLASSIFIERS, |     classifiers=CLASSIFIERS, | ||||||
|     install_requires=['pymongo>=3.4', 'six'], |     install_requires=['pymongo>=2.7.1', 'six'], | ||||||
|     test_suite='nose.collector', |     test_suite='nose.collector', | ||||||
|     **extra_opts |     **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,4 +1,4 @@ | |||||||
| from .all_warnings import AllWarnings | from all_warnings import AllWarnings | ||||||
| from .document import * | from document import * | ||||||
| from .queryset import * | from queryset import * | ||||||
| from .fields import * | from fields import * | ||||||
|   | |||||||
| @@ -1,13 +1,13 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from .class_methods import * | from class_methods import * | ||||||
| from .delta import * | from delta import * | ||||||
| from .dynamic import * | from dynamic import * | ||||||
| from .indexes import * | from indexes import * | ||||||
| from .inheritance import * | from inheritance import * | ||||||
| from .instance import * | from instance import * | ||||||
| from .json_serialisation import * | from json_serialisation import * | ||||||
| from .validation import * | from validation import * | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -2,7 +2,6 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.pymongo_support import list_collection_names |  | ||||||
|  |  | ||||||
| from mongoengine.queryset import NULLIFY, PULL | from mongoengine.queryset import NULLIFY, PULL | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| @@ -27,7 +26,9 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         self.Person = Person |         self.Person = Person | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in list_collection_names(self.db): |         for collection in self.db.collection_names(): | ||||||
|  |             if 'system.' in collection: | ||||||
|  |                 continue | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|     def test_definition(self): |     def test_definition(self): | ||||||
| @@ -64,10 +65,10 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         """ |         """ | ||||||
|         collection_name = 'person' |         collection_name = 'person' | ||||||
|         self.Person(name='Test').save() |         self.Person(name='Test').save() | ||||||
|         self.assertIn(collection_name, list_collection_names(self.db)) |         self.assertTrue(collection_name in self.db.collection_names()) | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |         self.Person.drop_collection() | ||||||
|         self.assertNotIn(collection_name, list_collection_names(self.db)) |         self.assertFalse(collection_name in self.db.collection_names()) | ||||||
|  |  | ||||||
|     def test_register_delete_rule(self): |     def test_register_delete_rule(self): | ||||||
|         """Ensure that register delete rule adds a delete rule to the document |         """Ensure that register delete rule adds a delete rule to the document | ||||||
| @@ -100,16 +101,16 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|  |  | ||||||
|         BlogPost.ensure_index(['author', 'description']) |         BlogPost.ensure_index(['author', 'description']) | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] }) | ||||||
|  |  | ||||||
|         BlogPost._get_collection().drop_index('author_1_description_1') |         BlogPost._get_collection().drop_index('author_1_description_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|  |  | ||||||
|         BlogPost._get_collection().drop_index('author_1_title_1') |         BlogPost._get_collection().drop_index('author_1_title_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] }) | ||||||
|  |  | ||||||
|     def test_compare_indexes_inheritance(self): |     def test_compare_indexes_inheritance(self): | ||||||
|         """ Ensure that the indexes are properly created and that |         """ Ensure that the indexes are properly created and that | ||||||
| @@ -138,16 +139,16 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|  |  | ||||||
|         BlogPostWithTags.ensure_index(['author', 'tag_list']) |         BlogPostWithTags.ensure_index(['author', 'tag_list']) | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] }) | ||||||
|  |  | ||||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') |         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|  |  | ||||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') |         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] }) | ||||||
|  |  | ||||||
|     def test_compare_indexes_multiple_subclasses(self): |     def test_compare_indexes_multiple_subclasses(self): | ||||||
|         """ Ensure that compare_indexes behaves correctly if called from a |         """ Ensure that compare_indexes behaves correctly if called from a | ||||||
| @@ -182,28 +183,9 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         BlogPostWithCustomField.ensure_indexes() |         BlogPostWithCustomField.ensure_indexes() | ||||||
|  |  | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|         self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|         self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []}) |         self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||||
|  |  | ||||||
|     def test_compare_indexes_for_text_indexes(self): |  | ||||||
|         """ Ensure that compare_indexes behaves correctly for text indexes """ |  | ||||||
|  |  | ||||||
|         class Doc(Document): |  | ||||||
|             a = StringField() |  | ||||||
|             b = StringField() |  | ||||||
|             meta = {'indexes': [ |  | ||||||
|                 {'fields': ['$a', "$b"], |  | ||||||
|                  'default_language': 'english', |  | ||||||
|                  'weights': {'a': 10, 'b': 2} |  | ||||||
|                 } |  | ||||||
|             ]} |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         Doc.ensure_indexes() |  | ||||||
|         actual = Doc.compare_indexes() |  | ||||||
|         expected = {'missing': [], 'extra': []} |  | ||||||
|         self.assertEqual(actual, expected) |  | ||||||
|  |  | ||||||
|     def test_list_indexes_inheritance(self): |     def test_list_indexes_inheritance(self): | ||||||
|         """ ensure that all of the indexes are listed regardless of the super- |         """ ensure that all of the indexes are listed regardless of the super- | ||||||
| @@ -337,7 +319,7 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|             meta = {'collection': collection_name} |             meta = {'collection': collection_name} | ||||||
|  |  | ||||||
|         Person(name="Test User").save() |         Person(name="Test User").save() | ||||||
|         self.assertIn(collection_name, list_collection_names(self.db)) |         self.assertTrue(collection_name in self.db.collection_names()) | ||||||
|  |  | ||||||
|         user_obj = self.db[collection_name].find_one() |         user_obj = self.db[collection_name].find_one() | ||||||
|         self.assertEqual(user_obj['name'], "Test User") |         self.assertEqual(user_obj['name'], "Test User") | ||||||
| @@ -346,7 +328,7 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         self.assertEqual(user_obj.name, "Test User") |         self.assertEqual(user_obj.name, "Test User") | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|         self.assertNotIn(collection_name, list_collection_names(self.db)) |         self.assertFalse(collection_name in self.db.collection_names()) | ||||||
|  |  | ||||||
|     def test_collection_name_and_primary(self): |     def test_collection_name_and_primary(self): | ||||||
|         """Ensure that a collection with a specified name may be used. |         """Ensure that a collection with a specified name may be used. | ||||||
|   | |||||||
| @@ -3,14 +3,16 @@ import unittest | |||||||
|  |  | ||||||
| from bson import SON | from bson import SON | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.pymongo_support import list_collection_names | from mongoengine.connection import get_db | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  | __all__ = ("DeltaTest",) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeltaTest(MongoDBTestCase): | class DeltaTest(unittest.TestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         super(DeltaTest, self).setUp() |         connect(db='mongoenginetest') | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -23,7 +25,9 @@ class DeltaTest(MongoDBTestCase): | |||||||
|         self.Person = Person |         self.Person = Person | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in list_collection_names(self.db): |         for collection in self.db.collection_names(): | ||||||
|  |             if 'system.' in collection: | ||||||
|  |                 continue | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|     def test_delta(self): |     def test_delta(self): | ||||||
| @@ -690,7 +694,7 @@ class DeltaTest(MongoDBTestCase): | |||||||
|         organization.employees.append(person) |         organization.employees.append(person) | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         self.assertEqual({}, removals) |         self.assertEqual({}, removals) | ||||||
|         self.assertIn('employees', updates) |         self.assertTrue('employees' in updates) | ||||||
|  |  | ||||||
|     def test_delta_with_dbref_false(self): |     def test_delta_with_dbref_false(self): | ||||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) |         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) | ||||||
| @@ -705,7 +709,7 @@ class DeltaTest(MongoDBTestCase): | |||||||
|         organization.employees.append(person) |         organization.employees.append(person) | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         self.assertEqual({}, removals) |         self.assertEqual({}, removals) | ||||||
|         self.assertIn('employees', updates) |         self.assertTrue('employees' in updates) | ||||||
|  |  | ||||||
|     def test_nested_nested_fields_mark_as_changed(self): |     def test_nested_nested_fields_mark_as_changed(self): | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
| @@ -859,6 +863,5 @@ class DeltaTest(MongoDBTestCase): | |||||||
|         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) |         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) | ||||||
|         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) |         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -1,15 +1,16 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
| __all__ = ("TestDynamicDocument", ) | __all__ = ("DynamicTest", ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDynamicDocument(MongoDBTestCase): | class DynamicTest(unittest.TestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         super(TestDynamicDocument, self).setUp() |         connect(db='mongoenginetest') | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|         class Person(DynamicDocument): |         class Person(DynamicDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -97,72 +98,6 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         self.assertEqual(len(p._data), 4) |         self.assertEqual(len(p._data), 4) | ||||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) |         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) | ||||||
|  |  | ||||||
|     def test_fields_without_underscore(self): |  | ||||||
|         """Ensure we can query dynamic fields""" |  | ||||||
|         Person = self.Person |  | ||||||
|  |  | ||||||
|         p = self.Person(name='Dean') |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         raw_p = Person.objects.as_pymongo().get(id=p.id) |  | ||||||
|         self.assertEqual( |  | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_cls': u'Person', |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 'name': u'Dean' |  | ||||||
|              } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         p.name = 'OldDean' |  | ||||||
|         p.newattr = 'garbage' |  | ||||||
|         p.save() |  | ||||||
|         raw_p = Person.objects.as_pymongo().get(id=p.id) |  | ||||||
|         self.assertEqual( |  | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_cls': u'Person', |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 'name': 'OldDean', |  | ||||||
|                 'newattr': u'garbage' |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_fields_containing_underscore(self): |  | ||||||
|         """Ensure we can query dynamic fields""" |  | ||||||
|         class WeirdPerson(DynamicDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             _name = StringField() |  | ||||||
|  |  | ||||||
|         WeirdPerson.drop_collection() |  | ||||||
|  |  | ||||||
|         p = WeirdPerson(name='Dean', _name='Dean') |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) |  | ||||||
|         self.assertEqual( |  | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 '_name': u'Dean', |  | ||||||
|                 'name': u'Dean' |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         p.name = 'OldDean' |  | ||||||
|         p._name = 'NewDean' |  | ||||||
|         p._newattr1 = 'garbage'    # Unknown fields won't be added |  | ||||||
|         p.save() |  | ||||||
|         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) |  | ||||||
|         self.assertEqual( |  | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 '_name': u'NewDean', |  | ||||||
|                 'name': u'OldDean', |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_dynamic_document_queries(self): |     def test_dynamic_document_queries(self): | ||||||
|         """Ensure we can query dynamic fields""" |         """Ensure we can query dynamic fields""" | ||||||
|         p = self.Person() |         p = self.Person() | ||||||
| @@ -239,8 +174,8 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Employee.drop_collection() |         Employee.drop_collection() | ||||||
|  |  | ||||||
|         self.assertIn('name', Employee._fields) |         self.assertTrue('name' in Employee._fields) | ||||||
|         self.assertIn('salary', Employee._fields) |         self.assertTrue('salary' in Employee._fields) | ||||||
|         self.assertEqual(Employee._get_collection_name(), |         self.assertEqual(Employee._get_collection_name(), | ||||||
|                          self.Person._get_collection_name()) |                          self.Person._get_collection_name()) | ||||||
|  |  | ||||||
| @@ -254,7 +189,7 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         self.assertEqual(1, Employee.objects(age=20).count()) |         self.assertEqual(1, Employee.objects(age=20).count()) | ||||||
|  |  | ||||||
|         joe_bloggs = self.Person.objects.first() |         joe_bloggs = self.Person.objects.first() | ||||||
|         self.assertIsInstance(joe_bloggs, Employee) |         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||||
|  |  | ||||||
|     def test_embedded_dynamic_document(self): |     def test_embedded_dynamic_document(self): | ||||||
|         """Test dynamic embedded documents""" |         """Test dynamic embedded documents""" | ||||||
| @@ -434,6 +369,5 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         person.save() |         person.save() | ||||||
|         self.assertEqual(Person.objects.first().age, 35) |         self.assertEqual(Person.objects.first().age, 35) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -1,15 +1,16 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import unittest | import unittest | ||||||
| from datetime import datetime | import sys | ||||||
|  |  | ||||||
| from nose.plugins.skip import SkipTest | from nose.plugins.skip import SkipTest | ||||||
| from pymongo.errors import OperationFailure | from datetime import datetime | ||||||
| import pymongo | import pymongo | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
|  | from tests.utils import get_mongodb_version, needs_mongodb_v26 | ||||||
|  |  | ||||||
| __all__ = ("IndexesTest", ) | __all__ = ("IndexesTest", ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -67,9 +68,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|         info = BlogPost.objects._collection.index_information() |         info = BlogPost.objects._collection.index_information() | ||||||
|         # _id, '-date', 'tags', ('cat', 'date') |         # _id, '-date', 'tags', ('cat', 'date') | ||||||
|         self.assertEqual(len(info), 4) |         self.assertEqual(len(info), 4) | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         for expected in expected_specs: |         for expected in expected_specs: | ||||||
|             self.assertIn(expected['fields'], info) |             self.assertTrue(expected['fields'] in info) | ||||||
|  |  | ||||||
|     def _index_test_inheritance(self, InheritFrom): |     def _index_test_inheritance(self, InheritFrom): | ||||||
|  |  | ||||||
| @@ -99,9 +100,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|         # the indices on -date and tags will both contain |         # the indices on -date and tags will both contain | ||||||
|         # _cls as first element in the key |         # _cls as first element in the key | ||||||
|         self.assertEqual(len(info), 4) |         self.assertEqual(len(info), 4) | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         for expected in expected_specs: |         for expected in expected_specs: | ||||||
|             self.assertIn(expected['fields'], info) |             self.assertTrue(expected['fields'] in info) | ||||||
|  |  | ||||||
|         class ExtendedBlogPost(BlogPost): |         class ExtendedBlogPost(BlogPost): | ||||||
|             title = StringField() |             title = StringField() | ||||||
| @@ -114,9 +115,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         ExtendedBlogPost.ensure_indexes() |         ExtendedBlogPost.ensure_indexes() | ||||||
|         info = ExtendedBlogPost.objects._collection.index_information() |         info = ExtendedBlogPost.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         for expected in expected_specs: |         for expected in expected_specs: | ||||||
|             self.assertIn(expected['fields'], info) |             self.assertTrue(expected['fields'] in info) | ||||||
|  |  | ||||||
|     def test_indexes_document_inheritance(self): |     def test_indexes_document_inheritance(self): | ||||||
|         """Ensure that indexes are used when meta[indexes] is specified for |         """Ensure that indexes are used when meta[indexes] is specified for | ||||||
| @@ -224,8 +225,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|         # Indexes are lazy so use list() to perform query |         # Indexes are lazy so use list() to perform query | ||||||
|         list(Person.objects) |         list(Person.objects) | ||||||
|         info = Person.objects._collection.index_information() |         info = Person.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('rank.title', 1)], info) |         self.assertTrue([('rank.title', 1)] in info) | ||||||
|  |  | ||||||
|     def test_explicit_geo2d_index(self): |     def test_explicit_geo2d_index(self): | ||||||
|         """Ensure that geo2d indexes work when created via meta[indexes] |         """Ensure that geo2d indexes work when created via meta[indexes] | ||||||
| @@ -244,8 +245,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('location.point', '2d')], info) |         self.assertTrue([('location.point', '2d')] in info) | ||||||
|  |  | ||||||
|     def test_explicit_geo2d_index_embedded(self): |     def test_explicit_geo2d_index_embedded(self): | ||||||
|         """Ensure that geo2d indexes work when created via meta[indexes] |         """Ensure that geo2d indexes work when created via meta[indexes] | ||||||
| @@ -267,8 +268,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('current.location.point', '2d')], info) |         self.assertTrue([('current.location.point', '2d')] in info) | ||||||
|  |  | ||||||
|     def test_explicit_geosphere_index(self): |     def test_explicit_geosphere_index(self): | ||||||
|         """Ensure that geosphere indexes work when created via meta[indexes] |         """Ensure that geosphere indexes work when created via meta[indexes] | ||||||
| @@ -287,8 +288,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('location.point', '2dsphere')], info) |         self.assertTrue([('location.point', '2dsphere')] in info) | ||||||
|  |  | ||||||
|     def test_explicit_geohaystack_index(self): |     def test_explicit_geohaystack_index(self): | ||||||
|         """Ensure that geohaystack indexes work when created via meta[indexes] |         """Ensure that geohaystack indexes work when created via meta[indexes] | ||||||
| @@ -309,8 +310,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('location.point', 'geoHaystack')], info) |         self.assertTrue([('location.point', 'geoHaystack')] in info) | ||||||
|  |  | ||||||
|     def test_create_geohaystack_index(self): |     def test_create_geohaystack_index(self): | ||||||
|         """Ensure that geohaystack indexes can be created |         """Ensure that geohaystack indexes can be created | ||||||
| @@ -321,8 +322,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) |         Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info) |         self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info) | ||||||
|  |  | ||||||
|     def test_dictionary_indexes(self): |     def test_dictionary_indexes(self): | ||||||
|         """Ensure that indexes are used when meta[indexes] contains |         """Ensure that indexes are used when meta[indexes] contains | ||||||
| @@ -354,8 +355,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|         info = [(value['key'], |         info = [(value['key'], | ||||||
|                  value.get('unique', False), |                  value.get('unique', False), | ||||||
|                  value.get('sparse', False)) |                  value.get('sparse', False)) | ||||||
|                 for key, value in iteritems(info)] |                 for key, value in info.iteritems()] | ||||||
|         self.assertIn(([('addDate', -1)], True, True), info) |         self.assertTrue(([('addDate', -1)], True, True) in info) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
| @@ -406,7 +407,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertEqual(2, User.objects.count()) |         self.assertEqual(2, User.objects.count()) | ||||||
|         info = User.objects._collection.index_information() |         info = User.objects._collection.index_information() | ||||||
|         self.assertEqual(list(info.keys()), ['_id_']) |         self.assertEqual(info.keys(), ['_id_']) | ||||||
|  |  | ||||||
|         User.ensure_indexes() |         User.ensure_indexes() | ||||||
|         info = User.objects._collection.index_information() |         info = User.objects._collection.index_information() | ||||||
| @@ -475,6 +476,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|     def test_covered_index(self): |     def test_covered_index(self): | ||||||
|         """Ensure that covered indexes can be used |         """Ensure that covered indexes can be used | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Test(Document): |         class Test(Document): | ||||||
|             a = IntField() |             a = IntField() | ||||||
|             b = IntField() |             b = IntField() | ||||||
| @@ -489,41 +491,38 @@ class IndexesTest(unittest.TestCase): | |||||||
|         obj = Test(a=1) |         obj = Test(a=1) | ||||||
|         obj.save() |         obj.save() | ||||||
|  |  | ||||||
|  |         IS_MONGODB_3 = get_mongodb_version()[0] >= 3 | ||||||
|  |  | ||||||
|         # Need to be explicit about covered indexes as mongoDB doesn't know if |         # Need to be explicit about covered indexes as mongoDB doesn't know if | ||||||
|         # the documents returned might have more keys in that here. |         # the documents returned might have more keys in that here. | ||||||
|         query_plan = Test.objects(id=obj.id).exclude('a').explain() |         query_plan = Test.objects(id=obj.id).exclude('a').explain() | ||||||
|         self.assertEqual( |         if not IS_MONGODB_3: | ||||||
|             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), |             self.assertFalse(query_plan['indexOnly']) | ||||||
|             'IDHACK' |         else: | ||||||
|         ) |             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK') | ||||||
|  |  | ||||||
|         query_plan = Test.objects(id=obj.id).only('id').explain() |         query_plan = Test.objects(id=obj.id).only('id').explain() | ||||||
|         self.assertEqual( |         if not IS_MONGODB_3: | ||||||
|             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), |             self.assertTrue(query_plan['indexOnly']) | ||||||
|             'IDHACK' |         else: | ||||||
|         ) |             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK') | ||||||
|  |  | ||||||
|         query_plan = Test.objects(a=1).only('a').exclude('id').explain() |         query_plan = Test.objects(a=1).only('a').exclude('id').explain() | ||||||
|         self.assertEqual( |         if not IS_MONGODB_3: | ||||||
|             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), |             self.assertTrue(query_plan['indexOnly']) | ||||||
|             'IXSCAN' |         else: | ||||||
|         ) |             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN') | ||||||
|         self.assertEqual( |             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'PROJECTION') | ||||||
|             query_plan.get('queryPlanner').get('winningPlan').get('stage'), |  | ||||||
|             'PROJECTION' |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         query_plan = Test.objects(a=1).explain() |         query_plan = Test.objects(a=1).explain() | ||||||
|         self.assertEqual( |         if not IS_MONGODB_3: | ||||||
|             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), |             self.assertFalse(query_plan['indexOnly']) | ||||||
|             'IXSCAN' |         else: | ||||||
|         ) |             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN') | ||||||
|         self.assertEqual( |             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'FETCH') | ||||||
|             query_plan.get('queryPlanner').get('winningPlan').get('stage'), |  | ||||||
|             'FETCH' |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_index_on_id(self): |     def test_index_on_id(self): | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             meta = { |             meta = { | ||||||
|                 'indexes': [ |                 'indexes': [ | ||||||
| @@ -542,46 +541,40 @@ class IndexesTest(unittest.TestCase): | |||||||
|                                  [('categories', 1), ('_id', 1)]) |                                  [('categories', 1), ('_id', 1)]) | ||||||
|  |  | ||||||
|     def test_hint(self): |     def test_hint(self): | ||||||
|         TAGS_INDEX_NAME = 'tags_1' |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
|             meta = { |             meta = { | ||||||
|                 'indexes': [ |                 'indexes': [ | ||||||
|                     { |                     'tags', | ||||||
|                         'fields': ['tags'], |  | ||||||
|                         'name': TAGS_INDEX_NAME |  | ||||||
|                     } |  | ||||||
|                 ], |                 ], | ||||||
|             } |             } | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         for i in range(10): |         for i in range(0, 10): | ||||||
|             tags = [("tag %i" % n) for n in range(i % 2)] |             tags = [("tag %i" % n) for n in range(0, i % 2)] | ||||||
|             BlogPost(tags=tags).save() |             BlogPost(tags=tags).save() | ||||||
|  |  | ||||||
|         # Hinting by shape should work. |         self.assertEqual(BlogPost.objects.count(), 10) | ||||||
|  |         self.assertEqual(BlogPost.objects.hint().count(), 10) | ||||||
|  |  | ||||||
|  |         # PyMongo 3.0 bug only, works correctly with 2.X and 3.0.1+ versions | ||||||
|  |         if pymongo.version != '3.0': | ||||||
|             self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) |             self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) | ||||||
|  |  | ||||||
|         # Hinting by index name should work. |             self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) | ||||||
|         self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10) |  | ||||||
|  |  | ||||||
|         # Clearing the hint should work fine. |         if pymongo.version >= '2.8': | ||||||
|         self.assertEqual(BlogPost.objects.hint().count(), 10) |             self.assertEqual(BlogPost.objects.hint('tags').count(), 10) | ||||||
|         self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).hint().count(), 10) |         else: | ||||||
|  |             def invalid_index(): | ||||||
|  |                 BlogPost.objects.hint('tags').next() | ||||||
|  |             self.assertRaises(TypeError, invalid_index) | ||||||
|  |  | ||||||
|         # Hinting on a non-existent index shape should fail. |         def invalid_index_2(): | ||||||
|         with self.assertRaises(OperationFailure): |             return BlogPost.objects.hint(('tags', 1)).next() | ||||||
|             BlogPost.objects.hint([('ZZ', 1)]).count() |         self.assertRaises(Exception, invalid_index_2) | ||||||
|  |  | ||||||
|         # Hinting on a non-existent index name should fail. |  | ||||||
|         with self.assertRaises(OperationFailure): |  | ||||||
|             BlogPost.objects.hint('Bad Name').count() |  | ||||||
|  |  | ||||||
|         # Invalid shape argument (missing list brackets) should fail. |  | ||||||
|         with self.assertRaises(ValueError): |  | ||||||
|             BlogPost.objects.hint(('tags', 1)).count() |  | ||||||
|  |  | ||||||
|     def test_unique(self): |     def test_unique(self): | ||||||
|         """Ensure that uniqueness constraints are applied to fields. |         """Ensure that uniqueness constraints are applied to fields. | ||||||
| @@ -598,32 +591,10 @@ class IndexesTest(unittest.TestCase): | |||||||
|         # Two posts with the same slug is not allowed |         # Two posts with the same slug is not allowed | ||||||
|         post2 = BlogPost(title='test2', slug='test') |         post2 = BlogPost(title='test2', slug='test') | ||||||
|         self.assertRaises(NotUniqueError, post2.save) |         self.assertRaises(NotUniqueError, post2.save) | ||||||
|         self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2) |  | ||||||
|  |  | ||||||
|         # Ensure backwards compatibility for errors |         # Ensure backwards compatibilty for errors | ||||||
|         self.assertRaises(OperationError, post2.save) |         self.assertRaises(OperationError, post2.save) | ||||||
|  |  | ||||||
|     def test_primary_key_unique_not_working(self): |  | ||||||
|         """Relates to #1445""" |  | ||||||
|         class Blog(Document): |  | ||||||
|             id = StringField(primary_key=True, unique=True) |  | ||||||
|  |  | ||||||
|         Blog.drop_collection() |  | ||||||
|  |  | ||||||
|         with self.assertRaises(OperationFailure) as ctx_err: |  | ||||||
|             Blog(id='garbage').save() |  | ||||||
|  |  | ||||||
|         # One of the errors below should happen. Which one depends on the |  | ||||||
|         # PyMongo version and dict order. |  | ||||||
|         err_msg = str(ctx_err.exception) |  | ||||||
|         self.assertTrue( |  | ||||||
|             any([ |  | ||||||
|                 "The field 'unique' is not valid for an _id index specification" in err_msg, |  | ||||||
|                 "The field 'background' is not valid for an _id index specification" in err_msg, |  | ||||||
|                 "The field 'sparse' is not valid for an _id index specification" in err_msg, |  | ||||||
|             ]) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_unique_with(self): |     def test_unique_with(self): | ||||||
|         """Ensure that unique_with constraints are applied to fields. |         """Ensure that unique_with constraints are applied to fields. | ||||||
|         """ |         """ | ||||||
| @@ -705,77 +676,6 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertRaises(NotUniqueError, post2.save) |         self.assertRaises(NotUniqueError, post2.save) | ||||||
|  |  | ||||||
|     def test_unique_embedded_document_in_sorted_list(self): |  | ||||||
|         """ |  | ||||||
|         Ensure that the uniqueness constraints are applied to fields in |  | ||||||
|         embedded documents, even when the embedded documents in a sorted list |  | ||||||
|         field. |  | ||||||
|         """ |  | ||||||
|         class SubDocument(EmbeddedDocument): |  | ||||||
|             year = IntField() |  | ||||||
|             slug = StringField(unique=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             subs = SortedListField(EmbeddedDocumentField(SubDocument), |  | ||||||
|                                    ordering='year') |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost( |  | ||||||
|             title='test1', subs=[ |  | ||||||
|                 SubDocument(year=2009, slug='conflict'), |  | ||||||
|                 SubDocument(year=2009, slug='conflict') |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         # confirm that the unique index is created |  | ||||||
|         indexes = BlogPost._get_collection().index_information() |  | ||||||
|         self.assertIn('subs.slug_1', indexes) |  | ||||||
|         self.assertTrue(indexes['subs.slug_1']['unique']) |  | ||||||
|  |  | ||||||
|         post2 = BlogPost( |  | ||||||
|             title='test2', subs=[SubDocument(year=2014, slug='conflict')] |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.assertRaises(NotUniqueError, post2.save) |  | ||||||
|  |  | ||||||
|     def test_unique_embedded_document_in_embedded_document_list(self): |  | ||||||
|         """ |  | ||||||
|         Ensure that the uniqueness constraints are applied to fields in |  | ||||||
|         embedded documents, even when the embedded documents in an embedded |  | ||||||
|         list field. |  | ||||||
|         """ |  | ||||||
|         class SubDocument(EmbeddedDocument): |  | ||||||
|             year = IntField() |  | ||||||
|             slug = StringField(unique=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             subs = EmbeddedDocumentListField(SubDocument) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost( |  | ||||||
|             title='test1', subs=[ |  | ||||||
|                 SubDocument(year=2009, slug='conflict'), |  | ||||||
|                 SubDocument(year=2009, slug='conflict') |  | ||||||
|             ] |  | ||||||
|         ) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         # confirm that the unique index is created |  | ||||||
|         indexes = BlogPost._get_collection().index_information() |  | ||||||
|         self.assertIn('subs.slug_1', indexes) |  | ||||||
|         self.assertTrue(indexes['subs.slug_1']['unique']) |  | ||||||
|  |  | ||||||
|         post2 = BlogPost( |  | ||||||
|             title='test2', subs=[SubDocument(year=2014, slug='conflict')] |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.assertRaises(NotUniqueError, post2.save) |  | ||||||
|  |  | ||||||
|     def test_unique_with_embedded_document_and_embedded_unique(self): |     def test_unique_with_embedded_document_and_embedded_unique(self): | ||||||
|         """Ensure that uniqueness constraints are applied to fields on |         """Ensure that uniqueness constraints are applied to fields on | ||||||
|         embedded documents.  And work with unique_with as well. |         embedded documents.  And work with unique_with as well. | ||||||
| @@ -827,18 +727,6 @@ class IndexesTest(unittest.TestCase): | |||||||
|         self.assertEqual(3600, |         self.assertEqual(3600, | ||||||
|                          info['created_1']['expireAfterSeconds']) |                          info['created_1']['expireAfterSeconds']) | ||||||
|  |  | ||||||
|     def test_index_drop_dups_silently_ignored(self): |  | ||||||
|         class Customer(Document): |  | ||||||
|             cust_id = IntField(unique=True, required=True) |  | ||||||
|             meta = { |  | ||||||
|                 'indexes': ['cust_id'], |  | ||||||
|                 'index_drop_dups': True, |  | ||||||
|                 'allow_inheritance': False, |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         Customer.drop_collection() |  | ||||||
|         Customer.objects.first() |  | ||||||
|  |  | ||||||
|     def test_unique_and_indexes(self): |     def test_unique_and_indexes(self): | ||||||
|         """Ensure that 'unique' constraints aren't overridden by |         """Ensure that 'unique' constraints aren't overridden by | ||||||
|         meta.indexes. |         meta.indexes. | ||||||
| @@ -855,23 +743,18 @@ class IndexesTest(unittest.TestCase): | |||||||
|         cust.save() |         cust.save() | ||||||
|  |  | ||||||
|         cust_dupe = Customer(cust_id=1) |         cust_dupe = Customer(cust_id=1) | ||||||
|         with self.assertRaises(NotUniqueError): |         try: | ||||||
|             cust_dupe.save() |             cust_dupe.save() | ||||||
|  |             raise AssertionError("We saved a dupe!") | ||||||
|  |         except NotUniqueError: | ||||||
|  |             pass | ||||||
|  |  | ||||||
|         cust = Customer(cust_id=2) |     def test_unique_and_primary(self): | ||||||
|         cust.save() |  | ||||||
|  |  | ||||||
|         # duplicate key on update |  | ||||||
|         with self.assertRaises(NotUniqueError): |  | ||||||
|             cust.cust_id = 1 |  | ||||||
|             cust.save() |  | ||||||
|  |  | ||||||
|     def test_primary_save_duplicate_update_existing_object(self): |  | ||||||
|         """If you set a field as primary, then unexpected behaviour can occur. |         """If you set a field as primary, then unexpected behaviour can occur. | ||||||
|         You won't create a duplicate but you will update an existing document. |         You won't create a duplicate but you will update an existing document. | ||||||
|         """ |         """ | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField(primary_key=True) |             name = StringField(primary_key=True, unique=True) | ||||||
|             password = StringField() |             password = StringField() | ||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
| @@ -918,9 +801,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|             self.fail('Unbound local error at index + pk definition') |             self.fail('Unbound local error at index + pk definition') | ||||||
|  |  | ||||||
|         info = BlogPost.objects._collection.index_information() |         info = BlogPost.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         index_item = [('_id', 1), ('comments.comment_id', 1)] |         index_item = [('_id', 1), ('comments.comment_id', 1)] | ||||||
|         self.assertIn(index_item, info) |         self.assertTrue(index_item in info) | ||||||
|  |  | ||||||
|     def test_compound_key_embedded(self): |     def test_compound_key_embedded(self): | ||||||
|  |  | ||||||
| @@ -966,9 +849,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|             } |             } | ||||||
|  |  | ||||||
|         info = MyDoc.objects._collection.index_information() |         info = MyDoc.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in iteritems(info)] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertIn([('provider_ids.foo', 1)], info) |         self.assertTrue([('provider_ids.foo', 1)] in info) | ||||||
|         self.assertIn([('provider_ids.bar', 1)], info) |         self.assertTrue([('provider_ids.bar', 1)] in info) | ||||||
|  |  | ||||||
|     def test_sparse_compound_indexes(self): |     def test_sparse_compound_indexes(self): | ||||||
|  |  | ||||||
| @@ -984,6 +867,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|                          info['provider_ids.foo_1_provider_ids.bar_1']['key']) |                          info['provider_ids.foo_1_provider_ids.bar_1']['key']) | ||||||
|         self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) |         self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) | ||||||
|  |  | ||||||
|  |     @needs_mongodb_v26 | ||||||
|     def test_text_indexes(self): |     def test_text_indexes(self): | ||||||
|         class Book(Document): |         class Book(Document): | ||||||
|             title = DictField() |             title = DictField() | ||||||
| @@ -992,9 +876,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|             } |             } | ||||||
|  |  | ||||||
|         indexes = Book.objects._collection.index_information() |         indexes = Book.objects._collection.index_information() | ||||||
|         self.assertIn("title_text", indexes) |         self.assertTrue("title_text" in indexes) | ||||||
|         key = indexes["title_text"]["key"] |         key = indexes["title_text"]["key"] | ||||||
|         self.assertIn(('_fts', 'text'), key) |         self.assertTrue(('_fts', 'text') in key) | ||||||
|  |  | ||||||
|     def test_hashed_indexes(self): |     def test_hashed_indexes(self): | ||||||
|  |  | ||||||
| @@ -1005,8 +889,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|             } |             } | ||||||
|  |  | ||||||
|         indexes = Book.objects._collection.index_information() |         indexes = Book.objects._collection.index_information() | ||||||
|         self.assertIn("ref_id_hashed", indexes) |         self.assertTrue("ref_id_hashed" in indexes) | ||||||
|         self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"]) |         self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"]) | ||||||
|  |  | ||||||
|     def test_indexes_after_database_drop(self): |     def test_indexes_after_database_drop(self): | ||||||
|         """ |         """ | ||||||
| @@ -1047,6 +931,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|             # Drop the temporary database at the end |             # Drop the temporary database at the end | ||||||
|             connection.drop_database('tempdatabase') |             connection.drop_database('tempdatabase') | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_index_dont_send_cls_option(self): |     def test_index_dont_send_cls_option(self): | ||||||
|         """ |         """ | ||||||
|         Ensure that 'cls' option is not sent through ensureIndex. We shouldn't |         Ensure that 'cls' option is not sent through ensureIndex. We shouldn't | ||||||
| @@ -1128,7 +1013,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|         TestDoc.ensure_indexes() |         TestDoc.ensure_indexes() | ||||||
|  |  | ||||||
|         index_info = TestDoc._get_collection().index_information() |         index_info = TestDoc._get_collection().index_information() | ||||||
|         self.assertIn('shard_1_1__cls_1_txt_1_1', index_info) |         self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|   | |||||||
| @@ -2,45 +2,30 @@ | |||||||
| import unittest | import unittest | ||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
| from six import iteritems | from datetime import datetime | ||||||
|  |  | ||||||
| from mongoengine import (BooleanField, Document, EmbeddedDocument, |  | ||||||
|                          EmbeddedDocumentField, GenericReferenceField, |  | ||||||
|                          IntField, ReferenceField, StringField) |  | ||||||
| from mongoengine.pymongo_support import list_collection_names |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
| from tests.fixtures import Base | from tests.fixtures import Base | ||||||
|  |  | ||||||
|  | from mongoengine import Document, EmbeddedDocument, connect | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  | from mongoengine.fields import (BooleanField, GenericReferenceField, | ||||||
|  |                                 IntField, StringField) | ||||||
|  |  | ||||||
| __all__ = ('InheritanceTest', ) | __all__ = ('InheritanceTest', ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class InheritanceTest(MongoDBTestCase): | class InheritanceTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in list_collection_names(self.db): |         for collection in self.db.collection_names(): | ||||||
|  |             if 'system.' in collection: | ||||||
|  |                 continue | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|     def test_constructor_cls(self): |  | ||||||
|         # Ensures _cls is properly set during construction |  | ||||||
|         # and when object gets reloaded (prevent regression of #1950) |  | ||||||
|         class EmbedData(EmbeddedDocument): |  | ||||||
|             data = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class DataDoc(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             embed = EmbeddedDocumentField(EmbedData) |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         test_doc = DataDoc(name='test', embed=EmbedData(data='data')) |  | ||||||
|         self.assertEqual(test_doc._cls, 'DataDoc') |  | ||||||
|         self.assertEqual(test_doc.embed._cls, 'EmbedData') |  | ||||||
|         test_doc.save() |  | ||||||
|         saved_doc = DataDoc.objects.with_id(test_doc.id) |  | ||||||
|         self.assertEqual(test_doc._cls, saved_doc._cls) |  | ||||||
|         self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls) |  | ||||||
|         test_doc.delete() |  | ||||||
|  |  | ||||||
|     def test_superclasses(self): |     def test_superclasses(self): | ||||||
|         """Ensure that the correct list of superclasses is assembled. |         """Ensure that the correct list of superclasses is assembled. | ||||||
|         """ |         """ | ||||||
| @@ -273,10 +258,9 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         # can't inherit because Animal didn't explicitly allow inheritance |         # can't inherit because Animal didn't explicitly allow inheritance | ||||||
|         with self.assertRaises(ValueError) as cm: |         with self.assertRaises(ValueError): | ||||||
|             class Dog(Animal): |             class Dog(Animal): | ||||||
|                 pass |                 pass | ||||||
|         self.assertIn("Document Animal may not be subclassed", str(cm.exception)) |  | ||||||
|  |  | ||||||
|         # Check that _cls etc aren't present on simple documents |         # Check that _cls etc aren't present on simple documents | ||||||
|         dog = Animal(name='dog').save() |         dog = Animal(name='dog').save() | ||||||
| @@ -284,7 +268,7 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         collection = self.db[Animal._get_collection_name()] |         collection = self.db[Animal._get_collection_name()] | ||||||
|         obj = collection.find_one() |         obj = collection.find_one() | ||||||
|         self.assertNotIn('_cls', obj) |         self.assertFalse('_cls' in obj) | ||||||
|  |  | ||||||
|     def test_cant_turn_off_inheritance_on_subclass(self): |     def test_cant_turn_off_inheritance_on_subclass(self): | ||||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. |         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||||
| @@ -293,10 +277,9 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|             meta = {'allow_inheritance': True} |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError) as cm: |         with self.assertRaises(ValueError): | ||||||
|             class Mammal(Animal): |             class Mammal(Animal): | ||||||
|                 meta = {'allow_inheritance': False} |                 meta = {'allow_inheritance': False} | ||||||
|         self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False') |  | ||||||
|  |  | ||||||
|     def test_allow_inheritance_abstract_document(self): |     def test_allow_inheritance_abstract_document(self): | ||||||
|         """Ensure that abstract documents can set inheritance rules and that |         """Ensure that abstract documents can set inheritance rules and that | ||||||
| @@ -309,48 +292,13 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|         class Animal(FinalDocument): |         class Animal(FinalDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError) as cm: |         with self.assertRaises(ValueError): | ||||||
|             class Mammal(Animal): |             class Mammal(Animal): | ||||||
|                 pass |                 pass | ||||||
|  |  | ||||||
|         # Check that _cls isn't present in simple documents |         # Check that _cls isn't present in simple documents | ||||||
|         doc = Animal(name='dog') |         doc = Animal(name='dog') | ||||||
|         self.assertNotIn('_cls', doc.to_mongo()) |         self.assertFalse('_cls' in doc.to_mongo()) | ||||||
|  |  | ||||||
|     def test_using_abstract_class_in_reference_field(self): |  | ||||||
|         # Ensures no regression of #1920 |  | ||||||
|         class AbstractHuman(Document): |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|  |  | ||||||
|         class Dad(AbstractHuman): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Home(Document): |  | ||||||
|             dad = ReferenceField(AbstractHuman)  # Referencing the abstract class |  | ||||||
|             address = StringField() |  | ||||||
|  |  | ||||||
|         dad = Dad(name='5').save() |  | ||||||
|         Home(dad=dad, address='street').save() |  | ||||||
|  |  | ||||||
|         home = Home.objects.first() |  | ||||||
|         home.address = 'garbage' |  | ||||||
|         home.save()     # Was failing with ValidationError |  | ||||||
|  |  | ||||||
|     def test_abstract_class_referencing_self(self): |  | ||||||
|         # Ensures no regression of #1920 |  | ||||||
|         class Human(Document): |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|             creator = ReferenceField('self', dbref=True) |  | ||||||
|  |  | ||||||
|         class User(Human): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         user = User(name='John').save() |  | ||||||
|         user2 = User(name='Foo', creator=user).save() |  | ||||||
|  |  | ||||||
|         user2 = User.objects.with_id(user2.id) |  | ||||||
|         user2.name = 'Bar' |  | ||||||
|         user2.save()    # Was failing with ValidationError |  | ||||||
|  |  | ||||||
|     def test_abstract_handle_ids_in_metaclass_properly(self): |     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||||
|  |  | ||||||
| @@ -410,11 +358,11 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|             meta = {'abstract': True, |             meta = {'abstract': True, | ||||||
|                     'allow_inheritance': False} |                     'allow_inheritance': False} | ||||||
|  |  | ||||||
|         city = City(continent='asia') |         bkk = City(continent='asia') | ||||||
|         self.assertEqual(None, city.pk) |         self.assertEqual(None, bkk.pk) | ||||||
|         # TODO: expected error? Shouldn't we create a new error type? |         # TODO: expected error? Shouldn't we create a new error type? | ||||||
|         with self.assertRaises(KeyError): |         with self.assertRaises(KeyError): | ||||||
|             setattr(city, 'pk', 1) |             setattr(bkk, 'pk', 1) | ||||||
|  |  | ||||||
|     def test_allow_inheritance_embedded_document(self): |     def test_allow_inheritance_embedded_document(self): | ||||||
|         """Ensure embedded documents respect inheritance.""" |         """Ensure embedded documents respect inheritance.""" | ||||||
| @@ -426,14 +374,14 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|                 pass |                 pass | ||||||
|  |  | ||||||
|         doc = Comment(content='test') |         doc = Comment(content='test') | ||||||
|         self.assertNotIn('_cls', doc.to_mongo()) |         self.assertFalse('_cls' in doc.to_mongo()) | ||||||
|  |  | ||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
|             content = StringField() |             content = StringField() | ||||||
|             meta = {'allow_inheritance': True} |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|         doc = Comment(content='test') |         doc = Comment(content='test') | ||||||
|         self.assertIn('_cls', doc.to_mongo()) |         self.assertTrue('_cls' in doc.to_mongo()) | ||||||
|  |  | ||||||
|     def test_document_inheritance(self): |     def test_document_inheritance(self): | ||||||
|         """Ensure mutliple inheritance of abstract documents |         """Ensure mutliple inheritance of abstract documents | ||||||
| @@ -482,12 +430,12 @@ class InheritanceTest(MongoDBTestCase): | |||||||
|             meta = {'abstract': True} |             meta = {'abstract': True} | ||||||
|         class Human(Mammal): pass |         class Human(Mammal): pass | ||||||
|  |  | ||||||
|         for k, v in iteritems(defaults): |         for k, v in defaults.iteritems(): | ||||||
|             for cls in [Animal, Fish, Guppy]: |             for cls in [Animal, Fish, Guppy]: | ||||||
|                 self.assertEqual(cls._meta[k], v) |                 self.assertEqual(cls._meta[k], v) | ||||||
|  |  | ||||||
|         self.assertNotIn('collection', Animal._meta) |         self.assertFalse('collection' in Animal._meta) | ||||||
|         self.assertNotIn('collection', Mammal._meta) |         self.assertFalse('collection' in Mammal._meta) | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._get_collection_name(), None) |         self.assertEqual(Animal._get_collection_name(), None) | ||||||
|         self.assertEqual(Mammal._get_collection_name(), None) |         self.assertEqual(Mammal._get_collection_name(), None) | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -32,12 +32,12 @@ class TestJson(unittest.TestCase): | |||||||
|             string = StringField(db_field='s') |             string = StringField(db_field='s') | ||||||
|             embedded = EmbeddedDocumentField(Embedded, db_field='e') |             embedded = EmbeddedDocumentField(Embedded, db_field='e') | ||||||
|  |  | ||||||
|         doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) |         doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello")) | ||||||
|         doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':')) |         doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':')) | ||||||
|  |  | ||||||
|         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" |         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" | ||||||
|  |  | ||||||
|         self.assertEqual(doc_json, expected_json) |         self.assertEqual( doc_json, expected_json) | ||||||
|  |  | ||||||
|     def test_json_simple(self): |     def test_json_simple(self): | ||||||
|  |  | ||||||
| @@ -61,6 +61,10 @@ class TestJson(unittest.TestCase): | |||||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) |         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||||
|  |  | ||||||
|     def test_json_complex(self): |     def test_json_complex(self): | ||||||
|  |  | ||||||
|  |         if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: | ||||||
|  |             raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") | ||||||
|  |  | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|   | |||||||
| @@ -20,16 +20,16 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # 1st level error schema |         # 1st level error schema | ||||||
|         error.errors = {'1st': ValidationError('bad 1st'), } |         error.errors = {'1st': ValidationError('bad 1st'), } | ||||||
|         self.assertIn('1st', error.to_dict()) |         self.assertTrue('1st' in error.to_dict()) | ||||||
|         self.assertEqual(error.to_dict()['1st'], 'bad 1st') |         self.assertEqual(error.to_dict()['1st'], 'bad 1st') | ||||||
|  |  | ||||||
|         # 2nd level error schema |         # 2nd level error schema | ||||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ |         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||||
|             '2nd': ValidationError('bad 2nd'), |             '2nd': ValidationError('bad 2nd'), | ||||||
|         })} |         })} | ||||||
|         self.assertIn('1st', error.to_dict()) |         self.assertTrue('1st' in error.to_dict()) | ||||||
|         self.assertIsInstance(error.to_dict()['1st'], dict) |         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) | ||||||
|         self.assertIn('2nd', error.to_dict()['1st']) |         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||||
|         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') |         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||||
|  |  | ||||||
|         # moar levels |         # moar levels | ||||||
| @@ -40,10 +40,10 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|                 }), |                 }), | ||||||
|             }), |             }), | ||||||
|         })} |         })} | ||||||
|         self.assertIn('1st', error.to_dict()) |         self.assertTrue('1st' in error.to_dict()) | ||||||
|         self.assertIn('2nd', error.to_dict()['1st']) |         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||||
|         self.assertIn('3rd', error.to_dict()['1st']['2nd']) |         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) | ||||||
|         self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd']) |         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) | ||||||
|         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], |         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||||
|                          'Inception') |                          'Inception') | ||||||
|  |  | ||||||
| @@ -58,7 +58,7 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             User().validate() |             User().validate() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("User:None", e.message) |             self.assertTrue("User:None" in e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 'username': 'Field is required', |                 'username': 'Field is required', | ||||||
|                 'name': 'Field is required'}) |                 'name': 'Field is required'}) | ||||||
| @@ -68,7 +68,7 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             user.save() |             user.save() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("User:RossC0", e.message) |             self.assertTrue("User:RossC0" in e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 'name': 'Field is required'}) |                 'name': 'Field is required'}) | ||||||
|  |  | ||||||
| @@ -116,7 +116,7 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             Doc(id="bad").validate() |             Doc(id="bad").validate() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("SubDoc:None", e.message) |             self.assertTrue("SubDoc:None" in e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 "e": {'val': 'OK could not be converted to int'}}) |                 "e": {'val': 'OK could not be converted to int'}}) | ||||||
|  |  | ||||||
| @@ -127,14 +127,14 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         keys = doc._data.keys() |         keys = doc._data.keys() | ||||||
|         self.assertEqual(2, len(keys)) |         self.assertEqual(2, len(keys)) | ||||||
|         self.assertIn('e', keys) |         self.assertTrue('e' in keys) | ||||||
|         self.assertIn('id', keys) |         self.assertTrue('id' in keys) | ||||||
|  |  | ||||||
|         doc.e.val = "OK" |         doc.e.val = "OK" | ||||||
|         try: |         try: | ||||||
|             doc.save() |             doc.save() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("Doc:test", e.message) |             self.assertTrue("Doc:test" in e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 "e": {'val': 'OK could not be converted to int'}}) |                 "e": {'val': 'OK could not be converted to int'}}) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,3 +1,3 @@ | |||||||
| from .fields import * | from fields import * | ||||||
| from .file_tests import * | from file_tests import * | ||||||
| from .geo import * | from geo import * | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -24,16 +24,6 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | |||||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_file(path): |  | ||||||
|     """Use a BytesIO instead of a file to allow |  | ||||||
|     to have a one-liner and avoid that the file remains opened""" |  | ||||||
|     bytes_io = StringIO() |  | ||||||
|     with open(path, 'rb') as f: |  | ||||||
|         bytes_io.write(f.read()) |  | ||||||
|     bytes_io.seek(0) |  | ||||||
|     return bytes_io |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FileTest(MongoDBTestCase): | class FileTest(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
| @@ -63,8 +53,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         putfile.save() |         putfile.save() | ||||||
|  |  | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertEqual(putfile, result) |         self.assertTrue(putfile == result) | ||||||
|         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id) |         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>") | ||||||
|         self.assertEqual(result.the_file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.delete()  # Remove file from GridFS |         result.the_file.delete()  # Remove file from GridFS | ||||||
| @@ -81,7 +71,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         putfile.save() |         putfile.save() | ||||||
|  |  | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertEqual(putfile, result) |         self.assertTrue(putfile == result) | ||||||
|         self.assertEqual(result.the_file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
| @@ -106,7 +96,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         streamfile.save() |         streamfile.save() | ||||||
|  |  | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertEqual(streamfile, result) |         self.assertTrue(streamfile == result) | ||||||
|         self.assertEqual(result.the_file.read(), text + more_text) |         self.assertEqual(result.the_file.read(), text + more_text) | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.seek(0) |         result.the_file.seek(0) | ||||||
| @@ -142,7 +132,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         streamfile.save() |         streamfile.save() | ||||||
|  |  | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertEqual(streamfile, result) |         self.assertTrue(streamfile == result) | ||||||
|         self.assertEqual(result.the_file.read(), text + more_text) |         self.assertEqual(result.the_file.read(), text + more_text) | ||||||
|         # self.assertEqual(result.the_file.content_type, content_type) |         # self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.seek(0) |         result.the_file.seek(0) | ||||||
| @@ -171,7 +161,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         setfile.save() |         setfile.save() | ||||||
|  |  | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertEqual(setfile, result) |         self.assertTrue(setfile == result) | ||||||
|         self.assertEqual(result.the_file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|  |  | ||||||
|         # Try replacing file with new one |         # Try replacing file with new one | ||||||
| @@ -179,7 +169,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         result.save() |         result.save() | ||||||
|  |  | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertEqual(setfile, result) |         self.assertTrue(setfile == result) | ||||||
|         self.assertEqual(result.the_file.read(), more_text) |         self.assertEqual(result.the_file.read(), more_text) | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
|  |  | ||||||
| @@ -241,8 +231,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         test_file_dupe = TestFile() |         test_file_dupe = TestFile() | ||||||
|         data = test_file_dupe.the_file.read()  # Should be None |         data = test_file_dupe.the_file.read()  # Should be None | ||||||
|  |  | ||||||
|         self.assertNotEqual(test_file.name, test_file_dupe.name) |         self.assertTrue(test_file.name != test_file_dupe.name) | ||||||
|         self.assertNotEqual(test_file.the_file.read(), data) |         self.assertTrue(test_file.the_file.read() != data) | ||||||
|  |  | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
| @@ -257,8 +247,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') |         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|         marmot_photo_content = get_file(TEST_IMAGE_PATH)  # Retrieve a photo from disk |         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk | ||||||
|         marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar') |         marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||||
|         marmot.photo.close() |         marmot.photo.close() | ||||||
|         marmot.save() |         marmot.save() | ||||||
|  |  | ||||||
| @@ -271,11 +261,11 @@ class FileTest(MongoDBTestCase): | |||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() |         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() | ||||||
|         self.assertEqual(test_file.the_file.get().length, 8313) |         self.assertEqual(test_file.the_file.get().length, 8313) | ||||||
|  |  | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = get_file(TEST_IMAGE2_PATH) |         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertEqual(test_file.the_file.get().length, 4971) |         self.assertEqual(test_file.the_file.get().length, 4971) | ||||||
|  |  | ||||||
| @@ -301,7 +291,7 @@ class FileTest(MongoDBTestCase): | |||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         test_file = TestFile() |         test_file = TestFile() | ||||||
|         self.assertNotIn(test_file.the_file, [{"test": 1}]) |         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||||
|  |  | ||||||
|     def test_file_disk_space(self): |     def test_file_disk_space(self): | ||||||
|         """ Test disk space usage when we delete/replace a file """ |         """ Test disk space usage when we delete/replace a file """ | ||||||
| @@ -320,16 +310,16 @@ class FileTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 1) |         self.assertEquals(len(list(files)), 1) | ||||||
|         self.assertEqual(len(list(chunks)), 1) |         self.assertEquals(len(list(chunks)), 1) | ||||||
|  |  | ||||||
|         # Deleting the docoument should delete the files |         # Deleting the docoument should delete the files | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         self.assertEquals(len(list(files)), 0) | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         self.assertEquals(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|         # Test case where we don't store a file in the first place |         # Test case where we don't store a file in the first place | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
| @@ -337,15 +327,15 @@ class FileTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         self.assertEquals(len(list(files)), 0) | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         self.assertEquals(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         self.assertEquals(len(list(files)), 0) | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         self.assertEquals(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|         # Test case where we overwrite the file |         # Test case where we overwrite the file | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
| @@ -358,15 +348,15 @@ class FileTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 1) |         self.assertEquals(len(list(files)), 1) | ||||||
|         self.assertEqual(len(list(chunks)), 1) |         self.assertEquals(len(list(chunks)), 1) | ||||||
|  |  | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         self.assertEquals(len(list(files)), 0) | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         self.assertEquals(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|     def test_image_field(self): |     def test_image_field(self): | ||||||
|         if not HAS_PIL: |         if not HAS_PIL: | ||||||
| @@ -389,7 +379,7 @@ class FileTest(MongoDBTestCase): | |||||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) |                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) |         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -410,11 +400,11 @@ class FileTest(MongoDBTestCase): | |||||||
|             the_file = ImageField() |             the_file = ImageField() | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() |         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() | ||||||
|         self.assertEqual(test_file.the_file.size, (371, 76)) |         self.assertEqual(test_file.the_file.size, (371, 76)) | ||||||
|  |  | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = get_file(TEST_IMAGE2_PATH) |         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertEqual(test_file.the_file.size, (45, 101)) |         self.assertEqual(test_file.the_file.size, (45, 101)) | ||||||
|  |  | ||||||
| @@ -428,7 +418,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) |         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -451,7 +441,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) |         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -474,7 +464,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) |         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -552,8 +542,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image1.put(get_file(TEST_IMAGE_PATH)) |         t.image1.put(open(TEST_IMAGE_PATH, 'rb')) | ||||||
|         t.image2.put(get_file(TEST_IMAGE2_PATH)) |         t.image2.put(open(TEST_IMAGE2_PATH, 'rb')) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         test = TestImage.objects.first() |         test = TestImage.objects.first() | ||||||
| @@ -573,10 +563,12 @@ class FileTest(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') |         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|         with open(TEST_IMAGE_PATH, 'rb') as marmot_photo:   # Retrieve a photo from disk |         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk | ||||||
|  |  | ||||||
|         photos_field = marmot._fields['photos'].field |         photos_field = marmot._fields['photos'].field | ||||||
|         new_proxy = photos_field.get_proxy_obj('photos', marmot) |         new_proxy = photos_field.get_proxy_obj('photos', marmot) | ||||||
|         new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') |         new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||||
|  |         marmot_photo.close() | ||||||
|  |  | ||||||
|         marmot.photos.append(new_proxy) |         marmot.photos.append(new_proxy) | ||||||
|         marmot.save() |         marmot.save() | ||||||
| @@ -586,6 +578,5 @@ class FileTest(MongoDBTestCase): | |||||||
|         self.assertEqual(marmot.photos[0].foo, 'bar') |         self.assertEqual(marmot.photos[0].foo, 'bar') | ||||||
|         self.assertEqual(marmot.photos[0].get().length, 8313) |         self.assertEqual(marmot.photos[0].get().length, 8313) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -40,11 +40,6 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord) |             expected = "Both values (%s) in point must be float or int" % repr(coord) | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
|  |  | ||||||
|         invalid_coords = [21, 4, 'a'] |  | ||||||
|         for coord in invalid_coords: |  | ||||||
|             expected = "GeoPointField can only accept tuples or lists of (x, y)" |  | ||||||
|             self._test_for_expected_error(Location, coord, expected) |  | ||||||
|  |  | ||||||
|     def test_point_validation(self): |     def test_point_validation(self): | ||||||
|         class Location(Document): |         class Location(Document): | ||||||
|             loc = PointField() |             loc = PointField() | ||||||
| @@ -303,9 +298,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             polygon = PolygonField() |             polygon = PolygonField() | ||||||
|  |  | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies) |         self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) | ||||||
|         self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies) |         self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) | ||||||
|         self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies) |         self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) | ||||||
|  |  | ||||||
|     def test_indexes_2dsphere_embedded(self): |     def test_indexes_2dsphere_embedded(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
| @@ -321,9 +316,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             venue = EmbeddedDocumentField(Venue) |             venue = EmbeddedDocumentField(Venue) | ||||||
|  |  | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies) |         self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) | ||||||
|         self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies) |         self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) | ||||||
|         self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies) |         self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) | ||||||
|  |  | ||||||
|     def test_geo_indexes_recursion(self): |     def test_geo_indexes_recursion(self): | ||||||
|  |  | ||||||
| @@ -340,9 +335,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Parent(name='Berlin').save() |         Parent(name='Berlin').save() | ||||||
|         info = Parent._get_collection().index_information() |         info = Parent._get_collection().index_information() | ||||||
|         self.assertNotIn('location_2d', info) |         self.assertFalse('location_2d' in info) | ||||||
|         info = Location._get_collection().index_information() |         info = Location._get_collection().index_information() | ||||||
|         self.assertIn('location_2d', info) |         self.assertTrue('location_2d' in info) | ||||||
|  |  | ||||||
|         self.assertEqual(len(Parent._geo_indices()), 0) |         self.assertEqual(len(Parent._geo_indices()), 0) | ||||||
|         self.assertEqual(len(Location._geo_indices()), 1) |         self.assertEqual(len(Location._geo_indices()), 1) | ||||||
|   | |||||||
| @@ -1,143 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import uuid |  | ||||||
|  |  | ||||||
| from nose.plugins.skip import SkipTest |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| from bson import Binary |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
| BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBinaryField(MongoDBTestCase): |  | ||||||
|     def test_binary_fields(self): |  | ||||||
|         """Ensure that binary fields can be stored and retrieved. |  | ||||||
|         """ |  | ||||||
|         class Attachment(Document): |  | ||||||
|             content_type = StringField() |  | ||||||
|             blob = BinaryField() |  | ||||||
|  |  | ||||||
|         BLOB = six.b('\xe6\x00\xc4\xff\x07') |  | ||||||
|         MIME_TYPE = 'application/octet-stream' |  | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |  | ||||||
|  |  | ||||||
|         attachment = Attachment(content_type=MIME_TYPE, blob=BLOB) |  | ||||||
|         attachment.save() |  | ||||||
|  |  | ||||||
|         attachment_1 = Attachment.objects().first() |  | ||||||
|         self.assertEqual(MIME_TYPE, attachment_1.content_type) |  | ||||||
|         self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) |  | ||||||
|  |  | ||||||
|     def test_validation_succeeds(self): |  | ||||||
|         """Ensure that valid values can be assigned to binary fields. |  | ||||||
|         """ |  | ||||||
|         class AttachmentRequired(Document): |  | ||||||
|             blob = BinaryField(required=True) |  | ||||||
|  |  | ||||||
|         class AttachmentSizeLimit(Document): |  | ||||||
|             blob = BinaryField(max_bytes=4) |  | ||||||
|  |  | ||||||
|         attachment_required = AttachmentRequired() |  | ||||||
|         self.assertRaises(ValidationError, attachment_required.validate) |  | ||||||
|         attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) |  | ||||||
|         attachment_required.validate() |  | ||||||
|  |  | ||||||
|         _5_BYTES = six.b('\xe6\x00\xc4\xff\x07') |  | ||||||
|         _4_BYTES = six.b('\xe6\x00\xc4\xff') |  | ||||||
|         self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate) |  | ||||||
|         AttachmentSizeLimit(blob=_4_BYTES).validate() |  | ||||||
|  |  | ||||||
|     def test_validation_fails(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to binary fields.""" |  | ||||||
|  |  | ||||||
|         class Attachment(Document): |  | ||||||
|             blob = BinaryField() |  | ||||||
|  |  | ||||||
|         for invalid_data in (2, u'Im_a_unicode', ['some_str']): |  | ||||||
|             self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate) |  | ||||||
|  |  | ||||||
|     def test__primary(self): |  | ||||||
|         class Attachment(Document): |  | ||||||
|             id = BinaryField(primary_key=True) |  | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |  | ||||||
|         binary_id = uuid.uuid4().bytes |  | ||||||
|         att = Attachment(id=binary_id).save() |  | ||||||
|         self.assertEqual(1, Attachment.objects.count()) |  | ||||||
|         self.assertEqual(1, Attachment.objects.filter(id=att.id).count()) |  | ||||||
|         att.delete() |  | ||||||
|         self.assertEqual(0, Attachment.objects.count()) |  | ||||||
|  |  | ||||||
|     def test_primary_filter_by_binary_pk_as_str(self): |  | ||||||
|         raise SkipTest("Querying by id as string is not currently supported") |  | ||||||
|  |  | ||||||
|         class Attachment(Document): |  | ||||||
|             id = BinaryField(primary_key=True) |  | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |  | ||||||
|         binary_id = uuid.uuid4().bytes |  | ||||||
|         att = Attachment(id=binary_id).save() |  | ||||||
|         self.assertEqual(1, Attachment.objects.filter(id=binary_id).count()) |  | ||||||
|         att.delete() |  | ||||||
|         self.assertEqual(0, Attachment.objects.count()) |  | ||||||
|  |  | ||||||
|     def test_match_querying_with_bytes(self): |  | ||||||
|         class MyDocument(Document): |  | ||||||
|             bin_field = BinaryField() |  | ||||||
|  |  | ||||||
|         MyDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         doc = MyDocument(bin_field=BIN_VALUE).save() |  | ||||||
|         matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() |  | ||||||
|         self.assertEqual(matched_doc.id, doc.id) |  | ||||||
|  |  | ||||||
|     def test_match_querying_with_binary(self): |  | ||||||
|         class MyDocument(Document): |  | ||||||
|             bin_field = BinaryField() |  | ||||||
|  |  | ||||||
|         MyDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         doc = MyDocument(bin_field=BIN_VALUE).save() |  | ||||||
|  |  | ||||||
|         matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() |  | ||||||
|         self.assertEqual(matched_doc.id, doc.id) |  | ||||||
|  |  | ||||||
|     def test_modify_operation__set(self): |  | ||||||
|         """Ensures no regression of bug #1127""" |  | ||||||
|         class MyDocument(Document): |  | ||||||
|             some_field = StringField() |  | ||||||
|             bin_field = BinaryField() |  | ||||||
|  |  | ||||||
|         MyDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         doc = MyDocument.objects(some_field='test').modify( |  | ||||||
|             upsert=True, new=True, |  | ||||||
|             set__bin_field=BIN_VALUE |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(doc.some_field, 'test') |  | ||||||
|         if six.PY3: |  | ||||||
|             self.assertEqual(doc.bin_field, BIN_VALUE) |  | ||||||
|         else: |  | ||||||
|             self.assertEqual(doc.bin_field, Binary(BIN_VALUE)) |  | ||||||
|  |  | ||||||
|     def test_update_one(self): |  | ||||||
|         """Ensures no regression of bug #1127""" |  | ||||||
|         class MyDocument(Document): |  | ||||||
|             bin_field = BinaryField() |  | ||||||
|  |  | ||||||
|         MyDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         bin_data = six.b('\xe6\x00\xc4\xff\x07') |  | ||||||
|         doc = MyDocument(bin_field=bin_data).save() |  | ||||||
|  |  | ||||||
|         n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE) |  | ||||||
|         self.assertEqual(n_updated, 1) |  | ||||||
|         fetched = MyDocument.objects.with_id(doc.id) |  | ||||||
|         if six.PY3: |  | ||||||
|             self.assertEqual(fetched.bin_field, BIN_VALUE) |  | ||||||
|         else: |  | ||||||
|             self.assertEqual(fetched.bin_field, Binary(BIN_VALUE)) |  | ||||||
| @@ -1,49 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase, get_as_pymongo |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBooleanField(MongoDBTestCase): |  | ||||||
|     def test_storage(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             admin = BooleanField() |  | ||||||
|  |  | ||||||
|         person = Person(admin=True) |  | ||||||
|         person.save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             get_as_pymongo(person), |  | ||||||
|             {'_id': person.id, |  | ||||||
|              'admin': True}) |  | ||||||
|  |  | ||||||
|     def test_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to boolean |  | ||||||
|         fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             admin = BooleanField() |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.admin = True |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.admin = 2 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.admin = 'Yes' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.admin = 'False' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_weirdness_constructor(self): |  | ||||||
|         """When attribute is set in contructor, it gets cast into a bool |  | ||||||
|         which causes some weird behavior. We dont necessarily want to maintain this behavior |  | ||||||
|         but its a known issue |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             admin = BooleanField() |  | ||||||
|  |  | ||||||
|         new_person = Person(admin='False') |  | ||||||
|         self.assertTrue(new_person.admin) |  | ||||||
|  |  | ||||||
|         new_person = Person(admin='0') |  | ||||||
|         self.assertTrue(new_person.admin) |  | ||||||
| @@ -1,446 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from decimal import Decimal |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestCachedReferenceField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_get_and_save(self): |  | ||||||
|         """ |  | ||||||
|         Tests #1047: CachedReferenceField creates DBRefs on to_python, |  | ||||||
|         but can't save them on to_mongo. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocorrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = CachedReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocorrence.drop_collection() |  | ||||||
|  |  | ||||||
|         Ocorrence(person="testte", |  | ||||||
|                   animal=Animal(name="Leopard", tag="heavy").save()).save() |  | ||||||
|         p = Ocorrence.objects.get() |  | ||||||
|         p.person = 'new_testte' |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|     def test_general_things(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocorrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = CachedReferenceField( |  | ||||||
|                 Animal, fields=['tag']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocorrence.drop_collection() |  | ||||||
|  |  | ||||||
|         a = Animal(name="Leopard", tag="heavy") |  | ||||||
|         a.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) |  | ||||||
|         o = Ocorrence(person="teste", animal=a) |  | ||||||
|         o.save() |  | ||||||
|  |  | ||||||
|         p = Ocorrence(person="Wilson") |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Ocorrence.objects(animal=None).count(), 1) |  | ||||||
|  |  | ||||||
|         self.assertEqual( |  | ||||||
|             a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) |  | ||||||
|  |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') |  | ||||||
|  |  | ||||||
|         # counts |  | ||||||
|         Ocorrence(person="teste 2").save() |  | ||||||
|         Ocorrence(person="teste 3").save() |  | ||||||
|  |  | ||||||
|         count = Ocorrence.objects(animal__tag='heavy').count() |  | ||||||
|         self.assertEqual(count, 1) |  | ||||||
|  |  | ||||||
|         ocorrence = Ocorrence.objects(animal__tag='heavy').first() |  | ||||||
|         self.assertEqual(ocorrence.person, "teste") |  | ||||||
|         self.assertIsInstance(ocorrence.animal, Animal) |  | ||||||
|  |  | ||||||
|     def test_with_decimal(self): |  | ||||||
|         class PersonAuto(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             salary = DecimalField() |  | ||||||
|  |  | ||||||
|         class SocialTest(Document): |  | ||||||
|             group = StringField() |  | ||||||
|             person = CachedReferenceField( |  | ||||||
|                 PersonAuto, |  | ||||||
|                 fields=('salary',)) |  | ||||||
|  |  | ||||||
|         PersonAuto.drop_collection() |  | ||||||
|         SocialTest.drop_collection() |  | ||||||
|  |  | ||||||
|         p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         s = SocialTest(group="dev", person=p) |  | ||||||
|         s.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual( |  | ||||||
|             SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { |  | ||||||
|                 '_id': s.pk, |  | ||||||
|                 'group': s.group, |  | ||||||
|                 'person': { |  | ||||||
|                     '_id': p.pk, |  | ||||||
|                     'salary': 7000.00 |  | ||||||
|                 } |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_field_reference(self): |  | ||||||
|         class Group(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             group = ReferenceField(Group) |  | ||||||
|  |  | ||||||
|         class SocialData(Document): |  | ||||||
|             obs = StringField() |  | ||||||
|             tags = ListField( |  | ||||||
|                 StringField()) |  | ||||||
|             person = CachedReferenceField( |  | ||||||
|                 Person, |  | ||||||
|                 fields=('group',)) |  | ||||||
|  |  | ||||||
|         Group.drop_collection() |  | ||||||
|         Person.drop_collection() |  | ||||||
|         SocialData.drop_collection() |  | ||||||
|  |  | ||||||
|         g1 = Group(name='dev') |  | ||||||
|         g1.save() |  | ||||||
|  |  | ||||||
|         g2 = Group(name="designers") |  | ||||||
|         g2.save() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="Alberto", group=g1) |  | ||||||
|         p1.save() |  | ||||||
|  |  | ||||||
|         p2 = Person(name="Andre", group=g1) |  | ||||||
|         p2.save() |  | ||||||
|  |  | ||||||
|         p3 = Person(name="Afro design", group=g2) |  | ||||||
|         p3.save() |  | ||||||
|  |  | ||||||
|         s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) |  | ||||||
|         s1.save() |  | ||||||
|  |  | ||||||
|         s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) |  | ||||||
|         s2.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(SocialData.objects._collection.find_one( |  | ||||||
|             {'tags': 'tag2'}), { |  | ||||||
|                 '_id': s1.pk, |  | ||||||
|                 'obs': 'testing 123', |  | ||||||
|                 'tags': ['tag1', 'tag2'], |  | ||||||
|                 'person': { |  | ||||||
|                     '_id': p1.pk, |  | ||||||
|                     'group': g1.pk |  | ||||||
|                 } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         self.assertEqual(SocialData.objects(person__group=g2).count(), 1) |  | ||||||
|         self.assertEqual(SocialData.objects(person__group=g2).first(), s2) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_field_push_with_fields(self): |  | ||||||
|         class Product(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Product.drop_collection() |  | ||||||
|  |  | ||||||
|         class Basket(Document): |  | ||||||
|             products = ListField(CachedReferenceField(Product, fields=['name'])) |  | ||||||
|  |  | ||||||
|         Basket.drop_collection() |  | ||||||
|         product1 = Product(name='abc').save() |  | ||||||
|         product2 = Product(name='def').save() |  | ||||||
|         basket = Basket(products=[product1]).save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             Basket.objects._collection.find_one(), |  | ||||||
|             { |  | ||||||
|                 '_id': basket.pk, |  | ||||||
|                 'products': [ |  | ||||||
|                     { |  | ||||||
|                         '_id': product1.pk, |  | ||||||
|                         'name': product1.name |  | ||||||
|                     } |  | ||||||
|                 ] |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|         # push to list |  | ||||||
|         basket.update(push__products=product2) |  | ||||||
|         basket.reload() |  | ||||||
|         self.assertEqual( |  | ||||||
|             Basket.objects._collection.find_one(), |  | ||||||
|             { |  | ||||||
|                 '_id': basket.pk, |  | ||||||
|                 'products': [ |  | ||||||
|                     { |  | ||||||
|                         '_id': product1.pk, |  | ||||||
|                         'name': product1.name |  | ||||||
|                     }, |  | ||||||
|                     { |  | ||||||
|                         '_id': product2.pk, |  | ||||||
|                         'name': product2.name |  | ||||||
|                     } |  | ||||||
|                 ] |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_field_update_all(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             TYPES = ( |  | ||||||
|                 ('pf', "PF"), |  | ||||||
|                 ('pj', "PJ") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |  | ||||||
|             tp = StringField(choices=TYPES) |  | ||||||
|             father = CachedReferenceField('self', fields=('tp',)) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         a1 = Person(name="Wilson Father", tp="pj") |  | ||||||
|         a1.save() |  | ||||||
|  |  | ||||||
|         a2 = Person(name='Wilson Junior', tp='pf', father=a1) |  | ||||||
|         a2.save() |  | ||||||
|  |  | ||||||
|         a2 = Person.objects.with_id(a2.id) |  | ||||||
|         self.assertEqual(a2.father.tp, a1.tp) |  | ||||||
|  |  | ||||||
|         self.assertEqual(dict(a2.to_mongo()), { |  | ||||||
|             "_id": a2.pk, |  | ||||||
|             "name": u"Wilson Junior", |  | ||||||
|             "tp": u"pf", |  | ||||||
|             "father": { |  | ||||||
|                 "_id": a1.pk, |  | ||||||
|                 "tp": u"pj" |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Person.objects(father=a1)._query, { |  | ||||||
|             'father._id': a1.pk |  | ||||||
|         }) |  | ||||||
|         self.assertEqual(Person.objects(father=a1).count(), 1) |  | ||||||
|  |  | ||||||
|         Person.objects.update(set__tp="pf") |  | ||||||
|         Person.father.sync_all() |  | ||||||
|  |  | ||||||
|         a2.reload() |  | ||||||
|         self.assertEqual(dict(a2.to_mongo()), { |  | ||||||
|             "_id": a2.pk, |  | ||||||
|             "name": u"Wilson Junior", |  | ||||||
|             "tp": u"pf", |  | ||||||
|             "father": { |  | ||||||
|                 "_id": a1.pk, |  | ||||||
|                 "tp": u"pf" |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_fields_on_embedded_documents(self): |  | ||||||
|         with self.assertRaises(InvalidDocumentError): |  | ||||||
|             class Test(Document): |  | ||||||
|                 name = StringField() |  | ||||||
|  |  | ||||||
|             type('WrongEmbeddedDocument', ( |  | ||||||
|                 EmbeddedDocument,), { |  | ||||||
|                     'test': CachedReferenceField(Test) |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_auto_sync(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             TYPES = ( |  | ||||||
|                 ('pf', "PF"), |  | ||||||
|                 ('pj', "PJ") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |  | ||||||
|             tp = StringField( |  | ||||||
|                 choices=TYPES |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             father = CachedReferenceField('self', fields=('tp',)) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         a1 = Person(name="Wilson Father", tp="pj") |  | ||||||
|         a1.save() |  | ||||||
|  |  | ||||||
|         a2 = Person(name='Wilson Junior', tp='pf', father=a1) |  | ||||||
|         a2.save() |  | ||||||
|  |  | ||||||
|         a1.tp = 'pf' |  | ||||||
|         a1.save() |  | ||||||
|  |  | ||||||
|         a2.reload() |  | ||||||
|         self.assertEqual(dict(a2.to_mongo()), { |  | ||||||
|             '_id': a2.pk, |  | ||||||
|             'name': 'Wilson Junior', |  | ||||||
|             'tp': 'pf', |  | ||||||
|             'father': { |  | ||||||
|                 '_id': a1.pk, |  | ||||||
|                 'tp': 'pf' |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_auto_sync_disabled(self): |  | ||||||
|         class Persone(Document): |  | ||||||
|             TYPES = ( |  | ||||||
|                 ('pf', "PF"), |  | ||||||
|                 ('pj', "PJ") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |  | ||||||
|             tp = StringField( |  | ||||||
|                 choices=TYPES |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             father = CachedReferenceField( |  | ||||||
|                 'self', fields=('tp',), auto_sync=False) |  | ||||||
|  |  | ||||||
|         Persone.drop_collection() |  | ||||||
|  |  | ||||||
|         a1 = Persone(name="Wilson Father", tp="pj") |  | ||||||
|         a1.save() |  | ||||||
|  |  | ||||||
|         a2 = Persone(name='Wilson Junior', tp='pf', father=a1) |  | ||||||
|         a2.save() |  | ||||||
|  |  | ||||||
|         a1.tp = 'pf' |  | ||||||
|         a1.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { |  | ||||||
|             '_id': a2.pk, |  | ||||||
|             'name': 'Wilson Junior', |  | ||||||
|             'tp': 'pf', |  | ||||||
|             'father': { |  | ||||||
|                 '_id': a1.pk, |  | ||||||
|                 'tp': 'pj' |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_embedded_fields(self): |  | ||||||
|         class Owner(EmbeddedDocument): |  | ||||||
|             TPS = ( |  | ||||||
|                 ('n', "Normal"), |  | ||||||
|                 ('u', "Urgent") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |  | ||||||
|             tp = StringField( |  | ||||||
|                 verbose_name="Type", |  | ||||||
|                 db_field="t", |  | ||||||
|                 choices=TPS) |  | ||||||
|  |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|             owner = EmbeddedDocumentField(Owner) |  | ||||||
|  |  | ||||||
|         class Ocorrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = CachedReferenceField( |  | ||||||
|                 Animal, fields=['tag', 'owner.tp']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocorrence.drop_collection() |  | ||||||
|  |  | ||||||
|         a = Animal(name="Leopard", tag="heavy", |  | ||||||
|                    owner=Owner(tp='u', name="Wilson Júnior") |  | ||||||
|                    ) |  | ||||||
|         a.save() |  | ||||||
|  |  | ||||||
|         o = Ocorrence(person="teste", animal=a) |  | ||||||
|         o.save() |  | ||||||
|         self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { |  | ||||||
|             '_id': a.pk, |  | ||||||
|             'tag': 'heavy', |  | ||||||
|             'owner': { |  | ||||||
|                 't': 'u' |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') |  | ||||||
|  |  | ||||||
|         # Check to_mongo with fields |  | ||||||
|         self.assertNotIn('animal', o.to_mongo(fields=['person'])) |  | ||||||
|  |  | ||||||
|         # counts |  | ||||||
|         Ocorrence(person="teste 2").save() |  | ||||||
|         Ocorrence(person="teste 3").save() |  | ||||||
|  |  | ||||||
|         count = Ocorrence.objects( |  | ||||||
|             animal__tag='heavy', animal__owner__tp='u').count() |  | ||||||
|         self.assertEqual(count, 1) |  | ||||||
|  |  | ||||||
|         ocorrence = Ocorrence.objects( |  | ||||||
|             animal__tag='heavy', |  | ||||||
|             animal__owner__tp='u').first() |  | ||||||
|         self.assertEqual(ocorrence.person, "teste") |  | ||||||
|         self.assertIsInstance(ocorrence.animal, Animal) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_embedded_list_fields(self): |  | ||||||
|         class Owner(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             tags = ListField(StringField()) |  | ||||||
|  |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|             owner = EmbeddedDocumentField(Owner) |  | ||||||
|  |  | ||||||
|         class Ocorrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = CachedReferenceField( |  | ||||||
|                 Animal, fields=['tag', 'owner.tags']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocorrence.drop_collection() |  | ||||||
|  |  | ||||||
|         a = Animal(name="Leopard", tag="heavy", |  | ||||||
|                    owner=Owner(tags=['cool', 'funny'], |  | ||||||
|                                name="Wilson Júnior") |  | ||||||
|                    ) |  | ||||||
|         a.save() |  | ||||||
|  |  | ||||||
|         o = Ocorrence(person="teste 2", animal=a) |  | ||||||
|         o.save() |  | ||||||
|         self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { |  | ||||||
|             '_id': a.pk, |  | ||||||
|             'tag': 'heavy', |  | ||||||
|             'owner': { |  | ||||||
|                 'tags': ['cool', 'funny'] |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['owner']['tags'], |  | ||||||
|                          ['cool', 'funny']) |  | ||||||
|  |  | ||||||
|         # counts |  | ||||||
|         Ocorrence(person="teste 2").save() |  | ||||||
|         Ocorrence(person="teste 3").save() |  | ||||||
|  |  | ||||||
|         query = Ocorrence.objects( |  | ||||||
|             animal__tag='heavy', animal__owner__tags='cool')._query |  | ||||||
|         self.assertEqual( |  | ||||||
|             query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) |  | ||||||
|  |  | ||||||
|         ocorrence = Ocorrence.objects( |  | ||||||
|             animal__tag='heavy', |  | ||||||
|             animal__owner__tags='cool').first() |  | ||||||
|         self.assertEqual(ocorrence.person, "teste 2") |  | ||||||
|         self.assertIsInstance(ocorrence.animal, Animal) |  | ||||||
| @@ -1,184 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime |  | ||||||
| import math |  | ||||||
| import itertools |  | ||||||
| import re |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComplexDateTimeFieldTest(MongoDBTestCase): |  | ||||||
|     def test_complexdatetime_storage(self): |  | ||||||
|         """Tests for complex datetime fields - which can handle |  | ||||||
|         microseconds without rounding. |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             date = ComplexDateTimeField() |  | ||||||
|             date_with_dots = ComplexDateTimeField(separator='.') |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and |  | ||||||
|         # dropped - with default datetimefields |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date, d1) |  | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond - with |  | ||||||
|         # default datetimefields |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date, d1) |  | ||||||
|  |  | ||||||
|         # Pre UTC dates microseconds below 1000 are dropped - with default |  | ||||||
|         # datetimefields |  | ||||||
|         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date, d1) |  | ||||||
|  |  | ||||||
|         # Pre UTC microseconds above 1000 is wonky - with default datetimefields |  | ||||||
|         # log.date has an invalid microsecond value so I can't construct |  | ||||||
|         # a date to compare. |  | ||||||
|         for i in range(1001, 3113, 33): |  | ||||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) |  | ||||||
|             log.date = d1 |  | ||||||
|             log.save() |  | ||||||
|             log.reload() |  | ||||||
|             self.assertEqual(log.date, d1) |  | ||||||
|             log1 = LogEntry.objects.get(date=d1) |  | ||||||
|             self.assertEqual(log, log1) |  | ||||||
|  |  | ||||||
|         # Test string padding |  | ||||||
|         microsecond = map(int, [math.pow(10, x) for x in range(6)]) |  | ||||||
|         mm = dd = hh = ii = ss = [1, 10] |  | ||||||
|  |  | ||||||
|         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): |  | ||||||
|             stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] |  | ||||||
|             self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) |  | ||||||
|  |  | ||||||
|         # Test separator |  | ||||||
|         stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] |  | ||||||
|         self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) |  | ||||||
|  |  | ||||||
|     def test_complexdatetime_usage(self): |  | ||||||
|         """Tests for complex datetime fields - which can handle |  | ||||||
|         microseconds without rounding. |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             date = ComplexDateTimeField() |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999) |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|  |  | ||||||
|         log1 = LogEntry.objects.get(date=d1) |  | ||||||
|         self.assertEqual(log, log1) |  | ||||||
|  |  | ||||||
|         # create extra 59 log entries for a total of 60 |  | ||||||
|         for i in range(1951, 2010): |  | ||||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) |  | ||||||
|             LogEntry(date=d).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(LogEntry.objects.count(), 60) |  | ||||||
|  |  | ||||||
|         # Test ordering |  | ||||||
|         logs = LogEntry.objects.order_by("date") |  | ||||||
|         i = 0 |  | ||||||
|         while i < 59: |  | ||||||
|             self.assertTrue(logs[i].date <= logs[i + 1].date) |  | ||||||
|             i += 1 |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.order_by("-date") |  | ||||||
|         i = 0 |  | ||||||
|         while i < 59: |  | ||||||
|             self.assertTrue(logs[i].date >= logs[i + 1].date) |  | ||||||
|             i += 1 |  | ||||||
|  |  | ||||||
|         # Test searching |  | ||||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) |  | ||||||
|         self.assertEqual(logs.count(), 30) |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) |  | ||||||
|         self.assertEqual(logs.count(), 30) |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter( |  | ||||||
|             date__lte=datetime.datetime(2011, 1, 1), |  | ||||||
|             date__gte=datetime.datetime(2000, 1, 1), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(logs.count(), 10) |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         # Test microsecond-level ordering/filtering |  | ||||||
|         for microsecond in (99, 999, 9999, 10000): |  | ||||||
|             LogEntry( |  | ||||||
|                 date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond) |  | ||||||
|             ).save() |  | ||||||
|  |  | ||||||
|         logs = list(LogEntry.objects.order_by('date')) |  | ||||||
|         for next_idx, log in enumerate(logs[:-1], start=1): |  | ||||||
|             next_log = logs[next_idx] |  | ||||||
|             self.assertTrue(log.date < next_log.date) |  | ||||||
|  |  | ||||||
|         logs = list(LogEntry.objects.order_by('-date')) |  | ||||||
|         for next_idx, log in enumerate(logs[:-1], start=1): |  | ||||||
|             next_log = logs[next_idx] |  | ||||||
|             self.assertTrue(log.date > next_log.date) |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter( |  | ||||||
|             date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) |  | ||||||
|         self.assertEqual(logs.count(), 4) |  | ||||||
|  |  | ||||||
|     def test_no_default_value(self): |  | ||||||
|         class Log(Document): |  | ||||||
|             timestamp = ComplexDateTimeField() |  | ||||||
|  |  | ||||||
|         Log.drop_collection() |  | ||||||
|  |  | ||||||
|         log = Log() |  | ||||||
|         self.assertIsNone(log.timestamp) |  | ||||||
|         log.save() |  | ||||||
|  |  | ||||||
|         fetched_log = Log.objects.with_id(log.id) |  | ||||||
|         self.assertIsNone(fetched_log.timestamp) |  | ||||||
|  |  | ||||||
|     def test_default_static_value(self): |  | ||||||
|         NOW = datetime.datetime.utcnow() |  | ||||||
|         class Log(Document): |  | ||||||
|             timestamp = ComplexDateTimeField(default=NOW) |  | ||||||
|  |  | ||||||
|         Log.drop_collection() |  | ||||||
|  |  | ||||||
|         log = Log() |  | ||||||
|         self.assertEqual(log.timestamp, NOW) |  | ||||||
|         log.save() |  | ||||||
|  |  | ||||||
|         fetched_log = Log.objects.with_id(log.id) |  | ||||||
|         self.assertEqual(fetched_log.timestamp, NOW) |  | ||||||
|  |  | ||||||
|     def test_default_callable(self): |  | ||||||
|         NOW = datetime.datetime.utcnow() |  | ||||||
|  |  | ||||||
|         class Log(Document): |  | ||||||
|             timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow) |  | ||||||
|  |  | ||||||
|         Log.drop_collection() |  | ||||||
|  |  | ||||||
|         log = Log() |  | ||||||
|         self.assertGreaterEqual(log.timestamp, NOW) |  | ||||||
|         log.save() |  | ||||||
|  |  | ||||||
|         fetched_log = Log.objects.with_id(log.id) |  | ||||||
|         self.assertGreaterEqual(fetched_log.timestamp, NOW) |  | ||||||
| @@ -1,165 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| try: |  | ||||||
|     import dateutil |  | ||||||
| except ImportError: |  | ||||||
|     dateutil = None |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDateField(MongoDBTestCase): |  | ||||||
|     def test_date_from_empty_string(self): |  | ||||||
|         """ |  | ||||||
|         Ensure an exception is raised when trying to |  | ||||||
|         cast an empty string to datetime. |  | ||||||
|         """ |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             dt = DateField() |  | ||||||
|  |  | ||||||
|         md = MyDoc(dt='') |  | ||||||
|         self.assertRaises(ValidationError, md.save) |  | ||||||
|  |  | ||||||
|     def test_date_from_whitespace_string(self): |  | ||||||
|         """ |  | ||||||
|         Ensure an exception is raised when trying to |  | ||||||
|         cast a whitespace-only string to datetime. |  | ||||||
|         """ |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             dt = DateField() |  | ||||||
|  |  | ||||||
|         md = MyDoc(dt='   ') |  | ||||||
|         self.assertRaises(ValidationError, md.save) |  | ||||||
|  |  | ||||||
|     def test_default_values_today(self): |  | ||||||
|         """Ensure that default field values are used when creating |  | ||||||
|         a document. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             day = DateField(default=datetime.date.today) |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.validate() |  | ||||||
|         self.assertEqual(person.day, person.day) |  | ||||||
|         self.assertEqual(person.day, datetime.date.today()) |  | ||||||
|         self.assertEqual(person._data['day'], person.day) |  | ||||||
|  |  | ||||||
|     def test_date(self): |  | ||||||
|         """Tests showing pymongo date fields |  | ||||||
|  |  | ||||||
|         See: http://api.mongodb.org/python/current/api/bson/son.html#dt |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             date = DateField() |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         # Test can save dates |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = datetime.date.today() |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date, datetime.date.today()) |  | ||||||
|  |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) |  | ||||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date, d1.date()) |  | ||||||
|         self.assertEqual(log.date, d2.date()) |  | ||||||
|  |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) |  | ||||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date, d1.date()) |  | ||||||
|         self.assertEqual(log.date, d2.date()) |  | ||||||
|  |  | ||||||
|         if not six.PY3: |  | ||||||
|             # Pre UTC dates microseconds below 1000 are dropped |  | ||||||
|             # This does not seem to be true in PY3 |  | ||||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) |  | ||||||
|             d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) |  | ||||||
|             log.date = d1 |  | ||||||
|             log.save() |  | ||||||
|             log.reload() |  | ||||||
|             self.assertEqual(log.date, d1.date()) |  | ||||||
|             self.assertEqual(log.date, d2.date()) |  | ||||||
|  |  | ||||||
|     def test_regular_usage(self): |  | ||||||
|         """Tests for regular datetime fields""" |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             date = DateField() |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = d1 |  | ||||||
|         log.validate() |  | ||||||
|         log.save() |  | ||||||
|  |  | ||||||
|         for query in (d1, d1.isoformat(' ')): |  | ||||||
|             log1 = LogEntry.objects.get(date=query) |  | ||||||
|             self.assertEqual(log, log1) |  | ||||||
|  |  | ||||||
|         if dateutil: |  | ||||||
|             log1 = LogEntry.objects.get(date=d1.isoformat('T')) |  | ||||||
|             self.assertEqual(log, log1) |  | ||||||
|  |  | ||||||
|         # create additional 19 log entries for a total of 20 |  | ||||||
|         for i in range(1971, 1990): |  | ||||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1) |  | ||||||
|             LogEntry(date=d).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(LogEntry.objects.count(), 20) |  | ||||||
|  |  | ||||||
|         # Test ordering |  | ||||||
|         logs = LogEntry.objects.order_by("date") |  | ||||||
|         i = 0 |  | ||||||
|         while i < 19: |  | ||||||
|             self.assertTrue(logs[i].date <= logs[i + 1].date) |  | ||||||
|             i += 1 |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.order_by("-date") |  | ||||||
|         i = 0 |  | ||||||
|         while i < 19: |  | ||||||
|             self.assertTrue(logs[i].date >= logs[i + 1].date) |  | ||||||
|             i += 1 |  | ||||||
|  |  | ||||||
|         # Test searching |  | ||||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) |  | ||||||
|         self.assertEqual(logs.count(), 10) |  | ||||||
|  |  | ||||||
|     def test_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to datetime |  | ||||||
|         fields. |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             time = DateField() |  | ||||||
|  |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.time = datetime.datetime.now() |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         log.time = datetime.date.today() |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         log.time = datetime.datetime.now().isoformat(' ') |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         if dateutil: |  | ||||||
|             log.time = datetime.datetime.now().isoformat('T') |  | ||||||
|             log.validate() |  | ||||||
|  |  | ||||||
|         log.time = -1 |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|         log.time = 'ABC' |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
| @@ -1,231 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime as dt |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| try: |  | ||||||
|     import dateutil |  | ||||||
| except ImportError: |  | ||||||
|     dateutil = None |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine import connection |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDateTimeField(MongoDBTestCase): |  | ||||||
|     def test_datetime_from_empty_string(self): |  | ||||||
|         """ |  | ||||||
|         Ensure an exception is raised when trying to |  | ||||||
|         cast an empty string to datetime. |  | ||||||
|         """ |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             dt = DateTimeField() |  | ||||||
|  |  | ||||||
|         md = MyDoc(dt='') |  | ||||||
|         self.assertRaises(ValidationError, md.save) |  | ||||||
|  |  | ||||||
|     def test_datetime_from_whitespace_string(self): |  | ||||||
|         """ |  | ||||||
|         Ensure an exception is raised when trying to |  | ||||||
|         cast a whitespace-only string to datetime. |  | ||||||
|         """ |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             dt = DateTimeField() |  | ||||||
|  |  | ||||||
|         md = MyDoc(dt='   ') |  | ||||||
|         self.assertRaises(ValidationError, md.save) |  | ||||||
|  |  | ||||||
|     def test_default_value_utcnow(self): |  | ||||||
|         """Ensure that default field values are used when creating |  | ||||||
|         a document. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             created = DateTimeField(default=dt.datetime.utcnow) |  | ||||||
|  |  | ||||||
|         utcnow = dt.datetime.utcnow() |  | ||||||
|         person = Person() |  | ||||||
|         person.validate() |  | ||||||
|         person_created_t0 = person.created |  | ||||||
|         self.assertLess(person.created - utcnow, dt.timedelta(seconds=1)) |  | ||||||
|         self.assertEqual(person_created_t0, person.created)  # make sure it does not change |  | ||||||
|         self.assertEqual(person._data['created'], person.created) |  | ||||||
|  |  | ||||||
|     def test_handling_microseconds(self): |  | ||||||
|         """Tests showing pymongo datetime fields handling of microseconds. |  | ||||||
|         Microseconds are rounded to the nearest millisecond and pre UTC |  | ||||||
|         handling is wonky. |  | ||||||
|  |  | ||||||
|         See: http://api.mongodb.org/python/current/api/bson/son.html#dt |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             date = DateTimeField() |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         # Test can save dates |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = dt.date.today() |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertEqual(log.date.date(), dt.date.today()) |  | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and |  | ||||||
|         # dropped |  | ||||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 999) |  | ||||||
|         d2 = dt.datetime(1970, 1, 1, 0, 0, 1) |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertNotEqual(log.date, d1) |  | ||||||
|         self.assertEqual(log.date, d2) |  | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond |  | ||||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) |  | ||||||
|         d2 = dt.datetime(1970, 1, 1, 0, 0, 1, 9000) |  | ||||||
|         log.date = d1 |  | ||||||
|         log.save() |  | ||||||
|         log.reload() |  | ||||||
|         self.assertNotEqual(log.date, d1) |  | ||||||
|         self.assertEqual(log.date, d2) |  | ||||||
|  |  | ||||||
|         if not six.PY3: |  | ||||||
|             # Pre UTC dates microseconds below 1000 are dropped |  | ||||||
|             # This does not seem to be true in PY3 |  | ||||||
|             d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) |  | ||||||
|             d2 = dt.datetime(1969, 12, 31, 23, 59, 59) |  | ||||||
|             log.date = d1 |  | ||||||
|             log.save() |  | ||||||
|             log.reload() |  | ||||||
|             self.assertNotEqual(log.date, d1) |  | ||||||
|             self.assertEqual(log.date, d2) |  | ||||||
|  |  | ||||||
|     def test_regular_usage(self): |  | ||||||
|         """Tests for regular datetime fields""" |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             date = DateTimeField() |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1) |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.date = d1 |  | ||||||
|         log.validate() |  | ||||||
|         log.save() |  | ||||||
|  |  | ||||||
|         for query in (d1, d1.isoformat(' ')): |  | ||||||
|             log1 = LogEntry.objects.get(date=query) |  | ||||||
|             self.assertEqual(log, log1) |  | ||||||
|  |  | ||||||
|         if dateutil: |  | ||||||
|             log1 = LogEntry.objects.get(date=d1.isoformat('T')) |  | ||||||
|             self.assertEqual(log, log1) |  | ||||||
|  |  | ||||||
|         # create additional 19 log entries for a total of 20 |  | ||||||
|         for i in range(1971, 1990): |  | ||||||
|             d = dt.datetime(i, 1, 1, 0, 0, 1) |  | ||||||
|             LogEntry(date=d).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(LogEntry.objects.count(), 20) |  | ||||||
|  |  | ||||||
|         # Test ordering |  | ||||||
|         logs = LogEntry.objects.order_by("date") |  | ||||||
|         i = 0 |  | ||||||
|         while i < 19: |  | ||||||
|             self.assertTrue(logs[i].date <= logs[i + 1].date) |  | ||||||
|             i += 1 |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.order_by("-date") |  | ||||||
|         i = 0 |  | ||||||
|         while i < 19: |  | ||||||
|             self.assertTrue(logs[i].date >= logs[i + 1].date) |  | ||||||
|             i += 1 |  | ||||||
|  |  | ||||||
|         # Test searching |  | ||||||
|         logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) |  | ||||||
|         self.assertEqual(logs.count(), 10) |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) |  | ||||||
|         self.assertEqual(logs.count(), 10) |  | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter( |  | ||||||
|             date__lte=dt.datetime(1980, 1, 1), |  | ||||||
|             date__gte=dt.datetime(1975, 1, 1), |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(logs.count(), 5) |  | ||||||
|  |  | ||||||
|     def test_datetime_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to datetime |  | ||||||
|         fields. |  | ||||||
|         """ |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             time = DateTimeField() |  | ||||||
|  |  | ||||||
|         log = LogEntry() |  | ||||||
|         log.time = dt.datetime.now() |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         log.time = dt.date.today() |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         log.time = dt.datetime.now().isoformat(' ') |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         log.time = '2019-05-16 21:42:57.897847' |  | ||||||
|         log.validate() |  | ||||||
|  |  | ||||||
|         if dateutil: |  | ||||||
|             log.time = dt.datetime.now().isoformat('T') |  | ||||||
|             log.validate() |  | ||||||
|  |  | ||||||
|         log.time = -1 |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|         log.time = 'ABC' |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|         log.time = '2019-05-16 21:GARBAGE:12' |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|         log.time = '2019-05-16 21:42:57.GARBAGE' |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|         log.time = '2019-05-16 21:42:57.123.456' |  | ||||||
|         self.assertRaises(ValidationError, log.validate) |  | ||||||
|  |  | ||||||
|     def test_parse_datetime_as_str(self): |  | ||||||
|         class DTDoc(Document): |  | ||||||
|             date = DateTimeField() |  | ||||||
|  |  | ||||||
|         date_str = '2019-03-02 22:26:01' |  | ||||||
|  |  | ||||||
|         # make sure that passing a parsable datetime works |  | ||||||
|         dtd = DTDoc() |  | ||||||
|         dtd.date = date_str |  | ||||||
|         self.assertIsInstance(dtd.date, six.string_types) |  | ||||||
|         dtd.save() |  | ||||||
|         dtd.reload() |  | ||||||
|  |  | ||||||
|         self.assertIsInstance(dtd.date, dt.datetime) |  | ||||||
|         self.assertEqual(str(dtd.date), date_str) |  | ||||||
|  |  | ||||||
|         dtd.date = 'January 1st, 9999999999' |  | ||||||
|         self.assertRaises(ValidationError, dtd.validate) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDateTimeTzAware(MongoDBTestCase): |  | ||||||
|     def test_datetime_tz_aware_mark_as_changed(self): |  | ||||||
|         # Reset the connections |  | ||||||
|         connection._connection_settings = {} |  | ||||||
|         connection._connections = {} |  | ||||||
|         connection._dbs = {} |  | ||||||
|  |  | ||||||
|         connect(db='mongoenginetest', tz_aware=True) |  | ||||||
|  |  | ||||||
|         class LogEntry(Document): |  | ||||||
|             time = DateTimeField() |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |  | ||||||
|  |  | ||||||
|         LogEntry(time=dt.datetime(2013, 1, 1, 0, 0, 0)).save() |  | ||||||
|  |  | ||||||
|         log = LogEntry.objects.first() |  | ||||||
|         log.time = dt.datetime(2013, 1, 1, 0, 0, 0) |  | ||||||
|         self.assertEqual(['time'], log._changed_fields) |  | ||||||
| @@ -1,91 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from decimal import Decimal |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDecimalField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to decimal fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             height = DecimalField(min_value=Decimal('0.1'), |  | ||||||
|                                   max_value=Decimal('3.5')) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         Person(height=Decimal('1.89')).save() |  | ||||||
|         person = Person.objects.first() |  | ||||||
|         self.assertEqual(person.height, Decimal('1.89')) |  | ||||||
|  |  | ||||||
|         person.height = '2.0' |  | ||||||
|         person.save() |  | ||||||
|         person.height = 0.01 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.height = Decimal('0.01') |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.height = Decimal('4.0') |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.height = 'something invalid' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person_2 = Person(height='something invalid') |  | ||||||
|         self.assertRaises(ValidationError, person_2.validate) |  | ||||||
|  |  | ||||||
|     def test_comparison(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             money = DecimalField() |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         Person(money=6).save() |  | ||||||
|         Person(money=7).save() |  | ||||||
|         Person(money=8).save() |  | ||||||
|         Person(money=10).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count()) |  | ||||||
|         self.assertEqual(2, Person.objects(money__gt=7).count()) |  | ||||||
|         self.assertEqual(2, Person.objects(money__gt="7").count()) |  | ||||||
|  |  | ||||||
|         self.assertEqual(3, Person.objects(money__gte="7").count()) |  | ||||||
|  |  | ||||||
|     def test_storage(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             float_value = DecimalField(precision=4) |  | ||||||
|             string_value = DecimalField(precision=4, force_string=True) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] |  | ||||||
|         for store_at_creation in [True, False]: |  | ||||||
|             for value in values_to_store: |  | ||||||
|                 # to_python is called explicitly if values were sent in the kwargs of __init__ |  | ||||||
|                 if store_at_creation: |  | ||||||
|                     Person(float_value=value, string_value=value).save() |  | ||||||
|                 else: |  | ||||||
|                     person = Person.objects.create() |  | ||||||
|                     person.float_value = value |  | ||||||
|                     person.string_value = value |  | ||||||
|                     person.save() |  | ||||||
|  |  | ||||||
|         # How its stored |  | ||||||
|         expected = [ |  | ||||||
|             {'float_value': 10.0, 'string_value': '10.0000'}, |  | ||||||
|             {'float_value': 10.1, 'string_value': '10.1000'}, |  | ||||||
|             {'float_value': 10.11, 'string_value': '10.1100'}, |  | ||||||
|             {'float_value': 10.111, 'string_value': '10.1110'}, |  | ||||||
|             {'float_value': 10.1111, 'string_value': '10.1111'}, |  | ||||||
|             {'float_value': 10.1111, 'string_value': '10.1111'}] |  | ||||||
|         expected.extend(expected) |  | ||||||
|         actual = list(Person.objects.exclude('id').as_pymongo()) |  | ||||||
|         self.assertEqual(expected, actual) |  | ||||||
|  |  | ||||||
|         # How it comes out locally |  | ||||||
|         expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), |  | ||||||
|                     Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] |  | ||||||
|         expected.extend(expected) |  | ||||||
|         for field_name in ['float_value', 'string_value']: |  | ||||||
|             actual = list(Person.objects().scalar(field_name)) |  | ||||||
|             self.assertEqual(expected, actual) |  | ||||||
| @@ -1,324 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.base import BaseDict |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase, get_as_pymongo |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDictField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_storage(self): |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             info = DictField() |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         info = {'testkey': 'testvalue'} |  | ||||||
|         post = BlogPost(info=info).save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             get_as_pymongo(post), |  | ||||||
|             { |  | ||||||
|                 '_id': post.id, |  | ||||||
|                 'info': info |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_general_things(self): |  | ||||||
|         """Ensure that dict types work as expected.""" |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             info = DictField() |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post = BlogPost() |  | ||||||
|         post.info = 'my post' |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = ['test', 'test'] |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = {'$title': 'test'} |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = {'nested': {'$title': 'test'}} |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = {'the.title': 'test'} |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = {'nested': {'the.title': 'test'}} |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = {1: 'test'} |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = {'title': 'test'} |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost() |  | ||||||
|         post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}} |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost() |  | ||||||
|         post.info = {'details': {'test': 'test'}} |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost() |  | ||||||
|         post.info = {'details': {'test': 3}} |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(BlogPost.objects.count(), 4) |  | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.filter(info__title__exact='test').count(), 1) |  | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.filter(info__details__test__exact='test').count(), 1) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects.filter(info__title__exact='dollar_sign').first() |  | ||||||
|         self.assertIn('te$t', post['info']['details']) |  | ||||||
|  |  | ||||||
|         # Confirm handles non strings or non existing keys |  | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.filter(info__details__test__exact=5).count(), 0) |  | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects.create(info={'title': 'original'}) |  | ||||||
|         post.info.update({'title': 'updated'}) |  | ||||||
|         post.save() |  | ||||||
|         post.reload() |  | ||||||
|         self.assertEqual('updated', post.info['title']) |  | ||||||
|  |  | ||||||
|         post.info.setdefault('authors', []) |  | ||||||
|         post.save() |  | ||||||
|         post.reload() |  | ||||||
|         self.assertEqual([], post.info['authors']) |  | ||||||
|  |  | ||||||
|     def test_dictfield_dump_document(self): |  | ||||||
|         """Ensure a DictField can handle another document's dump.""" |  | ||||||
|         class Doc(Document): |  | ||||||
|             field = DictField() |  | ||||||
|  |  | ||||||
|         class ToEmbed(Document): |  | ||||||
|             id = IntField(primary_key=True, default=1) |  | ||||||
|             recursive = DictField() |  | ||||||
|  |  | ||||||
|         class ToEmbedParent(Document): |  | ||||||
|             id = IntField(primary_key=True, default=1) |  | ||||||
|             recursive = DictField() |  | ||||||
|  |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class ToEmbedChild(ToEmbedParent): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         to_embed_recursive = ToEmbed(id=1).save() |  | ||||||
|         to_embed = ToEmbed( |  | ||||||
|             id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() |  | ||||||
|         doc = Doc(field=to_embed.to_mongo().to_dict()) |  | ||||||
|         doc.save() |  | ||||||
|         self.assertIsInstance(doc.field, dict) |  | ||||||
|         self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}}) |  | ||||||
|         # Same thing with a Document with a _cls field |  | ||||||
|         to_embed_recursive = ToEmbedChild(id=1).save() |  | ||||||
|         to_embed_child = ToEmbedChild( |  | ||||||
|             id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() |  | ||||||
|         doc = Doc(field=to_embed_child.to_mongo().to_dict()) |  | ||||||
|         doc.save() |  | ||||||
|         self.assertIsInstance(doc.field, dict) |  | ||||||
|         expected = { |  | ||||||
|             '_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild', |  | ||||||
|             'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}} |  | ||||||
|         } |  | ||||||
|         self.assertEqual(doc.field, expected) |  | ||||||
|  |  | ||||||
|     def test_dictfield_strict(self): |  | ||||||
|         """Ensure that dict field handles validation if provided a strict field type.""" |  | ||||||
|         class Simple(Document): |  | ||||||
|             mapping = DictField(field=IntField()) |  | ||||||
|  |  | ||||||
|         Simple.drop_collection() |  | ||||||
|  |  | ||||||
|         e = Simple() |  | ||||||
|         e.mapping['someint'] = 1 |  | ||||||
|         e.save() |  | ||||||
|  |  | ||||||
|         # try creating an invalid mapping |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             e.mapping['somestring'] = "abc" |  | ||||||
|             e.save() |  | ||||||
|  |  | ||||||
|     def test_dictfield_complex(self): |  | ||||||
|         """Ensure that the dict field can handle the complex types.""" |  | ||||||
|         class SettingBase(EmbeddedDocument): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class StringSetting(SettingBase): |  | ||||||
|             value = StringField() |  | ||||||
|  |  | ||||||
|         class IntegerSetting(SettingBase): |  | ||||||
|             value = IntField() |  | ||||||
|  |  | ||||||
|         class Simple(Document): |  | ||||||
|             mapping = DictField() |  | ||||||
|  |  | ||||||
|         Simple.drop_collection() |  | ||||||
|  |  | ||||||
|         e = Simple() |  | ||||||
|         e.mapping['somestring'] = StringSetting(value='foo') |  | ||||||
|         e.mapping['someint'] = IntegerSetting(value=42) |  | ||||||
|         e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', |  | ||||||
|                                     'float': 1.001, |  | ||||||
|                                     'complex': IntegerSetting(value=42), |  | ||||||
|                                     'list': [IntegerSetting(value=42), |  | ||||||
|                                              StringSetting(value='foo')]} |  | ||||||
|         e.save() |  | ||||||
|  |  | ||||||
|         e2 = Simple.objects.get(id=e.id) |  | ||||||
|         self.assertIsInstance(e2.mapping['somestring'], StringSetting) |  | ||||||
|         self.assertIsInstance(e2.mapping['someint'], IntegerSetting) |  | ||||||
|  |  | ||||||
|         # Test querying |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__someint__value=42).count(), 1) |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) |  | ||||||
|  |  | ||||||
|         # Confirm can update |  | ||||||
|         Simple.objects().update( |  | ||||||
|             set__mapping={"someint": IntegerSetting(value=10)}) |  | ||||||
|         Simple.objects().update( |  | ||||||
|             set__mapping__nested_dict__list__1=StringSetting(value='Boo')) |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) |  | ||||||
|         self.assertEqual( |  | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) |  | ||||||
|  |  | ||||||
|     def test_push_dict(self): |  | ||||||
|         class MyModel(Document): |  | ||||||
|             events = ListField(DictField()) |  | ||||||
|  |  | ||||||
|         doc = MyModel(events=[{'a': 1}]).save() |  | ||||||
|         raw_doc = get_as_pymongo(doc) |  | ||||||
|         expected_raw_doc = { |  | ||||||
|             '_id': doc.id, |  | ||||||
|             'events': [{'a': 1}] |  | ||||||
|         } |  | ||||||
|         self.assertEqual(raw_doc, expected_raw_doc) |  | ||||||
|  |  | ||||||
|         MyModel.objects(id=doc.id).update(push__events={}) |  | ||||||
|         raw_doc = get_as_pymongo(doc) |  | ||||||
|         expected_raw_doc = { |  | ||||||
|             '_id': doc.id, |  | ||||||
|             'events': [{'a': 1}, {}] |  | ||||||
|         } |  | ||||||
|         self.assertEqual(raw_doc, expected_raw_doc) |  | ||||||
|  |  | ||||||
|     def test_ensure_unique_default_instances(self): |  | ||||||
|         """Ensure that every field has it's own unique default instance.""" |  | ||||||
|         class D(Document): |  | ||||||
|             data = DictField() |  | ||||||
|             data2 = DictField(default=lambda: {}) |  | ||||||
|  |  | ||||||
|         d1 = D() |  | ||||||
|         d1.data['foo'] = 'bar' |  | ||||||
|         d1.data2['foo'] = 'bar' |  | ||||||
|         d2 = D() |  | ||||||
|         self.assertEqual(d2.data, {}) |  | ||||||
|         self.assertEqual(d2.data2, {}) |  | ||||||
|  |  | ||||||
|     def test_dict_field_invalid_dict_value(self): |  | ||||||
|         class DictFieldTest(Document): |  | ||||||
|             dictionary = DictField(required=True) |  | ||||||
|  |  | ||||||
|         DictFieldTest.drop_collection() |  | ||||||
|  |  | ||||||
|         test = DictFieldTest(dictionary=None) |  | ||||||
|         test.dictionary  # Just access to test getter |  | ||||||
|         self.assertRaises(ValidationError, test.validate) |  | ||||||
|  |  | ||||||
|         test = DictFieldTest(dictionary=False) |  | ||||||
|         test.dictionary  # Just access to test getter |  | ||||||
|         self.assertRaises(ValidationError, test.validate) |  | ||||||
|  |  | ||||||
|     def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): |  | ||||||
|         class DictFieldTest(Document): |  | ||||||
|             dictionary = DictField(required=True) |  | ||||||
|  |  | ||||||
|         DictFieldTest.drop_collection() |  | ||||||
|  |  | ||||||
|         class Embedded(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         embed = Embedded(name='garbage') |  | ||||||
|         doc = DictFieldTest(dictionary=embed) |  | ||||||
|         with self.assertRaises(ValidationError) as ctx_err: |  | ||||||
|             doc.validate() |  | ||||||
|         self.assertIn("'dictionary'", str(ctx_err.exception)) |  | ||||||
|         self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception)) |  | ||||||
|  |  | ||||||
|     def test_atomic_update_dict_field(self): |  | ||||||
|         """Ensure that the entire DictField can be atomically updated.""" |  | ||||||
|         class Simple(Document): |  | ||||||
|             mapping = DictField(field=ListField(IntField(required=True))) |  | ||||||
|  |  | ||||||
|         Simple.drop_collection() |  | ||||||
|  |  | ||||||
|         e = Simple() |  | ||||||
|         e.mapping['someints'] = [1, 2] |  | ||||||
|         e.save() |  | ||||||
|         e.update(set__mapping={"ints": [3, 4]}) |  | ||||||
|         e.reload() |  | ||||||
|         self.assertEqual(BaseDict, type(e.mapping)) |  | ||||||
|         self.assertEqual({"ints": [3, 4]}, e.mapping) |  | ||||||
|  |  | ||||||
|         # try creating an invalid mapping |  | ||||||
|         with self.assertRaises(ValueError): |  | ||||||
|             e.update(set__mapping={"somestrings": ["foo", "bar", ]}) |  | ||||||
|  |  | ||||||
|     def test_dictfield_with_referencefield_complex_nesting_cases(self): |  | ||||||
|         """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" |  | ||||||
|         # Relates to Issue #1453 |  | ||||||
|         class Doc(Document): |  | ||||||
|             s = StringField() |  | ||||||
|  |  | ||||||
|         class Simple(Document): |  | ||||||
|             mapping0 = DictField(ReferenceField(Doc, dbref=True)) |  | ||||||
|             mapping1 = DictField(ReferenceField(Doc, dbref=False)) |  | ||||||
|             mapping2 = DictField(ListField(ReferenceField(Doc, dbref=True))) |  | ||||||
|             mapping3 = DictField(ListField(ReferenceField(Doc, dbref=False))) |  | ||||||
|             mapping4 = DictField(DictField(field=ReferenceField(Doc, dbref=True))) |  | ||||||
|             mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) |  | ||||||
|             mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) |  | ||||||
|             mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) |  | ||||||
|             mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))) |  | ||||||
|             mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))) |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         Simple.drop_collection() |  | ||||||
|  |  | ||||||
|         d = Doc(s='aa').save() |  | ||||||
|         e = Simple() |  | ||||||
|         e.mapping0['someint'] = e.mapping1['someint'] = d |  | ||||||
|         e.mapping2['someint'] = e.mapping3['someint'] = [d] |  | ||||||
|         e.mapping4['someint'] = e.mapping5['someint'] = {'d': d} |  | ||||||
|         e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}] |  | ||||||
|         e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}] |  | ||||||
|         e.save() |  | ||||||
|  |  | ||||||
|         s = Simple.objects.first() |  | ||||||
|         self.assertIsInstance(s.mapping0['someint'], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping1['someint'], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping2['someint'][0], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping3['someint'][0], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping4['someint']['d'], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping5['someint']['d'], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc) |  | ||||||
|         self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc) |  | ||||||
| @@ -1,130 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import sys |  | ||||||
| from unittest import SkipTest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestEmailField(MongoDBTestCase): |  | ||||||
|     def test_generic_behavior(self): |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField() |  | ||||||
|  |  | ||||||
|         user = User(email='ross@example.com') |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         user = User(email='ross@example.co.uk') |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S' |  | ||||||
|                            'aJIazqqWkm7.net')) |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         user = User(email='new-tld@example.technology') |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         user = User(email='ross@example.com.') |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         # unicode domain |  | ||||||
|         user = User(email=u'user@пример.рф') |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         # invalid unicode domain |  | ||||||
|         user = User(email=u'user@пример') |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         # invalid data type |  | ||||||
|         user = User(email=123) |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|     def test_email_field_unicode_user(self): |  | ||||||
|         # Don't run this test on pypy3, which doesn't support unicode regex: |  | ||||||
|         # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode |  | ||||||
|         if sys.version_info[:2] == (3, 2): |  | ||||||
|             raise SkipTest('unicode email addresses are not supported on PyPy 3') |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField() |  | ||||||
|  |  | ||||||
|         # unicode user shouldn't validate by default... |  | ||||||
|         user = User(email=u'Dörte@Sörensen.example.com') |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         # ...but it should be fine with allow_utf8_user set to True |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField(allow_utf8_user=True) |  | ||||||
|  |  | ||||||
|         user = User(email=u'Dörte@Sörensen.example.com') |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|     def test_email_field_domain_whitelist(self): |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField() |  | ||||||
|  |  | ||||||
|         # localhost domain shouldn't validate by default... |  | ||||||
|         user = User(email='me@localhost') |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         # ...but it should be fine if it's whitelisted |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField(domain_whitelist=['localhost']) |  | ||||||
|  |  | ||||||
|         user = User(email='me@localhost') |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|     def test_email_domain_validation_fails_if_invalid_idn(self): |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField() |  | ||||||
|  |  | ||||||
|         invalid_idn = '.google.com' |  | ||||||
|         user = User(email='me@%s' % invalid_idn) |  | ||||||
|         with self.assertRaises(ValidationError) as ctx_err: |  | ||||||
|             user.validate() |  | ||||||
|         self.assertIn("domain failed IDN encoding", str(ctx_err.exception)) |  | ||||||
|  |  | ||||||
|     def test_email_field_ip_domain(self): |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField() |  | ||||||
|  |  | ||||||
|         valid_ipv4 = 'email@[127.0.0.1]' |  | ||||||
|         valid_ipv6 = 'email@[2001:dB8::1]' |  | ||||||
|         invalid_ip = 'email@[324.0.0.1]' |  | ||||||
|  |  | ||||||
|         # IP address as a domain shouldn't validate by default... |  | ||||||
|         user = User(email=valid_ipv4) |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         user = User(email=valid_ipv6) |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         user = User(email=invalid_ip) |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         # ...but it should be fine with allow_ip_domain set to True |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField(allow_ip_domain=True) |  | ||||||
|  |  | ||||||
|         user = User(email=valid_ipv4) |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         user = User(email=valid_ipv6) |  | ||||||
|         user.validate() |  | ||||||
|  |  | ||||||
|         # invalid IP should still fail validation |  | ||||||
|         user = User(email=invalid_ip) |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|     def test_email_field_honors_regex(self): |  | ||||||
|         class User(Document): |  | ||||||
|             email = EmailField(regex=r'\w+@example.com') |  | ||||||
|  |  | ||||||
|         # Fails regex validation |  | ||||||
|         user = User(email='me@foo.com') |  | ||||||
|         self.assertRaises(ValidationError, user.validate) |  | ||||||
|  |  | ||||||
|         # Passes regex validation |  | ||||||
|         user = User(email='me@example.com') |  | ||||||
|         self.assertIsNone(user.validate()) |  | ||||||
| @@ -1,344 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \ |  | ||||||
|     InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \ |  | ||||||
|     ReferenceField |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestEmbeddedDocumentField(MongoDBTestCase): |  | ||||||
|     def test___init___(self): |  | ||||||
|         class MyDoc(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         field = EmbeddedDocumentField(MyDoc) |  | ||||||
|         self.assertEqual(field.document_type_obj, MyDoc) |  | ||||||
|  |  | ||||||
|         field2 = EmbeddedDocumentField('MyDoc') |  | ||||||
|         self.assertEqual(field2.document_type_obj, 'MyDoc') |  | ||||||
|  |  | ||||||
|     def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             EmbeddedDocumentField(dict) |  | ||||||
|  |  | ||||||
|     def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): |  | ||||||
|  |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         emb = EmbeddedDocumentField('MyDoc') |  | ||||||
|         with self.assertRaises(ValidationError) as ctx: |  | ||||||
|             emb.document_type |  | ||||||
|         self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception)) |  | ||||||
|  |  | ||||||
|     def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): |  | ||||||
|         # Relates to #1661 |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             class MyFailingDoc(Document): |  | ||||||
|                 emb = EmbeddedDocumentField(MyDoc) |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             class MyFailingdoc2(Document): |  | ||||||
|                 emb = EmbeddedDocumentField('MyDoc') |  | ||||||
|  |  | ||||||
|     def test_query_embedded_document_attribute(self): |  | ||||||
|         class AdminSettings(EmbeddedDocument): |  | ||||||
|             foo1 = StringField() |  | ||||||
|             foo2 = StringField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             settings = EmbeddedDocumentField(AdminSettings) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p = Person( |  | ||||||
|             settings=AdminSettings(foo1='bar1', foo2='bar2'), |  | ||||||
|             name='John', |  | ||||||
|         ).save() |  | ||||||
|  |  | ||||||
|         # Test non exiting attribute |  | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |  | ||||||
|             Person.objects(settings__notexist='bar').first() |  | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |  | ||||||
|  |  | ||||||
|         with self.assertRaises(LookUpError): |  | ||||||
|             Person.objects.only('settings.notexist') |  | ||||||
|  |  | ||||||
|         # Test existing attribute |  | ||||||
|         self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id) |  | ||||||
|         only_p = Person.objects.only('settings.foo1').first() |  | ||||||
|         self.assertEqual(only_p.settings.foo1, p.settings.foo1) |  | ||||||
|         self.assertIsNone(only_p.settings.foo2) |  | ||||||
|         self.assertIsNone(only_p.name) |  | ||||||
|  |  | ||||||
|         exclude_p = Person.objects.exclude('settings.foo1').first() |  | ||||||
|         self.assertIsNone(exclude_p.settings.foo1) |  | ||||||
|         self.assertEqual(exclude_p.settings.foo2, p.settings.foo2) |  | ||||||
|         self.assertEqual(exclude_p.name, p.name) |  | ||||||
|  |  | ||||||
|     def test_query_embedded_document_attribute_with_inheritance(self): |  | ||||||
|         class BaseSettings(EmbeddedDocument): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|             base_foo = StringField() |  | ||||||
|  |  | ||||||
|         class AdminSettings(BaseSettings): |  | ||||||
|             sub_foo = StringField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             settings = EmbeddedDocumentField(BaseSettings) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         # Test non exiting attribute |  | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |  | ||||||
|             self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) |  | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |  | ||||||
|  |  | ||||||
|         # Test existing attribute |  | ||||||
|         self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) |  | ||||||
|         self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) |  | ||||||
|  |  | ||||||
|         only_p = Person.objects.only('settings.base_foo', 'settings._cls').first() |  | ||||||
|         self.assertEqual(only_p.settings.base_foo, 'basefoo') |  | ||||||
|         self.assertIsNone(only_p.settings.sub_foo) |  | ||||||
|  |  | ||||||
|     def test_query_list_embedded_document_with_inheritance(self): |  | ||||||
|         class Post(EmbeddedDocument): |  | ||||||
|             title = StringField(max_length=120, required=True) |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class TextPost(Post): |  | ||||||
|             content = StringField() |  | ||||||
|  |  | ||||||
|         class MoviePost(Post): |  | ||||||
|             author = StringField() |  | ||||||
|  |  | ||||||
|         class Record(Document): |  | ||||||
|             posts = ListField(EmbeddedDocumentField(Post)) |  | ||||||
|  |  | ||||||
|         record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save() |  | ||||||
|         record_text = Record(posts=[TextPost(content='a', title='foo')]).save() |  | ||||||
|  |  | ||||||
|         records = list(Record.objects(posts__author=record_movie.posts[0].author)) |  | ||||||
|         self.assertEqual(len(records), 1) |  | ||||||
|         self.assertEqual(records[0].id, record_movie.id) |  | ||||||
|  |  | ||||||
|         records = list(Record.objects(posts__content=record_text.posts[0].content)) |  | ||||||
|         self.assertEqual(len(records), 1) |  | ||||||
|         self.assertEqual(records[0].id, record_text.id) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Record.objects(posts__title='foo').count(), 2) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestGenericEmbeddedDocumentField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_generic_embedded_document(self): |  | ||||||
|         class Car(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Dish(EmbeddedDocument): |  | ||||||
|             food = StringField(required=True) |  | ||||||
|             number = IntField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             like = GenericEmbeddedDocumentField() |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |  | ||||||
|         person.like = Car(name='Fiat') |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         person = Person.objects.first() |  | ||||||
|         self.assertIsInstance(person.like, Car) |  | ||||||
|  |  | ||||||
|         person.like = Dish(food="arroz", number=15) |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         person = Person.objects.first() |  | ||||||
|         self.assertIsInstance(person.like, Dish) |  | ||||||
|  |  | ||||||
|     def test_generic_embedded_document_choices(self): |  | ||||||
|         """Ensure you can limit GenericEmbeddedDocument choices.""" |  | ||||||
|         class Car(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Dish(EmbeddedDocument): |  | ||||||
|             food = StringField(required=True) |  | ||||||
|             number = IntField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             like = GenericEmbeddedDocumentField(choices=(Dish,)) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |  | ||||||
|         person.like = Car(name='Fiat') |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.like = Dish(food="arroz", number=15) |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         person = Person.objects.first() |  | ||||||
|         self.assertIsInstance(person.like, Dish) |  | ||||||
|  |  | ||||||
|     def test_generic_list_embedded_document_choices(self): |  | ||||||
|         """Ensure you can limit GenericEmbeddedDocument choices inside |  | ||||||
|         a list field. |  | ||||||
|         """ |  | ||||||
|         class Car(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Dish(EmbeddedDocument): |  | ||||||
|             food = StringField(required=True) |  | ||||||
|             number = IntField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,))) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |  | ||||||
|         person.likes = [Car(name='Fiat')] |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.likes = [Dish(food="arroz", number=15)] |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         person = Person.objects.first() |  | ||||||
|         self.assertIsInstance(person.likes[0], Dish) |  | ||||||
|  |  | ||||||
|     def test_choices_validation_documents(self): |  | ||||||
|         """ |  | ||||||
|         Ensure fields with document choices validate given a valid choice. |  | ||||||
|         """ |  | ||||||
|         class UserComments(EmbeddedDocument): |  | ||||||
|             author = StringField() |  | ||||||
|             message = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             comments = ListField( |  | ||||||
|                 GenericEmbeddedDocumentField(choices=(UserComments,)) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Ensure Validation Passes |  | ||||||
|         BlogPost(comments=[ |  | ||||||
|             UserComments(author='user2', message='message2'), |  | ||||||
|         ]).save() |  | ||||||
|  |  | ||||||
|     def test_choices_validation_documents_invalid(self): |  | ||||||
|         """ |  | ||||||
|         Ensure fields with document choices validate given an invalid choice. |  | ||||||
|         This should throw a ValidationError exception. |  | ||||||
|         """ |  | ||||||
|         class UserComments(EmbeddedDocument): |  | ||||||
|             author = StringField() |  | ||||||
|             message = StringField() |  | ||||||
|  |  | ||||||
|         class ModeratorComments(EmbeddedDocument): |  | ||||||
|             author = StringField() |  | ||||||
|             message = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             comments = ListField( |  | ||||||
|                 GenericEmbeddedDocumentField(choices=(UserComments,)) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Single Entry Failure |  | ||||||
|         post = BlogPost(comments=[ |  | ||||||
|             ModeratorComments(author='mod1', message='message1'), |  | ||||||
|         ]) |  | ||||||
|         self.assertRaises(ValidationError, post.save) |  | ||||||
|  |  | ||||||
|         # Mixed Entry Failure |  | ||||||
|         post = BlogPost(comments=[ |  | ||||||
|             ModeratorComments(author='mod1', message='message1'), |  | ||||||
|             UserComments(author='user2', message='message2'), |  | ||||||
|         ]) |  | ||||||
|         self.assertRaises(ValidationError, post.save) |  | ||||||
|  |  | ||||||
|     def test_choices_validation_documents_inheritance(self): |  | ||||||
|         """ |  | ||||||
|         Ensure fields with document choices validate given subclass of choice. |  | ||||||
|         """ |  | ||||||
|         class Comments(EmbeddedDocument): |  | ||||||
|             meta = { |  | ||||||
|                 'abstract': True |  | ||||||
|             } |  | ||||||
|             author = StringField() |  | ||||||
|             message = StringField() |  | ||||||
|  |  | ||||||
|         class UserComments(Comments): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             comments = ListField( |  | ||||||
|                 GenericEmbeddedDocumentField(choices=(Comments,)) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Save Valid EmbeddedDocument Type |  | ||||||
|         BlogPost(comments=[ |  | ||||||
|             UserComments(author='user2', message='message2'), |  | ||||||
|         ]).save() |  | ||||||
|  |  | ||||||
|     def test_query_generic_embedded_document_attribute(self): |  | ||||||
|         class AdminSettings(EmbeddedDocument): |  | ||||||
|             foo1 = StringField() |  | ||||||
|  |  | ||||||
|         class NonAdminSettings(EmbeddedDocument): |  | ||||||
|             foo2 = StringField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings)) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p1 = Person(settings=AdminSettings(foo1='bar1')).save() |  | ||||||
|         p2 = Person(settings=NonAdminSettings(foo2='bar2')).save() |  | ||||||
|  |  | ||||||
|         # Test non exiting attribute |  | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |  | ||||||
|             Person.objects(settings__notexist='bar').first() |  | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |  | ||||||
|  |  | ||||||
|         with self.assertRaises(LookUpError): |  | ||||||
|             Person.objects.only('settings.notexist') |  | ||||||
|  |  | ||||||
|         # Test existing attribute |  | ||||||
|         self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id) |  | ||||||
|         self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id) |  | ||||||
|  |  | ||||||
|     def test_query_generic_embedded_document_attribute_with_inheritance(self): |  | ||||||
|         class BaseSettings(EmbeddedDocument): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|             base_foo = StringField() |  | ||||||
|  |  | ||||||
|         class AdminSettings(BaseSettings): |  | ||||||
|             sub_foo = StringField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             settings = GenericEmbeddedDocumentField(choices=[BaseSettings]) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         # Test non exiting attribute |  | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |  | ||||||
|             self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) |  | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |  | ||||||
|  |  | ||||||
|         # Test existing attribute |  | ||||||
|         self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) |  | ||||||
|         self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) |  | ||||||
| @@ -1,58 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestFloatField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_float_ne_operator(self): |  | ||||||
|         class TestDocument(Document): |  | ||||||
|             float_fld = FloatField() |  | ||||||
|  |  | ||||||
|         TestDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         TestDocument(float_fld=None).save() |  | ||||||
|         TestDocument(float_fld=1).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) |  | ||||||
|         self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count()) |  | ||||||
|  |  | ||||||
|     def test_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to float fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             height = FloatField(min_value=0.1, max_value=3.5) |  | ||||||
|  |  | ||||||
|         class BigPerson(Document): |  | ||||||
|             height = FloatField() |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.height = 1.89 |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.height = '2.0' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.height = 0.01 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person.height = 4.0 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|         person_2 = Person(height='something invalid') |  | ||||||
|         self.assertRaises(ValidationError, person_2.validate) |  | ||||||
|  |  | ||||||
|         big_person = BigPerson() |  | ||||||
|  |  | ||||||
|         for value, value_type in enumerate(six.integer_types): |  | ||||||
|             big_person.height = value_type(value) |  | ||||||
|             big_person.validate() |  | ||||||
|  |  | ||||||
|         big_person.height = 2 ** 500 |  | ||||||
|         big_person.validate() |  | ||||||
|  |  | ||||||
|         big_person.height = 2 ** 100000  # Too big for a float value |  | ||||||
|         self.assertRaises(ValidationError, big_person.validate) |  | ||||||
| @@ -1,42 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestIntField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_int_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to int fields. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             age = IntField(min_value=0, max_value=110) |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         person.age = 0 |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.age = 50 |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.age = 110 |  | ||||||
|         person.validate() |  | ||||||
|  |  | ||||||
|         person.age = -1 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.age = 120 |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         person.age = 'ten' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_ne_operator(self): |  | ||||||
|         class TestDocument(Document): |  | ||||||
|             int_fld = IntField() |  | ||||||
|  |  | ||||||
|         TestDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         TestDocument(int_fld=None).save() |  | ||||||
|         TestDocument(int_fld=1).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) |  | ||||||
|         self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count()) |  | ||||||
| @@ -1,570 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from bson import DBRef, ObjectId |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.base import LazyReference |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestLazyReferenceField(MongoDBTestCase): |  | ||||||
|     def test_lazy_reference_config(self): |  | ||||||
|         # Make sure ReferenceField only accepts a document class or a string |  | ||||||
|         # with a document class name. |  | ||||||
|         self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) |  | ||||||
|  |  | ||||||
|     def test___repr__(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal() |  | ||||||
|         oc = Ocurrence(animal=animal) |  | ||||||
|         self.assertIn('LazyReference', repr(oc.animal)) |  | ||||||
|  |  | ||||||
|     def test___getattr___unknown_attr_raises_attribute_error(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal().save() |  | ||||||
|         oc = Ocurrence(animal=animal) |  | ||||||
|         with self.assertRaises(AttributeError): |  | ||||||
|             oc.animal.not_exist |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_simple(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         Ocurrence(person="test", animal=animal).save() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |  | ||||||
|         fetched_animal = p.animal.fetch() |  | ||||||
|         self.assertEqual(fetched_animal, animal) |  | ||||||
|         # `fetch` keep cache on referenced document by default... |  | ||||||
|         animal.tag = "not so heavy" |  | ||||||
|         animal.save() |  | ||||||
|         double_fetch = p.animal.fetch() |  | ||||||
|         self.assertIs(fetched_animal, double_fetch) |  | ||||||
|         self.assertEqual(double_fetch.tag, "heavy") |  | ||||||
|         # ...unless specified otherwise |  | ||||||
|         fetch_force = p.animal.fetch(force=True) |  | ||||||
|         self.assertIsNot(fetch_force, fetched_animal) |  | ||||||
|         self.assertEqual(fetch_force.tag, "not so heavy") |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_fetch_invalid_ref(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         Ocurrence(person="test", animal=animal).save() |  | ||||||
|         animal.delete() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |  | ||||||
|         with self.assertRaises(DoesNotExist): |  | ||||||
|             p.animal.fetch() |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_set(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         class SubAnimal(Animal): |  | ||||||
|             nick = StringField() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         sub_animal = SubAnimal(nick='doggo', name='dog').save() |  | ||||||
|         for ref in ( |  | ||||||
|                 animal, |  | ||||||
|                 animal.pk, |  | ||||||
|                 DBRef(animal._get_collection_name(), animal.pk), |  | ||||||
|                 LazyReference(Animal, animal.pk), |  | ||||||
|  |  | ||||||
|                 sub_animal, |  | ||||||
|                 sub_animal.pk, |  | ||||||
|                 DBRef(sub_animal._get_collection_name(), sub_animal.pk), |  | ||||||
|                 LazyReference(SubAnimal, sub_animal.pk), |  | ||||||
|                 ): |  | ||||||
|             p = Ocurrence(person="test", animal=ref).save() |  | ||||||
|             p.reload() |  | ||||||
|             self.assertIsInstance(p.animal, LazyReference) |  | ||||||
|             p.animal.fetch() |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_bad_set(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         class BadDoc(Document): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         baddoc = BadDoc().save() |  | ||||||
|         for bad in ( |  | ||||||
|                 42, |  | ||||||
|                 'foo', |  | ||||||
|                 baddoc, |  | ||||||
|                 DBRef(baddoc._get_collection_name(), animal.pk), |  | ||||||
|                 LazyReference(BadDoc, animal.pk) |  | ||||||
|                 ): |  | ||||||
|             with self.assertRaises(ValidationError): |  | ||||||
|                 p = Ocurrence(person="test", animal=bad).save() |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_query_conversion(self): |  | ||||||
|         """Ensure that LazyReferenceFields can be queried using objects and values |  | ||||||
|         of the type of the primary key of the referenced object. |  | ||||||
|         """ |  | ||||||
|         class Member(Document): |  | ||||||
|             user_num = IntField(primary_key=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             author = LazyReferenceField(Member, dbref=False) |  | ||||||
|  |  | ||||||
|         Member.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         m1 = Member(user_num=1) |  | ||||||
|         m1.save() |  | ||||||
|         m2 = Member(user_num=2) |  | ||||||
|         m2.save() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |  | ||||||
|         self.assertEqual(post.id, post1.id) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|         # Same thing by passing a LazyReference instance |  | ||||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_query_conversion_dbref(self): |  | ||||||
|         """Ensure that LazyReferenceFields can be queried using objects and values |  | ||||||
|         of the type of the primary key of the referenced object. |  | ||||||
|         """ |  | ||||||
|         class Member(Document): |  | ||||||
|             user_num = IntField(primary_key=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             author = LazyReferenceField(Member, dbref=True) |  | ||||||
|  |  | ||||||
|         Member.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         m1 = Member(user_num=1) |  | ||||||
|         m1.save() |  | ||||||
|         m2 = Member(user_num=2) |  | ||||||
|         m2.save() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |  | ||||||
|         self.assertEqual(post.id, post1.id) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|         # Same thing by passing a LazyReference instance |  | ||||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_passthrough(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             animal = LazyReferenceField(Animal, passthrough=False) |  | ||||||
|             animal_passthrough = LazyReferenceField(Animal, passthrough=True) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         Ocurrence(animal=animal, animal_passthrough=animal).save() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |  | ||||||
|         with self.assertRaises(KeyError): |  | ||||||
|             p.animal['name'] |  | ||||||
|         with self.assertRaises(AttributeError): |  | ||||||
|             p.animal.name |  | ||||||
|         self.assertEqual(p.animal.pk, animal.pk) |  | ||||||
|  |  | ||||||
|         self.assertEqual(p.animal_passthrough.name, "Leopard") |  | ||||||
|         self.assertEqual(p.animal_passthrough['name'], "Leopard") |  | ||||||
|  |  | ||||||
|         # Should not be able to access referenced document's methods |  | ||||||
|         with self.assertRaises(AttributeError): |  | ||||||
|             p.animal.save |  | ||||||
|         with self.assertRaises(KeyError): |  | ||||||
|             p.animal['save'] |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_not_set(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         Ocurrence(person='foo').save() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertIs(p.animal, None) |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_equality(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         animalref = LazyReference(Animal, animal.pk) |  | ||||||
|         self.assertEqual(animal, animalref) |  | ||||||
|         self.assertEqual(animalref, animal) |  | ||||||
|  |  | ||||||
|         other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) |  | ||||||
|         self.assertNotEqual(animal, other_animalref) |  | ||||||
|         self.assertNotEqual(other_animalref, animal) |  | ||||||
|  |  | ||||||
|     def test_lazy_reference_embedded(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class EmbeddedOcurrence(EmbeddedDocument): |  | ||||||
|             in_list = ListField(LazyReferenceField(Animal)) |  | ||||||
|             direct = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             in_list = ListField(LazyReferenceField(Animal)) |  | ||||||
|             in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) |  | ||||||
|             direct = LazyReferenceField(Animal) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal1 = Animal('doggo').save() |  | ||||||
|         animal2 = Animal('cheeta').save() |  | ||||||
|  |  | ||||||
|         def check_fields_type(occ): |  | ||||||
|             self.assertIsInstance(occ.direct, LazyReference) |  | ||||||
|             for elem in occ.in_list: |  | ||||||
|                 self.assertIsInstance(elem, LazyReference) |  | ||||||
|             self.assertIsInstance(occ.in_embedded.direct, LazyReference) |  | ||||||
|             for elem in occ.in_embedded.in_list: |  | ||||||
|                 self.assertIsInstance(elem, LazyReference) |  | ||||||
|  |  | ||||||
|         occ = Ocurrence( |  | ||||||
|             in_list=[animal1, animal2], |  | ||||||
|             in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, |  | ||||||
|             direct=animal1 |  | ||||||
|         ).save() |  | ||||||
|         check_fields_type(occ) |  | ||||||
|         occ.reload() |  | ||||||
|         check_fields_type(occ) |  | ||||||
|         occ.direct = animal1.id |  | ||||||
|         occ.in_list = [animal1.id, animal2.id] |  | ||||||
|         occ.in_embedded.direct = animal1.id |  | ||||||
|         occ.in_embedded.in_list = [animal1.id, animal2.id] |  | ||||||
|         check_fields_type(occ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestGenericLazyReferenceField(MongoDBTestCase): |  | ||||||
|     def test_generic_lazy_reference_simple(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         Ocurrence(person="test", animal=animal).save() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |  | ||||||
|         fetched_animal = p.animal.fetch() |  | ||||||
|         self.assertEqual(fetched_animal, animal) |  | ||||||
|         # `fetch` keep cache on referenced document by default... |  | ||||||
|         animal.tag = "not so heavy" |  | ||||||
|         animal.save() |  | ||||||
|         double_fetch = p.animal.fetch() |  | ||||||
|         self.assertIs(fetched_animal, double_fetch) |  | ||||||
|         self.assertEqual(double_fetch.tag, "heavy") |  | ||||||
|         # ...unless specified otherwise |  | ||||||
|         fetch_force = p.animal.fetch(force=True) |  | ||||||
|         self.assertIsNot(fetch_force, fetched_animal) |  | ||||||
|         self.assertEqual(fetch_force.tag, "not so heavy") |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_choices(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Vegetal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Mineral(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal]) |  | ||||||
|             thing = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Vegetal.drop_collection() |  | ||||||
|         Mineral.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard").save() |  | ||||||
|         vegetal = Vegetal(name="Oak").save() |  | ||||||
|         mineral = Mineral(name="Granite").save() |  | ||||||
|  |  | ||||||
|         occ_animal = Ocurrence(living_thing=animal, thing=animal).save() |  | ||||||
|         occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             Ocurrence(living_thing=mineral).save() |  | ||||||
|  |  | ||||||
|         occ = Ocurrence.objects.get(living_thing=animal) |  | ||||||
|         self.assertEqual(occ, occ_animal) |  | ||||||
|         self.assertIsInstance(occ.thing, LazyReference) |  | ||||||
|         self.assertIsInstance(occ.living_thing, LazyReference) |  | ||||||
|  |  | ||||||
|         occ.thing = vegetal |  | ||||||
|         occ.living_thing = vegetal |  | ||||||
|         occ.save() |  | ||||||
|  |  | ||||||
|         occ.thing = mineral |  | ||||||
|         occ.living_thing = mineral |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             occ.save() |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_set(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         class SubAnimal(Animal): |  | ||||||
|             nick = StringField() |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         sub_animal = SubAnimal(nick='doggo', name='dog').save() |  | ||||||
|         for ref in ( |  | ||||||
|                 animal, |  | ||||||
|                 LazyReference(Animal, animal.pk), |  | ||||||
|                 {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, |  | ||||||
|  |  | ||||||
|                 sub_animal, |  | ||||||
|                 LazyReference(SubAnimal, sub_animal.pk), |  | ||||||
|                 {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, |  | ||||||
|                 ): |  | ||||||
|             p = Ocurrence(person="test", animal=ref).save() |  | ||||||
|             p.reload() |  | ||||||
|             self.assertIsInstance(p.animal, (LazyReference, Document)) |  | ||||||
|             p.animal.fetch() |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_bad_set(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = GenericLazyReferenceField(choices=['Animal']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         class BadDoc(Document): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |  | ||||||
|         baddoc = BadDoc().save() |  | ||||||
|         for bad in ( |  | ||||||
|                 42, |  | ||||||
|                 'foo', |  | ||||||
|                 baddoc, |  | ||||||
|                 LazyReference(BadDoc, animal.pk) |  | ||||||
|                 ): |  | ||||||
|             with self.assertRaises(ValidationError): |  | ||||||
|                 p = Ocurrence(person="test", animal=bad).save() |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_query_conversion(self): |  | ||||||
|         class Member(Document): |  | ||||||
|             user_num = IntField(primary_key=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             author = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         Member.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         m1 = Member(user_num=1) |  | ||||||
|         m1.save() |  | ||||||
|         m2 = Member(user_num=2) |  | ||||||
|         m2.save() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |  | ||||||
|         self.assertEqual(post.id, post1.id) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|         # Same thing by passing a LazyReference instance |  | ||||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_not_set(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         Ocurrence(person='foo').save() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertIs(p.animal, None) |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_accepts_string_instead_of_class(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             person = StringField() |  | ||||||
|             animal = GenericLazyReferenceField('Animal') |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal = Animal().save() |  | ||||||
|         Ocurrence(animal=animal).save() |  | ||||||
|         p = Ocurrence.objects.get() |  | ||||||
|         self.assertEqual(p.animal, animal) |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_embedded(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             tag = StringField() |  | ||||||
|  |  | ||||||
|         class EmbeddedOcurrence(EmbeddedDocument): |  | ||||||
|             in_list = ListField(GenericLazyReferenceField()) |  | ||||||
|             direct = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         class Ocurrence(Document): |  | ||||||
|             in_list = ListField(GenericLazyReferenceField()) |  | ||||||
|             in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) |  | ||||||
|             direct = GenericLazyReferenceField() |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Ocurrence.drop_collection() |  | ||||||
|  |  | ||||||
|         animal1 = Animal('doggo').save() |  | ||||||
|         animal2 = Animal('cheeta').save() |  | ||||||
|  |  | ||||||
|         def check_fields_type(occ): |  | ||||||
|             self.assertIsInstance(occ.direct, LazyReference) |  | ||||||
|             for elem in occ.in_list: |  | ||||||
|                 self.assertIsInstance(elem, LazyReference) |  | ||||||
|             self.assertIsInstance(occ.in_embedded.direct, LazyReference) |  | ||||||
|             for elem in occ.in_embedded.in_list: |  | ||||||
|                 self.assertIsInstance(elem, LazyReference) |  | ||||||
|  |  | ||||||
|         occ = Ocurrence( |  | ||||||
|             in_list=[animal1, animal2], |  | ||||||
|             in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, |  | ||||||
|             direct=animal1 |  | ||||||
|         ).save() |  | ||||||
|         check_fields_type(occ) |  | ||||||
|         occ.reload() |  | ||||||
|         check_fields_type(occ) |  | ||||||
|         animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} |  | ||||||
|         animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} |  | ||||||
|         occ.direct = animal1_ref |  | ||||||
|         occ.in_list = [animal1_ref, animal2_ref] |  | ||||||
|         occ.in_embedded.direct = animal1_ref |  | ||||||
|         occ.in_embedded.in_list = [animal1_ref, animal2_ref] |  | ||||||
|         check_fields_type(occ) |  | ||||||
| @@ -1,56 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| try: |  | ||||||
|     from bson.int64 import Int64 |  | ||||||
| except ImportError: |  | ||||||
|     Int64 = long |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestLongField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_long_field_is_considered_as_int64(self): |  | ||||||
|         """ |  | ||||||
|         Tests that long fields are stored as long in mongo, even if long |  | ||||||
|         value is small enough to be an int. |  | ||||||
|         """ |  | ||||||
|         class TestLongFieldConsideredAsInt64(Document): |  | ||||||
|             some_long = LongField() |  | ||||||
|  |  | ||||||
|         doc = TestLongFieldConsideredAsInt64(some_long=42).save() |  | ||||||
|         db = get_db() |  | ||||||
|         self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64) |  | ||||||
|         self.assertIsInstance(doc.some_long, six.integer_types) |  | ||||||
|  |  | ||||||
|     def test_long_validation(self): |  | ||||||
|         """Ensure that invalid values cannot be assigned to long fields. |  | ||||||
|         """ |  | ||||||
|         class TestDocument(Document): |  | ||||||
|             value = LongField(min_value=0, max_value=110) |  | ||||||
|  |  | ||||||
|         doc = TestDocument() |  | ||||||
|         doc.value = 50 |  | ||||||
|         doc.validate() |  | ||||||
|  |  | ||||||
|         doc.value = -1 |  | ||||||
|         self.assertRaises(ValidationError, doc.validate) |  | ||||||
|         doc.value = 120 |  | ||||||
|         self.assertRaises(ValidationError, doc.validate) |  | ||||||
|         doc.value = 'ten' |  | ||||||
|         self.assertRaises(ValidationError, doc.validate) |  | ||||||
|  |  | ||||||
|     def test_long_ne_operator(self): |  | ||||||
|         class TestDocument(Document): |  | ||||||
|             long_fld = LongField() |  | ||||||
|  |  | ||||||
|         TestDocument.drop_collection() |  | ||||||
|  |  | ||||||
|         TestDocument(long_fld=None).save() |  | ||||||
|         TestDocument(long_fld=1).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) |  | ||||||
| @@ -1,144 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import datetime |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestMapField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_mapfield(self): |  | ||||||
|         """Ensure that the MapField handles the declared type.""" |  | ||||||
|         class Simple(Document): |  | ||||||
|             mapping = MapField(IntField()) |  | ||||||
|  |  | ||||||
|         Simple.drop_collection() |  | ||||||
|  |  | ||||||
|         e = Simple() |  | ||||||
|         e.mapping['someint'] = 1 |  | ||||||
|         e.save() |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             e.mapping['somestring'] = "abc" |  | ||||||
|             e.save() |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             class NoDeclaredType(Document): |  | ||||||
|                 mapping = MapField() |  | ||||||
|  |  | ||||||
|     def test_complex_mapfield(self): |  | ||||||
|         """Ensure that the MapField can handle complex declared types.""" |  | ||||||
|  |  | ||||||
|         class SettingBase(EmbeddedDocument): |  | ||||||
|             meta = {"allow_inheritance": True} |  | ||||||
|  |  | ||||||
|         class StringSetting(SettingBase): |  | ||||||
|             value = StringField() |  | ||||||
|  |  | ||||||
|         class IntegerSetting(SettingBase): |  | ||||||
|             value = IntField() |  | ||||||
|  |  | ||||||
|         class Extensible(Document): |  | ||||||
|             mapping = MapField(EmbeddedDocumentField(SettingBase)) |  | ||||||
|  |  | ||||||
|         Extensible.drop_collection() |  | ||||||
|  |  | ||||||
|         e = Extensible() |  | ||||||
|         e.mapping['somestring'] = StringSetting(value='foo') |  | ||||||
|         e.mapping['someint'] = IntegerSetting(value=42) |  | ||||||
|         e.save() |  | ||||||
|  |  | ||||||
|         e2 = Extensible.objects.get(id=e.id) |  | ||||||
|         self.assertIsInstance(e2.mapping['somestring'], StringSetting) |  | ||||||
|         self.assertIsInstance(e2.mapping['someint'], IntegerSetting) |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |  | ||||||
|             e.mapping['someint'] = 123 |  | ||||||
|             e.save() |  | ||||||
|  |  | ||||||
|     def test_embedded_mapfield_db_field(self): |  | ||||||
|         class Embedded(EmbeddedDocument): |  | ||||||
|             number = IntField(default=0, db_field='i') |  | ||||||
|  |  | ||||||
|         class Test(Document): |  | ||||||
|             my_map = MapField(field=EmbeddedDocumentField(Embedded), |  | ||||||
|                               db_field='x') |  | ||||||
|  |  | ||||||
|         Test.drop_collection() |  | ||||||
|  |  | ||||||
|         test = Test() |  | ||||||
|         test.my_map['DICTIONARY_KEY'] = Embedded(number=1) |  | ||||||
|         test.save() |  | ||||||
|  |  | ||||||
|         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) |  | ||||||
|  |  | ||||||
|         test = Test.objects.get() |  | ||||||
|         self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) |  | ||||||
|         doc = self.db.test.find_one() |  | ||||||
|         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) |  | ||||||
|  |  | ||||||
|     def test_mapfield_numerical_index(self): |  | ||||||
|         """Ensure that MapField accept numeric strings as indexes.""" |  | ||||||
|  |  | ||||||
|         class Embedded(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Test(Document): |  | ||||||
|             my_map = MapField(EmbeddedDocumentField(Embedded)) |  | ||||||
|  |  | ||||||
|         Test.drop_collection() |  | ||||||
|  |  | ||||||
|         test = Test() |  | ||||||
|         test.my_map['1'] = Embedded(name='test') |  | ||||||
|         test.save() |  | ||||||
|         test.my_map['1'].name = 'test updated' |  | ||||||
|         test.save() |  | ||||||
|  |  | ||||||
|     def test_map_field_lookup(self): |  | ||||||
|         """Ensure MapField lookups succeed on Fields without a lookup |  | ||||||
|         method. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class Action(EmbeddedDocument): |  | ||||||
|             operation = StringField() |  | ||||||
|             object = StringField() |  | ||||||
|  |  | ||||||
|         class Log(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             visited = MapField(DateTimeField()) |  | ||||||
|             actions = MapField(EmbeddedDocumentField(Action)) |  | ||||||
|  |  | ||||||
|         Log.drop_collection() |  | ||||||
|         Log(name="wilson", visited={'friends': datetime.datetime.now()}, |  | ||||||
|             actions={'friends': Action(operation='drink', object='beer')}).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(1, Log.objects( |  | ||||||
|             visited__friends__exists=True).count()) |  | ||||||
|  |  | ||||||
|         self.assertEqual(1, Log.objects( |  | ||||||
|             actions__friends__operation='drink', |  | ||||||
|             actions__friends__object='beer').count()) |  | ||||||
|  |  | ||||||
|     def test_map_field_unicode(self): |  | ||||||
|         class Info(EmbeddedDocument): |  | ||||||
|             description = StringField() |  | ||||||
|             value_list = ListField(field=StringField()) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             info_dict = MapField(field=EmbeddedDocumentField(Info)) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         tree = BlogPost(info_dict={ |  | ||||||
|             u"éééé": { |  | ||||||
|                 'description': u"VALUE: éééé" |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         tree.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, |  | ||||||
|             u"VALUE: éééé" |  | ||||||
|         ) |  | ||||||
| @@ -1,219 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from bson import SON, DBRef |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestReferenceField(MongoDBTestCase): |  | ||||||
|     def test_reference_validation(self): |  | ||||||
|         """Ensure that invalid document objects cannot be assigned to |  | ||||||
|         reference fields. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             author = ReferenceField(User) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         # Make sure ReferenceField only accepts a document class or a string |  | ||||||
|         # with a document class name. |  | ||||||
|         self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) |  | ||||||
|  |  | ||||||
|         user = User(name='Test User') |  | ||||||
|  |  | ||||||
|         # Ensure that the referenced object must have been saved |  | ||||||
|         post1 = BlogPost(content='Chips and gravy taste good.') |  | ||||||
|         post1.author = user |  | ||||||
|         self.assertRaises(ValidationError, post1.save) |  | ||||||
|  |  | ||||||
|         # Check that an invalid object type cannot be used |  | ||||||
|         post2 = BlogPost(content='Chips and chilli taste good.') |  | ||||||
|         post1.author = post2 |  | ||||||
|         self.assertRaises(ValidationError, post1.validate) |  | ||||||
|  |  | ||||||
|         # Ensure ObjectID's are accepted as references |  | ||||||
|         user_object_id = user.pk |  | ||||||
|         post3 = BlogPost(content="Chips and curry sauce taste good.") |  | ||||||
|         post3.author = user_object_id |  | ||||||
|         post3.save() |  | ||||||
|  |  | ||||||
|         # Make sure referencing a saved document of the right type works |  | ||||||
|         user.save() |  | ||||||
|         post1.author = user |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         # Make sure referencing a saved document of the *wrong* type fails |  | ||||||
|         post2.save() |  | ||||||
|         post1.author = post2 |  | ||||||
|         self.assertRaises(ValidationError, post1.validate) |  | ||||||
|  |  | ||||||
|     def test_objectid_reference_fields(self): |  | ||||||
|         """Make sure storing Object ID references works.""" |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             parent = ReferenceField('self') |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="John").save() |  | ||||||
|         Person(name="Ross", parent=p1.pk).save() |  | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="Ross") |  | ||||||
|         self.assertEqual(p.parent, p1) |  | ||||||
|  |  | ||||||
|     def test_dbref_reference_fields(self): |  | ||||||
|         """Make sure storing references as bson.dbref.DBRef works.""" |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             parent = ReferenceField('self', dbref=True) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="John").save() |  | ||||||
|         Person(name="Ross", parent=p1).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual( |  | ||||||
|             Person._get_collection().find_one({'name': 'Ross'})['parent'], |  | ||||||
|             DBRef('person', p1.pk) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="Ross") |  | ||||||
|         self.assertEqual(p.parent, p1) |  | ||||||
|  |  | ||||||
|     def test_dbref_to_mongo(self): |  | ||||||
|         """Make sure that calling to_mongo on a ReferenceField which |  | ||||||
|         has dbref=False, but actually actually contains a DBRef returns |  | ||||||
|         an ID of that DBRef. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             parent = ReferenceField('self', dbref=False) |  | ||||||
|  |  | ||||||
|         p = Person( |  | ||||||
|             name='Steve', |  | ||||||
|             parent=DBRef('person', 'abcdefghijklmnop') |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(p.to_mongo(), SON([ |  | ||||||
|             ('name', u'Steve'), |  | ||||||
|             ('parent', 'abcdefghijklmnop') |  | ||||||
|         ])) |  | ||||||
|  |  | ||||||
|     def test_objectid_reference_fields(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             parent = ReferenceField('self', dbref=False) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="John").save() |  | ||||||
|         Person(name="Ross", parent=p1).save() |  | ||||||
|  |  | ||||||
|         col = Person._get_collection() |  | ||||||
|         data = col.find_one({'name': 'Ross'}) |  | ||||||
|         self.assertEqual(data['parent'], p1.pk) |  | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="Ross") |  | ||||||
|         self.assertEqual(p.parent, p1) |  | ||||||
|  |  | ||||||
|     def test_undefined_reference(self): |  | ||||||
|         """Ensure that ReferenceFields may reference undefined Documents. |  | ||||||
|         """ |  | ||||||
|         class Product(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             company = ReferenceField('Company') |  | ||||||
|  |  | ||||||
|         class Company(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Product.drop_collection() |  | ||||||
|         Company.drop_collection() |  | ||||||
|  |  | ||||||
|         ten_gen = Company(name='10gen') |  | ||||||
|         ten_gen.save() |  | ||||||
|         mongodb = Product(name='MongoDB', company=ten_gen) |  | ||||||
|         mongodb.save() |  | ||||||
|  |  | ||||||
|         me = Product(name='MongoEngine') |  | ||||||
|         me.save() |  | ||||||
|  |  | ||||||
|         obj = Product.objects(company=ten_gen).first() |  | ||||||
|         self.assertEqual(obj, mongodb) |  | ||||||
|         self.assertEqual(obj.company, ten_gen) |  | ||||||
|  |  | ||||||
|         obj = Product.objects(company=None).first() |  | ||||||
|         self.assertEqual(obj, me) |  | ||||||
|  |  | ||||||
|         obj = Product.objects.get(company=None) |  | ||||||
|         self.assertEqual(obj, me) |  | ||||||
|  |  | ||||||
|     def test_reference_query_conversion(self): |  | ||||||
|         """Ensure that ReferenceFields can be queried using objects and values |  | ||||||
|         of the type of the primary key of the referenced object. |  | ||||||
|         """ |  | ||||||
|         class Member(Document): |  | ||||||
|             user_num = IntField(primary_key=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             author = ReferenceField(Member, dbref=False) |  | ||||||
|  |  | ||||||
|         Member.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         m1 = Member(user_num=1) |  | ||||||
|         m1.save() |  | ||||||
|         m2 = Member(user_num=2) |  | ||||||
|         m2.save() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |  | ||||||
|         self.assertEqual(post.id, post1.id) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
|  |  | ||||||
|     def test_reference_query_conversion_dbref(self): |  | ||||||
|         """Ensure that ReferenceFields can be queried using objects and values |  | ||||||
|         of the type of the primary key of the referenced object. |  | ||||||
|         """ |  | ||||||
|         class Member(Document): |  | ||||||
|             user_num = IntField(primary_key=True) |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             title = StringField() |  | ||||||
|             author = ReferenceField(Member, dbref=True) |  | ||||||
|  |  | ||||||
|         Member.drop_collection() |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         m1 = Member(user_num=1) |  | ||||||
|         m1.save() |  | ||||||
|         m2 = Member(user_num=2) |  | ||||||
|         m2.save() |  | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |  | ||||||
|         post1.save() |  | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |  | ||||||
|         post2.save() |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |  | ||||||
|         self.assertEqual(post.id, post1.id) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |  | ||||||
|         self.assertEqual(post.id, post2.id) |  | ||||||
| @@ -1,271 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestSequenceField(MongoDBTestCase): |  | ||||||
|     def test_sequence_field(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             Person(name="Person %s" % x).save() |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |  | ||||||
|         self.assertEqual(ids, range(1, 11)) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         Person.id.set_next_value(1000) |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 1000) |  | ||||||
|  |  | ||||||
|     def test_sequence_field_get_next_value(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             Person(name="Person %s" % x).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), 11) |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), 1) |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True, value_decorator=str) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             Person(name="Person %s" % x).save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), '11') |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), '1') |  | ||||||
|  |  | ||||||
|     def test_sequence_field_sequence_name(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True, sequence_name='jelly') |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             Person(name="Person %s" % x).save() |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |  | ||||||
|         self.assertEqual(ids, range(1, 11)) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         Person.id.set_next_value(1000) |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) |  | ||||||
|         self.assertEqual(c['next'], 1000) |  | ||||||
|  |  | ||||||
|     def test_multiple_sequence_fields(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True) |  | ||||||
|             counter = SequenceField() |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             Person(name="Person %s" % x).save() |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |  | ||||||
|         self.assertEqual(ids, range(1, 11)) |  | ||||||
|  |  | ||||||
|         counters = [i.counter for i in Person.objects] |  | ||||||
|         self.assertEqual(counters, range(1, 11)) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         Person.id.set_next_value(1000) |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 1000) |  | ||||||
|  |  | ||||||
|         Person.counter.set_next_value(999) |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'}) |  | ||||||
|         self.assertEqual(c['next'], 999) |  | ||||||
|  |  | ||||||
|     def test_sequence_fields_reload(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             counter = SequenceField() |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Animal.drop_collection() |  | ||||||
|  |  | ||||||
|         a = Animal(name="Boi").save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(a.counter, 1) |  | ||||||
|         a.reload() |  | ||||||
|         self.assertEqual(a.counter, 1) |  | ||||||
|  |  | ||||||
|         a.counter = None |  | ||||||
|         self.assertEqual(a.counter, 2) |  | ||||||
|         a.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(a.counter, 2) |  | ||||||
|  |  | ||||||
|         a = Animal.objects.first() |  | ||||||
|         self.assertEqual(a.counter, 2) |  | ||||||
|         a.reload() |  | ||||||
|         self.assertEqual(a.counter, 2) |  | ||||||
|  |  | ||||||
|     def test_multiple_sequence_fields_on_docs(self): |  | ||||||
|         class Animal(Document): |  | ||||||
|             id = SequenceField(primary_key=True) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Animal.drop_collection() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             Animal(name="Animal %s" % x).save() |  | ||||||
|             Person(name="Person %s" % x).save() |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |  | ||||||
|         self.assertEqual(ids, range(1, 11)) |  | ||||||
|  |  | ||||||
|         id = [i.id for i in Animal.objects] |  | ||||||
|         self.assertEqual(id, range(1, 11)) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|     def test_sequence_field_value_decorator(self): |  | ||||||
|         class Person(Document): |  | ||||||
|             id = SequenceField(primary_key=True, value_decorator=str) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         for x in range(10): |  | ||||||
|             p = Person(name="Person %s" % x) |  | ||||||
|             p.save() |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |  | ||||||
|         self.assertEqual(ids, map(str, range(1, 11))) |  | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |  | ||||||
|         self.assertEqual(c['next'], 10) |  | ||||||
|  |  | ||||||
|     def test_embedded_sequence_field(self): |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             id = SequenceField() |  | ||||||
|             content = StringField(required=True) |  | ||||||
|  |  | ||||||
|         class Post(Document): |  | ||||||
|             title = StringField(required=True) |  | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |  | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |  | ||||||
|         Post.drop_collection() |  | ||||||
|  |  | ||||||
|         Post(title="MongoEngine", |  | ||||||
|              comments=[Comment(content="NoSQL Rocks"), |  | ||||||
|                        Comment(content="MongoEngine Rocks")]).save() |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) |  | ||||||
|         self.assertEqual(c['next'], 2) |  | ||||||
|         post = Post.objects.first() |  | ||||||
|         self.assertEqual(1, post.comments[0].id) |  | ||||||
|         self.assertEqual(2, post.comments[1].id) |  | ||||||
|  |  | ||||||
|     def test_inherited_sequencefield(self): |  | ||||||
|         class Base(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             counter = SequenceField() |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|  |  | ||||||
|         class Foo(Base): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Bar(Base): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         bar = Bar(name='Bar') |  | ||||||
|         bar.save() |  | ||||||
|  |  | ||||||
|         foo = Foo(name='Foo') |  | ||||||
|         foo.save() |  | ||||||
|  |  | ||||||
|         self.assertTrue('base.counter' in |  | ||||||
|                         self.db['mongoengine.counters'].find().distinct('_id')) |  | ||||||
|         self.assertFalse(('foo.counter' or 'bar.counter') in |  | ||||||
|                          self.db['mongoengine.counters'].find().distinct('_id')) |  | ||||||
|         self.assertNotEqual(foo.counter, bar.counter) |  | ||||||
|         self.assertEqual(foo._fields['counter'].owner_document, Base) |  | ||||||
|         self.assertEqual(bar._fields['counter'].owner_document, Base) |  | ||||||
|  |  | ||||||
|     def test_no_inherited_sequencefield(self): |  | ||||||
|         class Base(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|  |  | ||||||
|         class Foo(Base): |  | ||||||
|             counter = SequenceField() |  | ||||||
|  |  | ||||||
|         class Bar(Base): |  | ||||||
|             counter = SequenceField() |  | ||||||
|  |  | ||||||
|         bar = Bar(name='Bar') |  | ||||||
|         bar.save() |  | ||||||
|  |  | ||||||
|         foo = Foo(name='Foo') |  | ||||||
|         foo.save() |  | ||||||
|  |  | ||||||
|         self.assertFalse('base.counter' in |  | ||||||
|                          self.db['mongoengine.counters'].find().distinct('_id')) |  | ||||||
|         self.assertTrue(('foo.counter' and 'bar.counter') in |  | ||||||
|                          self.db['mongoengine.counters'].find().distinct('_id')) |  | ||||||
|         self.assertEqual(foo.counter, bar.counter) |  | ||||||
|         self.assertEqual(foo._fields['counter'].owner_document, Foo) |  | ||||||
|         self.assertEqual(bar._fields['counter'].owner_document, Bar) |  | ||||||
| @@ -1,59 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestURLField(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def test_validation(self): |  | ||||||
|         """Ensure that URLFields validate urls properly.""" |  | ||||||
|         class Link(Document): |  | ||||||
|             url = URLField() |  | ||||||
|  |  | ||||||
|         link = Link() |  | ||||||
|         link.url = 'google' |  | ||||||
|         self.assertRaises(ValidationError, link.validate) |  | ||||||
|  |  | ||||||
|         link.url = 'http://www.google.com:8080' |  | ||||||
|         link.validate() |  | ||||||
|  |  | ||||||
|     def test_unicode_url_validation(self): |  | ||||||
|         """Ensure unicode URLs are validated properly.""" |  | ||||||
|         class Link(Document): |  | ||||||
|             url = URLField() |  | ||||||
|  |  | ||||||
|         link = Link() |  | ||||||
|         link.url = u'http://привет.com' |  | ||||||
|  |  | ||||||
|         # TODO fix URL validation - this *IS* a valid URL |  | ||||||
|         # For now we just want to make sure that the error message is correct |  | ||||||
|         with self.assertRaises(ValidationError) as ctx_err: |  | ||||||
|             link.validate() |  | ||||||
|         self.assertEqual(unicode(ctx_err.exception), |  | ||||||
|                          u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") |  | ||||||
|  |  | ||||||
|     def test_url_scheme_validation(self): |  | ||||||
|         """Ensure that URLFields validate urls with specific schemes properly. |  | ||||||
|         """ |  | ||||||
|         class Link(Document): |  | ||||||
|             url = URLField() |  | ||||||
|  |  | ||||||
|         class SchemeLink(Document): |  | ||||||
|             url = URLField(schemes=['ws', 'irc']) |  | ||||||
|  |  | ||||||
|         link = Link() |  | ||||||
|         link.url = 'ws://google.com' |  | ||||||
|         self.assertRaises(ValidationError, link.validate) |  | ||||||
|  |  | ||||||
|         scheme_link = SchemeLink() |  | ||||||
|         scheme_link.url = 'ws://google.com' |  | ||||||
|         scheme_link.validate() |  | ||||||
|  |  | ||||||
|     def test_underscore_allowed_in_domains_names(self): |  | ||||||
|         class Link(Document): |  | ||||||
|             url = URLField() |  | ||||||
|  |  | ||||||
|         link = Link() |  | ||||||
|         link.url = 'https://san_leandro-ca.geebo.com' |  | ||||||
|         link.validate() |  | ||||||
| @@ -1,65 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import uuid |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase, get_as_pymongo |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Person(Document): |  | ||||||
|     api_key = UUIDField(binary=False) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestUUIDField(MongoDBTestCase): |  | ||||||
|     def test_storage(self): |  | ||||||
|         uid = uuid.uuid4() |  | ||||||
|         person = Person(api_key=uid).save() |  | ||||||
|         self.assertEqual( |  | ||||||
|             get_as_pymongo(person), |  | ||||||
|             {'_id': person.id, |  | ||||||
|              'api_key': str(uid) |  | ||||||
|              } |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def test_field_string(self): |  | ||||||
|         """Test UUID fields storing as String |  | ||||||
|         """ |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         uu = uuid.uuid4() |  | ||||||
|         Person(api_key=uu).save() |  | ||||||
|         self.assertEqual(1, Person.objects(api_key=uu).count()) |  | ||||||
|         self.assertEqual(uu, Person.objects.first().api_key) |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         valid = (uuid.uuid4(), uuid.uuid1()) |  | ||||||
|         for api_key in valid: |  | ||||||
|             person.api_key = api_key |  | ||||||
|             person.validate() |  | ||||||
|  |  | ||||||
|         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', |  | ||||||
|                    '9d159858-549b-4975-9f98-dd2f987c113') |  | ||||||
|         for api_key in invalid: |  | ||||||
|             person.api_key = api_key |  | ||||||
|             self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_field_binary(self): |  | ||||||
|         """Test UUID fields storing as Binary object.""" |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         uu = uuid.uuid4() |  | ||||||
|         Person(api_key=uu).save() |  | ||||||
|         self.assertEqual(1, Person.objects(api_key=uu).count()) |  | ||||||
|         self.assertEqual(uu, Person.objects.first().api_key) |  | ||||||
|  |  | ||||||
|         person = Person() |  | ||||||
|         valid = (uuid.uuid4(), uuid.uuid1()) |  | ||||||
|         for api_key in valid: |  | ||||||
|             person.api_key = api_key |  | ||||||
|             person.validate() |  | ||||||
|  |  | ||||||
|         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', |  | ||||||
|                    '9d159858-549b-4975-9f98-dd2f987c113') |  | ||||||
|         for api_key in invalid: |  | ||||||
|             person.api_key = api_key |  | ||||||
|             self.assertRaises(ValidationError, person.validate) |  | ||||||
| @@ -48,7 +48,6 @@ class PickleSignalsTest(Document): | |||||||
|     def post_delete(self, sender, document, **kwargs): |     def post_delete(self, sender, document, **kwargs): | ||||||
|         pickled = pickle.dumps(document) |         pickled = pickle.dumps(document) | ||||||
|  |  | ||||||
|  |  | ||||||
| signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | ||||||
| signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) | signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) | ||||||
|  |  | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user