Compare commits
182 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
1f7272d139 | ||
|
f6ba1ad788 | ||
|
294d59c9bb | ||
|
759f72169a | ||
|
1f7135be61 | ||
|
6942f9c1cf | ||
|
d9da75d1c0 | ||
|
3503c98857 | ||
|
708c3f1e2a | ||
|
6f645e8619 | ||
|
bce7ca7ac4 | ||
|
350465c25d | ||
|
5b9c70ae22 | ||
|
9b30afeca9 | ||
|
c1b202c119 | ||
|
41cfe5d2ca | ||
|
05339e184f | ||
|
447127d956 | ||
|
394334fbea | ||
|
9f8cd33d43 | ||
|
f066e28c35 | ||
|
b349a449bb | ||
|
1c5898d396 | ||
|
6802967863 | ||
|
0462f18680 | ||
|
af6699098f | ||
|
6b7e7dc124 | ||
|
6bae4c6a66 | ||
|
46da918dbe | ||
|
bb7e5f17b5 | ||
|
b9d03114c2 | ||
|
436b1ce176 | ||
|
50fb5d83f1 | ||
|
fda672f806 | ||
|
2bf783b04d | ||
|
2f72b23a0d | ||
|
85336f9777 | ||
|
174d964553 | ||
|
cf8677248e | ||
|
1e6a3163af | ||
|
e008919978 | ||
|
4814066c67 | ||
|
f17f8b48c2 | ||
|
ab0aec0ac5 | ||
|
b49a641ba5 | ||
|
2f50051426 | ||
|
43cc32db40 | ||
|
b4d6f6b947 | ||
|
71ff533623 | ||
|
e33a5bbef5 | ||
|
6c0112c2be | ||
|
15bbf26b93 | ||
|
87c97efce0 | ||
|
6c4aee1479 | ||
|
73549a9044 | ||
|
30fdd3e184 | ||
|
c97eb5d63f | ||
|
5729c7d5e7 | ||
|
d77b13efcb | ||
|
c43faca7b9 | ||
|
892ddd5724 | ||
|
a9de779f33 | ||
|
1c2f016ba0 | ||
|
7b4d9140af | ||
|
c1fc87ff4e | ||
|
cd5ea5d4e0 | ||
|
30c01089f5 | ||
|
89825a2b21 | ||
|
a743b75bb4 | ||
|
f7ebf8dedd | ||
|
f6220cab3b | ||
|
0c5e1c4138 | ||
|
03fe431f1a | ||
|
a8e4554fec | ||
|
e81b09b9aa | ||
|
c6e846e0ae | ||
|
03dcfb5c4b | ||
|
3e54da03e2 | ||
|
c4b3196917 | ||
|
0d81e7933e | ||
|
b2a2735034 | ||
|
f865c5de90 | ||
|
4159369e8b | ||
|
170693cf0b | ||
|
4e7b5d4af8 | ||
|
67bf789fcf | ||
|
f5cf616c2f | ||
|
7975f19817 | ||
|
017602056d | ||
|
c63f43854b | ||
|
5cc71ec2ad | ||
|
80e81f8475 | ||
|
3685c8e015 | ||
|
99e943c365 | ||
|
21818e71f5 | ||
|
bcc6d25e21 | ||
|
7b885ee0d3 | ||
|
c10e808a4f | ||
|
54e9be0ed8 | ||
|
938cdf316a | ||
|
27c33911e6 | ||
|
e88f8759e7 | ||
|
f2992e3165 | ||
|
c71fd1ee3b | ||
|
fb45b19fdc | ||
|
c4ea8d4942 | ||
|
646aa131ef | ||
|
0adb40bf92 | ||
|
17d6014bf1 | ||
|
ff57cd4eaf | ||
|
74bd7c3744 | ||
|
cfbb283f85 | ||
|
74a3c4451b | ||
|
be3643c962 | ||
|
f4aa546af8 | ||
|
67b876a7f4 | ||
|
94e177c0ef | ||
|
1bd83cc9bc | ||
|
ecda3f4a7d | ||
|
8f972a965d | ||
|
0f051fc57c | ||
|
c3f8925f46 | ||
|
5d0cab2052 | ||
|
4d7492f682 | ||
|
fc9d99080f | ||
|
47ebac0276 | ||
|
cb3fca03e9 | ||
|
abbbd83729 | ||
|
1743ab7812 | ||
|
324e3972a6 | ||
|
1502dda2ab | ||
|
f31b2c4a79 | ||
|
89b9b60e0c | ||
|
de9ba12779 | ||
|
9cc4359c04 | ||
|
67eaf120b9 | ||
|
b8353c4a33 | ||
|
7013033ae4 | ||
|
cb8cd03852 | ||
|
f63fb62014 | ||
|
2e4fb86b86 | ||
|
5e776a07dd | ||
|
81e637e50e | ||
|
0971ad0a80 | ||
|
8267ded7ec | ||
|
7f36ea55f5 | ||
|
72a051f2d3 | ||
|
51b197888c | ||
|
cd63865d31 | ||
|
5be5685a09 | ||
|
76b2f25d46 | ||
|
58607d4a7f | ||
|
c0a5b16a7f | ||
|
07442a6f84 | ||
|
9544b7d968 | ||
|
babbc8bcd6 | ||
|
5d9ec0b208 | ||
|
1877cacf9c | ||
|
2f4978cfea | ||
|
b85bb95082 | ||
|
db7f93cff3 | ||
|
85e271098f | ||
|
17001e2f74 | ||
|
c82f4f0d45 | ||
|
88247a3af9 | ||
|
158578a406 | ||
|
8bcbc6d545 | ||
|
ef55e6d476 | ||
|
295ef3dc1d | ||
|
e60d56f060 | ||
|
328e062ae9 | ||
|
0523c2ea4b | ||
|
c5c7378c63 | ||
|
0d4afad342 | ||
|
eacb614750 | ||
|
341e1e7a6d | ||
|
2f6890c78a | ||
|
857cd718df | ||
|
c9dc441915 | ||
|
a7ca9950fc | ||
|
e0dd33e6be | ||
|
2e718e1130 |
41
.travis.yml
41
.travis.yml
@@ -1,6 +1,5 @@
|
||||
# http://travis-ci.org/#!/MongoEngine/mongoengine
|
||||
language: python
|
||||
services: mongodb
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
@@ -8,26 +7,54 @@ python:
|
||||
- "3.3"
|
||||
- "3.4"
|
||||
- "pypy"
|
||||
- "pypy3"
|
||||
env:
|
||||
- PYMONGO=dev DJANGO=dev
|
||||
- PYMONGO=dev DJANGO=1.6.5
|
||||
- PYMONGO=dev DJANGO=1.5.8
|
||||
- PYMONGO=2.7.1 DJANGO=dev
|
||||
- PYMONGO=2.7.1 DJANGO=1.6.5
|
||||
- PYMONGO=2.7.1 DJANGO=1.5.8
|
||||
- PYMONGO=2.7.2 DJANGO=dev
|
||||
- PYMONGO=2.7.2 DJANGO=1.6.5
|
||||
- PYMONGO=2.7.2 DJANGO=1.5.8
|
||||
|
||||
matrix:
|
||||
exclude:
|
||||
- python: "2.6"
|
||||
env: PYMONGO=dev DJANGO=dev
|
||||
- python: "2.6"
|
||||
env: PYMONGO=2.7.1 DJANGO=dev
|
||||
- python: "2.6"
|
||||
env: PYMONGO=2.7.2 DJANGO=dev
|
||||
allow_failures:
|
||||
- python: "pypy3"
|
||||
fast_finish: true
|
||||
|
||||
before_install:
|
||||
- "travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10"
|
||||
- "echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list"
|
||||
- "travis_retry sudo apt-get update"
|
||||
- "travis_retry sudo apt-get install mongodb-org-server"
|
||||
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk
|
||||
- if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
||||
- if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO; true; fi
|
||||
- pip install Django==$DJANGO
|
||||
- pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
|
||||
- python setup.py install
|
||||
- if [[ $PYMONGO == 'dev' ]]; then travis_retry pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi
|
||||
- if [[ $PYMONGO != 'dev' ]]; then travis_retry pip install pymongo==$PYMONGO; true; fi
|
||||
- if [[ $DJANGO == 'dev' ]]; then travis_retry pip install https://www.djangoproject.com/download/1.7c2/tarball/; fi
|
||||
- if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi
|
||||
- travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
|
||||
- travis_retry pip install coveralls
|
||||
- travis_retry python setup.py install
|
||||
|
||||
script:
|
||||
- python setup.py test
|
||||
- travis_retry python setup.py test
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
|
||||
- coverage run --source=mongoengine setup.py test
|
||||
- coverage report -m
|
||||
- python benchmark.py
|
||||
after_script:
|
||||
coveralls --verbose
|
||||
notifications:
|
||||
irc: "irc.freenode.org#mongoengine"
|
||||
branches:
|
||||
|
17
AUTHORS
17
AUTHORS
@@ -142,7 +142,7 @@ that much better:
|
||||
* Pete Campton
|
||||
* Martyn Smith
|
||||
* Marcelo Anton
|
||||
* Aleksey Porfirov
|
||||
* Aleksey Porfirov (https://github.com/lexqt)
|
||||
* Nicolas Trippar
|
||||
* Manuel Hermann
|
||||
* Gustavo Gawryszewski
|
||||
@@ -196,4 +196,17 @@ that much better:
|
||||
* Polyrabbit (https://github.com/polyrabbit)
|
||||
* Sagiv Malihi (https://github.com/sagivmalihi)
|
||||
* Dmitry Konishchev (https://github.com/KonishchevDmitry)
|
||||
*
|
||||
* Martyn Smith (https://github.com/martynsmith)
|
||||
* Andrei Zbikowski (https://github.com/b1naryth1ef)
|
||||
* Ronald van Rij (https://github.com/ronaldvanrij)
|
||||
* François Schmidts (https://github.com/jaesivsm)
|
||||
* Eric Plumb (https://github.com/professorplumb)
|
||||
* Damien Churchill (https://github.com/damoxc)
|
||||
* Aleksandr Sorokoumov (https://github.com/Gerrrr)
|
||||
* Clay McClure (https://github.com/claymation)
|
||||
* Bruno Rocha (https://github.com/rochacbruno)
|
||||
* Norberto Leite (https://github.com/nleite)
|
||||
* Bob Cribbs (https://github.com/bocribbz)
|
||||
* Jay Shirley (https://github.com/jshirley)
|
||||
* DavidBord (https://github.com/DavidBord)
|
||||
* Axel Haustant (https://github.com/noirbizarre)
|
||||
|
@@ -11,6 +11,10 @@ MongoEngine
|
||||
|
||||
.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
|
||||
:target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master
|
||||
|
||||
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png
|
||||
:target: https://landscape.io/github/MongoEngine/mongoengine/master
|
||||
:alt: Code Health
|
||||
|
||||
About
|
||||
=====
|
||||
@@ -29,7 +33,7 @@ setup.py install``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
- pymongo>=2.5
|
||||
- pymongo>=2.7.1
|
||||
- sphinx (optional - for documentation generation)
|
||||
|
||||
Optional Dependencies
|
||||
|
@@ -84,6 +84,7 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.MapField
|
||||
.. autoclass:: mongoengine.fields.ReferenceField
|
||||
.. autoclass:: mongoengine.fields.GenericReferenceField
|
||||
.. autoclass:: mongoengine.fields.CachedReferenceField
|
||||
.. autoclass:: mongoengine.fields.BinaryField
|
||||
.. autoclass:: mongoengine.fields.FileField
|
||||
.. autoclass:: mongoengine.fields.ImageField
|
||||
@@ -94,6 +95,9 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.PointField
|
||||
.. autoclass:: mongoengine.fields.LineStringField
|
||||
.. autoclass:: mongoengine.fields.PolygonField
|
||||
.. autoclass:: mongoengine.fields.MultiPointField
|
||||
.. autoclass:: mongoengine.fields.MultiLineStringField
|
||||
.. autoclass:: mongoengine.fields.MultiPolygonField
|
||||
.. autoclass:: mongoengine.fields.GridFSError
|
||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||
|
@@ -5,7 +5,33 @@ Changelog
|
||||
|
||||
Changes in 0.9.X - DEV
|
||||
======================
|
||||
|
||||
- Allow specifying the '_cls' as a field for indexes #397
|
||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||
- Not overriding default values when loading a subset of fields #399
|
||||
- Saving document doesn't create new fields in existing collection #620
|
||||
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||
- Added support to show original model fields on to_json calls instead of db_field #697
|
||||
- Added Queryset.search_text to Text indexes searchs #700
|
||||
- Fixed tests for Django 1.7 #696
|
||||
- Follow ReferenceFields in EmbeddedDocuments with select_related #690
|
||||
- Added preliminary support for text indexes #680
|
||||
- Added `elemMatch` operator as well - `match` is too obscure #653
|
||||
- Added support for progressive JPEG #486 #548
|
||||
- Allow strings to be used in index creation #675
|
||||
- Fixed EmbeddedDoc weakref proxy issue #592
|
||||
- Fixed nested reference field distinct error #583
|
||||
- Fixed change tracking on nested MapFields #539
|
||||
- Dynamic fields in embedded documents now visible to queryset.only() / qs.exclude() #425 #507
|
||||
- Add authentication_source option to register_connection #178 #464 #573 #580 #590
|
||||
- Implemented equality between Documents and DBRefs #597
|
||||
- Fixed ReferenceField inside nested ListFields dereferencing problem #368
|
||||
- Added the ability to reload specific document fields #100
|
||||
- Added db_alias support and fixes for custom map/reduce output #586
|
||||
- post_save signal now has access to delta information about field changes #594 #589
|
||||
- Don't query with $orderby for qs.get() #600
|
||||
- Fix id shard key save issue #636
|
||||
- Fixes issue with recursive embedded document errors #557
|
||||
- Fix clear_changed_fields() clearing unsaved documents bug #602
|
||||
- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x.
|
||||
- Removing support for Python < 2.6.6
|
||||
- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664
|
||||
@@ -22,6 +48,12 @@ Changes in 0.9.X - DEV
|
||||
- Workaround a dateutil bug #608
|
||||
- Conditional save for atomic-style operations #511
|
||||
- Allow dynamic dictionary-style field access #559
|
||||
- Increase email field length to accommodate new TLDs #726
|
||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||
- Make 'db' argument to connection optional #737
|
||||
- Allow atomic update for the entire `DictField` #742
|
||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||
- Fix multiple connections aliases being rewritten #748
|
||||
|
||||
Changes in 0.8.7
|
||||
================
|
||||
|
@@ -35,8 +35,8 @@ in ::func:`~mongoengine.connect`
|
||||
ReplicaSets
|
||||
===========
|
||||
|
||||
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`
|
||||
to use them please use a URI style connection and provide the `replicaSet` name in the
|
||||
MongoEngine supports :class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`.
|
||||
To use them, please use a URI style connection and provide the `replicaSet` name in the
|
||||
connection kwargs.
|
||||
|
||||
Read preferences are supported through the connection or via individual
|
||||
|
@@ -4,7 +4,7 @@ Defining documents
|
||||
In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When
|
||||
working with relational databases, rows are stored in **tables**, which have a
|
||||
strict **schema** that the rows follow. MongoDB stores documents in
|
||||
**collections** rather than tables - the principle difference is that no schema
|
||||
**collections** rather than tables - the principal difference is that no schema
|
||||
is enforced at a database level.
|
||||
|
||||
Defining a document's schema
|
||||
@@ -91,6 +91,12 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.StringField`
|
||||
* :class:`~mongoengine.fields.URLField`
|
||||
* :class:`~mongoengine.fields.UUIDField`
|
||||
* :class:`~mongoengine.fields.PointField`
|
||||
* :class:`~mongoengine.fields.LineStringField`
|
||||
* :class:`~mongoengine.fields.PolygonField`
|
||||
* :class:`~mongoengine.fields.MultiPointField`
|
||||
* :class:`~mongoengine.fields.MultiLineStringField`
|
||||
* :class:`~mongoengine.fields.MultiPolygonField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
@@ -459,7 +465,8 @@ by creating a list of index specifications called :attr:`indexes` in the
|
||||
either be a single field name, a tuple containing multiple field names, or a
|
||||
dictionary containing a full index definition. A direction may be specified on
|
||||
fields by prefixing the field name with a **+** (for ascending) or a **-** sign
|
||||
(for descending). Note that direction only matters on multi-field indexes. ::
|
||||
(for descending). Note that direction only matters on multi-field indexes.
|
||||
Text indexes may be specified by prefixing the field name with a **$**. ::
|
||||
|
||||
class Page(Document):
|
||||
title = StringField()
|
||||
@@ -543,6 +550,9 @@ The following fields will explicitly add a "2dsphere" index:
|
||||
- :class:`~mongoengine.fields.PointField`
|
||||
- :class:`~mongoengine.fields.LineStringField`
|
||||
- :class:`~mongoengine.fields.PolygonField`
|
||||
- :class:`~mongoengine.fields.MultiPointField`
|
||||
- :class:`~mongoengine.fields.MultiLineStringField`
|
||||
- :class:`~mongoengine.fields.MultiPolygonField`
|
||||
|
||||
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||
automatic index but would prefer a compound index. In this example we turn off
|
||||
|
@@ -46,7 +46,7 @@ slightly different manner. First, a new file must be created by calling the
|
||||
marmot.photo.write('some_more_image_data')
|
||||
marmot.photo.close()
|
||||
|
||||
marmot.photo.save()
|
||||
marmot.save()
|
||||
|
||||
Deletion
|
||||
--------
|
||||
|
@@ -12,3 +12,4 @@ User Guide
|
||||
querying
|
||||
gridfs
|
||||
signals
|
||||
text-indexes
|
||||
|
@@ -500,11 +500,13 @@ that you may use with these methods:
|
||||
* ``dec`` -- decrement a value by a given amount
|
||||
* ``push`` -- append a value to a list
|
||||
* ``push_all`` -- append several values to a list
|
||||
* ``pop`` -- remove the first or last element of a list
|
||||
* ``pop`` -- remove the first or last element of a list `depending on the value`_
|
||||
* ``pull`` -- remove a value from a list
|
||||
* ``pull_all`` -- remove several values from a list
|
||||
* ``add_to_set`` -- add value to a list only if its not in the list already
|
||||
|
||||
.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/
|
||||
|
||||
The syntax for atomic updates is similar to the querying syntax, but the
|
||||
modifier comes before the field, not after it::
|
||||
|
||||
@@ -523,6 +525,13 @@ modifier comes before the field, not after it::
|
||||
>>> post.tags
|
||||
['database', 'nosql']
|
||||
|
||||
.. note::
|
||||
|
||||
If no modifier operator is specified the default will be ``$set``. So the following sentences are identical::
|
||||
|
||||
>>> BlogPost.objects(id=post.id).update(title='Example Post')
|
||||
>>> BlogPost.objects(id=post.id).update(set__title='Example Post')
|
||||
|
||||
.. note::
|
||||
|
||||
In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates
|
||||
|
49
docs/guide/text-indexes.rst
Normal file
49
docs/guide/text-indexes.rst
Normal file
@@ -0,0 +1,49 @@
|
||||
===========
|
||||
Text Search
|
||||
===========
|
||||
|
||||
After MongoDB 2.4 version, supports search documents by text indexes.
|
||||
|
||||
|
||||
Defining a Document with text index
|
||||
===================================
|
||||
Use the *$* prefix to set a text index, Look the declaration::
|
||||
|
||||
class News(Document):
|
||||
title = StringField()
|
||||
content = StringField()
|
||||
is_active = BooleanField()
|
||||
|
||||
meta = {'indexes': [
|
||||
{'fields': ['$title', "$content"],
|
||||
'default_language': 'english',
|
||||
'weight': {'title': 10, 'content': 2}
|
||||
}
|
||||
]}
|
||||
|
||||
|
||||
|
||||
Querying
|
||||
========
|
||||
|
||||
Saving a document::
|
||||
|
||||
News(title="Using mongodb text search",
|
||||
content="Testing text search").save()
|
||||
|
||||
News(title="MongoEngine 0.9 released",
|
||||
content="Various improvements").save()
|
||||
|
||||
Next, start a text search using :attr:`QuerySet.search_text` method::
|
||||
|
||||
document = News.objects.search_text('testing').first()
|
||||
document.title # may be: "Using mongodb text search"
|
||||
|
||||
document = News.objects.search_text('released').first()
|
||||
document.title # may be: "MongoEngine 0.9 released"
|
||||
|
||||
|
||||
Ordering by text score
|
||||
======================
|
||||
|
||||
objects = News.objects.search('mongo').order_by('$text_score')
|
@@ -7,8 +7,7 @@ __all__ = ("BaseDict", "BaseList")
|
||||
|
||||
|
||||
class BaseDict(dict):
|
||||
"""A special dict so we can watch any changes
|
||||
"""
|
||||
"""A special dict so we can watch any changes"""
|
||||
|
||||
_dereferenced = False
|
||||
_instance = None
|
||||
@@ -23,29 +22,37 @@ class BaseDict(dict):
|
||||
self._name = name
|
||||
return super(BaseDict, self).__init__(dict_items)
|
||||
|
||||
def __getitem__(self, *args, **kwargs):
|
||||
value = super(BaseDict, self).__getitem__(*args, **kwargs)
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseDict, self).__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseDict, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__setitem__(*args, **kwargs)
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__setitem__(key, value)
|
||||
|
||||
def __delete__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__delete__(*args, **kwargs)
|
||||
|
||||
def __delitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__delitem__(*args, **kwargs)
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delitem__(key)
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).__delattr__(*args, **kwargs)
|
||||
def __delattr__(self, key, *args, **kwargs):
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseDict, self).__delattr__(key)
|
||||
|
||||
def __getstate__(self):
|
||||
self.instance = None
|
||||
@@ -72,9 +79,12 @@ class BaseDict(dict):
|
||||
self._mark_as_changed()
|
||||
return super(BaseDict, self).update(*args, **kwargs)
|
||||
|
||||
def _mark_as_changed(self):
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
self._instance._mark_as_changed(self._name)
|
||||
if key:
|
||||
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class BaseList(list):
|
||||
@@ -94,21 +104,35 @@ class BaseList(list):
|
||||
self._name = name
|
||||
return super(BaseList, self).__init__(list_items)
|
||||
|
||||
def __getitem__(self, *args, **kwargs):
|
||||
value = super(BaseList, self).__getitem__(*args, **kwargs)
|
||||
def __getitem__(self, key, *args, **kwargs):
|
||||
value = super(BaseList, self).__getitem__(key)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseDict) and isinstance(value, dict):
|
||||
value = BaseDict(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif not isinstance(value, BaseList) and isinstance(value, list):
|
||||
value = BaseList(value, None, '%s.%s' % (self._name, key))
|
||||
super(BaseList, self).__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__setitem__(*args, **kwargs)
|
||||
def __setitem__(self, key, value, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
else:
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseList, self).__setitem__(key, value)
|
||||
|
||||
def __delitem__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).__delitem__(*args, **kwargs)
|
||||
def __delitem__(self, key, *args, **kwargs):
|
||||
if isinstance(key, slice):
|
||||
self._mark_as_changed()
|
||||
else:
|
||||
self._mark_as_changed(key)
|
||||
return super(BaseList, self).__delitem__(key)
|
||||
|
||||
def __setslice__(self, *args, **kwargs):
|
||||
self._mark_as_changed()
|
||||
@@ -155,13 +179,16 @@ class BaseList(list):
|
||||
self._mark_as_changed()
|
||||
return super(BaseList, self).sort(*args, **kwargs)
|
||||
|
||||
def _mark_as_changed(self):
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, '_mark_as_changed'):
|
||||
self._instance._mark_as_changed(self._name)
|
||||
if key:
|
||||
self._instance._mark_as_changed('%s.%s' % (self._name, key))
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
|
||||
class StrictDict(object):
|
||||
__slots__ = ()
|
||||
__slots__ = ()
|
||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||
_classes = {}
|
||||
def __init__(self, **kwargs):
|
||||
@@ -205,7 +232,7 @@ class StrictDict(object):
|
||||
return self.items() == other.items()
|
||||
def __neq__(self, other):
|
||||
return self.items() != other.items()
|
||||
|
||||
|
||||
@classmethod
|
||||
def create(cls, allowed_keys):
|
||||
allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
|
||||
@@ -213,7 +240,9 @@ class StrictDict(object):
|
||||
if allowed_keys not in cls._classes:
|
||||
class SpecificStrictDict(cls):
|
||||
__slots__ = allowed_keys_tuple
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
def __repr__(self):
|
||||
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
return cls._classes[allowed_keys]
|
||||
|
||||
|
||||
@@ -252,4 +281,3 @@ class SemiStrictDict(StrictDict):
|
||||
except AttributeError:
|
||||
extras_iter = ()
|
||||
return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
|
||||
|
||||
|
@@ -16,16 +16,17 @@ from mongoengine.errors import (ValidationError, InvalidDocumentError,
|
||||
from mongoengine.python_support import PY3, txt_type
|
||||
|
||||
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict, SemiStrictDict
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict, SemiStrictDict
|
||||
from mongoengine.base.fields import ComplexBaseField
|
||||
|
||||
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
|
||||
|
||||
NON_FIELD_ERRORS = '__all__'
|
||||
|
||||
|
||||
class BaseDocument(object):
|
||||
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '_cls', '__weakref__')
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
|
||||
|
||||
_dynamic = False
|
||||
_dynamic_lock = True
|
||||
@@ -50,25 +51,36 @@ class BaseDocument(object):
|
||||
for value in args:
|
||||
name = next(field)
|
||||
if name in values:
|
||||
raise TypeError("Multiple values for keyword argument '" + name + "'")
|
||||
raise TypeError(
|
||||
"Multiple values for keyword argument '" + name + "'")
|
||||
values[name] = value
|
||||
__auto_convert = values.pop("__auto_convert", True)
|
||||
|
||||
# 399: set default values only to fields loaded from DB
|
||||
__only_fields = set(values.pop("__only_fields", values))
|
||||
|
||||
signals.pre_init.send(self.__class__, document=self, values=values)
|
||||
|
||||
|
||||
if self.STRICT and not self._dynamic:
|
||||
self._data = StrictDict.create(allowed_keys=self._fields.keys())()
|
||||
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
|
||||
else:
|
||||
self._data = SemiStrictDict.create(allowed_keys=self._fields.keys())()
|
||||
|
||||
self._data = SemiStrictDict.create(
|
||||
allowed_keys=self._fields_ordered)()
|
||||
|
||||
_created = values.pop("_created", True)
|
||||
self._data = {}
|
||||
self._dynamic_fields = SON()
|
||||
|
||||
# Assign default values to instance
|
||||
for key, field in self._fields.iteritems():
|
||||
if self._db_field_map.get(key, key) in values:
|
||||
if self._db_field_map.get(key, key) in __only_fields:
|
||||
continue
|
||||
value = getattr(self, key, None)
|
||||
setattr(self, key, value)
|
||||
|
||||
if "_cls" not in values:
|
||||
self._cls = self._class_name
|
||||
|
||||
# Set passed values after initialisation
|
||||
if self._dynamic:
|
||||
dynamic_data = {}
|
||||
@@ -102,6 +114,7 @@ class BaseDocument(object):
|
||||
|
||||
# Flag initialised
|
||||
self._initialised = True
|
||||
self._created = _created
|
||||
signals.post_init.send(self.__class__, document=self)
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
@@ -141,8 +154,8 @@ class BaseDocument(object):
|
||||
self__created = True
|
||||
|
||||
if (self._is_document and not self__created and
|
||||
name in self._meta.get('shard_key', tuple()) and
|
||||
self._data.get(name) != value):
|
||||
name in self._meta.get('shard_key', tuple()) and
|
||||
self._data.get(name) != value):
|
||||
OperationError = _import_class('OperationError')
|
||||
msg = "Shard Keys are immutable. Tried to update %s" % name
|
||||
raise OperationError(msg)
|
||||
@@ -150,11 +163,11 @@ class BaseDocument(object):
|
||||
try:
|
||||
self__initialised = self._initialised
|
||||
except AttributeError:
|
||||
self__initialised = False
|
||||
self__initialised = False
|
||||
# Check if the user has created a new instance of a class
|
||||
if (self._is_document and self__initialised
|
||||
and self__created and name == self._meta['id_field']):
|
||||
super(BaseDocument, self).__setattr__('_created', False)
|
||||
and self__created and name == self._meta['id_field']):
|
||||
super(BaseDocument, self).__setattr__('_created', False)
|
||||
|
||||
super(BaseDocument, self).__setattr__(name, value)
|
||||
|
||||
@@ -171,7 +184,7 @@ class BaseDocument(object):
|
||||
if isinstance(data["_data"], SON):
|
||||
data["_data"] = self.__class__._from_son(data["_data"])._data
|
||||
for k in ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields'):
|
||||
'_dynamic_fields'):
|
||||
if k in data:
|
||||
setattr(self, k, data[k])
|
||||
if '_fields_ordered' in data:
|
||||
@@ -229,8 +242,9 @@ class BaseDocument(object):
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__) and hasattr(other, 'id'):
|
||||
if self.id == other.id:
|
||||
return True
|
||||
return self.id == other.id
|
||||
if isinstance(other, DBRef):
|
||||
return self._get_collection_name() == other.collection and self.id == other.id
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
@@ -253,21 +267,43 @@ class BaseDocument(object):
|
||||
"""
|
||||
pass
|
||||
|
||||
def to_mongo(self):
|
||||
"""Return as SON data ready for use with MongoDB.
|
||||
def to_mongo(self, use_db_field=True, fields=[]):
|
||||
"""
|
||||
Return as SON data ready for use with MongoDB.
|
||||
"""
|
||||
data = SON()
|
||||
data["_id"] = None
|
||||
data['_cls'] = self._class_name
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
|
||||
root_fields = set([f.split('.')[0] for f in fields])
|
||||
|
||||
for field_name in self:
|
||||
if root_fields and field_name not in root_fields:
|
||||
continue
|
||||
|
||||
value = self._data.get(field_name, None)
|
||||
field = self._fields.get(field_name)
|
||||
|
||||
if field is None and self._dynamic:
|
||||
field = self._dynamic_fields.get(field_name)
|
||||
|
||||
if value is not None:
|
||||
value = field.to_mongo(value)
|
||||
|
||||
if isinstance(field, (EmbeddedDocumentField)):
|
||||
if fields:
|
||||
key = '%s.' % field_name
|
||||
embedded_fields = [
|
||||
i.replace(key, '') for i in fields
|
||||
if i.startswith(key)]
|
||||
|
||||
else:
|
||||
embedded_fields = []
|
||||
|
||||
value = field.to_mongo(value, use_db_field=use_db_field,
|
||||
fields=embedded_fields)
|
||||
else:
|
||||
value = field.to_mongo(value)
|
||||
|
||||
# Handle self generating fields
|
||||
if value is None and field._auto_gen:
|
||||
@@ -275,7 +311,10 @@ class BaseDocument(object):
|
||||
self._data[field_name] = value
|
||||
|
||||
if value is not None:
|
||||
data[field.db_field] = value
|
||||
if use_db_field:
|
||||
data[field.db_field] = value
|
||||
else:
|
||||
data[field.name] = value
|
||||
|
||||
# If "_id" has not been set, then try and set it
|
||||
Document = _import_class("Document")
|
||||
@@ -288,7 +327,7 @@ class BaseDocument(object):
|
||||
|
||||
# Only add _cls if allow_inheritance is True
|
||||
if (not hasattr(self, '_meta') or
|
||||
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
|
||||
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
|
||||
data.pop('_cls')
|
||||
|
||||
return data
|
||||
@@ -310,7 +349,8 @@ class BaseDocument(object):
|
||||
self._data.get(name)) for name in self._fields_ordered]
|
||||
|
||||
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
||||
GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField")
|
||||
GenericEmbeddedDocumentField = _import_class(
|
||||
"GenericEmbeddedDocumentField")
|
||||
|
||||
for field, value in fields:
|
||||
if value is not None:
|
||||
@@ -332,14 +372,18 @@ class BaseDocument(object):
|
||||
pk = "None"
|
||||
if hasattr(self, 'pk'):
|
||||
pk = self.pk
|
||||
elif self._instance:
|
||||
elif self._instance and hasattr(self._instance, 'pk'):
|
||||
pk = self._instance.pk
|
||||
message = "ValidationError (%s:%s) " % (self._class_name, pk)
|
||||
raise ValidationError(message, errors=errors)
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
"""Converts a document to JSON"""
|
||||
return json_util.dumps(self.to_mongo(), *args, **kwargs)
|
||||
"""Converts a document to JSON.
|
||||
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
|
||||
"""
|
||||
use_db_field = kwargs.pop('use_db_field') if kwargs.has_key(
|
||||
'use_db_field') else True
|
||||
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_data):
|
||||
@@ -373,7 +417,7 @@ class BaseDocument(object):
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if (isinstance(value, (list, tuple)) and
|
||||
not isinstance(value, BaseList)):
|
||||
not isinstance(value, BaseList)):
|
||||
value = BaseList(value, self, name)
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, self, name)
|
||||
@@ -385,9 +429,18 @@ class BaseDocument(object):
|
||||
"""
|
||||
if not key:
|
||||
return
|
||||
key = self._db_field_map.get(key, key)
|
||||
if (hasattr(self, '_changed_fields') and
|
||||
key not in self._changed_fields):
|
||||
|
||||
if not hasattr(self, '_changed_fields'):
|
||||
return
|
||||
|
||||
if '.' in key:
|
||||
key, rest = key.split('.', 1)
|
||||
key = self._db_field_map.get(key, key)
|
||||
key = '%s.%s' % (key, rest)
|
||||
else:
|
||||
key = self._db_field_map.get(key, key)
|
||||
|
||||
if key not in self._changed_fields:
|
||||
self._changed_fields.append(key)
|
||||
|
||||
def _clear_changed_fields(self):
|
||||
@@ -407,6 +460,8 @@ class BaseDocument(object):
|
||||
else:
|
||||
data = getattr(data, part, None)
|
||||
if hasattr(data, "_changed_fields"):
|
||||
if hasattr(data, "_is_document") and data._is_document:
|
||||
continue
|
||||
data._changed_fields = []
|
||||
self._changed_fields = []
|
||||
|
||||
@@ -420,12 +475,17 @@ class BaseDocument(object):
|
||||
|
||||
for index, value in iterator:
|
||||
list_key = "%s%s." % (key, index)
|
||||
# don't check anything lower if this key is already marked
|
||||
# as changed.
|
||||
if list_key[:-1] in changed_fields:
|
||||
continue
|
||||
if hasattr(value, '_get_changed_fields'):
|
||||
changed = value._get_changed_fields(inspected)
|
||||
changed_fields += ["%s%s" % (list_key, k)
|
||||
for k in changed if k]
|
||||
for k in changed if k]
|
||||
elif isinstance(value, (list, tuple, dict)):
|
||||
self._nestable_types_changed_fields(changed_fields, list_key, value, inspected)
|
||||
self._nestable_types_changed_fields(
|
||||
changed_fields, list_key, value, inspected)
|
||||
|
||||
def _get_changed_fields(self, inspected=None):
|
||||
"""Returns a list of all fields that have explicitly been changed.
|
||||
@@ -435,6 +495,7 @@ class BaseDocument(object):
|
||||
ReferenceField = _import_class("ReferenceField")
|
||||
changed_fields = []
|
||||
changed_fields += getattr(self, '_changed_fields', [])
|
||||
|
||||
inspected = inspected or set()
|
||||
if hasattr(self, 'id') and isinstance(self.id, Hashable):
|
||||
if self.id in inspected:
|
||||
@@ -454,16 +515,17 @@ class BaseDocument(object):
|
||||
if isinstance(field, ReferenceField):
|
||||
continue
|
||||
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
|
||||
and db_field_name not in changed_fields):
|
||||
and db_field_name not in changed_fields):
|
||||
# Find all embedded fields that have been changed
|
||||
changed = data._get_changed_fields(inspected)
|
||||
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||
elif (isinstance(data, (list, tuple, dict)) and
|
||||
db_field_name not in changed_fields):
|
||||
if (hasattr(field, 'field') and
|
||||
isinstance(field.field, ReferenceField)):
|
||||
isinstance(field.field, ReferenceField)):
|
||||
continue
|
||||
self._nestable_types_changed_fields(changed_fields, key, data, inspected)
|
||||
self._nestable_types_changed_fields(
|
||||
changed_fields, key, data, inspected)
|
||||
return changed_fields
|
||||
|
||||
def _delta(self):
|
||||
@@ -487,7 +549,10 @@ class BaseDocument(object):
|
||||
if isinstance(d, (ObjectId, DBRef)):
|
||||
break
|
||||
elif isinstance(d, list) and p.isdigit():
|
||||
d = d[int(p)]
|
||||
try:
|
||||
d = d[int(p)]
|
||||
except IndexError:
|
||||
d = None
|
||||
elif hasattr(d, 'get'):
|
||||
d = d.get(p)
|
||||
new_path.append(p)
|
||||
@@ -506,7 +571,7 @@ class BaseDocument(object):
|
||||
# If we've set a value that ain't the default value dont unset it.
|
||||
default = None
|
||||
if (self._dynamic and len(parts) and parts[0] in
|
||||
self._dynamic_fields):
|
||||
self._dynamic_fields):
|
||||
del(set_data[path])
|
||||
unset_data[path] = 1
|
||||
continue
|
||||
@@ -552,7 +617,7 @@ class BaseDocument(object):
|
||||
return cls._meta.get('collection', None)
|
||||
|
||||
@classmethod
|
||||
def _from_son(cls, son, _auto_dereference=True):
|
||||
def _from_son(cls, son, _auto_dereference=True, only_fields=[]):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON.
|
||||
"""
|
||||
|
||||
@@ -589,21 +654,24 @@ class BaseDocument(object):
|
||||
default = default()
|
||||
if isinstance(default, BaseDocument):
|
||||
changed_fields.append(field_name)
|
||||
elif not only_fields or field_name in only_fields:
|
||||
changed_fields.append(field_name)
|
||||
|
||||
if errors_dict:
|
||||
errors = "\n".join(["%s - %s" % (k, v)
|
||||
for k, v in errors_dict.items()])
|
||||
for k, v in errors_dict.items()])
|
||||
msg = ("Invalid data to create a `%s` instance.\n%s"
|
||||
% (cls._class_name, errors))
|
||||
raise InvalidDocumentError(msg)
|
||||
|
||||
|
||||
if cls.STRICT:
|
||||
data = dict((k, v) for k,v in data.iteritems() if k in cls._fields)
|
||||
obj = cls(__auto_convert=False, **data)
|
||||
data = dict((k, v)
|
||||
for k, v in data.iteritems() if k in cls._fields)
|
||||
obj = cls(__auto_convert=False, _created=False, __only_fields=only_fields, **data)
|
||||
obj._changed_fields = changed_fields
|
||||
obj._created = False
|
||||
if not _auto_dereference:
|
||||
obj._fields = fields
|
||||
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
@@ -653,6 +721,9 @@ class BaseDocument(object):
|
||||
ALLOW_INHERITANCE)
|
||||
include_cls = (allow_inheritance and not spec.get('sparse', False) and
|
||||
spec.get('cls', True))
|
||||
|
||||
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
|
||||
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
|
||||
if "cls" in spec:
|
||||
spec.pop('cls')
|
||||
for key in spec['fields']:
|
||||
@@ -660,15 +731,18 @@ class BaseDocument(object):
|
||||
if isinstance(key, (list, tuple)):
|
||||
continue
|
||||
|
||||
# ASCENDING from +,
|
||||
# ASCENDING from +
|
||||
# DESCENDING from -
|
||||
# GEO2D from *
|
||||
# TEXT from $
|
||||
direction = pymongo.ASCENDING
|
||||
if key.startswith("-"):
|
||||
direction = pymongo.DESCENDING
|
||||
elif key.startswith("*"):
|
||||
direction = pymongo.GEO2D
|
||||
if key.startswith(("+", "-", "*")):
|
||||
elif key.startswith("$"):
|
||||
direction = pymongo.TEXT
|
||||
if key.startswith(("+", "-", "*", "$")):
|
||||
key = key[1:]
|
||||
|
||||
# Use real field name, do it manually because we need field
|
||||
@@ -679,8 +753,14 @@ class BaseDocument(object):
|
||||
fields = []
|
||||
else:
|
||||
fields = cls._lookup_field(parts)
|
||||
parts = [field if field == '_id' else field.db_field
|
||||
for field in fields]
|
||||
parts = []
|
||||
for field in fields:
|
||||
try:
|
||||
if field != "_id":
|
||||
field = field.db_field
|
||||
except AttributeError:
|
||||
pass
|
||||
parts.append(field)
|
||||
key = '.'.join(parts)
|
||||
index_list.append((key, direction))
|
||||
|
||||
@@ -737,7 +817,7 @@ class BaseDocument(object):
|
||||
|
||||
# Grab any embedded document field unique indexes
|
||||
if (field.__class__.__name__ == "EmbeddedDocumentField" and
|
||||
field.document_type != cls):
|
||||
field.document_type != cls):
|
||||
field_namespace = "%s." % field_name
|
||||
doc_cls = field.document_type
|
||||
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
|
||||
@@ -753,7 +833,8 @@ class BaseDocument(object):
|
||||
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
|
||||
"PointField", "LineStringField", "PolygonField"]
|
||||
|
||||
geo_field_types = tuple([_import_class(field) for field in geo_field_type_names])
|
||||
geo_field_types = tuple([_import_class(field)
|
||||
for field in geo_field_type_names])
|
||||
|
||||
for field in cls._fields.values():
|
||||
if not isinstance(field, geo_field_types):
|
||||
@@ -763,13 +844,14 @@ class BaseDocument(object):
|
||||
if field_cls in inspected:
|
||||
continue
|
||||
if hasattr(field_cls, '_geo_indices'):
|
||||
geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field)
|
||||
geo_indices += field_cls._geo_indices(
|
||||
inspected, parent_field=field.db_field)
|
||||
elif field._geo_index:
|
||||
field_name = field.db_field
|
||||
if parent_field:
|
||||
field_name = "%s.%s" % (parent_field, field_name)
|
||||
geo_indices.append({'fields':
|
||||
[(field_name, field._geo_index)]})
|
||||
[(field_name, field._geo_index)]})
|
||||
return geo_indices
|
||||
|
||||
@classmethod
|
||||
@@ -817,8 +899,17 @@ class BaseDocument(object):
|
||||
# Look up subfield on the previous field
|
||||
new_field = field.lookup_member(field_name)
|
||||
if not new_field and isinstance(field, ComplexBaseField):
|
||||
fields.append(field_name)
|
||||
continue
|
||||
if hasattr(field.field, 'document_type') and cls._dynamic \
|
||||
and field.field.document_type._dynamic:
|
||||
DynamicField = _import_class('DynamicField')
|
||||
new_field = DynamicField(db_field=field_name)
|
||||
else:
|
||||
fields.append(field_name)
|
||||
continue
|
||||
elif not new_field and hasattr(field, 'document_type') and cls._dynamic \
|
||||
and field.document_type._dynamic:
|
||||
DynamicField = _import_class('DynamicField')
|
||||
new_field = DynamicField(db_field=field_name)
|
||||
elif not new_field:
|
||||
raise LookUpError('Cannot resolve field "%s"'
|
||||
% field_name)
|
||||
|
@@ -11,10 +11,12 @@ from mongoengine.errors import ValidationError
|
||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||
from mongoengine.base.datastructures import BaseDict, BaseList
|
||||
|
||||
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
|
||||
__all__ = ("BaseField", "ComplexBaseField",
|
||||
"ObjectIdField", "GeoJsonBaseField")
|
||||
|
||||
|
||||
class BaseField(object):
|
||||
|
||||
"""A base class for fields in a MongoDB document. Instances of this class
|
||||
may be added to subclasses of `Document` to define a document's schema.
|
||||
|
||||
@@ -43,7 +45,7 @@ class BaseField(object):
|
||||
:param required: If the field is required. Whether it has to have a
|
||||
value or not. Defaults to False.
|
||||
:param default: (optional) The default value for this field if no value
|
||||
has been set (or if the value has been unset). It Can be a
|
||||
has been set (or if the value has been unset). It can be a
|
||||
callable.
|
||||
:param unique: Is the field value unique or not. Defaults to False.
|
||||
:param unique_with: (optional) The other field this field should be
|
||||
@@ -60,6 +62,7 @@ class BaseField(object):
|
||||
used when generating model forms from the document model.
|
||||
"""
|
||||
self.db_field = (db_field or name) if not primary_key else '_id'
|
||||
|
||||
if name:
|
||||
msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
@@ -105,7 +108,7 @@ class BaseField(object):
|
||||
if instance._initialised:
|
||||
try:
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
instance._data[self.name] != value):
|
||||
instance._mark_as_changed(self.name)
|
||||
except:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
@@ -113,7 +116,7 @@ class BaseField(object):
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
if isinstance(value, EmbeddedDocument):
|
||||
value._instance = weakref.proxy(instance)
|
||||
instance._data[self.name] = value
|
||||
|
||||
@@ -175,6 +178,7 @@ class BaseField(object):
|
||||
|
||||
|
||||
class ComplexBaseField(BaseField):
|
||||
|
||||
"""Handles complex fields, such as lists / dictionaries.
|
||||
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
@@ -197,7 +201,7 @@ class ComplexBaseField(BaseField):
|
||||
GenericReferenceField = _import_class('GenericReferenceField')
|
||||
dereference = (self._auto_dereference and
|
||||
(self.field is None or isinstance(self.field,
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
(GenericReferenceField, ReferenceField))))
|
||||
|
||||
_dereference = _import_class("DeReference")()
|
||||
|
||||
@@ -212,7 +216,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
# Convert lists / values so we can watch for any changes on them
|
||||
if (isinstance(value, (list, tuple)) and
|
||||
not isinstance(value, BaseList)):
|
||||
not isinstance(value, BaseList)):
|
||||
value = BaseList(value, instance, self.name)
|
||||
instance._data[self.name] = value
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
@@ -220,8 +224,8 @@ class ComplexBaseField(BaseField):
|
||||
instance._data[self.name] = value
|
||||
|
||||
if (self._auto_dereference and instance._initialised and
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
isinstance(value, (BaseList, BaseDict))
|
||||
and not value._dereferenced):
|
||||
value = _dereference(
|
||||
value, max_depth=1, instance=instance, name=self.name
|
||||
)
|
||||
@@ -384,6 +388,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
|
||||
class ObjectIdField(BaseField):
|
||||
|
||||
"""A field wrapper around MongoDB's ObjectIds.
|
||||
"""
|
||||
|
||||
@@ -412,6 +417,7 @@ class ObjectIdField(BaseField):
|
||||
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
|
||||
"""A geo json field storing a geojson style object.
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
@@ -435,7 +441,8 @@ class GeoJsonBaseField(BaseField):
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == set(['type', 'coordinates']):
|
||||
if value['type'] != self._type:
|
||||
self.error('%s type must be "%s"' % (self._name, self._type))
|
||||
self.error('%s type must be "%s"' %
|
||||
(self._name, self._type))
|
||||
return self.validate(value['coordinates'])
|
||||
else:
|
||||
self.error('%s can only accept a valid GeoJson dictionary'
|
||||
@@ -450,7 +457,7 @@ class GeoJsonBaseField(BaseField):
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value):
|
||||
def _validate_polygon(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
|
||||
@@ -468,7 +475,10 @@ class GeoJsonBaseField(BaseField):
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
if top_level:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validates a linestring"""
|
||||
@@ -502,6 +512,66 @@ class GeoJsonBaseField(BaseField):
|
||||
not isinstance(value[1], (float, int))):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPoint must be a list of Point'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except:
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
error = self._validate_point(point)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiLineString must be a list of LineString'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except:
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
error = self._validate_linestring(linestring, False)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPolygon must be a list of Polygon'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except:
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
error = self._validate_polygon(polygon, False)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
|
@@ -16,6 +16,7 @@ __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
||||
|
||||
|
||||
class DocumentMetaclass(type):
|
||||
|
||||
"""Metaclass for all documents.
|
||||
"""
|
||||
|
||||
@@ -29,6 +30,7 @@ class DocumentMetaclass(type):
|
||||
return super_new(cls, name, bases, attrs)
|
||||
|
||||
attrs['_is_document'] = attrs.get('_is_document', False)
|
||||
attrs['_cached_reference_fields'] = []
|
||||
|
||||
# EmbeddedDocuments could have meta data for inheritance
|
||||
if 'meta' in attrs:
|
||||
@@ -45,6 +47,10 @@ class DocumentMetaclass(type):
|
||||
meta.merge(base._meta)
|
||||
attrs['_meta'] = meta
|
||||
|
||||
if '_meta' in attrs and attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
|
||||
# Handle document Fields
|
||||
|
||||
# Merge all fields from subclasses
|
||||
@@ -90,7 +96,7 @@ class DocumentMetaclass(type):
|
||||
# Set _fields and db_field maps
|
||||
attrs['_fields'] = doc_fields
|
||||
attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
|
||||
for k, v in doc_fields.iteritems()])
|
||||
for k, v in doc_fields.iteritems()])
|
||||
attrs['_reverse_db_field_map'] = dict(
|
||||
(v, k) for k, v in attrs['_db_field_map'].iteritems())
|
||||
|
||||
@@ -105,7 +111,7 @@ class DocumentMetaclass(type):
|
||||
class_name = [name]
|
||||
for base in flattened_bases:
|
||||
if (not getattr(base, '_is_base_cls', True) and
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||
# Collate heirarchy for _cls and _subclasses
|
||||
class_name.append(base.__name__)
|
||||
|
||||
@@ -115,7 +121,7 @@ class DocumentMetaclass(type):
|
||||
allow_inheritance = base._meta.get('allow_inheritance',
|
||||
ALLOW_INHERITANCE)
|
||||
if (allow_inheritance is not True and
|
||||
not base._meta.get('abstract')):
|
||||
not base._meta.get('abstract')):
|
||||
raise ValueError('Document %s may not be subclassed' %
|
||||
base.__name__)
|
||||
|
||||
@@ -141,7 +147,8 @@ class DocumentMetaclass(type):
|
||||
base._subclasses += (_cls,)
|
||||
base._types = base._subclasses # TODO depreciate _types
|
||||
|
||||
Document, EmbeddedDocument, DictField = cls._import_classes()
|
||||
(Document, EmbeddedDocument, DictField,
|
||||
CachedReferenceField) = cls._import_classes()
|
||||
|
||||
if issubclass(new_class, Document):
|
||||
new_class._collection = None
|
||||
@@ -170,6 +177,20 @@ class DocumentMetaclass(type):
|
||||
f = field
|
||||
f.owner_document = new_class
|
||||
delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
|
||||
if isinstance(f, CachedReferenceField):
|
||||
|
||||
if issubclass(new_class, EmbeddedDocument):
|
||||
raise InvalidDocumentError(
|
||||
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
||||
if not f.document_type:
|
||||
raise InvalidDocumentError(
|
||||
"Document is not avaiable to sync")
|
||||
|
||||
if f.auto_sync:
|
||||
f.start_listener()
|
||||
|
||||
f.document_type._cached_reference_fields.append(f)
|
||||
|
||||
if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
|
||||
delete_rule = getattr(f.field,
|
||||
'reverse_delete_rule',
|
||||
@@ -191,7 +212,7 @@ class DocumentMetaclass(type):
|
||||
field.name, delete_rule)
|
||||
|
||||
if (field.name and hasattr(Document, field.name) and
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
EmbeddedDocument not in new_class.mro()):
|
||||
msg = ("%s is a document method and not a valid "
|
||||
"field name" % field.name)
|
||||
raise InvalidDocumentError(msg)
|
||||
@@ -224,10 +245,12 @@ class DocumentMetaclass(type):
|
||||
Document = _import_class('Document')
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
DictField = _import_class('DictField')
|
||||
return (Document, EmbeddedDocument, DictField)
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
|
||||
|
||||
|
||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||
collection in the database.
|
||||
"""
|
||||
@@ -275,21 +298,21 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Find the parent document class
|
||||
parent_doc_cls = [b for b in flattened_bases
|
||||
if b.__class__ == TopLevelDocumentMetaclass]
|
||||
if b.__class__ == TopLevelDocumentMetaclass]
|
||||
parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
|
||||
|
||||
# Prevent classes setting collection different to their parents
|
||||
# If parent wasn't an abstract class
|
||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
||||
and not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del(attrs['_meta']['collection'])
|
||||
and not parent_doc_cls._meta.get('abstract', True)):
|
||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||
warnings.warn(msg, SyntaxWarning)
|
||||
del(attrs['_meta']['collection'])
|
||||
|
||||
# Ensure abstract documents have abstract bases
|
||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||
if (parent_doc_cls and
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
not parent_doc_cls._meta.get('abstract', False)):
|
||||
msg = "Abstract document cannot have non-abstract base"
|
||||
raise ValueError(msg)
|
||||
return super_new(cls, name, bases, attrs)
|
||||
@@ -306,7 +329,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
# Set collection in the meta if its callable
|
||||
if (getattr(base, '_is_document', False) and
|
||||
not base._meta.get('abstract')):
|
||||
not base._meta.get('abstract')):
|
||||
collection = meta.get('collection', None)
|
||||
if callable(collection):
|
||||
meta['collection'] = collection(base)
|
||||
@@ -318,7 +341,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
simple_class = all([b._meta.get('abstract')
|
||||
for b in flattened_bases if hasattr(b, '_meta')])
|
||||
if (not simple_class and meta['allow_inheritance'] is False and
|
||||
not meta['abstract']):
|
||||
not meta['abstract']):
|
||||
raise ValueError('Only direct subclasses of Document may set '
|
||||
'"allow_inheritance" to False')
|
||||
|
||||
@@ -378,7 +401,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
for exc in exceptions_to_merge:
|
||||
name = exc.__name__
|
||||
parents = tuple(getattr(base, name) for base in flattened_bases
|
||||
if hasattr(base, name)) or (exc,)
|
||||
if hasattr(base, name)) or (exc,)
|
||||
# Create new exception and set to new_class
|
||||
exception = type(name, parents, {'__module__': module})
|
||||
setattr(new_class, name, exception)
|
||||
@@ -387,6 +410,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
|
||||
class MetaDict(dict):
|
||||
|
||||
"""Custom dictionary for meta classes.
|
||||
Handles the merging of set indexes
|
||||
"""
|
||||
@@ -401,5 +425,6 @@ class MetaDict(dict):
|
||||
|
||||
|
||||
class BasesTuple(tuple):
|
||||
|
||||
"""Special class to handle introspection of bases tuple in __new__"""
|
||||
pass
|
||||
|
@@ -25,6 +25,7 @@ def _import_class(cls_name):
|
||||
'GenericEmbeddedDocumentField', 'GeoPointField',
|
||||
'PointField', 'LineStringField', 'ListField',
|
||||
'PolygonField', 'ReferenceField', 'StringField',
|
||||
'CachedReferenceField',
|
||||
'ComplexBaseField', 'GeoJsonBaseField')
|
||||
queryset_classes = ('OperationError',)
|
||||
deref_classes = ('DeReference',)
|
||||
|
@@ -1,6 +1,10 @@
|
||||
import pymongo
|
||||
from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
|
||||
|
||||
try:
|
||||
import motor
|
||||
except ImportError:
|
||||
motor = None
|
||||
|
||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
@@ -18,9 +22,11 @@ _connections = {}
|
||||
_dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name, host=None, port=None,
|
||||
is_slave=False, read_preference=False, slaves=None,
|
||||
username=None, password=None, **kwargs):
|
||||
def register_connection(alias, name=None, host=None, port=None,
|
||||
read_preference=False,
|
||||
username=None, password=None, authentication_source=None,
|
||||
async=False,
|
||||
**kwargs):
|
||||
"""Add a connection.
|
||||
|
||||
:param alias: the name that will be used to refer to this connection
|
||||
@@ -28,28 +34,24 @@ def register_connection(alias, name, host=None, port=None,
|
||||
:param name: the name of the specific database to use
|
||||
:param host: the host name of the :program:`mongod` instance to connect to
|
||||
:param port: the port that the :program:`mongod` instance is running on
|
||||
:param is_slave: whether the connection can act as a slave
|
||||
** Depreciated pymongo 2.0.1+
|
||||
:param read_preference: The read preference for the collection
|
||||
** Added pymongo 2.1
|
||||
:param slaves: a list of aliases of slave connections; each of these must
|
||||
be a registered connection that has :attr:`is_slave` set to ``True``
|
||||
:param username: username to authenticate with
|
||||
:param password: password to authenticate with
|
||||
:param authentication_source: database to authenticate against
|
||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||
|
||||
"""
|
||||
global _connection_settings
|
||||
|
||||
conn_settings = {
|
||||
'name': name,
|
||||
'name': name or 'test',
|
||||
'host': host or 'localhost',
|
||||
'port': port or 27017,
|
||||
'is_slave': is_slave,
|
||||
'slaves': slaves or [],
|
||||
'read_preference': read_preference,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'read_preference': read_preference
|
||||
'authentication_source': authentication_source,
|
||||
'async': async
|
||||
}
|
||||
|
||||
# Handle uri style connections
|
||||
@@ -64,6 +66,10 @@ def register_connection(alias, name, host=None, port=None,
|
||||
if "replicaSet" in conn_settings['host']:
|
||||
conn_settings['replicaSet'] = True
|
||||
|
||||
# Deprecated parameters that should not be passed on
|
||||
kwargs.pop('slaves', None)
|
||||
kwargs.pop('is_slave', None)
|
||||
|
||||
conn_settings.update(kwargs)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
@@ -94,12 +100,20 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
conn_settings = _connection_settings[alias].copy()
|
||||
|
||||
conn_settings.pop('name', None)
|
||||
conn_settings.pop('slaves', None)
|
||||
conn_settings.pop('is_slave', None)
|
||||
conn_settings.pop('username', None)
|
||||
conn_settings.pop('password', None)
|
||||
conn_settings.pop('authentication_source', None)
|
||||
async = conn_settings.pop('async')
|
||||
|
||||
if async:
|
||||
if not motor:
|
||||
raise ImproperlyConfigured("Motor library was not found")
|
||||
|
||||
connection_class = motor.MotorClient
|
||||
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
|
||||
connection_class = MongoClient
|
||||
if 'replicaSet' in conn_settings:
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
# Discard port since it can't be used on MongoReplicaSetClient
|
||||
@@ -107,24 +121,30 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
# Discard replicaSet if not base string
|
||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||
conn_settings.pop('replicaSet', None)
|
||||
connection_class = MongoReplicaSetClient
|
||||
|
||||
if async:
|
||||
connection_class = motor.MotorReplicaSetClient
|
||||
else:
|
||||
connection_class = MongoReplicaSetClient
|
||||
|
||||
try:
|
||||
connection = None
|
||||
connection_settings_iterator = ((alias, settings.copy()) for alias, settings in _connection_settings.iteritems())
|
||||
for alias, connection_settings in connection_settings_iterator:
|
||||
# check for shared connections
|
||||
connection_settings_iterator = (
|
||||
(db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings.pop('name', None)
|
||||
connection_settings.pop('slaves', None)
|
||||
connection_settings.pop('is_slave', None)
|
||||
connection_settings.pop('username', None)
|
||||
connection_settings.pop('password', None)
|
||||
if conn_settings == connection_settings and _connections.get(alias, None):
|
||||
connection = _connections[alias]
|
||||
if conn_settings == connection_settings and _connections.get(db_alias, None):
|
||||
connection = _connections[db_alias]
|
||||
break
|
||||
|
||||
_connections[alias] = connection if connection else connection_class(**conn_settings)
|
||||
_connections[alias] = connection if connection else connection_class(
|
||||
**conn_settings)
|
||||
except Exception, e:
|
||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
raise ConnectionError(
|
||||
"Cannot connect to database %s :\n%s" % (alias, e))
|
||||
return _connections[alias]
|
||||
|
||||
|
||||
@@ -140,12 +160,13 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
# Authenticate if necessary
|
||||
if conn_settings['username'] and conn_settings['password']:
|
||||
db.authenticate(conn_settings['username'],
|
||||
conn_settings['password'])
|
||||
conn_settings['password'],
|
||||
source=conn_settings['authentication_source'])
|
||||
_dbs[alias] = db
|
||||
return _dbs[alias]
|
||||
|
||||
|
||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument.
|
||||
|
||||
Connection settings may be provided here as well if the database is not
|
||||
|
@@ -12,7 +12,7 @@ class DeReference(object):
|
||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||
"""
|
||||
Cheaply dereferences the items to a set depth.
|
||||
Also handles the convertion of complex data types.
|
||||
Also handles the conversion of complex data types.
|
||||
|
||||
:param items: The iterable (dict, list, queryset) to be dereferenced.
|
||||
:param max_depth: The maximum depth to recurse to
|
||||
@@ -36,7 +36,7 @@ class DeReference(object):
|
||||
if instance and isinstance(instance, (Document, EmbeddedDocument,
|
||||
TopLevelDocumentMetaclass)):
|
||||
doc_type = instance._fields.get(name)
|
||||
if hasattr(doc_type, 'field'):
|
||||
while hasattr(doc_type, 'field'):
|
||||
doc_type = doc_type.field
|
||||
|
||||
if isinstance(doc_type, ReferenceField):
|
||||
@@ -51,9 +51,19 @@ class DeReference(object):
|
||||
return items
|
||||
elif not field.dbref:
|
||||
if not hasattr(items, 'items'):
|
||||
items = [field.to_python(v)
|
||||
if not isinstance(v, (DBRef, Document)) else v
|
||||
for v in items]
|
||||
|
||||
def _get_items(items):
|
||||
new_items = []
|
||||
for v in items:
|
||||
if isinstance(v, list):
|
||||
new_items.append(_get_items(v))
|
||||
elif not isinstance(v, (DBRef, Document)):
|
||||
new_items.append(field.to_python(v))
|
||||
else:
|
||||
new_items.append(v)
|
||||
return new_items
|
||||
|
||||
items = _get_items(items)
|
||||
else:
|
||||
items = dict([
|
||||
(k, field.to_python(v))
|
||||
@@ -85,7 +95,7 @@ class DeReference(object):
|
||||
# Recursively find dbreferences
|
||||
depth += 1
|
||||
for k, item in iterator:
|
||||
if isinstance(item, Document):
|
||||
if isinstance(item, (Document, EmbeddedDocument)):
|
||||
for field_name, field in item._fields.iteritems():
|
||||
v = item._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
@@ -114,11 +124,11 @@ class DeReference(object):
|
||||
"""Fetch all references and convert to their document objects
|
||||
"""
|
||||
object_map = {}
|
||||
for col, dbrefs in self.reference_map.iteritems():
|
||||
for collection, dbrefs in self.reference_map.iteritems():
|
||||
keys = object_map.keys()
|
||||
refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
|
||||
if hasattr(col, 'objects'): # We have a document class for the refs
|
||||
references = col.objects.in_bulk(refs)
|
||||
if hasattr(collection, 'objects'): # We have a document class for the refs
|
||||
references = collection.objects.in_bulk(refs)
|
||||
for key, doc in references.iteritems():
|
||||
object_map[key] = doc
|
||||
else: # Generic reference: use the refs data to convert to document
|
||||
@@ -126,19 +136,19 @@ class DeReference(object):
|
||||
continue
|
||||
|
||||
if doc_type:
|
||||
references = doc_type._get_db()[col].find({'_id': {'$in': refs}})
|
||||
references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
else:
|
||||
references = get_db()[col].find({'_id': {'$in': refs}})
|
||||
references = get_db()[collection].find({'_id': {'$in': refs}})
|
||||
for ref in references:
|
||||
if '_cls' in ref:
|
||||
doc = get_document(ref["_cls"])._from_son(ref)
|
||||
elif doc_type is None:
|
||||
doc = get_document(
|
||||
''.join(x.capitalize()
|
||||
for x in col.split('_')))._from_son(ref)
|
||||
for x in collection.split('_')))._from_son(ref)
|
||||
else:
|
||||
doc = doc_type._from_son(ref)
|
||||
object_map[doc.id] = doc
|
||||
@@ -170,7 +180,11 @@ class DeReference(object):
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
del items['_cls']
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||
if _cls is not None:
|
||||
doc._data['_cls'] = _cls
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
@@ -192,7 +206,7 @@ class DeReference(object):
|
||||
|
||||
if k in self.object_map and not is_list:
|
||||
data[k] = self.object_map[k]
|
||||
elif isinstance(v, Document):
|
||||
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||
for field_name, field in v._fields.iteritems():
|
||||
v = data[k]._data.get(field_name, None)
|
||||
if isinstance(v, (DBRef)):
|
||||
@@ -204,7 +218,8 @@ class DeReference(object):
|
||||
elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
|
||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name)
|
||||
item_name = '%s.%s' % (name, k) if name else name
|
||||
data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
|
||||
elif hasattr(v, 'id'):
|
||||
data[k] = self.object_map.get(v.id, v)
|
||||
|
||||
|
@@ -1,39 +1,31 @@
|
||||
#coding: utf-8
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from mongoengine.python_support import PY3
|
||||
from unittest import TestCase
|
||||
|
||||
from mongoengine import connect
|
||||
|
||||
try:
|
||||
from django.test import TestCase
|
||||
from django.conf import settings
|
||||
except Exception as err:
|
||||
if PY3:
|
||||
from unittest import TestCase
|
||||
# Dummy value so no error
|
||||
class settings:
|
||||
MONGO_DATABASE_NAME = 'dummy'
|
||||
else:
|
||||
raise err
|
||||
from mongoengine.connection import get_db
|
||||
|
||||
|
||||
class MongoTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
if PY3:
|
||||
raise SkipTest('django does not have Python 3 support')
|
||||
|
||||
"""
|
||||
TestCase class that clear the collection between the tests
|
||||
"""
|
||||
db_name = 'test_%s' % settings.MONGO_DATABASE_NAME
|
||||
|
||||
@property
|
||||
def db_name(self):
|
||||
from django.conf import settings
|
||||
return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy')
|
||||
|
||||
def __init__(self, methodName='runtest'):
|
||||
self.db = connect(self.db_name).get_db()
|
||||
connect(self.db_name)
|
||||
self.db = get_db()
|
||||
super(MongoTestCase, self).__init__(methodName)
|
||||
|
||||
def _post_teardown(self):
|
||||
super(MongoTestCase, self)._post_teardown()
|
||||
def dropCollections(self):
|
||||
for collection in self.db.collection_names():
|
||||
if collection == 'system.indexes':
|
||||
continue
|
||||
self.db.drop_collection(collection)
|
||||
|
||||
def tearDown(self):
|
||||
self.dropCollections()
|
||||
|
@@ -41,6 +41,7 @@ class InvalidCollectionError(Exception):
|
||||
|
||||
|
||||
class EmbeddedDocument(BaseDocument):
|
||||
|
||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||
fields on :class:`~mongoengine.Document`\ s through the
|
||||
@@ -54,12 +55,12 @@ class EmbeddedDocument(BaseDocument):
|
||||
`_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
|
||||
dictionary.
|
||||
"""
|
||||
|
||||
|
||||
__slots__ = ('_instance')
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
my_metaclass = DocumentMetaclass
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -69,7 +70,7 @@ class EmbeddedDocument(BaseDocument):
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return self.to_mongo() == other.to_mongo()
|
||||
return self._data == other._data
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
@@ -77,6 +78,7 @@ class EmbeddedDocument(BaseDocument):
|
||||
|
||||
|
||||
class Document(BaseDocument):
|
||||
|
||||
"""The base class used for defining the structure and properties of
|
||||
collections of documents stored in MongoDB. Inherit from this class, and
|
||||
add fields as class attributes to define a document's structure.
|
||||
@@ -124,14 +126,15 @@ class Document(BaseDocument):
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
__slots__ = ('__objects' )
|
||||
__slots__ = ('__objects')
|
||||
|
||||
def pk():
|
||||
"""Primary key alias
|
||||
"""
|
||||
|
||||
def fget(self):
|
||||
return getattr(self, self._meta['id_field'])
|
||||
|
||||
@@ -140,6 +143,13 @@ class Document(BaseDocument):
|
||||
return property(fget, fset)
|
||||
pk = pk()
|
||||
|
||||
@property
|
||||
def text_score(self):
|
||||
"""
|
||||
Used for text searchs
|
||||
"""
|
||||
return self._data.get('text_score')
|
||||
|
||||
@classmethod
|
||||
def _get_db(cls):
|
||||
"""Some Model using other db_alias"""
|
||||
@@ -165,7 +175,7 @@ class Document(BaseDocument):
|
||||
if options.get('max') != max_documents or \
|
||||
options.get('size') != max_size:
|
||||
msg = (('Cannot create collection "%s" as a capped '
|
||||
'collection as it already exists')
|
||||
'collection as it already exists')
|
||||
% cls._collection)
|
||||
raise InvalidCollectionError(msg)
|
||||
else:
|
||||
@@ -282,9 +292,9 @@ class Document(BaseDocument):
|
||||
upsert=upsert, **write_concern)
|
||||
created = is_new_object(last_error)
|
||||
|
||||
|
||||
if cascade is None:
|
||||
cascade = self._meta.get('cascade', False) or cascade_kwargs is not None
|
||||
cascade = self._meta.get(
|
||||
'cascade', False) or cascade_kwargs is not None
|
||||
|
||||
if cascade:
|
||||
kwargs = {
|
||||
@@ -309,12 +319,12 @@ class Document(BaseDocument):
|
||||
raise NotUniqueError(message % unicode(err))
|
||||
raise OperationError(message % unicode(err))
|
||||
id_field = self._meta['id_field']
|
||||
if id_field not in self._meta.get('shard_key', []):
|
||||
if created or id_field not in self._meta.get('shard_key', []):
|
||||
self[id_field] = self._fields[id_field].to_python(object_id)
|
||||
|
||||
signals.post_save.send(self.__class__, document=self, created=created)
|
||||
self._clear_changed_fields()
|
||||
self._created = False
|
||||
signals.post_save.send(self.__class__, document=self, created=created)
|
||||
return self
|
||||
|
||||
def cascade_save(self, *args, **kwargs):
|
||||
@@ -377,7 +387,8 @@ class Document(BaseDocument):
|
||||
del(query["_cls"])
|
||||
return self._qs.filter(**query).update_one(**kwargs)
|
||||
else:
|
||||
raise OperationError('attempt to update a document not yet saved')
|
||||
raise OperationError(
|
||||
'attempt to update a document not yet saved')
|
||||
|
||||
# Need to add shard key to query, or you get an error
|
||||
return self._qs.filter(**self._object_key).update_one(**kwargs)
|
||||
@@ -396,7 +407,8 @@ class Document(BaseDocument):
|
||||
signals.pre_delete.send(self.__class__, document=self)
|
||||
|
||||
try:
|
||||
self._qs.filter(**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
|
||||
self._qs.filter(
|
||||
**self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = u'Could not delete document (%s)' % err.message
|
||||
raise OperationError(message)
|
||||
@@ -463,27 +475,41 @@ class Document(BaseDocument):
|
||||
DeReference()([self], max_depth + 1)
|
||||
return self
|
||||
|
||||
def reload(self, max_depth=1):
|
||||
def reload(self, *fields, **kwargs):
|
||||
"""Reloads all attributes from the database.
|
||||
|
||||
:param fields: (optional) args list of fields to reload
|
||||
:param max_depth: (optional) depth of dereferencing to follow
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
.. versionchanged:: 0.6 Now chainable
|
||||
.. versionchanged:: 0.9 Can provide specific fields to reload
|
||||
"""
|
||||
max_depth = 1
|
||||
if fields and isinstance(fields[0], int):
|
||||
max_depth = fields[0]
|
||||
fields = fields[1:]
|
||||
elif "max_depth" in kwargs:
|
||||
max_depth = kwargs["max_depth"]
|
||||
|
||||
if not self.pk:
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
||||
**self._object_key).limit(1).select_related(max_depth=max_depth)
|
||||
|
||||
**self._object_key).only(*fields).limit(1
|
||||
).select_related(max_depth=max_depth)
|
||||
|
||||
if obj:
|
||||
obj = obj[0]
|
||||
else:
|
||||
raise self.DoesNotExist("Document does not exist")
|
||||
|
||||
for field in self._fields_ordered:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
if not fields or field in fields:
|
||||
setattr(self, field, self._reload(field, obj[field]))
|
||||
|
||||
self._changed_fields = obj._changed_fields
|
||||
self._created = False
|
||||
return obj
|
||||
return self
|
||||
|
||||
def _reload(self, key, value):
|
||||
"""Used by :meth:`~mongoengine.Document.reload` to ensure the
|
||||
@@ -514,8 +540,8 @@ class Document(BaseDocument):
|
||||
object.
|
||||
"""
|
||||
classes = [get_document(class_name)
|
||||
for class_name in cls._subclasses
|
||||
if class_name != cls.__name__] + [cls]
|
||||
for class_name in cls._subclasses
|
||||
if class_name != cls.__name__] + [cls]
|
||||
documents = [get_document(class_name)
|
||||
for class_name in document_cls._subclasses
|
||||
if class_name != document_cls.__name__] + [document_cls]
|
||||
@@ -537,7 +563,7 @@ class Document(BaseDocument):
|
||||
|
||||
@classmethod
|
||||
def ensure_index(cls, key_or_list, drop_dups=False, background=False,
|
||||
**kwargs):
|
||||
**kwargs):
|
||||
"""Ensure that the given indexes are in place.
|
||||
|
||||
:param key_or_list: a single index key or a list of index keys (to
|
||||
@@ -568,7 +594,9 @@ class Document(BaseDocument):
|
||||
index_cls = cls._meta.get('index_cls', True)
|
||||
|
||||
collection = cls._get_collection()
|
||||
if collection.read_preference > 1:
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
# this code runs on a secondary
|
||||
if not collection.is_mongos and collection.read_preference > 1:
|
||||
return
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
@@ -592,7 +620,7 @@ class Document(BaseDocument):
|
||||
# If _cls is being used (for polymorphism), it needs an index,
|
||||
# only if another index doesn't begin with _cls
|
||||
if (index_cls and not cls_indexed and
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
collection.ensure_index('_cls', background=background,
|
||||
**index_opts)
|
||||
|
||||
@@ -607,24 +635,25 @@ class Document(BaseDocument):
|
||||
|
||||
# get all the base classes, subclasses and sieblings
|
||||
classes = []
|
||||
|
||||
def get_classes(cls):
|
||||
|
||||
if (cls not in classes and
|
||||
isinstance(cls, TopLevelDocumentMetaclass)):
|
||||
isinstance(cls, TopLevelDocumentMetaclass)):
|
||||
classes.append(cls)
|
||||
|
||||
for base_cls in cls.__bases__:
|
||||
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
|
||||
base_cls != Document and
|
||||
not base_cls._meta.get('abstract') and
|
||||
base_cls._get_collection().full_name == cls._get_collection().full_name and
|
||||
base_cls not in classes):
|
||||
base_cls != Document and
|
||||
not base_cls._meta.get('abstract') and
|
||||
base_cls._get_collection().full_name == cls._get_collection().full_name and
|
||||
base_cls not in classes):
|
||||
classes.append(base_cls)
|
||||
get_classes(base_cls)
|
||||
for subclass in cls.__subclasses__():
|
||||
if (isinstance(base_cls, TopLevelDocumentMetaclass) and
|
||||
subclass._get_collection().full_name == cls._get_collection().full_name and
|
||||
subclass not in classes):
|
||||
subclass._get_collection().full_name == cls._get_collection().full_name and
|
||||
subclass not in classes):
|
||||
classes.append(subclass)
|
||||
get_classes(subclass)
|
||||
|
||||
@@ -652,8 +681,8 @@ class Document(BaseDocument):
|
||||
if [(u'_id', 1)] not in indexes:
|
||||
indexes.append([(u'_id', 1)])
|
||||
if (cls._meta.get('index_cls', True) and
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
indexes.append([(u'_cls', 1)])
|
||||
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
|
||||
indexes.append([(u'_cls', 1)])
|
||||
|
||||
return indexes
|
||||
|
||||
@@ -664,7 +693,8 @@ class Document(BaseDocument):
|
||||
"""
|
||||
|
||||
required = cls.list_indexes()
|
||||
existing = [info['key'] for info in cls._get_collection().index_information().values()]
|
||||
existing = [info['key']
|
||||
for info in cls._get_collection().index_information().values()]
|
||||
missing = [index for index in required if index not in existing]
|
||||
extra = [index for index in existing if index not in required]
|
||||
|
||||
@@ -682,6 +712,7 @@ class Document(BaseDocument):
|
||||
|
||||
|
||||
class DynamicDocument(Document):
|
||||
|
||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||
way as an ordinary document but has expando style properties. Any data
|
||||
@@ -697,7 +728,7 @@ class DynamicDocument(Document):
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
my_metaclass = TopLevelDocumentMetaclass
|
||||
__metaclass__ = TopLevelDocumentMetaclass
|
||||
|
||||
_dynamic = True
|
||||
@@ -713,6 +744,7 @@ class DynamicDocument(Document):
|
||||
|
||||
|
||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||
information about dynamic documents.
|
||||
@@ -720,7 +752,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||
my_metaclass = DocumentMetaclass
|
||||
my_metaclass = DocumentMetaclass
|
||||
__metaclass__ = DocumentMetaclass
|
||||
|
||||
_dynamic = True
|
||||
@@ -739,6 +771,7 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||
|
||||
|
||||
class MapReduceDocument(object):
|
||||
|
||||
"""A document returned from a map/reduce query.
|
||||
|
||||
:param collection: An instance of :class:`~pymongo.Collection`
|
||||
@@ -769,7 +802,7 @@ class MapReduceDocument(object):
|
||||
try:
|
||||
self.key = id_field_type(self.key)
|
||||
except:
|
||||
raise Exception("Could not cast key as %s" % \
|
||||
raise Exception("Could not cast key as %s" %
|
||||
id_field_type.__name__)
|
||||
|
||||
if not hasattr(self, "_key_object"):
|
||||
|
@@ -34,22 +34,25 @@ except ImportError:
|
||||
Image = None
|
||||
ImageOps = None
|
||||
|
||||
__all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
|
||||
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
|
||||
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
||||
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
|
||||
'GenericReferenceField', 'BinaryField', 'GridFSError',
|
||||
'GridFSProxy', 'FileField', 'ImageGridFsProxy',
|
||||
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
|
||||
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField',
|
||||
'GeoJsonBaseField']
|
||||
__all__ = [
|
||||
'StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
|
||||
'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
|
||||
'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
|
||||
'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
|
||||
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
|
||||
'CachedReferenceField', 'GenericReferenceField', 'BinaryField',
|
||||
'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy',
|
||||
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
|
||||
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField',
|
||||
'MultiPointField', 'MultiLineStringField', 'MultiPolygonField',
|
||||
'GeoJsonBaseField']
|
||||
|
||||
|
||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||
|
||||
|
||||
class StringField(BaseField):
|
||||
|
||||
"""A unicode string field.
|
||||
"""
|
||||
|
||||
@@ -109,6 +112,7 @@ class StringField(BaseField):
|
||||
|
||||
|
||||
class URLField(StringField):
|
||||
|
||||
"""A field that validates input as an URL.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
@@ -116,7 +120,8 @@ class URLField(StringField):
|
||||
|
||||
_URL_REGEX = re.compile(
|
||||
r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
# domain...
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
@@ -145,15 +150,19 @@ class URLField(StringField):
|
||||
|
||||
|
||||
class EmailField(StringField):
|
||||
|
||||
"""A field that validates input as an E-Mail-Address.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
|
||||
EMAIL_REGEX = re.compile(
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
|
||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE # domain
|
||||
# dot-atom
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
|
||||
# quoted-string
|
||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'
|
||||
# domain (max length of an ICAAN TLD is 22 characters)
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,22}$', re.IGNORECASE
|
||||
)
|
||||
|
||||
def validate(self, value):
|
||||
@@ -163,6 +172,7 @@ class EmailField(StringField):
|
||||
|
||||
|
||||
class IntField(BaseField):
|
||||
|
||||
"""An 32-bit integer field.
|
||||
"""
|
||||
|
||||
@@ -197,6 +207,7 @@ class IntField(BaseField):
|
||||
|
||||
|
||||
class LongField(BaseField):
|
||||
|
||||
"""An 64-bit integer field.
|
||||
"""
|
||||
|
||||
@@ -231,6 +242,7 @@ class LongField(BaseField):
|
||||
|
||||
|
||||
class FloatField(BaseField):
|
||||
|
||||
"""An floating point number field.
|
||||
"""
|
||||
|
||||
@@ -265,6 +277,7 @@ class FloatField(BaseField):
|
||||
|
||||
|
||||
class DecimalField(BaseField):
|
||||
|
||||
"""A fixed-point decimal number field.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
@@ -311,7 +324,7 @@ class DecimalField(BaseField):
|
||||
return value
|
||||
return value.quantize(self.precision, rounding=self.rounding)
|
||||
|
||||
def to_mongo(self, value):
|
||||
def to_mongo(self, value, use_db_field=True):
|
||||
if value is None:
|
||||
return value
|
||||
if self.force_string:
|
||||
@@ -338,6 +351,7 @@ class DecimalField(BaseField):
|
||||
|
||||
|
||||
class BooleanField(BaseField):
|
||||
|
||||
"""A boolean field type.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
@@ -356,6 +370,7 @@ class BooleanField(BaseField):
|
||||
|
||||
|
||||
class DateTimeField(BaseField):
|
||||
|
||||
"""A datetime field.
|
||||
|
||||
Uses the python-dateutil library if available alternatively use time.strptime
|
||||
@@ -406,15 +421,15 @@ class DateTimeField(BaseField):
|
||||
kwargs = {'microsecond': usecs}
|
||||
try: # Seconds are optional, so try converting seconds first.
|
||||
return datetime.datetime(*time.strptime(value,
|
||||
'%Y-%m-%d %H:%M:%S')[:6], **kwargs)
|
||||
'%Y-%m-%d %H:%M:%S')[:6], **kwargs)
|
||||
except ValueError:
|
||||
try: # Try without seconds.
|
||||
return datetime.datetime(*time.strptime(value,
|
||||
'%Y-%m-%d %H:%M')[:5], **kwargs)
|
||||
'%Y-%m-%d %H:%M')[:5], **kwargs)
|
||||
except ValueError: # Try without hour/minutes/seconds.
|
||||
try:
|
||||
return datetime.datetime(*time.strptime(value,
|
||||
'%Y-%m-%d')[:3], **kwargs)
|
||||
'%Y-%m-%d')[:3], **kwargs)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
@@ -423,6 +438,7 @@ class DateTimeField(BaseField):
|
||||
|
||||
|
||||
class ComplexDateTimeField(StringField):
|
||||
|
||||
"""
|
||||
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||
like DateTimeField does.
|
||||
@@ -525,6 +541,7 @@ class ComplexDateTimeField(StringField):
|
||||
|
||||
|
||||
class EmbeddedDocumentField(BaseField):
|
||||
|
||||
"""An embedded document field - with a declared document_type.
|
||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||
"""
|
||||
@@ -551,10 +568,11 @@ class EmbeddedDocumentField(BaseField):
|
||||
return self.document_type._from_son(value)
|
||||
return value
|
||||
|
||||
def to_mongo(self, value):
|
||||
def to_mongo(self, value, use_db_field=True, fields=[]):
|
||||
if not isinstance(value, self.document_type):
|
||||
return value
|
||||
return self.document_type.to_mongo(value)
|
||||
return self.document_type.to_mongo(value, use_db_field,
|
||||
fields=fields)
|
||||
|
||||
def validate(self, value, clean=True):
|
||||
"""Make sure that the document instance is an instance of the
|
||||
@@ -574,6 +592,7 @@ class EmbeddedDocumentField(BaseField):
|
||||
|
||||
|
||||
class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
"""A generic embedded document field - allows any
|
||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||
|
||||
@@ -601,17 +620,18 @@ class GenericEmbeddedDocumentField(BaseField):
|
||||
|
||||
value.validate(clean=clean)
|
||||
|
||||
def to_mongo(self, document):
|
||||
def to_mongo(self, document, use_db_field=True):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
data = document.to_mongo()
|
||||
data = document.to_mongo(use_db_field)
|
||||
if not '_cls' in data:
|
||||
data['_cls'] = document._class_name
|
||||
return data
|
||||
|
||||
|
||||
class DynamicField(BaseField):
|
||||
|
||||
"""A truly dynamic field type capable of handling different and varying
|
||||
types of data.
|
||||
|
||||
@@ -628,7 +648,7 @@ class DynamicField(BaseField):
|
||||
cls = value.__class__
|
||||
val = value.to_mongo()
|
||||
# If we its a document thats not inherited add _cls
|
||||
if (isinstance(value, Document)):
|
||||
if (isinstance(value, Document)):
|
||||
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
|
||||
if (isinstance(value, EmbeddedDocument)):
|
||||
val['_cls'] = cls.__name__
|
||||
@@ -675,6 +695,7 @@ class DynamicField(BaseField):
|
||||
|
||||
|
||||
class ListField(ComplexBaseField):
|
||||
|
||||
"""A list field that wraps a standard field, allowing multiple instances
|
||||
of the field to be used as a list in the database.
|
||||
|
||||
@@ -693,21 +714,22 @@ class ListField(ComplexBaseField):
|
||||
"""Make sure that a list of valid fields is being used.
|
||||
"""
|
||||
if (not isinstance(value, (list, tuple, QuerySet)) or
|
||||
isinstance(value, basestring)):
|
||||
isinstance(value, basestring)):
|
||||
self.error('Only lists and tuples may be used in a list field')
|
||||
super(ListField, self).validate(value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if self.field:
|
||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
and not isinstance(value, BaseDocument)
|
||||
and hasattr(value, '__iter__')):
|
||||
return [self.field.prepare_query_value(op, v) for v in value]
|
||||
return self.field.prepare_query_value(op, value)
|
||||
return super(ListField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class SortedListField(ListField):
|
||||
|
||||
"""A ListField that sorts the contents of its list before writing to
|
||||
the database in order to ensure that a sorted list is always
|
||||
retrieved.
|
||||
@@ -739,6 +761,7 @@ class SortedListField(ListField):
|
||||
reverse=self._order_reverse)
|
||||
return sorted(value, reverse=self._order_reverse)
|
||||
|
||||
|
||||
def key_not_string(d):
|
||||
""" Helper function to recursively determine if any key in a dictionary is
|
||||
not a string.
|
||||
@@ -747,6 +770,7 @@ def key_not_string(d):
|
||||
if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)):
|
||||
return True
|
||||
|
||||
|
||||
def key_has_dot_or_dollar(d):
|
||||
""" Helper function to recursively determine if any key in a dictionary
|
||||
contains a dot or a dollar sign.
|
||||
@@ -755,7 +779,9 @@ def key_has_dot_or_dollar(d):
|
||||
if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)):
|
||||
return True
|
||||
|
||||
|
||||
class DictField(ComplexBaseField):
|
||||
|
||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||
similar to an embedded document, but the structure is not defined.
|
||||
|
||||
@@ -801,12 +827,17 @@ class DictField(ComplexBaseField):
|
||||
return StringField().prepare_query_value(op, value)
|
||||
|
||||
if hasattr(self.field, 'field'):
|
||||
if op in ('set', 'unset') and isinstance(value, dict):
|
||||
return dict(
|
||||
(k, self.field.prepare_query_value(op, v))
|
||||
for k, v in value.items())
|
||||
return self.field.prepare_query_value(op, value)
|
||||
|
||||
return super(DictField, self).prepare_query_value(op, value)
|
||||
|
||||
|
||||
class MapField(DictField):
|
||||
|
||||
"""A field that maps a name to a specified field type. Similar to
|
||||
a DictField, except the 'value' of each item must match the specified
|
||||
field type.
|
||||
@@ -822,6 +853,7 @@ class MapField(DictField):
|
||||
|
||||
|
||||
class ReferenceField(BaseField):
|
||||
|
||||
"""A reference to a document that will be automatically dereferenced on
|
||||
access (lazily).
|
||||
|
||||
@@ -932,7 +964,7 @@ class ReferenceField(BaseField):
|
||||
"""Convert a MongoDB-compatible type to a Python type.
|
||||
"""
|
||||
if (not self.dbref and
|
||||
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
|
||||
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
|
||||
collection = self.document_type._get_collection_name()
|
||||
value = DBRef(collection, self.document_type.id.to_python(value))
|
||||
return value
|
||||
@@ -955,7 +987,147 @@ class ReferenceField(BaseField):
|
||||
return self.document_type._fields.get(member_name)
|
||||
|
||||
|
||||
class CachedReferenceField(BaseField):
|
||||
|
||||
"""
|
||||
A referencefield with cache fields to porpuse pseudo-joins
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, fields=[], auto_sync=True, **kwargs):
|
||||
"""Initialises the Cached Reference Field.
|
||||
|
||||
:param fields: A list of fields to be cached in document
|
||||
:param auto_sync: if True documents are auto updated.
|
||||
"""
|
||||
if not isinstance(document_type, basestring) and \
|
||||
not issubclass(document_type, (Document, basestring)):
|
||||
|
||||
self.error('Argument to CachedReferenceField constructor must be a'
|
||||
' document class or a string')
|
||||
|
||||
self.auto_sync = auto_sync
|
||||
self.document_type_obj = document_type
|
||||
self.fields = fields
|
||||
super(CachedReferenceField, self).__init__(**kwargs)
|
||||
|
||||
def start_listener(self):
|
||||
from mongoengine import signals
|
||||
signals.post_save.connect(self.on_document_pre_save,
|
||||
sender=self.document_type)
|
||||
|
||||
def on_document_pre_save(self, sender, document, created, **kwargs):
|
||||
if not created:
|
||||
update_kwargs = dict(
|
||||
('set__%s__%s' % (self.name, k), v)
|
||||
for k, v in document._delta()[0].items()
|
||||
if k in self.fields)
|
||||
|
||||
if update_kwargs:
|
||||
filter_kwargs = {}
|
||||
filter_kwargs[self.name] = document
|
||||
|
||||
self.owner_document.objects(
|
||||
**filter_kwargs).update(**update_kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, dict):
|
||||
collection = self.document_type._get_collection_name()
|
||||
value = DBRef(
|
||||
collection, self.document_type.id.to_python(value['_id']))
|
||||
|
||||
return value
|
||||
|
||||
@property
|
||||
def document_type(self):
|
||||
if isinstance(self.document_type_obj, basestring):
|
||||
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
||||
self.document_type_obj = self.owner_document
|
||||
else:
|
||||
self.document_type_obj = get_document(self.document_type_obj)
|
||||
return self.document_type_obj
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
# Document class being used rather than a document object
|
||||
return self
|
||||
|
||||
# Get value from document instance if available
|
||||
value = instance._data.get(self.name)
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
# Dereference DBRefs
|
||||
if self._auto_dereference and isinstance(value, DBRef):
|
||||
value = self.document_type._get_db().dereference(value)
|
||||
if value is not None:
|
||||
instance._data[self.name] = self.document_type._from_son(value)
|
||||
|
||||
return super(CachedReferenceField, self).__get__(instance, owner)
|
||||
|
||||
def to_mongo(self, document):
|
||||
id_field_name = self.document_type._meta['id_field']
|
||||
id_field = self.document_type._fields[id_field_name]
|
||||
doc_tipe = self.document_type
|
||||
|
||||
if isinstance(document, Document):
|
||||
# We need the id from the saved object to create the DBRef
|
||||
id_ = document.pk
|
||||
if id_ is None:
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
else:
|
||||
self.error('Only accept a document object')
|
||||
|
||||
value = SON((
|
||||
("_id", id_field.to_mongo(id_)),
|
||||
))
|
||||
|
||||
value.update(dict(document.to_mongo(fields=self.fields)))
|
||||
return value
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, Document):
|
||||
if value.pk is None:
|
||||
self.error('You can only reference documents once they have'
|
||||
' been saved to the database')
|
||||
return {'_id': value.pk}
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def validate(self, value):
|
||||
|
||||
if not isinstance(value, (self.document_type)):
|
||||
self.error("A CachedReferenceField only accepts documents")
|
||||
|
||||
if isinstance(value, Document) and value.id is None:
|
||||
self.error('You can only reference documents once they have been '
|
||||
'saved to the database')
|
||||
|
||||
def lookup_member(self, member_name):
|
||||
return self.document_type._fields.get(member_name)
|
||||
|
||||
def sync_all(self):
|
||||
"""
|
||||
Sync all cached fields on demand.
|
||||
Caution: this operation may be slower.
|
||||
"""
|
||||
update_key = 'set__%s' % self.name
|
||||
|
||||
for doc in self.document_type.objects:
|
||||
filter_kwargs = {}
|
||||
filter_kwargs[self.name] = doc
|
||||
|
||||
update_kwargs = {}
|
||||
update_kwargs[update_key] = doc
|
||||
|
||||
self.owner_document.objects(
|
||||
**filter_kwargs).update(**update_kwargs)
|
||||
|
||||
|
||||
class GenericReferenceField(BaseField):
|
||||
|
||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||
that will be automatically dereferenced on access (lazily).
|
||||
|
||||
@@ -974,6 +1146,7 @@ class GenericReferenceField(BaseField):
|
||||
return self
|
||||
|
||||
value = instance._data.get(self.name)
|
||||
|
||||
self._auto_dereference = instance._fields[self.name]._auto_dereference
|
||||
if self._auto_dereference and isinstance(value, (dict, SON)):
|
||||
instance._data[self.name] = self.dereference(value)
|
||||
@@ -1001,7 +1174,7 @@ class GenericReferenceField(BaseField):
|
||||
doc = doc_cls._from_son(doc)
|
||||
return doc
|
||||
|
||||
def to_mongo(self, document):
|
||||
def to_mongo(self, document, use_db_field=True):
|
||||
if document is None:
|
||||
return None
|
||||
|
||||
@@ -1036,6 +1209,7 @@ class GenericReferenceField(BaseField):
|
||||
|
||||
|
||||
class BinaryField(BaseField):
|
||||
|
||||
"""A binary data field.
|
||||
"""
|
||||
|
||||
@@ -1056,7 +1230,7 @@ class BinaryField(BaseField):
|
||||
if not isinstance(value, (bin_type, txt_type, Binary)):
|
||||
self.error("BinaryField only accepts instances of "
|
||||
"(%s, %s, Binary)" % (
|
||||
bin_type.__name__, txt_type.__name__))
|
||||
bin_type.__name__, txt_type.__name__))
|
||||
|
||||
if self.max_bytes is not None and len(value) > self.max_bytes:
|
||||
self.error('Binary value is too long')
|
||||
@@ -1067,6 +1241,7 @@ class GridFSError(Exception):
|
||||
|
||||
|
||||
class GridFSProxy(object):
|
||||
|
||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||
|
||||
.. versionadded:: 0.4
|
||||
@@ -1121,7 +1296,8 @@ class GridFSProxy(object):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
|
||||
|
||||
def __str__(self):
|
||||
name = getattr(self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
|
||||
name = getattr(
|
||||
self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
|
||||
return '<%s: %s>' % (self.__class__.__name__, name)
|
||||
|
||||
def __eq__(self, other):
|
||||
@@ -1135,7 +1311,8 @@ class GridFSProxy(object):
|
||||
@property
|
||||
def fs(self):
|
||||
if not self._fs:
|
||||
self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name)
|
||||
self._fs = gridfs.GridFS(
|
||||
get_db(self.db_alias), self.collection_name)
|
||||
return self._fs
|
||||
|
||||
def get(self, id=None):
|
||||
@@ -1209,6 +1386,7 @@ class GridFSProxy(object):
|
||||
|
||||
|
||||
class FileField(BaseField):
|
||||
|
||||
"""A GridFS storage field.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
@@ -1253,7 +1431,8 @@ class FileField(BaseField):
|
||||
pass
|
||||
|
||||
# Create a new proxy object as we don't already have one
|
||||
instance._data[key] = self.get_proxy_obj(key=key, instance=instance)
|
||||
instance._data[key] = self.get_proxy_obj(
|
||||
key=key, instance=instance)
|
||||
instance._data[key].put(value)
|
||||
else:
|
||||
instance._data[key] = value
|
||||
@@ -1291,11 +1470,13 @@ class FileField(BaseField):
|
||||
|
||||
|
||||
class ImageGridFsProxy(GridFSProxy):
|
||||
|
||||
"""
|
||||
Proxy for ImageField
|
||||
|
||||
versionadded: 0.6
|
||||
"""
|
||||
|
||||
def put(self, file_obj, **kwargs):
|
||||
"""
|
||||
Insert a image in database
|
||||
@@ -1312,6 +1493,16 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
except Exception, e:
|
||||
raise ValidationError('Invalid image: %s' % e)
|
||||
|
||||
# Progressive JPEG
|
||||
progressive = img.info.get('progressive') or False
|
||||
|
||||
if (kwargs.get('progressive') and
|
||||
isinstance(kwargs.get('progressive'), bool) and
|
||||
img_format == 'JPEG'):
|
||||
progressive = True
|
||||
else:
|
||||
progressive = False
|
||||
|
||||
if (field.size and (img.size[0] > field.size['width'] or
|
||||
img.size[1] > field.size['height'])):
|
||||
size = field.size
|
||||
@@ -1331,7 +1522,8 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
size = field.thumbnail_size
|
||||
|
||||
if size['force']:
|
||||
thumbnail = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS)
|
||||
thumbnail = ImageOps.fit(
|
||||
img, (size['width'], size['height']), Image.ANTIALIAS)
|
||||
else:
|
||||
thumbnail = img.copy()
|
||||
thumbnail.thumbnail((size['width'],
|
||||
@@ -1339,14 +1531,14 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
Image.ANTIALIAS)
|
||||
|
||||
if thumbnail:
|
||||
thumb_id = self._put_thumbnail(thumbnail, img_format)
|
||||
thumb_id = self._put_thumbnail(thumbnail, img_format, progressive)
|
||||
else:
|
||||
thumb_id = None
|
||||
|
||||
w, h = img.size
|
||||
|
||||
io = StringIO()
|
||||
img.save(io, img_format)
|
||||
img.save(io, img_format, progressive=progressive)
|
||||
io.seek(0)
|
||||
|
||||
return super(ImageGridFsProxy, self).put(io,
|
||||
@@ -1357,18 +1549,18 @@ class ImageGridFsProxy(GridFSProxy):
|
||||
**kwargs)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
#deletes thumbnail
|
||||
# deletes thumbnail
|
||||
out = self.get()
|
||||
if out and out.thumbnail_id:
|
||||
self.fs.delete(out.thumbnail_id)
|
||||
|
||||
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
|
||||
|
||||
def _put_thumbnail(self, thumbnail, format, **kwargs):
|
||||
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
|
||||
w, h = thumbnail.size
|
||||
|
||||
io = StringIO()
|
||||
thumbnail.save(io, format)
|
||||
thumbnail.save(io, format, progressive=progressive)
|
||||
io.seek(0)
|
||||
|
||||
return self.fs.put(io, width=w,
|
||||
@@ -1417,6 +1609,7 @@ class ImproperlyConfigured(Exception):
|
||||
|
||||
|
||||
class ImageField(FileField):
|
||||
|
||||
"""
|
||||
A Image File storage field.
|
||||
|
||||
@@ -1455,6 +1648,7 @@ class ImageField(FileField):
|
||||
|
||||
|
||||
class SequenceField(BaseField):
|
||||
|
||||
"""Provides a sequental counter see:
|
||||
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
|
||||
|
||||
@@ -1524,7 +1718,7 @@ class SequenceField(BaseField):
|
||||
data = collection.find_one({"_id": sequence_id})
|
||||
|
||||
if data:
|
||||
return self.value_decorator(data['next']+1)
|
||||
return self.value_decorator(data['next'] + 1)
|
||||
|
||||
return self.value_decorator(1)
|
||||
|
||||
@@ -1554,6 +1748,14 @@ class SequenceField(BaseField):
|
||||
|
||||
return super(SequenceField, self).__set__(instance, value)
|
||||
|
||||
def prepare_query_value(self, op, value):
|
||||
"""
|
||||
This method is overriden in order to convert the query value into to required
|
||||
type. We need to do this in order to be able to successfully compare query
|
||||
values passed as string, the base implementation returns the value as is.
|
||||
"""
|
||||
return self.value_decorator(value)
|
||||
|
||||
def to_python(self, value):
|
||||
if value is None:
|
||||
value = self.generate()
|
||||
@@ -1561,6 +1763,7 @@ class SequenceField(BaseField):
|
||||
|
||||
|
||||
class UUIDField(BaseField):
|
||||
|
||||
"""A UUID field.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
@@ -1613,12 +1816,13 @@ class UUIDField(BaseField):
|
||||
|
||||
|
||||
class GeoPointField(BaseField):
|
||||
"""A list storing a longitude and latitude coordinate.
|
||||
|
||||
.. note:: this represents a generic point in a 2D plane and a legacy way of
|
||||
representing a geo point. It admits 2d indexes but not "2dsphere" indexes
|
||||
in MongoDB > 2.4 which are more natural for modeling geospatial points.
|
||||
See :ref:`geospatial-indexes`
|
||||
"""A list storing a longitude and latitude coordinate.
|
||||
|
||||
.. note:: this represents a generic point in a 2D plane and a legacy way of
|
||||
representing a geo point. It admits 2d indexes but not "2dsphere" indexes
|
||||
in MongoDB > 2.4 which are more natural for modeling geospatial points.
|
||||
See :ref:`geospatial-indexes`
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
@@ -1633,13 +1837,16 @@ class GeoPointField(BaseField):
|
||||
'of (x, y)')
|
||||
|
||||
if not len(value) == 2:
|
||||
self.error("Value (%s) must be a two-dimensional point" % repr(value))
|
||||
self.error("Value (%s) must be a two-dimensional point" %
|
||||
repr(value))
|
||||
elif (not isinstance(value[0], (float, int)) or
|
||||
not isinstance(value[1], (float, int))):
|
||||
self.error("Both values (%s) in point must be float or int" % repr(value))
|
||||
self.error(
|
||||
"Both values (%s) in point must be float or int" % repr(value))
|
||||
|
||||
|
||||
class PointField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a longitude and latitude coordinate.
|
||||
|
||||
The data is represented as:
|
||||
@@ -1659,6 +1866,7 @@ class PointField(GeoJsonBaseField):
|
||||
|
||||
|
||||
class LineStringField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a line of longitude and latitude coordinates.
|
||||
|
||||
The data is represented as:
|
||||
@@ -1677,6 +1885,7 @@ class LineStringField(GeoJsonBaseField):
|
||||
|
||||
|
||||
class PolygonField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
|
||||
|
||||
The data is represented as:
|
||||
@@ -1695,3 +1904,70 @@ class PolygonField(GeoJsonBaseField):
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "Polygon"
|
||||
|
||||
|
||||
class MultiPointField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a list of Points.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiPoint" ,
|
||||
"coordinates" : [[x1, y1], [x2, y2]]}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiPoint"
|
||||
|
||||
|
||||
class MultiLineStringField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a list of LineStrings.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiLineString" ,
|
||||
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]]}
|
||||
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiLineString"
|
||||
|
||||
|
||||
class MultiPolygonField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing list of Polygons.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "Polygon" ,
|
||||
"coordinates" : [[
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]
|
||||
], [
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]
|
||||
]
|
||||
}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
of Polygons.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiPolygon"
|
||||
|
@@ -7,6 +7,7 @@ import pprint
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from bson import SON
|
||||
from bson.code import Code
|
||||
from bson import json_util
|
||||
import pymongo
|
||||
@@ -14,6 +15,7 @@ import pymongo.errors
|
||||
from pymongo.common import validate_read_preference
|
||||
|
||||
from mongoengine import signals
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import switch_db
|
||||
from mongoengine.common import _import_class
|
||||
from mongoengine.base.common import get_document
|
||||
@@ -37,6 +39,7 @@ RE_TYPE = type(re.compile(''))
|
||||
|
||||
|
||||
class BaseQuerySet(object):
|
||||
|
||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||
providing :class:`~mongoengine.Document` objects as the results.
|
||||
"""
|
||||
@@ -62,6 +65,8 @@ class BaseQuerySet(object):
|
||||
self._none = False
|
||||
self._as_pymongo = False
|
||||
self._as_pymongo_coerce = False
|
||||
self._search_text = None
|
||||
self._include_text_scores = False
|
||||
|
||||
# If inheritance is allowed, only return instances and instances of
|
||||
# subclasses of the class being used
|
||||
@@ -69,12 +74,14 @@ class BaseQuerySet(object):
|
||||
if len(self._document._subclasses) == 1:
|
||||
self._initial_query = {"_cls": self._document._subclasses[0]}
|
||||
else:
|
||||
self._initial_query = {"_cls": {"$in": self._document._subclasses}}
|
||||
self._initial_query = {
|
||||
"_cls": {"$in": self._document._subclasses}}
|
||||
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
||||
self._cursor_obj = None
|
||||
self._limit = None
|
||||
self._skip = None
|
||||
self._hint = -1 # Using -1 as None is a valid value for hint
|
||||
self.only_fields = []
|
||||
|
||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
|
||||
read_preference=None, **query):
|
||||
@@ -145,11 +152,13 @@ class BaseQuerySet(object):
|
||||
if queryset._scalar:
|
||||
return queryset._get_scalar(
|
||||
queryset._document._from_son(queryset._cursor[key],
|
||||
_auto_dereference=self._auto_dereference))
|
||||
_auto_dereference=self._auto_dereference,
|
||||
only_fields=self.only_fields))
|
||||
|
||||
if queryset._as_pymongo:
|
||||
return queryset._get_as_pymongo(queryset._cursor[key])
|
||||
return queryset._document._from_son(queryset._cursor[key],
|
||||
_auto_dereference=self._auto_dereference)
|
||||
_auto_dereference=self._auto_dereference, only_fields=self.only_fields)
|
||||
raise AttributeError
|
||||
|
||||
def __iter__(self):
|
||||
@@ -182,6 +191,36 @@ class BaseQuerySet(object):
|
||||
"""
|
||||
return self.__call__(*q_objs, **query)
|
||||
|
||||
def search_text(self, text, language=None, include_text_scores=False):
|
||||
"""
|
||||
Start a text search, using text indexes.
|
||||
Require: MongoDB server version 2.6+.
|
||||
|
||||
:param language: The language that determines the list of stop words
|
||||
for the search and the rules for the stemmer and tokenizer.
|
||||
If not specified, the search uses the default language of the index.
|
||||
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
||||
|
||||
:param include_text_scores: If True, automaticaly add a text_score attribute to Document.
|
||||
|
||||
"""
|
||||
queryset = self.clone()
|
||||
if queryset._search_text:
|
||||
raise OperationError(
|
||||
"Is not possible to use search_text two times.")
|
||||
|
||||
query_kwargs = SON({'$search': text})
|
||||
if language:
|
||||
query_kwargs['$language'] = language
|
||||
|
||||
queryset._query_obj &= Q(__raw__={'$text': query_kwargs})
|
||||
queryset._mongo_query = None
|
||||
queryset._cursor_obj = None
|
||||
queryset._search_text = text
|
||||
queryset._include_text_scores = include_text_scores
|
||||
|
||||
return queryset
|
||||
|
||||
def get(self, *q_objs, **query):
|
||||
"""Retrieve the the matching object raising
|
||||
:class:`~mongoengine.queryset.MultipleObjectsReturned` or
|
||||
@@ -192,7 +231,7 @@ class BaseQuerySet(object):
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset = queryset.limit(2)
|
||||
queryset = queryset.order_by().limit(2)
|
||||
queryset = queryset.filter(*q_objs, **query)
|
||||
|
||||
try:
|
||||
@@ -320,10 +359,10 @@ class BaseQuerySet(object):
|
||||
try:
|
||||
ids = self._collection.insert(raw, **write_concern)
|
||||
except pymongo.errors.DuplicateKeyError, err:
|
||||
message = 'Could not save document (%s)';
|
||||
message = 'Could not save document (%s)'
|
||||
raise NotUniqueError(message % unicode(err))
|
||||
except pymongo.errors.OperationFailure, err:
|
||||
message = 'Could not save document (%s)';
|
||||
message = 'Could not save document (%s)'
|
||||
if re.match('^E1100[01] duplicate key', unicode(err)):
|
||||
# E11000 - duplicate key error index
|
||||
# E11001 - duplicate key on update
|
||||
@@ -366,6 +405,7 @@ class BaseQuerySet(object):
|
||||
will force an fsync on the primary server.
|
||||
:param _from_doc_delete: True when called from document delete therefore
|
||||
signals will have been triggered so don't loop.
|
||||
:returns number of deleted documents
|
||||
"""
|
||||
queryset = self.clone()
|
||||
doc = queryset._document
|
||||
@@ -383,9 +423,11 @@ class BaseQuerySet(object):
|
||||
has_delete_signal) and not _from_doc_delete
|
||||
|
||||
if call_document_delete:
|
||||
cnt = 0
|
||||
for doc in queryset:
|
||||
doc.delete(write_concern=write_concern)
|
||||
return
|
||||
cnt += 1
|
||||
return cnt
|
||||
|
||||
delete_rules = doc._meta.get('delete_rules') or {}
|
||||
# Check for DENY rules before actually deleting/nullifying any other
|
||||
@@ -406,7 +448,7 @@ class BaseQuerySet(object):
|
||||
ref_q = document_cls.objects(**{field_name + '__in': self})
|
||||
ref_q_count = ref_q.count()
|
||||
if (doc != document_cls and ref_q_count > 0
|
||||
or (doc == document_cls and ref_q_count > 0)):
|
||||
or (doc == document_cls and ref_q_count > 0)):
|
||||
ref_q.delete(write_concern=write_concern)
|
||||
elif rule == NULLIFY:
|
||||
document_cls.objects(**{field_name + '__in': self}).update(
|
||||
@@ -416,7 +458,8 @@ class BaseQuerySet(object):
|
||||
write_concern=write_concern,
|
||||
**{'pull_all__%s' % field_name: self})
|
||||
|
||||
queryset._collection.remove(queryset._query, write_concern=write_concern)
|
||||
result = queryset._collection.remove(queryset._query, write_concern=write_concern)
|
||||
return result["n"]
|
||||
|
||||
def update(self, upsert=False, multi=True, write_concern=None,
|
||||
full_result=False, **update):
|
||||
@@ -513,7 +556,8 @@ class BaseQuerySet(object):
|
||||
raise OperationError("Conflicting parameters: remove and new")
|
||||
|
||||
if not update and not upsert and not remove:
|
||||
raise OperationError("No update parameters, must either update or remove")
|
||||
raise OperationError(
|
||||
"No update parameters, must either update or remove")
|
||||
|
||||
queryset = self.clone()
|
||||
query = queryset._query
|
||||
@@ -531,10 +575,10 @@ class BaseQuerySet(object):
|
||||
|
||||
if full_response:
|
||||
if result["value"] is not None:
|
||||
result["value"] = self._document._from_son(result["value"])
|
||||
result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields)
|
||||
else:
|
||||
if result is not None:
|
||||
result = self._document._from_son(result)
|
||||
result = self._document._from_son(result, only_fields=self.only_fields)
|
||||
|
||||
return result
|
||||
|
||||
@@ -569,13 +613,13 @@ class BaseQuerySet(object):
|
||||
if self._scalar:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._get_scalar(
|
||||
self._document._from_son(doc))
|
||||
self._document._from_son(doc, only_fields=self.only_fields))
|
||||
elif self._as_pymongo:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._get_as_pymongo(doc)
|
||||
else:
|
||||
for doc in docs:
|
||||
doc_map[doc['_id']] = self._document._from_son(doc)
|
||||
doc_map[doc['_id']] = self._document._from_son(doc, only_fields=self.only_fields)
|
||||
|
||||
return doc_map
|
||||
|
||||
@@ -599,7 +643,7 @@ class BaseQuerySet(object):
|
||||
|
||||
:param alias: The database alias
|
||||
|
||||
.. versionadded:: 0.8
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
with switch_db(self._document, alias) as cls:
|
||||
@@ -620,13 +664,15 @@ class BaseQuerySet(object):
|
||||
:class:`~mongoengine.queryset.base.BaseQuerySet` into another child class
|
||||
"""
|
||||
if not isinstance(cls, BaseQuerySet):
|
||||
raise OperationError('%s is not a subclass of BaseQuerySet' % cls.__name__)
|
||||
raise OperationError(
|
||||
'%s is not a subclass of BaseQuerySet' % cls.__name__)
|
||||
|
||||
copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
|
||||
'_where_clause', '_loaded_fields', '_ordering', '_snapshot',
|
||||
'_timeout', '_class_check', '_slave_okay', '_read_preference',
|
||||
'_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
|
||||
'_limit', '_skip', '_hint', '_auto_dereference')
|
||||
'_limit', '_skip', '_hint', '_auto_dereference',
|
||||
'_search_text', '_include_text_scores', 'only_fields')
|
||||
|
||||
for prop in copy_props:
|
||||
val = getattr(self, prop)
|
||||
@@ -712,10 +758,16 @@ class BaseQuerySet(object):
|
||||
distinct = self._dereference(queryset._cursor.distinct(field), 1,
|
||||
name=field, instance=self._document)
|
||||
|
||||
# We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
|
||||
doc_field = getattr(self._document._fields.get(field), "field", None)
|
||||
# We may need to cast to the correct type eg.
|
||||
# ListField(EmbeddedDocumentField)
|
||||
doc_field = getattr(
|
||||
self._document._fields.get(field), "field", None)
|
||||
instance = getattr(doc_field, "document_type", False)
|
||||
if instance:
|
||||
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
|
||||
GenericEmbeddedDocumentField = _import_class(
|
||||
'GenericEmbeddedDocumentField')
|
||||
if instance and isinstance(doc_field, (EmbeddedDocumentField,
|
||||
GenericEmbeddedDocumentField)):
|
||||
distinct = [instance(**doc) for doc in distinct]
|
||||
return distinct
|
||||
|
||||
@@ -738,6 +790,7 @@ class BaseQuerySet(object):
|
||||
.. versionchanged:: 0.5 - Added subfield support
|
||||
"""
|
||||
fields = dict([(f, QueryFieldList.ONLY) for f in fields])
|
||||
self.only_fields = fields.keys()
|
||||
return self.fields(True, **fields)
|
||||
|
||||
def exclude(self, *fields):
|
||||
@@ -794,7 +847,8 @@ class BaseQuerySet(object):
|
||||
for value, group in itertools.groupby(fields, lambda x: x[1]):
|
||||
fields = [field for field, value in group]
|
||||
fields = queryset._fields_to_dbfields(fields)
|
||||
queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called)
|
||||
queryset._loaded_fields += QueryFieldList(
|
||||
fields, value=value, _only_called=_only_called)
|
||||
|
||||
return queryset
|
||||
|
||||
@@ -924,10 +978,35 @@ class BaseQuerySet(object):
|
||||
def from_json(self, json_data):
|
||||
"""Converts json data to unsaved objects"""
|
||||
son_data = json_util.loads(json_data)
|
||||
return [self._document._from_son(data) for data in son_data]
|
||||
return [self._document._from_son(data, only_fields=self.only_fields) for data in son_data]
|
||||
|
||||
def aggregate(self, *pipeline, **kwargs):
|
||||
"""
|
||||
Perform a aggreggate function based in your queryset params
|
||||
:param pipeline: list of agreggation commands,
|
||||
see: http://docs.mongodb.org/manual/core/aggregation-pipeline/
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
initial_pipeline = []
|
||||
|
||||
if self._query:
|
||||
initial_pipeline.append({'$match': self._query})
|
||||
|
||||
if self._ordering:
|
||||
initial_pipeline.append({'$sort': dict(self._ordering)})
|
||||
|
||||
if self._limit is not None:
|
||||
initial_pipeline.append({'$limit': self._limit})
|
||||
|
||||
if self._skip is not None:
|
||||
initial_pipeline.append({'$skip': self._skip})
|
||||
|
||||
pipeline = initial_pipeline + list(pipeline)
|
||||
|
||||
return self._collection.aggregate(pipeline, cursor={}, **kwargs)
|
||||
|
||||
# JS functionality
|
||||
|
||||
def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None,
|
||||
scope=None):
|
||||
"""Perform a map/reduce query using the current query spec
|
||||
@@ -1008,7 +1087,35 @@ class BaseQuerySet(object):
|
||||
map_reduce_function = 'inline_map_reduce'
|
||||
else:
|
||||
map_reduce_function = 'map_reduce'
|
||||
mr_args['out'] = output
|
||||
|
||||
if isinstance(output, basestring):
|
||||
mr_args['out'] = output
|
||||
|
||||
elif isinstance(output, dict):
|
||||
ordered_output = []
|
||||
|
||||
for part in ('replace', 'merge', 'reduce'):
|
||||
value = output.get(part)
|
||||
if value:
|
||||
ordered_output.append((part, value))
|
||||
break
|
||||
|
||||
else:
|
||||
raise OperationError("actionData not specified for output")
|
||||
|
||||
db_alias = output.get('db_alias')
|
||||
remaing_args = ['db', 'sharded', 'nonAtomic']
|
||||
|
||||
if db_alias:
|
||||
ordered_output.append(('db', get_db(db_alias).name))
|
||||
del remaing_args[0]
|
||||
|
||||
for part in remaing_args:
|
||||
value = output.get(part)
|
||||
if value:
|
||||
ordered_output.append((part, value))
|
||||
|
||||
mr_args['out'] = SON(ordered_output)
|
||||
|
||||
results = getattr(queryset._collection, map_reduce_function)(
|
||||
map_f, reduce_f, **mr_args)
|
||||
@@ -1223,7 +1330,7 @@ class BaseQuerySet(object):
|
||||
if self._as_pymongo:
|
||||
return self._get_as_pymongo(raw_doc)
|
||||
doc = self._document._from_son(raw_doc,
|
||||
_auto_dereference=self._auto_dereference)
|
||||
_auto_dereference=self._auto_dereference, only_fields=self.only_fields)
|
||||
if self._scalar:
|
||||
return self._get_scalar(doc)
|
||||
|
||||
@@ -1258,6 +1365,13 @@ class BaseQuerySet(object):
|
||||
cursor_args['slave_okay'] = self._slave_okay
|
||||
if self._loaded_fields:
|
||||
cursor_args['fields'] = self._loaded_fields.as_dict()
|
||||
|
||||
if self._include_text_scores:
|
||||
if 'fields' not in cursor_args:
|
||||
cursor_args['fields'] = {}
|
||||
|
||||
cursor_args['fields']['text_score'] = {'$meta': "textScore"}
|
||||
|
||||
return cursor_args
|
||||
|
||||
@property
|
||||
@@ -1299,8 +1413,11 @@ class BaseQuerySet(object):
|
||||
def _query(self):
|
||||
if self._mongo_query is None:
|
||||
self._mongo_query = self._query_obj.to_query(self._document)
|
||||
if self._class_check:
|
||||
self._mongo_query.update(self._initial_query)
|
||||
if self._class_check and self._initial_query:
|
||||
if "_cls" in self._mongo_query:
|
||||
self._mongo_query = {"$and": [self._initial_query, self._mongo_query]}
|
||||
else:
|
||||
self._mongo_query.update(self._initial_query)
|
||||
return self._mongo_query
|
||||
|
||||
@property
|
||||
@@ -1466,6 +1583,13 @@ class BaseQuerySet(object):
|
||||
for key in keys:
|
||||
if not key:
|
||||
continue
|
||||
|
||||
if key == '$text_score':
|
||||
# automatically set to include text scores
|
||||
self._include_text_scores = True
|
||||
key_list.append(('text_score', {'$meta': "textScore"}))
|
||||
continue
|
||||
|
||||
direction = pymongo.ASCENDING
|
||||
if key[0] == '-':
|
||||
direction = pymongo.DESCENDING
|
||||
|
@@ -11,22 +11,22 @@ __all__ = ('query', 'update')
|
||||
|
||||
|
||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||
'all', 'size', 'exists', 'not')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact')
|
||||
CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
'all', 'size', 'exists', 'not', 'elemMatch')
|
||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||
'within_box', 'within_polygon', 'near', 'near_sphere',
|
||||
'max_distance', 'geo_within', 'geo_within_box',
|
||||
'geo_within_polygon', 'geo_within_center',
|
||||
'geo_within_sphere', 'geo_intersects')
|
||||
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
|
||||
'istartswith', 'endswith', 'iendswith',
|
||||
'exact', 'iexact')
|
||||
CUSTOM_OPERATORS = ('match',)
|
||||
MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||
|
||||
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert')
|
||||
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
|
||||
'push_all', 'pull', 'pull_all', 'add_to_set',
|
||||
'set_on_insert')
|
||||
|
||||
|
||||
def query(_doc_cls=None, _field_operation=False, **query):
|
||||
@@ -60,14 +60,20 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
raise InvalidQueryError(e)
|
||||
parts = []
|
||||
|
||||
CachedReferenceField = _import_class('CachedReferenceField')
|
||||
|
||||
cleaned_fields = []
|
||||
for field in fields:
|
||||
append_field = True
|
||||
if isinstance(field, basestring):
|
||||
parts.append(field)
|
||||
append_field = False
|
||||
# is last and CachedReferenceField
|
||||
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
||||
parts.append('%s._id' % field.db_field)
|
||||
else:
|
||||
parts.append(field.db_field)
|
||||
|
||||
if append_field:
|
||||
cleaned_fields.append(field)
|
||||
|
||||
@@ -79,13 +85,17 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
if op in singular_ops:
|
||||
if isinstance(field, basestring):
|
||||
if (op in STRING_OPERATORS and
|
||||
isinstance(value, basestring)):
|
||||
isinstance(value, basestring)):
|
||||
StringField = _import_class('StringField')
|
||||
value = StringField.prepare_query_value(op, value)
|
||||
else:
|
||||
value = field
|
||||
else:
|
||||
value = field.prepare_query_value(op, value)
|
||||
|
||||
if isinstance(field, CachedReferenceField) and value:
|
||||
value = value['_id']
|
||||
|
||||
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
|
||||
# 'in', 'nin' and 'all' require a list of values
|
||||
value = [field.prepare_query_value(op, v) for v in value]
|
||||
@@ -95,7 +105,7 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
if op in GEO_OPERATORS:
|
||||
value = _geo_operator(field, op, value)
|
||||
elif op in CUSTOM_OPERATORS:
|
||||
if op == 'match':
|
||||
if op in ('elem_match', 'match'):
|
||||
value = field.prepare_query_value(op, value)
|
||||
value = {"$elemMatch": value}
|
||||
else:
|
||||
@@ -117,19 +127,26 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
||||
mongo_query[key].update(value)
|
||||
# $maxDistance needs to come last - convert to SON
|
||||
value_dict = mongo_query[key]
|
||||
if ('$maxDistance' in value_dict and '$near' in value_dict and
|
||||
isinstance(value_dict['$near'], dict)):
|
||||
|
||||
if ('$maxDistance' in value_dict and '$near' in value_dict):
|
||||
value_son = SON()
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
if (get_connection().max_wire_version <= 1):
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
if isinstance(value_dict['$near'], dict):
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
if (get_connection().max_wire_version <= 1):
|
||||
value_son['$maxDistance'] = value_dict[
|
||||
'$maxDistance']
|
||||
else:
|
||||
value_son['$near'] = SON(value_son['$near'])
|
||||
value_son['$near'][
|
||||
'$maxDistance'] = value_dict['$maxDistance']
|
||||
else:
|
||||
value_son['$near'] = SON(value_son['$near'])
|
||||
value_son['$near']['$maxDistance'] = value_dict['$maxDistance']
|
||||
for k, v in value_dict.iteritems():
|
||||
if k == '$maxDistance':
|
||||
continue
|
||||
value_son[k] = v
|
||||
value_son['$maxDistance'] = value_dict['$maxDistance']
|
||||
|
||||
mongo_query[key] = value_son
|
||||
else:
|
||||
@@ -159,6 +176,9 @@ def update(_doc_cls=None, **update):
|
||||
mongo_update.update(value)
|
||||
continue
|
||||
parts = key.split('__')
|
||||
# if there is no operator, default to "set"
|
||||
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
||||
parts.insert(0, 'set')
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
op = None
|
||||
if parts[0] in UPDATE_OPERATORS:
|
||||
@@ -256,7 +276,8 @@ def update(_doc_cls=None, **update):
|
||||
if ListField in field_classes:
|
||||
# Join all fields via dot notation to the last ListField
|
||||
# Then process as normal
|
||||
last_listField = len(cleaned_fields) - field_classes.index(ListField)
|
||||
last_listField = len(
|
||||
cleaned_fields) - field_classes.index(ListField)
|
||||
key = ".".join(parts[:last_listField])
|
||||
parts = parts[last_listField:]
|
||||
parts.insert(0, key)
|
||||
|
@@ -1 +1 @@
|
||||
pymongo
|
||||
pymongo>=2.7.1
|
||||
|
2
setup.py
2
setup.py
@@ -77,7 +77,7 @@ setup(name='mongoengine',
|
||||
long_description=LONG_DESCRIPTION,
|
||||
platforms=['any'],
|
||||
classifiers=CLASSIFIERS,
|
||||
install_requires=['pymongo>=2.7'],
|
||||
install_requires=['pymongo>=2.7.1'],
|
||||
test_suite='nose.collector',
|
||||
**extra_opts
|
||||
)
|
||||
|
0
tests/async/__init__.py
Normal file
0
tests/async/__init__.py
Normal file
36
tests/async/test_connection.py
Normal file
36
tests/async/test_connection.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from mongoengine import *
|
||||
import motor
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def test_register_connection(self):
|
||||
"""
|
||||
Ensure that the connect() method works properly.
|
||||
"""
|
||||
register_connection('asyncdb', 'mongoengineasynctest', async=True)
|
||||
|
||||
self.assertEqual(
|
||||
mongoengine.connection._connection_settings['asyncdb']['name'],
|
||||
'mongoengineasynctest')
|
||||
|
||||
self.assertTrue(
|
||||
mongoengine.connection._connection_settings['asyncdb']['async'])
|
||||
conn = get_connection('asyncdb')
|
||||
self.assertTrue(isinstance(conn, motor.MotorClient))
|
||||
|
||||
db = get_db('asyncdb')
|
||||
self.assertTrue(isinstance(db, motor.MotorDatabase))
|
||||
self.assertEqual(db.name, 'mongoengineasynctest')
|
@@ -36,9 +36,9 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['age', 'id', 'name'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField"],
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
|
||||
|
@@ -207,22 +207,21 @@ class DeltaTest(unittest.TestCase):
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['embedded_field.list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'list_field': ['1', 2, {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}]}, {}))
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'embedded_field.list_field': ['1', 2, {
|
||||
['embedded_field.list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
]}, {}))
|
||||
}, {}))
|
||||
self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'string_field': 'hello world',
|
||||
'int_field': 1,
|
||||
'list_field': ['1', 2, {'hello': 'world'}],
|
||||
'dict_field': {'hello': 'world'}}
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
@@ -253,7 +252,7 @@ class DeltaTest(unittest.TestCase):
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
self.assertEqual(doc._delta(),
|
||||
({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {}))
|
||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
@@ -548,22 +547,21 @@ class DeltaTest(unittest.TestCase):
|
||||
doc.embedded_field.list_field[2].string_field = 'hello world'
|
||||
doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2]
|
||||
self.assertEqual(doc._get_changed_fields(),
|
||||
['db_embedded_field.db_list_field'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({
|
||||
'db_list_field': ['1', 2, {
|
||||
['db_embedded_field.db_list_field.2'])
|
||||
self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}]}, {}))
|
||||
'db_dict_field': {'hello': 'world'}}}, {}))
|
||||
self.assertEqual(doc._delta(), ({
|
||||
'db_embedded_field.db_list_field': ['1', 2, {
|
||||
'db_embedded_field.db_list_field.2': {
|
||||
'_cls': 'Embedded',
|
||||
'db_string_field': 'hello world',
|
||||
'db_int_field': 1,
|
||||
'db_list_field': ['1', 2, {'hello': 'world'}],
|
||||
'db_dict_field': {'hello': 'world'}}
|
||||
]}, {}))
|
||||
}, {}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc.embedded_field.list_field[2].string_field,
|
||||
@@ -594,8 +592,7 @@ class DeltaTest(unittest.TestCase):
|
||||
|
||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
||||
self.assertEqual(doc._delta(),
|
||||
({'db_embedded_field.db_list_field.2.db_list_field':
|
||||
[1, 2, {}]}, {}))
|
||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||
doc.save()
|
||||
doc = doc.reload(10)
|
||||
|
||||
@@ -735,5 +732,47 @@ class DeltaTest(unittest.TestCase):
|
||||
mydoc._clear_changed_fields()
|
||||
self.assertEqual([], mydoc._get_changed_fields())
|
||||
|
||||
def test_referenced_object_changed_attributes(self):
|
||||
"""Ensures that when you save a new reference to a field, the referenced object isn't altered"""
|
||||
|
||||
class Organization(Document):
|
||||
name = StringField()
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
org = ReferenceField('Organization', required=True)
|
||||
|
||||
Organization.drop_collection()
|
||||
User.drop_collection()
|
||||
|
||||
org1 = Organization(name='Org 1')
|
||||
org1.save()
|
||||
|
||||
org2 = Organization(name='Org 2')
|
||||
org2.save()
|
||||
|
||||
user = User(name='Fred', org=org1)
|
||||
user.save()
|
||||
|
||||
org1.reload()
|
||||
org2.reload()
|
||||
user.reload()
|
||||
self.assertEqual(org1.name, 'Org 1')
|
||||
self.assertEqual(org2.name, 'Org 2')
|
||||
self.assertEqual(user.name, 'Fred')
|
||||
|
||||
user.name = 'Harold'
|
||||
user.org = org2
|
||||
|
||||
org2.name = 'New Org 2'
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
user.save()
|
||||
org2.save()
|
||||
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
org2.reload()
|
||||
self.assertEqual(org2.name, 'New Org 2')
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -292,6 +292,22 @@ class DynamicTest(unittest.TestCase):
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
|
||||
def test_dynamic_embedded_works_with_only(self):
|
||||
"""Ensure custom fieldnames on a dynamic embedded document are found by qs.only()"""
|
||||
|
||||
class Address(DynamicEmbeddedDocument):
|
||||
city = StringField()
|
||||
|
||||
class Person(DynamicDocument):
|
||||
address = EmbeddedDocumentField(Address)
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save()
|
||||
|
||||
self.assertEqual(Person.objects.first().address.street_number, '1337')
|
||||
self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337')
|
||||
|
||||
def test_dynamic_and_embedded_dict_access(self):
|
||||
"""Ensure embedded dynamic documents work with dict[] style access"""
|
||||
|
||||
@@ -330,6 +346,5 @@ class DynamicTest(unittest.TestCase):
|
||||
person.save()
|
||||
self.assertEqual(Person.objects.first().age, 35)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -175,6 +175,16 @@ class IndexesTest(unittest.TestCase):
|
||||
info = A._get_collection().index_information()
|
||||
self.assertEqual(len(info.keys()), 2)
|
||||
|
||||
class B(A):
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
meta = {
|
||||
'indexes': [{'fields': ['c']}, {'fields': ['d'], 'cls': True}],
|
||||
'allow_inheritance': True
|
||||
}
|
||||
self.assertEqual([('c', 1)], B._meta['index_specs'][1]['fields'])
|
||||
self.assertEqual([('_cls', 1), ('d', 1)], B._meta['index_specs'][2]['fields'])
|
||||
|
||||
def test_build_index_spec_is_not_destructive(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
@@ -727,5 +737,32 @@ class IndexesTest(unittest.TestCase):
|
||||
report.to_mongo())
|
||||
self.assertEqual(report, Report.objects.get(pk=my_key))
|
||||
|
||||
def test_string_indexes(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
provider_ids = DictField()
|
||||
meta = {
|
||||
"indexes": ["provider_ids.foo", "provider_ids.bar"],
|
||||
}
|
||||
|
||||
info = MyDoc.objects._collection.index_information()
|
||||
info = [value['key'] for key, value in info.iteritems()]
|
||||
self.assertTrue([('provider_ids.foo', 1)] in info)
|
||||
self.assertTrue([('provider_ids.bar', 1)] in info)
|
||||
|
||||
def test_text_indexes(self):
|
||||
|
||||
class Book(Document):
|
||||
title = DictField()
|
||||
meta = {
|
||||
"indexes": ["$title"],
|
||||
}
|
||||
|
||||
indexes = Book.objects._collection.index_information()
|
||||
self.assertTrue("title_text" in indexes)
|
||||
key = indexes["title_text"]["key"]
|
||||
self.assertTrue(('_fts', 'text') in key)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -163,7 +163,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
@@ -180,7 +180,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
|
@@ -353,6 +353,14 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person.name, "Test User")
|
||||
self.assertEqual(person.age, 20)
|
||||
|
||||
person.reload('age')
|
||||
self.assertEqual(person.name, "Test User")
|
||||
self.assertEqual(person.age, 21)
|
||||
|
||||
person.reload()
|
||||
self.assertEqual(person.name, "Mr Test User")
|
||||
self.assertEqual(person.age, 21)
|
||||
|
||||
person.reload()
|
||||
self.assertEqual(person.name, "Mr Test User")
|
||||
self.assertEqual(person.age, 21)
|
||||
@@ -398,10 +406,11 @@ class InstanceTest(unittest.TestCase):
|
||||
doc.embedded_field.dict_field['woot'] = "woot"
|
||||
|
||||
self.assertEqual(doc._get_changed_fields(), [
|
||||
'list_field', 'dict_field', 'embedded_field.list_field',
|
||||
'embedded_field.dict_field'])
|
||||
'list_field', 'dict_field.woot', 'embedded_field.list_field',
|
||||
'embedded_field.dict_field.woot'])
|
||||
doc.save()
|
||||
|
||||
self.assertEqual(len(doc.list_field), 4)
|
||||
doc = doc.reload(10)
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(len(doc.list_field), 4)
|
||||
@@ -409,6 +418,16 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(len(doc.embedded_field.list_field), 4)
|
||||
self.assertEqual(len(doc.embedded_field.dict_field), 2)
|
||||
|
||||
doc.list_field.append(1)
|
||||
doc.save()
|
||||
doc.dict_field['extra'] = 1
|
||||
doc = doc.reload(10, 'list_field')
|
||||
self.assertEqual(doc._get_changed_fields(), [])
|
||||
self.assertEqual(len(doc.list_field), 5)
|
||||
self.assertEqual(len(doc.dict_field), 3)
|
||||
self.assertEqual(len(doc.embedded_field.list_field), 4)
|
||||
self.assertEqual(len(doc.embedded_field.dict_field), 2)
|
||||
|
||||
def test_reload_doesnt_exist(self):
|
||||
class Foo(Document):
|
||||
pass
|
||||
@@ -443,7 +462,7 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person['name'], 'Another User')
|
||||
|
||||
# Length = length(assigned fields + id)
|
||||
self.assertEqual(len(person), 3)
|
||||
self.assertEqual(len(person), 4)
|
||||
|
||||
self.assertTrue('age' in person)
|
||||
person.age = None
|
||||
@@ -515,9 +534,6 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
class Email(EmbeddedDocument):
|
||||
email = EmailField()
|
||||
def clean(self):
|
||||
print "instance:"
|
||||
print self._instance
|
||||
|
||||
class Account(Document):
|
||||
email = EmbeddedDocumentField(Email)
|
||||
@@ -835,7 +851,7 @@ class InstanceTest(unittest.TestCase):
|
||||
return uuid.UUID(int=i)
|
||||
|
||||
Widget.drop_collection()
|
||||
|
||||
|
||||
w1 = Widget(toggle=False, save_id=UUID(1))
|
||||
|
||||
# ignore save_condition on new record creation
|
||||
@@ -893,7 +909,7 @@ class InstanceTest(unittest.TestCase):
|
||||
w1.reload()
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 3)
|
||||
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that an existing document is updated instead of be
|
||||
overwritten."""
|
||||
@@ -1058,11 +1074,13 @@ class InstanceTest(unittest.TestCase):
|
||||
|
||||
self.assertRaises(OperationError, update_no_value_raises)
|
||||
|
||||
def update_no_op_raises():
|
||||
def update_no_op_should_default_to_set():
|
||||
person = self.Person.objects.first()
|
||||
person.update(name="Dan")
|
||||
person.reload()
|
||||
return person.name
|
||||
|
||||
self.assertRaises(InvalidQueryError, update_no_op_raises)
|
||||
self.assertEqual("Dan", update_no_op_should_default_to_set())
|
||||
|
||||
def test_update_unique_field(self):
|
||||
class Doc(Document):
|
||||
@@ -2365,6 +2383,8 @@ class InstanceTest(unittest.TestCase):
|
||||
log.machine = "Localhost"
|
||||
log.save()
|
||||
|
||||
self.assertTrue(log.id is not None)
|
||||
|
||||
log.log = "Saving"
|
||||
log.save()
|
||||
|
||||
@@ -2388,6 +2408,8 @@ class InstanceTest(unittest.TestCase):
|
||||
log.machine = "Localhost"
|
||||
log.save()
|
||||
|
||||
self.assertTrue(log.id is not None)
|
||||
|
||||
log.log = "Saving"
|
||||
log.save()
|
||||
|
||||
@@ -2536,5 +2558,90 @@ class InstanceTest(unittest.TestCase):
|
||||
f1.ref # Dereferences lazily
|
||||
self.assertEqual(f1, f2)
|
||||
|
||||
def test_dbref_equality(self):
|
||||
class Test2(Document):
|
||||
name = StringField()
|
||||
|
||||
class Test3(Document):
|
||||
name = StringField()
|
||||
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
test2 = ReferenceField('Test2')
|
||||
test3 = ReferenceField('Test3')
|
||||
|
||||
Test.drop_collection()
|
||||
Test2.drop_collection()
|
||||
Test3.drop_collection()
|
||||
|
||||
t2 = Test2(name='a')
|
||||
t2.save()
|
||||
|
||||
t3 = Test3(name='x')
|
||||
t3.id = t2.id
|
||||
t3.save()
|
||||
|
||||
t = Test(name='b', test2=t2, test3=t3)
|
||||
|
||||
f = Test._from_son(t.to_mongo())
|
||||
|
||||
dbref2 = f._data['test2']
|
||||
obj2 = f.test2
|
||||
self.assertTrue(isinstance(dbref2, DBRef))
|
||||
self.assertTrue(isinstance(obj2, Test2))
|
||||
self.assertTrue(obj2.id == dbref2.id)
|
||||
self.assertTrue(obj2 == dbref2)
|
||||
self.assertTrue(dbref2 == obj2)
|
||||
|
||||
dbref3 = f._data['test3']
|
||||
obj3 = f.test3
|
||||
self.assertTrue(isinstance(dbref3, DBRef))
|
||||
self.assertTrue(isinstance(obj3, Test3))
|
||||
self.assertTrue(obj3.id == dbref3.id)
|
||||
self.assertTrue(obj3 == dbref3)
|
||||
self.assertTrue(dbref3 == obj3)
|
||||
|
||||
self.assertTrue(obj2.id == obj3.id)
|
||||
self.assertTrue(dbref2.id == dbref3.id)
|
||||
self.assertFalse(dbref2 == dbref3)
|
||||
self.assertFalse(dbref3 == dbref2)
|
||||
self.assertTrue(dbref2 != dbref3)
|
||||
self.assertTrue(dbref3 != dbref2)
|
||||
|
||||
self.assertFalse(obj2 == dbref3)
|
||||
self.assertFalse(dbref3 == obj2)
|
||||
self.assertTrue(obj2 != dbref3)
|
||||
self.assertTrue(dbref3 != obj2)
|
||||
|
||||
self.assertFalse(obj3 == dbref2)
|
||||
self.assertFalse(dbref2 == obj3)
|
||||
self.assertTrue(obj3 != dbref2)
|
||||
self.assertTrue(dbref2 != obj3)
|
||||
|
||||
def test_default_values(self):
|
||||
class Person(Document):
|
||||
created_on = DateTimeField(default=lambda: datetime.utcnow())
|
||||
name = StringField()
|
||||
|
||||
p = Person(name='alon')
|
||||
p.save()
|
||||
orig_created_on = Person.objects().only('created_on')[0].created_on
|
||||
|
||||
p2 = Person.objects().only('name')[0]
|
||||
p2.name = 'alon2'
|
||||
p2.save()
|
||||
p3 = Person.objects().only('created_on')[0]
|
||||
self.assertEquals(orig_created_on, p3.created_on)
|
||||
|
||||
class Person(Document):
|
||||
created_on = DateTimeField(default=lambda: datetime.utcnow())
|
||||
name = StringField()
|
||||
height = IntField(default=189)
|
||||
|
||||
p4 = Person.objects()[0]
|
||||
p4.save()
|
||||
self.assertEquals(p4.height, 189)
|
||||
self.assertEquals(Person.objects(height=189).count(), 1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -20,6 +20,28 @@ class TestJson(unittest.TestCase):
|
||||
def setUp(self):
|
||||
connect(db='mongoenginetest')
|
||||
|
||||
def test_json_names(self):
|
||||
"""
|
||||
Going to test reported issue:
|
||||
https://github.com/MongoEngine/mongoengine/issues/654
|
||||
where the reporter asks for the availability to perform
|
||||
a to_json with the original class names and not the abreviated
|
||||
mongodb document keys
|
||||
"""
|
||||
class Embedded(EmbeddedDocument):
|
||||
string = StringField(db_field='s')
|
||||
|
||||
class Doc(Document):
|
||||
string = StringField(db_field='s')
|
||||
embedded = EmbeddedDocumentField(Embedded, db_field='e')
|
||||
|
||||
doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello"))
|
||||
doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':'))
|
||||
|
||||
expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}"""
|
||||
|
||||
self.assertEqual( doc_json, expected_json)
|
||||
|
||||
def test_json_simple(self):
|
||||
|
||||
class Embedded(EmbeddedDocument):
|
||||
|
@@ -141,6 +141,30 @@ class ValidatorErrorTest(unittest.TestCase):
|
||||
self.assertEqual(e.to_dict(), {
|
||||
"e": {'val': 'OK could not be converted to int'}})
|
||||
|
||||
def test_embedded_weakref(self):
|
||||
|
||||
class SubDoc(EmbeddedDocument):
|
||||
val = IntField(required=True)
|
||||
|
||||
class Doc(Document):
|
||||
e = EmbeddedDocumentField(SubDoc, db_field='eb')
|
||||
|
||||
Doc.drop_collection()
|
||||
|
||||
d1 = Doc()
|
||||
d2 = Doc()
|
||||
|
||||
s = SubDoc()
|
||||
|
||||
self.assertRaises(ValidationError, lambda: s.validate())
|
||||
|
||||
d1.e = s
|
||||
d2.e = s
|
||||
|
||||
del d1
|
||||
|
||||
self.assertRaises(ValidationError, lambda: d2.validate())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -18,6 +18,7 @@ from bson import Binary, DBRef, ObjectId
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.base import _document_registry
|
||||
from mongoengine.base.datastructures import BaseDict
|
||||
from mongoengine.errors import NotRegistered
|
||||
from mongoengine.python_support import PY3, b, bin_type
|
||||
|
||||
@@ -47,7 +48,8 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
# Confirm saving now would store values
|
||||
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||
self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid'])
|
||||
self.assertEqual(
|
||||
data_to_be_saved, ['age', 'created', 'name', 'userid'])
|
||||
|
||||
self.assertTrue(person.validate() is None)
|
||||
|
||||
@@ -63,7 +65,8 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
# Confirm introspection changes nothing
|
||||
data_to_be_saved = sorted(person.to_mongo().keys())
|
||||
self.assertEqual(data_to_be_saved, ['age', 'created', 'name', 'userid'])
|
||||
self.assertEqual(
|
||||
data_to_be_saved, ['age', 'created', 'name', 'userid'])
|
||||
|
||||
def test_default_values_set_to_None(self):
|
||||
"""Ensure that default field values are used when creating a document.
|
||||
@@ -587,7 +590,8 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and dropped
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped
|
||||
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
|
||||
d2 = datetime.datetime(1970, 01, 01, 00, 00, 01)
|
||||
log = LogEntry()
|
||||
@@ -688,7 +692,8 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
LogEntry.drop_collection()
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and dropped - with default datetimefields
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond and
|
||||
# dropped - with default datetimefields
|
||||
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999)
|
||||
log = LogEntry()
|
||||
log.date = d1
|
||||
@@ -696,14 +701,16 @@ class FieldTest(unittest.TestCase):
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields
|
||||
# Post UTC - microseconds are rounded (down) nearest millisecond - with
|
||||
# default datetimefields
|
||||
d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
log.reload()
|
||||
self.assertEqual(log.date, d1)
|
||||
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default datetimefields
|
||||
# Pre UTC dates microseconds below 1000 are dropped - with default
|
||||
# datetimefields
|
||||
d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999)
|
||||
log.date = d1
|
||||
log.save()
|
||||
@@ -929,12 +936,16 @@ class FieldTest(unittest.TestCase):
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 3)
|
||||
self.assertEqual(BlogPost.objects.filter(info__exact='test').count(), 1)
|
||||
self.assertEqual(BlogPost.objects.filter(info__0__test='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__exact='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__0__test='test').count(), 1)
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
|
||||
self.assertEqual(BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__0__test__exact='5').count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__100__test__exact='test').count(), 0)
|
||||
BlogPost.drop_collection()
|
||||
|
||||
def test_list_field_passed_in_value(self):
|
||||
@@ -951,7 +962,6 @@ class FieldTest(unittest.TestCase):
|
||||
foo.bars.append(bar)
|
||||
self.assertEqual(repr(foo.bars), '[<Bar: Bar object>]')
|
||||
|
||||
|
||||
def test_list_field_strict(self):
|
||||
"""Ensure that list field handles validation if provided a strict field type."""
|
||||
|
||||
@@ -1082,20 +1092,28 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertTrue(isinstance(e2.mapping[1], IntegerSetting))
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(Simple.objects.filter(mapping__1__value=42).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__2__number=1).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__1__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__2__number=1).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__2__complex__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__2__list__0__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(set__mapping__1=IntegerSetting(value=10))
|
||||
self.assertEqual(Simple.objects.filter(mapping__1__value=10).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__1__value=10).count(), 1)
|
||||
|
||||
Simple.objects().update(
|
||||
set__mapping__2__list__1=StringSetting(value='Boo'))
|
||||
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1)
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
@@ -1141,12 +1159,16 @@ class FieldTest(unittest.TestCase):
|
||||
post.save()
|
||||
|
||||
self.assertEqual(BlogPost.objects.count(), 3)
|
||||
self.assertEqual(BlogPost.objects.filter(info__title__exact='test').count(), 1)
|
||||
self.assertEqual(BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__title__exact='test').count(), 1)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact='test').count(), 1)
|
||||
|
||||
# Confirm handles non strings or non existing keys
|
||||
self.assertEqual(BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEqual(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__details__test__exact=5).count(), 0)
|
||||
self.assertEqual(
|
||||
BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0)
|
||||
|
||||
post = BlogPost.objects.create(info={'title': 'original'})
|
||||
post.info.update({'title': 'updated'})
|
||||
@@ -1207,19 +1229,50 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting))
|
||||
|
||||
# Test querying
|
||||
self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
|
||||
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__someint__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__number=1).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1)
|
||||
|
||||
# Confirm can update
|
||||
Simple.objects().update(
|
||||
set__mapping={"someint": IntegerSetting(value=10)})
|
||||
Simple.objects().update(
|
||||
set__mapping__nested_dict__list__1=StringSetting(value='Boo'))
|
||||
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0)
|
||||
self.assertEqual(
|
||||
Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1)
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someints'] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
|
||||
def create_invalid_mapping():
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar",]})
|
||||
|
||||
self.assertRaises(ValueError, create_invalid_mapping)
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
@@ -1290,7 +1343,7 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
class Test(Document):
|
||||
my_map = MapField(field=EmbeddedDocumentField(Embedded),
|
||||
db_field='x')
|
||||
db_field='x')
|
||||
|
||||
Test.drop_collection()
|
||||
|
||||
@@ -1334,7 +1387,7 @@ class FieldTest(unittest.TestCase):
|
||||
Log(name="wilson", visited={'friends': datetime.datetime.now()}).save()
|
||||
|
||||
self.assertEqual(1, Log.objects(
|
||||
visited__friends__exists=True).count())
|
||||
visited__friends__exists=True).count())
|
||||
|
||||
def test_embedded_db_field(self):
|
||||
|
||||
@@ -1477,6 +1530,375 @@ class FieldTest(unittest.TestCase):
|
||||
mongoed = p1.to_mongo()
|
||||
self.assertTrue(isinstance(mongoed['parent'], ObjectId))
|
||||
|
||||
def test_cached_reference_fields(self):
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(name="Leopard", tag="heavy")
|
||||
a.save()
|
||||
|
||||
self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal])
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
|
||||
p = Ocorrence(person="Wilson")
|
||||
p.save()
|
||||
|
||||
self.assertEqual(Ocorrence.objects(animal=None).count(), 1)
|
||||
|
||||
self.assertEqual(
|
||||
a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk})
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(animal__tag='heavy').count()
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
ocorrence = Ocorrence.objects(animal__tag='heavy').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertTrue(isinstance(ocorrence.animal, Animal))
|
||||
|
||||
def test_cached_reference_field_decimal(self):
|
||||
class PersonAuto(Document):
|
||||
name = StringField()
|
||||
salary = DecimalField()
|
||||
|
||||
class SocialTest(Document):
|
||||
group = StringField()
|
||||
person = CachedReferenceField(
|
||||
PersonAuto,
|
||||
fields=('salary',))
|
||||
|
||||
PersonAuto.drop_collection()
|
||||
SocialTest.drop_collection()
|
||||
|
||||
p = PersonAuto(name="Alberto", salary=Decimal('7000.00'))
|
||||
p.save()
|
||||
|
||||
s = SocialTest(group="dev", person=p)
|
||||
s.save()
|
||||
|
||||
self.assertEqual(
|
||||
SocialTest.objects._collection.find_one({'person.salary': 7000.00}), {
|
||||
'_id': s.pk,
|
||||
'group': s.group,
|
||||
'person': {
|
||||
'_id': p.pk,
|
||||
'salary': 7000.00
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_field_reference(self):
|
||||
class Group(Document):
|
||||
name = StringField()
|
||||
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
group = ReferenceField(Group)
|
||||
|
||||
class SocialData(Document):
|
||||
obs = StringField()
|
||||
tags = ListField(
|
||||
StringField())
|
||||
person = CachedReferenceField(
|
||||
Person,
|
||||
fields=('group',))
|
||||
|
||||
Group.drop_collection()
|
||||
Person.drop_collection()
|
||||
SocialData.drop_collection()
|
||||
|
||||
g1 = Group(name='dev')
|
||||
g1.save()
|
||||
|
||||
g2 = Group(name="designers")
|
||||
g2.save()
|
||||
|
||||
p1 = Person(name="Alberto", group=g1)
|
||||
p1.save()
|
||||
|
||||
p2 = Person(name="Andre", group=g1)
|
||||
p2.save()
|
||||
|
||||
p3 = Person(name="Afro design", group=g2)
|
||||
p3.save()
|
||||
|
||||
s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2'])
|
||||
s1.save()
|
||||
|
||||
s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4'])
|
||||
s2.save()
|
||||
|
||||
self.assertEqual(SocialData.objects._collection.find_one(
|
||||
{'tags': 'tag2'}), {
|
||||
'_id': s1.pk,
|
||||
'obs': 'testing 123',
|
||||
'tags': ['tag1', 'tag2'],
|
||||
'person': {
|
||||
'_id': p1.pk,
|
||||
'group': g1.pk
|
||||
}
|
||||
})
|
||||
|
||||
self.assertEqual(SocialData.objects(person__group=g2).count(), 1)
|
||||
self.assertEqual(SocialData.objects(person__group=g2).first(), s2)
|
||||
|
||||
def test_cached_reference_field_update_all(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2.save()
|
||||
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pj"
|
||||
}
|
||||
})
|
||||
|
||||
self.assertEqual(Person.objects(father=a1)._query, {
|
||||
'father._id': a1.pk
|
||||
})
|
||||
self.assertEqual(Person.objects(father=a1).count(), 1)
|
||||
|
||||
Person.objects.update(set__tp="pf")
|
||||
Person.father.sync_all()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
"_id": a2.pk,
|
||||
"name": u"Wilson Junior",
|
||||
"tp": u"pf",
|
||||
"father": {
|
||||
"_id": a1.pk,
|
||||
"tp": u"pf"
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_fields_on_embedded_documents(self):
|
||||
def build():
|
||||
class Test(Document):
|
||||
name = StringField()
|
||||
|
||||
type('WrongEmbeddedDocument', (
|
||||
EmbeddedDocument,), {
|
||||
'test': CachedReferenceField(Test)
|
||||
})
|
||||
|
||||
self.assertRaises(InvalidDocumentError, build)
|
||||
|
||||
def test_cached_reference_auto_sync(self):
|
||||
class Person(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
|
||||
father = CachedReferenceField('self', fields=('tp',))
|
||||
|
||||
Person.drop_collection()
|
||||
|
||||
a1 = Person(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Person(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.save()
|
||||
|
||||
a2.reload()
|
||||
self.assertEqual(dict(a2.to_mongo()), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pf'
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_auto_sync_disabled(self):
|
||||
class Persone(Document):
|
||||
TYPES = (
|
||||
('pf', "PF"),
|
||||
('pj', "PJ")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
choices=TYPES
|
||||
)
|
||||
|
||||
father = CachedReferenceField(
|
||||
'self', fields=('tp',), auto_sync=False)
|
||||
|
||||
Persone.drop_collection()
|
||||
|
||||
a1 = Persone(name="Wilson Father", tp="pj")
|
||||
a1.save()
|
||||
|
||||
a2 = Persone(name='Wilson Junior', tp='pf', father=a1)
|
||||
a2.save()
|
||||
|
||||
a1.tp = 'pf'
|
||||
a1.save()
|
||||
|
||||
self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), {
|
||||
'_id': a2.pk,
|
||||
'name': 'Wilson Junior',
|
||||
'tp': 'pf',
|
||||
'father': {
|
||||
'_id': a1.pk,
|
||||
'tp': 'pj'
|
||||
}
|
||||
})
|
||||
|
||||
def test_cached_reference_embedded_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
TPS = (
|
||||
('n', "Normal"),
|
||||
('u', "Urgent")
|
||||
)
|
||||
name = StringField()
|
||||
tp = StringField(
|
||||
verbose_name="Type",
|
||||
db_field="t",
|
||||
choices=TPS)
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tp'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(nam="Leopard", tag="heavy",
|
||||
owner=Owner(tp='u', name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
't': 'u'
|
||||
}
|
||||
})
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u')
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
count = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tp='u').count()
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tp='u').first()
|
||||
self.assertEqual(ocorrence.person, "teste")
|
||||
self.assertTrue(isinstance(ocorrence.animal, Animal))
|
||||
|
||||
def test_cached_reference_embedded_list_fields(self):
|
||||
class Owner(EmbeddedDocument):
|
||||
name = StringField()
|
||||
tags = ListField(StringField())
|
||||
|
||||
class Animal(Document):
|
||||
name = StringField()
|
||||
tag = StringField()
|
||||
|
||||
owner = EmbeddedDocumentField(Owner)
|
||||
|
||||
class Ocorrence(Document):
|
||||
person = StringField()
|
||||
animal = CachedReferenceField(
|
||||
Animal, fields=['tag', 'owner.tags'])
|
||||
|
||||
Animal.drop_collection()
|
||||
Ocorrence.drop_collection()
|
||||
|
||||
a = Animal(nam="Leopard", tag="heavy",
|
||||
owner=Owner(tags=['cool', 'funny'],
|
||||
name="Wilson Júnior")
|
||||
)
|
||||
a.save()
|
||||
|
||||
o = Ocorrence(person="teste 2", animal=a)
|
||||
o.save()
|
||||
self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), {
|
||||
'_id': a.pk,
|
||||
'tag': 'heavy',
|
||||
'owner': {
|
||||
'tags': ['cool', 'funny']
|
||||
}
|
||||
})
|
||||
|
||||
self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy')
|
||||
self.assertEqual(o.to_mongo()['animal']['owner']['tags'],
|
||||
['cool', 'funny'])
|
||||
|
||||
# counts
|
||||
Ocorrence(person="teste 2").save()
|
||||
Ocorrence(person="teste 3").save()
|
||||
|
||||
query = Ocorrence.objects(
|
||||
animal__tag='heavy', animal__owner__tags='cool')._query
|
||||
self.assertEqual(
|
||||
query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'})
|
||||
|
||||
ocorrence = Ocorrence.objects(
|
||||
animal__tag='heavy',
|
||||
animal__owner__tags='cool').first()
|
||||
self.assertEqual(ocorrence.person, "teste 2")
|
||||
self.assertTrue(isinstance(ocorrence.animal, Animal))
|
||||
|
||||
def test_objectid_reference_fields(self):
|
||||
|
||||
class Person(Document):
|
||||
@@ -1834,8 +2256,7 @@ class FieldTest(unittest.TestCase):
|
||||
Person(name="Wilson Jr").save()
|
||||
|
||||
self.assertEqual(repr(Person.objects(city=None)),
|
||||
"[<Person: Person object>]")
|
||||
|
||||
"[<Person: Person object>]")
|
||||
|
||||
def test_generic_reference_choices(self):
|
||||
"""Ensure that a GenericReferenceField can handle choices
|
||||
@@ -1982,7 +2403,8 @@ class FieldTest(unittest.TestCase):
|
||||
attachment_required.blob = Binary(b('\xe6\x00\xc4\xff\x07'))
|
||||
attachment_required.validate()
|
||||
|
||||
attachment_size_limit = AttachmentSizeLimit(blob=b('\xe6\x00\xc4\xff\x07'))
|
||||
attachment_size_limit = AttachmentSizeLimit(
|
||||
blob=b('\xe6\x00\xc4\xff\x07'))
|
||||
self.assertRaises(ValidationError, attachment_size_limit.validate)
|
||||
attachment_size_limit.blob = b('\xe6\x00\xc4\xff')
|
||||
attachment_size_limit.validate()
|
||||
@@ -2030,8 +2452,8 @@ class FieldTest(unittest.TestCase):
|
||||
"""
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3, choices=(
|
||||
('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
|
||||
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
|
||||
('S', 'Small'), ('M', 'Medium'), ('L', 'Large'),
|
||||
('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')))
|
||||
style = StringField(max_length=3, choices=(
|
||||
('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S')
|
||||
|
||||
@@ -2061,7 +2483,7 @@ class FieldTest(unittest.TestCase):
|
||||
"""
|
||||
class Shirt(Document):
|
||||
size = StringField(max_length=3,
|
||||
choices=('S', 'M', 'L', 'XL', 'XXL'))
|
||||
choices=('S', 'M', 'L', 'XL', 'XXL'))
|
||||
|
||||
Shirt.drop_collection()
|
||||
|
||||
@@ -2179,7 +2601,6 @@ class FieldTest(unittest.TestCase):
|
||||
c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'})
|
||||
self.assertEqual(c['next'], 1000)
|
||||
|
||||
|
||||
def test_sequence_field_get_next_value(self):
|
||||
class Person(Document):
|
||||
id = SequenceField(primary_key=True)
|
||||
@@ -2368,7 +2789,6 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(1, post.comments[0].id)
|
||||
self.assertEqual(2, post.comments[1].id)
|
||||
|
||||
|
||||
def test_generic_embedded_document(self):
|
||||
class Car(EmbeddedDocument):
|
||||
name = StringField()
|
||||
@@ -2478,7 +2898,7 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertTrue('comments' in error.errors)
|
||||
self.assertTrue(1 in error.errors['comments'])
|
||||
self.assertTrue(isinstance(error.errors['comments'][1]['content'],
|
||||
ValidationError))
|
||||
ValidationError))
|
||||
|
||||
# ValidationError.schema property
|
||||
error_dict = error.to_dict()
|
||||
@@ -2507,6 +2927,9 @@ class FieldTest(unittest.TestCase):
|
||||
"aJIazqqWkm7.net"))
|
||||
self.assertTrue(user.validate() is None)
|
||||
|
||||
user = User(email="new-tld@example.technology")
|
||||
self.assertTrue(user.validate() is None)
|
||||
|
||||
user = User(email='me@localhost')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
@@ -2604,13 +3027,36 @@ class FieldTest(unittest.TestCase):
|
||||
DictFieldTest.drop_collection()
|
||||
|
||||
test = DictFieldTest(dictionary=None)
|
||||
test.dictionary # Just access to test getter
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
test = DictFieldTest(dictionary=False)
|
||||
test.dictionary # Just access to test getter
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
def test_cls_field(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
Animal.objects.delete()
|
||||
Dog().save()
|
||||
Fish().save()
|
||||
Human().save()
|
||||
self.assertEquals(Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count(), 2)
|
||||
self.assertEquals(Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count(), 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -19,8 +19,8 @@ class GeoFieldTest(unittest.TestCase):
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail()
|
||||
except ValidationError, e:
|
||||
self.fail('Should not validate the location {0}'.format(loc))
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
@@ -155,6 +155,117 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
|
||||
def test_multipoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiPointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiPointField type must be "MultiPoint"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPoint", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[]]
|
||||
expected = "Invalid MultiPoint must contain at least one valid point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]], [[1, 2, 3]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Value (%s) must be a two-dimensional point" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2]]).validate()
|
||||
Location(loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [
|
||||
[1, 2],
|
||||
[81.4471435546875, 23.61432859499169]
|
||||
]}).validate()
|
||||
|
||||
def test_multilinestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiLineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiLineStringField type must be "MultiLineString"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiLineString", "coordinates": [[[1, 2, 3]]]}
|
||||
expected = "Invalid MultiLineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
expected = "Invalid MultiLineString must contain at least one valid linestring"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate()
|
||||
|
||||
def test_multipolygon_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiPolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiPolygonField type must be "MultiPolygon"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[5, "a"]]]]
|
||||
expected = "Invalid MultiPolygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[]]]]
|
||||
expected = "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2, 3]]]]
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
expected = "Invalid MultiPolygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2], [3, 4]]]]
|
||||
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -48,7 +48,7 @@ class ConnectionTest(unittest.TestCase):
|
||||
|
||||
connect('mongoenginetest', alias='testdb2')
|
||||
actual_connection = get_connection('testdb2')
|
||||
self.assertIs(expected_connection, actual_connection)
|
||||
self.assertEqual(expected_connection, actual_connection)
|
||||
|
||||
def test_connect_uri(self):
|
||||
"""Ensure that the connect() method works properly with uri's
|
||||
@@ -147,6 +147,18 @@ class ConnectionTest(unittest.TestCase):
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
|
||||
connect('mongoenginetest2', alias='t2', host="127.0.0.1")
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -291,6 +291,30 @@ class FieldTest(unittest.TestCase):
|
||||
self.assertEqual(employee.friends, friends)
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
def test_list_of_lists_of_references(self):
|
||||
|
||||
class User(Document):
|
||||
name = StringField()
|
||||
|
||||
class Post(Document):
|
||||
user_lists = ListField(ListField(ReferenceField(User)))
|
||||
|
||||
class SimpleList(Document):
|
||||
users = ListField(ReferenceField(User))
|
||||
|
||||
User.drop_collection()
|
||||
Post.drop_collection()
|
||||
|
||||
u1 = User.objects.create(name='u1')
|
||||
u2 = User.objects.create(name='u2')
|
||||
u3 = User.objects.create(name='u3')
|
||||
|
||||
SimpleList.objects.create(users=[u1, u2, u3])
|
||||
self.assertEqual(SimpleList.objects.all()[0].users, [u1, u2, u3])
|
||||
|
||||
Post.objects.create(user_lists=[[u1, u2], [u3]])
|
||||
self.assertEqual(Post.objects.all()[0].user_lists, [[u1, u2], [u3]])
|
||||
|
||||
def test_circular_reference(self):
|
||||
"""Ensure you can handle circular references
|
||||
"""
|
||||
@@ -1195,6 +1219,30 @@ class FieldTest(unittest.TestCase):
|
||||
page = Page.objects.first()
|
||||
self.assertEqual(page.tags[0], page.posts[0].tags[0])
|
||||
|
||||
def test_select_related_follows_embedded_referencefields(self):
|
||||
class Playlist(Document):
|
||||
items = ListField(EmbeddedDocumentField("PlaylistItem"))
|
||||
|
||||
class PlaylistItem(EmbeddedDocument):
|
||||
song = ReferenceField("Song")
|
||||
|
||||
class Song(Document):
|
||||
title = StringField()
|
||||
|
||||
Playlist.drop_collection()
|
||||
Song.drop_collection()
|
||||
|
||||
songs = [Song.objects.create(title="song %d" % i) for i in range(3)]
|
||||
items = [PlaylistItem(song=song) for song in songs]
|
||||
playlist = Playlist.objects.create(items=items)
|
||||
|
||||
with query_counter() as q:
|
||||
self.assertEqual(q, 0)
|
||||
|
||||
playlist = Playlist.objects.first().select_related()
|
||||
songs = [item.song for item in playlist.items]
|
||||
|
||||
self.assertEqual(q, 2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
|
@@ -2,11 +2,11 @@ import sys
|
||||
sys.path[0:0] = [""]
|
||||
import unittest
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from mongoengine import *
|
||||
|
||||
|
||||
from mongoengine.django.shortcuts import get_document_or_404
|
||||
|
||||
import django
|
||||
from django.http import Http404
|
||||
from django.template import Context, Template
|
||||
from django.conf import settings
|
||||
@@ -19,6 +19,10 @@ settings.configure(
|
||||
AUTHENTICATION_BACKENDS = ('mongoengine.django.auth.MongoEngineBackend',)
|
||||
)
|
||||
|
||||
# For Django >= 1.7
|
||||
if hasattr(django, 'setup'):
|
||||
django.setup()
|
||||
|
||||
try:
|
||||
from django.contrib.auth import authenticate, get_user_model
|
||||
from mongoengine.django.auth import User
|
||||
@@ -32,6 +36,7 @@ except Exception:
|
||||
DJ15 = False
|
||||
from django.contrib.sessions.tests import SessionTestsMixin
|
||||
from mongoengine.django.sessions import SessionStore, MongoSession
|
||||
from mongoengine.django.tests import MongoTestCase
|
||||
from datetime import tzinfo, timedelta
|
||||
ZERO = timedelta(0)
|
||||
|
||||
@@ -293,5 +298,11 @@ class MongoAuthTest(unittest.TestCase):
|
||||
db_user = User.objects.get(username='user')
|
||||
self.assertEqual(user.id, db_user.id)
|
||||
|
||||
|
||||
class MongoTestCaseTest(MongoTestCase):
|
||||
def test_mongo_test_case(self):
|
||||
self.db.dummy_collection.insert({'collection': 'will be dropped'})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -37,7 +37,8 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def post_init(cls, sender, document, **kwargs):
|
||||
signal_output.append('post_init signal, %s' % document)
|
||||
signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created))
|
||||
|
||||
|
||||
@classmethod
|
||||
def pre_save(cls, sender, document, **kwargs):
|
||||
@@ -54,7 +55,9 @@ class SignalTests(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def post_save(cls, sender, document, **kwargs):
|
||||
dirty_keys = document._delta()[0].keys() + document._delta()[1].keys()
|
||||
signal_output.append('post_save signal, %s' % document)
|
||||
signal_output.append('post_save dirty keys, %s' % dirty_keys)
|
||||
if 'created' in kwargs:
|
||||
if kwargs['created']:
|
||||
signal_output.append('Is created')
|
||||
@@ -191,10 +194,16 @@ class SignalTests(unittest.TestCase):
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
self.Author.objects.insert([a1], load_bulk=False)
|
||||
|
||||
def load_existing_author():
|
||||
a = self.Author(name='Bill Shakespeare')
|
||||
a.save()
|
||||
self.get_signal_output(lambda: None) # eliminate signal output
|
||||
a1 = self.Author.objects(name='Bill Shakespeare')[0]
|
||||
|
||||
self.assertEqual(self.get_signal_output(create_author), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
"post_init signal, Bill Shakespeare",
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
])
|
||||
|
||||
a1 = self.Author(name='Bill Shakespeare')
|
||||
@@ -203,6 +212,7 @@ class SignalTests(unittest.TestCase):
|
||||
"pre_save_post_validation signal, Bill Shakespeare",
|
||||
"Is created",
|
||||
"post_save signal, Bill Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is created"
|
||||
])
|
||||
|
||||
@@ -213,6 +223,7 @@ class SignalTests(unittest.TestCase):
|
||||
"pre_save_post_validation signal, William Shakespeare",
|
||||
"Is updated",
|
||||
"post_save signal, William Shakespeare",
|
||||
"post_save dirty keys, ['name']",
|
||||
"Is updated"
|
||||
])
|
||||
|
||||
@@ -221,12 +232,22 @@ class SignalTests(unittest.TestCase):
|
||||
'post_delete signal, William Shakespeare',
|
||||
])
|
||||
|
||||
signal_output = self.get_signal_output(load_existing_author)
|
||||
# test signal_output lines separately, because of random ObjectID after object load
|
||||
self.assertEqual(signal_output[0],
|
||||
"pre_init signal, Author",
|
||||
)
|
||||
self.assertEqual(signal_output[2],
|
||||
"post_init signal, Bill Shakespeare, document._created = False",
|
||||
)
|
||||
|
||||
|
||||
signal_output = self.get_signal_output(bulk_create_author_with_load)
|
||||
|
||||
# The output of this signal is not entirely deterministic. The reloaded
|
||||
# object will have an object ID. Hence, we only check part of the output
|
||||
self.assertEqual(signal_output[3],
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]")
|
||||
self.assertEqual(signal_output[3], "pre_bulk_insert signal, [<Author: Bill Shakespeare>]"
|
||||
)
|
||||
self.assertEqual(signal_output[-2:],
|
||||
["post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Is loaded",])
|
||||
@@ -234,7 +255,7 @@ class SignalTests(unittest.TestCase):
|
||||
self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [
|
||||
"pre_init signal, Author",
|
||||
"{'name': 'Bill Shakespeare'}",
|
||||
"post_init signal, Bill Shakespeare",
|
||||
"post_init signal, Bill Shakespeare, document._created = True",
|
||||
"pre_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"post_bulk_insert signal, [<Author: Bill Shakespeare>]",
|
||||
"Not loaded",
|
||||
|
Reference in New Issue
Block a user