Merge pull request #980 from MRigal/fix/various-fixes
Pep8, code clean-up and 0.10.0 changelog finalisation
This commit is contained in:
commit
45cb991254
2
AUTHORS
2
AUTHORS
@ -12,7 +12,7 @@ Laine Herron https://github.com/LaineHerron
|
|||||||
|
|
||||||
CONTRIBUTORS
|
CONTRIBUTORS
|
||||||
|
|
||||||
Dervived from the git logs, inevitably incomplete but all of whom and others
|
Derived from the git logs, inevitably incomplete but all of whom and others
|
||||||
have submitted patches, reported bugs and generally helped make MongoEngine
|
have submitted patches, reported bugs and generally helped make MongoEngine
|
||||||
that much better:
|
that much better:
|
||||||
|
|
||||||
|
@ -29,7 +29,10 @@ Style Guide
|
|||||||
-----------
|
-----------
|
||||||
|
|
||||||
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_
|
||||||
including 4 space indents and 79 character line limits.
|
including 4 space indents. When possible we try to stick to 79 character line limits.
|
||||||
|
However, screens got bigger and an ORM has a strong focus on readability and
|
||||||
|
if it can help, we accept 119 as maximum line length, in a similar way as
|
||||||
|
`django does <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_
|
||||||
|
|
||||||
Testing
|
Testing
|
||||||
-------
|
-------
|
||||||
@ -38,6 +41,10 @@ All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_
|
|||||||
and any pull requests are automatically tested by Travis. Any pull requests
|
and any pull requests are automatically tested by Travis. Any pull requests
|
||||||
without tests will take longer to be integrated and might be refused.
|
without tests will take longer to be integrated and might be refused.
|
||||||
|
|
||||||
|
You may also submit a simple failing test as a PullRequest if you don't know
|
||||||
|
how to fix it, it will be easier for other people to work on it and it may get
|
||||||
|
fixed faster.
|
||||||
|
|
||||||
General Guidelines
|
General Guidelines
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
@ -48,6 +55,7 @@ General Guidelines
|
|||||||
from the cmd line to run the test suite).
|
from the cmd line to run the test suite).
|
||||||
- Ensure tests pass on every Python and PyMongo versions.
|
- Ensure tests pass on every Python and PyMongo versions.
|
||||||
You can test on these versions locally by executing ``tox``
|
You can test on these versions locally by executing ``tox``
|
||||||
|
- Add enhancements or problematic bug fixes to docs/changelog.rst
|
||||||
- Add yourself to AUTHORS :)
|
- Add yourself to AUTHORS :)
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
|
@ -26,7 +26,9 @@ a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html
|
|||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
|
We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of
|
||||||
|
`pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``.
|
||||||
|
You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ and thus
|
||||||
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
you can use ``easy_install -U mongoengine``. Otherwise, you can download the
|
||||||
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python
|
||||||
setup.py install``.
|
setup.py install``.
|
||||||
@ -114,7 +116,7 @@ Also use the -s argument if you want to print out whatever or access pdb while t
|
|||||||
|
|
||||||
.. code-block:: shell
|
.. code-block:: shell
|
||||||
|
|
||||||
$ python setup.py nosetests --tests tests/test_django.py:QuerySetTest.test_get_document_or_404 -s
|
$ python setup.py nosetests --tests tests/fields/fields.py:FieldTest.test_cls_field -s
|
||||||
|
|
||||||
Community
|
Community
|
||||||
=========
|
=========
|
||||||
|
2
docs/_themes/sphinx_rtd_theme/footer.html
vendored
2
docs/_themes/sphinx_rtd_theme/footer.html
vendored
@ -2,7 +2,7 @@
|
|||||||
{% if next or prev %}
|
{% if next or prev %}
|
||||||
<div class="rst-footer-buttons">
|
<div class="rst-footer-buttons">
|
||||||
{% if next %}
|
{% if next %}
|
||||||
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
|
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}">Next <span class="icon icon-circle-arrow-right"></span></a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if prev %}
|
{% if prev %}
|
||||||
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
|
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
|
||||||
|
@ -2,9 +2,15 @@
|
|||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
Changes in 0.10.1 - DEV
|
||||||
|
=======================
|
||||||
|
|
||||||
Changes in 0.9.X - DEV
|
Changes in 0.10.0
|
||||||
======================
|
=================
|
||||||
|
- Django support was removed and will be available as a separate extension. #958
|
||||||
|
- Allow to load undeclared field with meta attribute 'strict': False #957
|
||||||
|
- Support for PyMongo 3+ #946
|
||||||
|
- Removed get_or_create() deprecated since 0.8.0. #300
|
||||||
- Improve Document._created status when switch collection and db #1020
|
- Improve Document._created status when switch collection and db #1020
|
||||||
- Queryset update doesn't go through field validation #453
|
- Queryset update doesn't go through field validation #453
|
||||||
- Added support for specifying authentication source as option `authSource` in URI. #967
|
- Added support for specifying authentication source as option `authSource` in URI. #967
|
||||||
@ -14,17 +20,14 @@ Changes in 0.9.X - DEV
|
|||||||
- Use sets for populating dbrefs to dereference
|
- Use sets for populating dbrefs to dereference
|
||||||
- Fixed unpickled documents replacing the global field's list. #888
|
- Fixed unpickled documents replacing the global field's list. #888
|
||||||
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
- Fixed storage of microseconds in ComplexDateTimeField and unused separator option. #910
|
||||||
- Django support was removed and will be available as a separate extension. #958
|
|
||||||
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
- Don't send a "cls" option to ensureIndex (related to https://jira.mongodb.org/browse/SERVER-769)
|
||||||
- Fix for updating sorting in SortedListField. #978
|
- Fix for updating sorting in SortedListField. #978
|
||||||
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
- Added __ support to escape field name in fields lookup keywords that match operators names #949
|
||||||
- Support for PyMongo 3+ #946
|
|
||||||
- Fix for issue where FileField deletion did not free space in GridFS.
|
- Fix for issue where FileField deletion did not free space in GridFS.
|
||||||
- No_dereference() not respected on embedded docs containing reference. #517
|
- No_dereference() not respected on embedded docs containing reference. #517
|
||||||
- Document save raise an exception if save_condition fails #1005
|
- Document save raise an exception if save_condition fails #1005
|
||||||
- Fixes some internal _id handling issue. #961
|
- Fixes some internal _id handling issue. #961
|
||||||
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
- Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652
|
||||||
- Removed get_or_create() deprecated since 0.8.0. #300
|
|
||||||
- Capped collection multiple of 256. #1011
|
- Capped collection multiple of 256. #1011
|
||||||
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
- Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods.
|
||||||
- Fix for delete with write_concern {'w': 0}. #1008
|
- Fix for delete with write_concern {'w': 0}. #1008
|
||||||
|
@ -15,5 +15,5 @@ The MongoEngine team is looking for help contributing and maintaining a new
|
|||||||
Django extension for MongoEngine! If you have Django experience and would like
|
Django extension for MongoEngine! If you have Django experience and would like
|
||||||
to help contribute to the project, please get in touch on the
|
to help contribute to the project, please get in touch on the
|
||||||
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
`mailing list <http://groups.google.com/group/mongoengine-users>`_ or by
|
||||||
simpily contributing on
|
simply contributing on
|
||||||
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
`GitHub <https://github.com/MongoEngine/django-mongoengine>`_.
|
||||||
|
@ -598,7 +598,7 @@ Some variables are made available in the scope of the Javascript function:
|
|||||||
|
|
||||||
The following example demonstrates the intended usage of
|
The following example demonstrates the intended usage of
|
||||||
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
:meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums
|
||||||
over a field on a document (this functionality is already available throught
|
over a field on a document (this functionality is already available through
|
||||||
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
:meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of
|
||||||
example)::
|
example)::
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import weakref
|
import weakref
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
|
||||||
|
|
||||||
@ -20,7 +21,7 @@ class BaseDict(dict):
|
|||||||
if isinstance(instance, (Document, EmbeddedDocument)):
|
if isinstance(instance, (Document, EmbeddedDocument)):
|
||||||
self._instance = weakref.proxy(instance)
|
self._instance = weakref.proxy(instance)
|
||||||
self._name = name
|
self._name = name
|
||||||
return super(BaseDict, self).__init__(dict_items)
|
super(BaseDict, self).__init__(dict_items)
|
||||||
|
|
||||||
def __getitem__(self, key, *args, **kwargs):
|
def __getitem__(self, key, *args, **kwargs):
|
||||||
value = super(BaseDict, self).__getitem__(key)
|
value = super(BaseDict, self).__getitem__(key)
|
||||||
@ -65,7 +66,7 @@ class BaseDict(dict):
|
|||||||
|
|
||||||
def clear(self, *args, **kwargs):
|
def clear(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
return super(BaseDict, self).clear(*args, **kwargs)
|
return super(BaseDict, self).clear()
|
||||||
|
|
||||||
def pop(self, *args, **kwargs):
|
def pop(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
@ -73,7 +74,7 @@ class BaseDict(dict):
|
|||||||
|
|
||||||
def popitem(self, *args, **kwargs):
|
def popitem(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
return super(BaseDict, self).popitem(*args, **kwargs)
|
return super(BaseDict, self).popitem()
|
||||||
|
|
||||||
def setdefault(self, *args, **kwargs):
|
def setdefault(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
@ -189,7 +190,7 @@ class BaseList(list):
|
|||||||
|
|
||||||
def reverse(self, *args, **kwargs):
|
def reverse(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
return super(BaseList, self).reverse(*args, **kwargs)
|
return super(BaseList, self).reverse()
|
||||||
|
|
||||||
def sort(self, *args, **kwargs):
|
def sort(self, *args, **kwargs):
|
||||||
self._mark_as_changed()
|
self._mark_as_changed()
|
||||||
@ -368,25 +369,31 @@ class StrictDict(object):
|
|||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
_special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
|
||||||
_classes = {}
|
_classes = {}
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
for k,v in kwargs.iteritems():
|
for k, v in kwargs.iteritems():
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
key = '_reserved_' + key if key in self._special_fields else key
|
key = '_reserved_' + key if key in self._special_fields else key
|
||||||
try:
|
try:
|
||||||
return getattr(self, key)
|
return getattr(self, key)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise KeyError(key)
|
raise KeyError(key)
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
key = '_reserved_' + key if key in self._special_fields else key
|
key = '_reserved_' + key if key in self._special_fields else key
|
||||||
return setattr(self, key, value)
|
return setattr(self, key, value)
|
||||||
|
|
||||||
def __contains__(self, key):
|
def __contains__(self, key):
|
||||||
return hasattr(self, key)
|
return hasattr(self, key)
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
try:
|
try:
|
||||||
return self[key]
|
return self[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def pop(self, key, default=None):
|
def pop(self, key, default=None):
|
||||||
v = self.get(key, default)
|
v = self.get(key, default)
|
||||||
try:
|
try:
|
||||||
@ -394,19 +401,29 @@ class StrictDict(object):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
return v
|
return v
|
||||||
|
|
||||||
def iteritems(self):
|
def iteritems(self):
|
||||||
for key in self:
|
for key in self:
|
||||||
yield key, self[key]
|
yield key, self[key]
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
return [(k, self[k]) for k in iter(self)]
|
return [(k, self[k]) for k in iter(self)]
|
||||||
|
|
||||||
|
def iterkeys(self):
|
||||||
|
return iter(self)
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
return list(iter(self))
|
return list(iter(self))
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return (key for key in self.__slots__ if hasattr(self, key))
|
return (key for key in self.__slots__ if hasattr(self, key))
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(list(self.iteritems()))
|
return len(list(self.iteritems()))
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.items() == other.items()
|
return self.items() == other.items()
|
||||||
|
|
||||||
def __neq__(self, other):
|
def __neq__(self, other):
|
||||||
return self.items() != other.items()
|
return self.items() != other.items()
|
||||||
|
|
||||||
@ -417,15 +434,18 @@ class StrictDict(object):
|
|||||||
if allowed_keys not in cls._classes:
|
if allowed_keys not in cls._classes:
|
||||||
class SpecificStrictDict(cls):
|
class SpecificStrictDict(cls):
|
||||||
__slots__ = allowed_keys_tuple
|
__slots__ = allowed_keys_tuple
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
|
return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k) for k in self.iterkeys())
|
||||||
|
|
||||||
cls._classes[allowed_keys] = SpecificStrictDict
|
cls._classes[allowed_keys] = SpecificStrictDict
|
||||||
return cls._classes[allowed_keys]
|
return cls._classes[allowed_keys]
|
||||||
|
|
||||||
|
|
||||||
class SemiStrictDict(StrictDict):
|
class SemiStrictDict(StrictDict):
|
||||||
__slots__ = ('_extras')
|
__slots__ = ('_extras', )
|
||||||
_classes = {}
|
_classes = {}
|
||||||
|
|
||||||
def __getattr__(self, attr):
|
def __getattr__(self, attr):
|
||||||
try:
|
try:
|
||||||
super(SemiStrictDict, self).__getattr__(attr)
|
super(SemiStrictDict, self).__getattr__(attr)
|
||||||
@ -434,6 +454,7 @@ class SemiStrictDict(StrictDict):
|
|||||||
return self.__getattribute__('_extras')[attr]
|
return self.__getattribute__('_extras')[attr]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
raise AttributeError(e)
|
raise AttributeError(e)
|
||||||
|
|
||||||
def __setattr__(self, attr, value):
|
def __setattr__(self, attr, value):
|
||||||
try:
|
try:
|
||||||
super(SemiStrictDict, self).__setattr__(attr, value)
|
super(SemiStrictDict, self).__setattr__(attr, value)
|
||||||
|
@ -14,7 +14,6 @@ from mongoengine.common import _import_class
|
|||||||
from mongoengine.errors import (ValidationError, InvalidDocumentError,
|
from mongoengine.errors import (ValidationError, InvalidDocumentError,
|
||||||
LookUpError, FieldDoesNotExist)
|
LookUpError, FieldDoesNotExist)
|
||||||
from mongoengine.python_support import PY3, txt_type
|
from mongoengine.python_support import PY3, txt_type
|
||||||
|
|
||||||
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
|
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
|
||||||
from mongoengine.base.datastructures import (
|
from mongoengine.base.datastructures import (
|
||||||
BaseDict,
|
BaseDict,
|
||||||
@ -150,7 +149,6 @@ class BaseDocument(object):
|
|||||||
# Handle dynamic data only if an initialised dynamic document
|
# Handle dynamic data only if an initialised dynamic document
|
||||||
if self._dynamic and not self._dynamic_lock:
|
if self._dynamic and not self._dynamic_lock:
|
||||||
|
|
||||||
field = None
|
|
||||||
if not hasattr(self, name) and not name.startswith('_'):
|
if not hasattr(self, name) and not name.startswith('_'):
|
||||||
DynamicField = _import_class("DynamicField")
|
DynamicField = _import_class("DynamicField")
|
||||||
field = DynamicField(db_field=name)
|
field = DynamicField(db_field=name)
|
||||||
@ -183,8 +181,8 @@ class BaseDocument(object):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
self__initialised = False
|
self__initialised = False
|
||||||
# Check if the user has created a new instance of a class
|
# Check if the user has created a new instance of a class
|
||||||
if (self._is_document and self__initialised
|
if (self._is_document and self__initialised and
|
||||||
and self__created and name == self._meta.get('id_field')):
|
self__created and name == self._meta.get('id_field')):
|
||||||
super(BaseDocument, self).__setattr__('_created', False)
|
super(BaseDocument, self).__setattr__('_created', False)
|
||||||
|
|
||||||
super(BaseDocument, self).__setattr__(name, value)
|
super(BaseDocument, self).__setattr__(name, value)
|
||||||
@ -328,7 +326,7 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
|
||||||
if isinstance(field, (EmbeddedDocumentField)):
|
if isinstance(field, EmbeddedDocumentField):
|
||||||
if fields:
|
if fields:
|
||||||
key = '%s.' % field_name
|
key = '%s.' % field_name
|
||||||
embedded_fields = [
|
embedded_fields = [
|
||||||
@ -417,10 +415,11 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
def to_json(self, *args, **kwargs):
|
def to_json(self, *args, **kwargs):
|
||||||
"""Converts a document to JSON.
|
"""Converts a document to JSON.
|
||||||
:param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
|
:param use_db_field: Set to True by default but enables the output of the json structure with the field names
|
||||||
|
and not the mongodb store db_names in case of set to False
|
||||||
"""
|
"""
|
||||||
use_db_field = kwargs.pop('use_db_field', True)
|
use_db_field = kwargs.pop('use_db_field', True)
|
||||||
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, json_data, created=False):
|
def from_json(cls, json_data, created=False):
|
||||||
@ -570,7 +569,7 @@ class BaseDocument(object):
|
|||||||
continue
|
continue
|
||||||
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
|
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
|
||||||
and db_field_name not in changed_fields):
|
and db_field_name not in changed_fields):
|
||||||
# Find all embedded fields that have been changed
|
# Find all embedded fields that have been changed
|
||||||
changed = data._get_changed_fields(inspected)
|
changed = data._get_changed_fields(inspected)
|
||||||
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
changed_fields += ["%s%s" % (key, k) for k in changed if k]
|
||||||
elif (isinstance(data, (list, tuple, dict)) and
|
elif (isinstance(data, (list, tuple, dict)) and
|
||||||
@ -578,7 +577,7 @@ class BaseDocument(object):
|
|||||||
if (hasattr(field, 'field') and
|
if (hasattr(field, 'field') and
|
||||||
isinstance(field.field, ReferenceField)):
|
isinstance(field.field, ReferenceField)):
|
||||||
continue
|
continue
|
||||||
elif (isinstance(field, SortedListField) and field._ordering):
|
elif isinstance(field, SortedListField) and field._ordering:
|
||||||
# if ordering is affected whole list is changed
|
# if ordering is affected whole list is changed
|
||||||
if any(map(lambda d: field._ordering in d._changed_fields, data)):
|
if any(map(lambda d: field._ordering in d._changed_fields, data)):
|
||||||
changed_fields.append(db_field_name)
|
changed_fields.append(db_field_name)
|
||||||
@ -621,18 +620,18 @@ class BaseDocument(object):
|
|||||||
else:
|
else:
|
||||||
set_data = doc
|
set_data = doc
|
||||||
if '_id' in set_data:
|
if '_id' in set_data:
|
||||||
del(set_data['_id'])
|
del set_data['_id']
|
||||||
|
|
||||||
# Determine if any changed items were actually unset.
|
# Determine if any changed items were actually unset.
|
||||||
for path, value in set_data.items():
|
for path, value in set_data.items():
|
||||||
if value or isinstance(value, (numbers.Number, bool)):
|
if value or isinstance(value, (numbers.Number, bool)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# If we've set a value that ain't the default value dont unset it.
|
# If we've set a value that ain't the default value don't unset it.
|
||||||
default = None
|
default = None
|
||||||
if (self._dynamic and len(parts) and parts[0] in
|
if (self._dynamic and len(parts) and parts[0] in
|
||||||
self._dynamic_fields):
|
self._dynamic_fields):
|
||||||
del(set_data[path])
|
del set_data[path]
|
||||||
unset_data[path] = 1
|
unset_data[path] = 1
|
||||||
continue
|
continue
|
||||||
elif path in self._fields:
|
elif path in self._fields:
|
||||||
@ -666,7 +665,7 @@ class BaseDocument(object):
|
|||||||
if default != value:
|
if default != value:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
del(set_data[path])
|
del set_data[path]
|
||||||
unset_data[path] = 1
|
unset_data[path] = 1
|
||||||
return set_data, unset_data
|
return set_data, unset_data
|
||||||
|
|
||||||
@ -821,7 +820,6 @@ class BaseDocument(object):
|
|||||||
parts = key.split('.')
|
parts = key.split('.')
|
||||||
if parts in (['pk'], ['id'], ['_id']):
|
if parts in (['pk'], ['id'], ['_id']):
|
||||||
key = '_id'
|
key = '_id'
|
||||||
fields = []
|
|
||||||
else:
|
else:
|
||||||
fields = cls._lookup_field(parts)
|
fields = cls._lookup_field(parts)
|
||||||
parts = []
|
parts = []
|
||||||
@ -981,7 +979,7 @@ class BaseDocument(object):
|
|||||||
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
if hasattr(getattr(field, 'field', None), 'lookup_member'):
|
||||||
new_field = field.field.lookup_member(field_name)
|
new_field = field.field.lookup_member(field_name)
|
||||||
elif cls._dynamic and (isinstance(field, DynamicField) or
|
elif cls._dynamic and (isinstance(field, DynamicField) or
|
||||||
getattr(getattr(field, 'document_type'), '_dynamic')):
|
getattr(getattr(field, 'document_type'), '_dynamic')):
|
||||||
new_field = DynamicField(db_field=field_name)
|
new_field = DynamicField(db_field=field_name)
|
||||||
else:
|
else:
|
||||||
# Look up subfield on the previous field or raise
|
# Look up subfield on the previous field or raise
|
||||||
|
@ -7,7 +7,6 @@ import pymongo
|
|||||||
|
|
||||||
from mongoengine.common import _import_class
|
from mongoengine.common import _import_class
|
||||||
from mongoengine.errors import ValidationError
|
from mongoengine.errors import ValidationError
|
||||||
|
|
||||||
from mongoengine.base.common import ALLOW_INHERITANCE
|
from mongoengine.base.common import ALLOW_INHERITANCE
|
||||||
from mongoengine.base.datastructures import (
|
from mongoengine.base.datastructures import (
|
||||||
BaseDict, BaseList, EmbeddedDocumentList
|
BaseDict, BaseList, EmbeddedDocumentList
|
||||||
@ -23,7 +22,6 @@ UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push',
|
|||||||
|
|
||||||
|
|
||||||
class BaseField(object):
|
class BaseField(object):
|
||||||
|
|
||||||
"""A base class for fields in a MongoDB document. Instances of this class
|
"""A base class for fields in a MongoDB document. Instances of this class
|
||||||
may be added to subclasses of `Document` to define a document's schema.
|
may be added to subclasses of `Document` to define a document's schema.
|
||||||
|
|
||||||
@ -114,7 +112,7 @@ class BaseField(object):
|
|||||||
"""Descriptor for assigning a value to a field in a document.
|
"""Descriptor for assigning a value to a field in a document.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# If setting to None and theres a default
|
# If setting to None and there is a default
|
||||||
# Then set the value to the default value
|
# Then set the value to the default value
|
||||||
if value is None:
|
if value is None:
|
||||||
if self.null:
|
if self.null:
|
||||||
@ -212,7 +210,6 @@ class BaseField(object):
|
|||||||
|
|
||||||
|
|
||||||
class ComplexBaseField(BaseField):
|
class ComplexBaseField(BaseField):
|
||||||
|
|
||||||
"""Handles complex fields, such as lists / dictionaries.
|
"""Handles complex fields, such as lists / dictionaries.
|
||||||
|
|
||||||
Allows for nesting of embedded documents inside complex types.
|
Allows for nesting of embedded documents inside complex types.
|
||||||
@ -262,8 +259,8 @@ class ComplexBaseField(BaseField):
|
|||||||
instance._data[self.name] = value
|
instance._data[self.name] = value
|
||||||
|
|
||||||
if (self._auto_dereference and instance._initialised and
|
if (self._auto_dereference and instance._initialised and
|
||||||
isinstance(value, (BaseList, BaseDict))
|
isinstance(value, (BaseList, BaseDict)) and
|
||||||
and not value._dereferenced):
|
not value._dereferenced):
|
||||||
value = _dereference(
|
value = _dereference(
|
||||||
value, max_depth=1, instance=instance, name=self.name
|
value, max_depth=1, instance=instance, name=self.name
|
||||||
)
|
)
|
||||||
@ -330,8 +327,8 @@ class ComplexBaseField(BaseField):
|
|||||||
return GenericReferenceField().to_mongo(value)
|
return GenericReferenceField().to_mongo(value)
|
||||||
cls = value.__class__
|
cls = value.__class__
|
||||||
val = value.to_mongo()
|
val = value.to_mongo()
|
||||||
# If we its a document thats not inherited add _cls
|
# If it's a document that is not inherited add _cls
|
||||||
if (isinstance(value, EmbeddedDocument)):
|
if isinstance(value, EmbeddedDocument):
|
||||||
val['_cls'] = cls.__name__
|
val['_cls'] = cls.__name__
|
||||||
return val
|
return val
|
||||||
|
|
||||||
@ -370,8 +367,8 @@ class ComplexBaseField(BaseField):
|
|||||||
elif hasattr(v, 'to_mongo'):
|
elif hasattr(v, 'to_mongo'):
|
||||||
cls = v.__class__
|
cls = v.__class__
|
||||||
val = v.to_mongo()
|
val = v.to_mongo()
|
||||||
# If we its a document thats not inherited add _cls
|
# If it's a document that is not inherited add _cls
|
||||||
if (isinstance(v, (Document, EmbeddedDocument))):
|
if isinstance(v, (Document, EmbeddedDocument)):
|
||||||
val['_cls'] = cls.__name__
|
val['_cls'] = cls.__name__
|
||||||
value_dict[k] = val
|
value_dict[k] = val
|
||||||
else:
|
else:
|
||||||
@ -422,7 +419,6 @@ class ComplexBaseField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class ObjectIdField(BaseField):
|
class ObjectIdField(BaseField):
|
||||||
|
|
||||||
"""A field wrapper around MongoDB's ObjectIds.
|
"""A field wrapper around MongoDB's ObjectIds.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -454,7 +450,6 @@ class ObjectIdField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class GeoJsonBaseField(BaseField):
|
class GeoJsonBaseField(BaseField):
|
||||||
|
|
||||||
"""A geo json field storing a geojson style object.
|
"""A geo json field storing a geojson style object.
|
||||||
|
|
||||||
.. versionadded:: 0.8
|
.. versionadded:: 0.8
|
||||||
|
@ -14,7 +14,6 @@ __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
|
|||||||
|
|
||||||
|
|
||||||
class DocumentMetaclass(type):
|
class DocumentMetaclass(type):
|
||||||
|
|
||||||
"""Metaclass for all documents.
|
"""Metaclass for all documents.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -111,7 +110,7 @@ class DocumentMetaclass(type):
|
|||||||
for base in flattened_bases:
|
for base in flattened_bases:
|
||||||
if (not getattr(base, '_is_base_cls', True) and
|
if (not getattr(base, '_is_base_cls', True) and
|
||||||
not getattr(base, '_meta', {}).get('abstract', True)):
|
not getattr(base, '_meta', {}).get('abstract', True)):
|
||||||
# Collate heirarchy for _cls and _subclasses
|
# Collate hierarchy for _cls and _subclasses
|
||||||
class_name.append(base.__name__)
|
class_name.append(base.__name__)
|
||||||
|
|
||||||
if hasattr(base, '_meta'):
|
if hasattr(base, '_meta'):
|
||||||
@ -144,7 +143,7 @@ class DocumentMetaclass(type):
|
|||||||
for base in document_bases:
|
for base in document_bases:
|
||||||
if _cls not in base._subclasses:
|
if _cls not in base._subclasses:
|
||||||
base._subclasses += (_cls,)
|
base._subclasses += (_cls,)
|
||||||
base._types = base._subclasses # TODO depreciate _types
|
base._types = base._subclasses # TODO depreciate _types
|
||||||
|
|
||||||
(Document, EmbeddedDocument, DictField,
|
(Document, EmbeddedDocument, DictField,
|
||||||
CachedReferenceField) = cls._import_classes()
|
CachedReferenceField) = cls._import_classes()
|
||||||
@ -184,7 +183,7 @@ class DocumentMetaclass(type):
|
|||||||
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
"CachedReferenceFields is not allowed in EmbeddedDocuments")
|
||||||
if not f.document_type:
|
if not f.document_type:
|
||||||
raise InvalidDocumentError(
|
raise InvalidDocumentError(
|
||||||
"Document is not avaiable to sync")
|
"Document is not available to sync")
|
||||||
|
|
||||||
if f.auto_sync:
|
if f.auto_sync:
|
||||||
f.start_listener()
|
f.start_listener()
|
||||||
@ -246,11 +245,10 @@ class DocumentMetaclass(type):
|
|||||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||||
DictField = _import_class('DictField')
|
DictField = _import_class('DictField')
|
||||||
CachedReferenceField = _import_class('CachedReferenceField')
|
CachedReferenceField = _import_class('CachedReferenceField')
|
||||||
return (Document, EmbeddedDocument, DictField, CachedReferenceField)
|
return Document, EmbeddedDocument, DictField, CachedReferenceField
|
||||||
|
|
||||||
|
|
||||||
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||||
|
|
||||||
"""Metaclass for top-level documents (i.e. documents that have their own
|
"""Metaclass for top-level documents (i.e. documents that have their own
|
||||||
collection in the database.
|
collection in the database.
|
||||||
"""
|
"""
|
||||||
@ -260,7 +258,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
super_new = super(TopLevelDocumentMetaclass, cls).__new__
|
||||||
|
|
||||||
# Set default _meta data if base class, otherwise get user defined meta
|
# Set default _meta data if base class, otherwise get user defined meta
|
||||||
if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
|
if attrs.get('my_metaclass') == TopLevelDocumentMetaclass:
|
||||||
# defaults
|
# defaults
|
||||||
attrs['_meta'] = {
|
attrs['_meta'] = {
|
||||||
'abstract': True,
|
'abstract': True,
|
||||||
@ -279,7 +277,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
attrs['_meta'].update(attrs.get('meta', {}))
|
attrs['_meta'].update(attrs.get('meta', {}))
|
||||||
else:
|
else:
|
||||||
attrs['_meta'] = attrs.get('meta', {})
|
attrs['_meta'] = attrs.get('meta', {})
|
||||||
# Explictly set abstract to false unless set
|
# Explicitly set abstract to false unless set
|
||||||
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
|
||||||
attrs['_is_base_cls'] = False
|
attrs['_is_base_cls'] = False
|
||||||
|
|
||||||
@ -294,7 +292,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
# Clean up top level meta
|
# Clean up top level meta
|
||||||
if 'meta' in attrs:
|
if 'meta' in attrs:
|
||||||
del(attrs['meta'])
|
del attrs['meta']
|
||||||
|
|
||||||
# Find the parent document class
|
# Find the parent document class
|
||||||
parent_doc_cls = [b for b in flattened_bases
|
parent_doc_cls = [b for b in flattened_bases
|
||||||
@ -303,11 +301,11 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
# Prevent classes setting collection different to their parents
|
# Prevent classes setting collection different to their parents
|
||||||
# If parent wasn't an abstract class
|
# If parent wasn't an abstract class
|
||||||
if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
|
if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and
|
||||||
and not parent_doc_cls._meta.get('abstract', True)):
|
not parent_doc_cls._meta.get('abstract', True)):
|
||||||
msg = "Trying to set a collection on a subclass (%s)" % name
|
msg = "Trying to set a collection on a subclass (%s)" % name
|
||||||
warnings.warn(msg, SyntaxWarning)
|
warnings.warn(msg, SyntaxWarning)
|
||||||
del(attrs['_meta']['collection'])
|
del attrs['_meta']['collection']
|
||||||
|
|
||||||
# Ensure abstract documents have abstract bases
|
# Ensure abstract documents have abstract bases
|
||||||
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
|
||||||
@ -410,14 +408,15 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
def get_auto_id_names(self):
|
@classmethod
|
||||||
|
def get_auto_id_names(cls, new_class):
|
||||||
id_name, id_db_name = ('id', '_id')
|
id_name, id_db_name = ('id', '_id')
|
||||||
if id_name not in self._fields and \
|
if id_name not in new_class._fields and \
|
||||||
id_db_name not in (v.db_field for v in self._fields.values()):
|
id_db_name not in (v.db_field for v in new_class._fields.values()):
|
||||||
return id_name, id_db_name
|
return id_name, id_db_name
|
||||||
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0
|
||||||
while id_name in self._fields or \
|
while id_name in new_class._fields or \
|
||||||
id_db_name in (v.db_field for v in self._fields.values()):
|
id_db_name in (v.db_field for v in new_class._fields.values()):
|
||||||
id_name = '{0}_{1}'.format(id_basename, i)
|
id_name = '{0}_{1}'.format(id_basename, i)
|
||||||
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
id_db_name = '{0}_{1}'.format(id_db_basename, i)
|
||||||
i += 1
|
i += 1
|
||||||
@ -425,7 +424,6 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
|||||||
|
|
||||||
|
|
||||||
class MetaDict(dict):
|
class MetaDict(dict):
|
||||||
|
|
||||||
"""Custom dictionary for meta classes.
|
"""Custom dictionary for meta classes.
|
||||||
Handles the merging of set indexes
|
Handles the merging of set indexes
|
||||||
"""
|
"""
|
||||||
@ -440,6 +438,5 @@ class MetaDict(dict):
|
|||||||
|
|
||||||
|
|
||||||
class BasesTuple(tuple):
|
class BasesTuple(tuple):
|
||||||
|
|
||||||
"""Special class to handle introspection of bases tuple in __new__"""
|
"""Special class to handle introspection of bases tuple in __new__"""
|
||||||
pass
|
pass
|
||||||
|
@ -120,7 +120,8 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|||||||
try:
|
try:
|
||||||
connection = None
|
connection = None
|
||||||
# check for shared connections
|
# check for shared connections
|
||||||
connection_settings_iterator = ((db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
|
connection_settings_iterator = (
|
||||||
|
(db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
|
||||||
for db_alias, connection_settings in connection_settings_iterator:
|
for db_alias, connection_settings in connection_settings_iterator:
|
||||||
connection_settings.pop('name', None)
|
connection_settings.pop('name', None)
|
||||||
connection_settings.pop('username', None)
|
connection_settings.pop('username', None)
|
||||||
|
@ -11,7 +11,6 @@ from document import Document, EmbeddedDocument
|
|||||||
|
|
||||||
|
|
||||||
class DeReference(object):
|
class DeReference(object):
|
||||||
|
|
||||||
def __call__(self, items, max_depth=1, instance=None, name=None):
|
def __call__(self, items, max_depth=1, instance=None, name=None):
|
||||||
"""
|
"""
|
||||||
Cheaply dereferences the items to a set depth.
|
Cheaply dereferences the items to a set depth.
|
||||||
@ -49,8 +48,8 @@ class DeReference(object):
|
|||||||
|
|
||||||
if is_list and all([i.__class__ == doc_type for i in items]):
|
if is_list and all([i.__class__ == doc_type for i in items]):
|
||||||
return items
|
return items
|
||||||
elif not is_list and all([i.__class__ == doc_type
|
elif not is_list and all(
|
||||||
for i in items.values()]):
|
[i.__class__ == doc_type for i in items.values()]):
|
||||||
return items
|
return items
|
||||||
elif not field.dbref:
|
elif not field.dbref:
|
||||||
if not hasattr(items, 'items'):
|
if not hasattr(items, 'items'):
|
||||||
@ -101,7 +100,7 @@ class DeReference(object):
|
|||||||
if isinstance(item, (Document, EmbeddedDocument)):
|
if isinstance(item, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in item._fields.iteritems():
|
for field_name, field in item._fields.iteritems():
|
||||||
v = item._data.get(field_name, None)
|
v = item._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, DBRef):
|
||||||
reference_map.setdefault(field.document_type, set()).add(v.id)
|
reference_map.setdefault(field.document_type, set()).add(v.id)
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id)
|
||||||
@ -112,7 +111,7 @@ class DeReference(object):
|
|||||||
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
|
||||||
key = field_cls
|
key = field_cls
|
||||||
reference_map.setdefault(key, set()).update(refs)
|
reference_map.setdefault(key, set()).update(refs)
|
||||||
elif isinstance(item, (DBRef)):
|
elif isinstance(item, DBRef):
|
||||||
reference_map.setdefault(item.collection, set()).add(item.id)
|
reference_map.setdefault(item.collection, set()).add(item.id)
|
||||||
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
elif isinstance(item, (dict, SON)) and '_ref' in item:
|
||||||
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
|
||||||
@ -155,7 +154,7 @@ class DeReference(object):
|
|||||||
elif doc_type is None:
|
elif doc_type is None:
|
||||||
doc = get_document(
|
doc = get_document(
|
||||||
''.join(x.capitalize()
|
''.join(x.capitalize()
|
||||||
for x in collection.split('_')))._from_son(ref)
|
for x in collection.split('_')))._from_son(ref)
|
||||||
else:
|
else:
|
||||||
doc = doc_type._from_son(ref)
|
doc = doc_type._from_son(ref)
|
||||||
object_map[(collection, doc.id)] = doc
|
object_map[(collection, doc.id)] = doc
|
||||||
@ -220,12 +219,12 @@ class DeReference(object):
|
|||||||
elif isinstance(v, (Document, EmbeddedDocument)):
|
elif isinstance(v, (Document, EmbeddedDocument)):
|
||||||
for field_name, field in v._fields.iteritems():
|
for field_name, field in v._fields.iteritems():
|
||||||
v = data[k]._data.get(field_name, None)
|
v = data[k]._data.get(field_name, None)
|
||||||
if isinstance(v, (DBRef)):
|
if isinstance(v, DBRef):
|
||||||
data[k]._data[field_name] = self.object_map.get(
|
data[k]._data[field_name] = self.object_map.get(
|
||||||
(v.collection, v.id), v)
|
(v.collection, v.id), v)
|
||||||
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
elif isinstance(v, (dict, SON)) and '_ref' in v:
|
||||||
data[k]._data[field_name] = self.object_map.get(
|
data[k]._data[field_name] = self.object_map.get(
|
||||||
(v['_ref'].collection , v['_ref'].id), v)
|
(v['_ref'].collection, v['_ref'].id), v)
|
||||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||||
item_name = "{0}.{1}.{2}".format(name, k, field_name)
|
item_name = "{0}.{1}.{2}".format(name, k, field_name)
|
||||||
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name)
|
||||||
|
@ -46,7 +46,6 @@ class InvalidCollectionError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class EmbeddedDocument(BaseDocument):
|
class EmbeddedDocument(BaseDocument):
|
||||||
|
|
||||||
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
"""A :class:`~mongoengine.Document` that isn't stored in its own
|
||||||
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
|
||||||
fields on :class:`~mongoengine.Document`\ s through the
|
fields on :class:`~mongoengine.Document`\ s through the
|
||||||
@ -61,7 +60,7 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
dictionary.
|
dictionary.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ('_instance')
|
__slots__ = ('_instance', )
|
||||||
|
|
||||||
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
# The __metaclass__ attribute is removed by 2to3 when running with Python3
|
||||||
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
||||||
@ -89,7 +88,6 @@ class EmbeddedDocument(BaseDocument):
|
|||||||
|
|
||||||
|
|
||||||
class Document(BaseDocument):
|
class Document(BaseDocument):
|
||||||
|
|
||||||
"""The base class used for defining the structure and properties of
|
"""The base class used for defining the structure and properties of
|
||||||
collections of documents stored in MongoDB. Inherit from this class, and
|
collections of documents stored in MongoDB. Inherit from this class, and
|
||||||
add fields as class attributes to define a document's structure.
|
add fields as class attributes to define a document's structure.
|
||||||
@ -160,7 +158,9 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
def fset(self, value):
|
def fset(self, value):
|
||||||
return setattr(self, self._meta['id_field'], value)
|
return setattr(self, self._meta['id_field'], value)
|
||||||
|
|
||||||
return property(fget, fset)
|
return property(fget, fset)
|
||||||
|
|
||||||
pk = pk()
|
pk = pk()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -190,7 +190,7 @@ class Document(BaseDocument):
|
|||||||
# options match the specified capped options
|
# options match the specified capped options
|
||||||
options = cls._collection.options()
|
options = cls._collection.options()
|
||||||
if options.get('max') != max_documents or \
|
if options.get('max') != max_documents or \
|
||||||
options.get('size') != max_size:
|
options.get('size') != max_size:
|
||||||
msg = (('Cannot create collection "%s" as a capped '
|
msg = (('Cannot create collection "%s" as a capped '
|
||||||
'collection as it already exists')
|
'collection as it already exists')
|
||||||
% cls._collection)
|
% cls._collection)
|
||||||
@ -248,7 +248,7 @@ class Document(BaseDocument):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def save(self, force_insert=False, validate=True, clean=True,
|
def save(self, force_insert=False, validate=True, clean=True,
|
||||||
write_concern=None, cascade=None, cascade_kwargs=None,
|
write_concern=None, cascade=None, cascade_kwargs=None,
|
||||||
_refs=None, save_condition=None, **kwargs):
|
_refs=None, save_condition=None, **kwargs):
|
||||||
"""Save the :class:`~mongoengine.Document` to the database. If the
|
"""Save the :class:`~mongoengine.Document` to the database. If the
|
||||||
document already exists, it will be updated, otherwise it will be
|
document already exists, it will be updated, otherwise it will be
|
||||||
@ -455,7 +455,7 @@ class Document(BaseDocument):
|
|||||||
if kwargs.get('upsert', False):
|
if kwargs.get('upsert', False):
|
||||||
query = self.to_mongo()
|
query = self.to_mongo()
|
||||||
if "_cls" in query:
|
if "_cls" in query:
|
||||||
del(query["_cls"])
|
del query["_cls"]
|
||||||
return self._qs.filter(**query).update_one(**kwargs)
|
return self._qs.filter(**query).update_one(**kwargs)
|
||||||
else:
|
else:
|
||||||
raise OperationError(
|
raise OperationError(
|
||||||
@ -580,8 +580,8 @@ class Document(BaseDocument):
|
|||||||
if not self.pk:
|
if not self.pk:
|
||||||
raise self.DoesNotExist("Document does not exist")
|
raise self.DoesNotExist("Document does not exist")
|
||||||
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
|
||||||
**self._object_key).only(*fields).limit(1
|
**self._object_key).only(*fields).limit(
|
||||||
).select_related(max_depth=max_depth)
|
1).select_related(max_depth=max_depth)
|
||||||
|
|
||||||
if obj:
|
if obj:
|
||||||
obj = obj[0]
|
obj = obj[0]
|
||||||
@ -640,11 +640,11 @@ class Document(BaseDocument):
|
|||||||
for class_name in document_cls._subclasses
|
for class_name in document_cls._subclasses
|
||||||
if class_name != document_cls.__name__] + [document_cls]
|
if class_name != document_cls.__name__] + [document_cls]
|
||||||
|
|
||||||
for cls in classes:
|
for klass in classes:
|
||||||
for document_cls in documents:
|
for document_cls in documents:
|
||||||
delete_rules = cls._meta.get('delete_rules') or {}
|
delete_rules = klass._meta.get('delete_rules') or {}
|
||||||
delete_rules[(document_cls, field_name)] = rule
|
delete_rules[(document_cls, field_name)] = rule
|
||||||
cls._meta['delete_rules'] = delete_rules
|
klass._meta['delete_rules'] = delete_rules
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def drop_collection(cls):
|
def drop_collection(cls):
|
||||||
@ -769,7 +769,7 @@ class Document(BaseDocument):
|
|||||||
**index_opts)
|
**index_opts)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list_indexes(cls, go_up=True, go_down=True):
|
def list_indexes(cls):
|
||||||
""" Lists all of the indexes that should be created for given
|
""" Lists all of the indexes that should be created for given
|
||||||
collection. It includes all the indexes from super- and sub-classes.
|
collection. It includes all the indexes from super- and sub-classes.
|
||||||
"""
|
"""
|
||||||
@ -816,8 +816,8 @@ class Document(BaseDocument):
|
|||||||
return indexes
|
return indexes
|
||||||
|
|
||||||
indexes = []
|
indexes = []
|
||||||
for cls in classes:
|
for klass in classes:
|
||||||
for index in get_indexes_spec(cls):
|
for index in get_indexes_spec(klass):
|
||||||
if index not in indexes:
|
if index not in indexes:
|
||||||
indexes.append(index)
|
indexes.append(index)
|
||||||
|
|
||||||
@ -856,7 +856,6 @@ class Document(BaseDocument):
|
|||||||
|
|
||||||
|
|
||||||
class DynamicDocument(Document):
|
class DynamicDocument(Document):
|
||||||
|
|
||||||
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
||||||
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
|
||||||
way as an ordinary document but has expando style properties. Any data
|
way as an ordinary document but has expando style properties. Any data
|
||||||
@ -888,7 +887,6 @@ class DynamicDocument(Document):
|
|||||||
|
|
||||||
|
|
||||||
class DynamicEmbeddedDocument(EmbeddedDocument):
|
class DynamicEmbeddedDocument(EmbeddedDocument):
|
||||||
|
|
||||||
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
||||||
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
|
||||||
information about dynamic documents.
|
information about dynamic documents.
|
||||||
@ -915,7 +913,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument):
|
|||||||
|
|
||||||
|
|
||||||
class MapReduceDocument(object):
|
class MapReduceDocument(object):
|
||||||
|
|
||||||
"""A document returned from a map/reduce query.
|
"""A document returned from a map/reduce query.
|
||||||
|
|
||||||
:param collection: An instance of :class:`~pymongo.Collection`
|
:param collection: An instance of :class:`~pymongo.Collection`
|
||||||
|
@ -115,6 +115,7 @@ class ValidationError(AssertionError):
|
|||||||
else:
|
else:
|
||||||
return unicode(source)
|
return unicode(source)
|
||||||
return errors_dict
|
return errors_dict
|
||||||
|
|
||||||
if not self.errors:
|
if not self.errors:
|
||||||
return {}
|
return {}
|
||||||
return build_dict(self.errors)
|
return build_dict(self.errors)
|
||||||
@ -125,9 +126,9 @@ class ValidationError(AssertionError):
|
|||||||
def generate_key(value, prefix=''):
|
def generate_key(value, prefix=''):
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
value = ' '.join([generate_key(k) for k in value])
|
value = ' '.join([generate_key(k) for k in value])
|
||||||
if isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
value = ' '.join(
|
value = ' '.join(
|
||||||
[generate_key(v, k) for k, v in value.iteritems()])
|
[generate_key(v, k) for k, v in value.iteritems()])
|
||||||
|
|
||||||
results = "%s.%s" % (prefix, value) if prefix else value
|
results = "%s.%s" % (prefix, value) if prefix else value
|
||||||
return results
|
return results
|
||||||
|
@ -47,12 +47,10 @@ __all__ = [
|
|||||||
'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField',
|
'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField',
|
||||||
'MultiPolygonField', 'GeoJsonBaseField']
|
'MultiPolygonField', 'GeoJsonBaseField']
|
||||||
|
|
||||||
|
|
||||||
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
RECURSIVE_REFERENCE_CONSTANT = 'self'
|
||||||
|
|
||||||
|
|
||||||
class StringField(BaseField):
|
class StringField(BaseField):
|
||||||
|
|
||||||
"""A unicode string field.
|
"""A unicode string field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -112,7 +110,6 @@ class StringField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class URLField(StringField):
|
class URLField(StringField):
|
||||||
|
|
||||||
"""A field that validates input as an URL.
|
"""A field that validates input as an URL.
|
||||||
|
|
||||||
.. versionadded:: 0.3
|
.. versionadded:: 0.3
|
||||||
@ -159,7 +156,6 @@ class URLField(StringField):
|
|||||||
|
|
||||||
|
|
||||||
class EmailField(StringField):
|
class EmailField(StringField):
|
||||||
|
|
||||||
"""A field that validates input as an E-Mail-Address.
|
"""A field that validates input as an E-Mail-Address.
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
@ -181,7 +177,6 @@ class EmailField(StringField):
|
|||||||
|
|
||||||
|
|
||||||
class IntField(BaseField):
|
class IntField(BaseField):
|
||||||
|
|
||||||
"""An 32-bit integer field.
|
"""An 32-bit integer field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -216,7 +211,6 @@ class IntField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class LongField(BaseField):
|
class LongField(BaseField):
|
||||||
|
|
||||||
"""An 64-bit integer field.
|
"""An 64-bit integer field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -251,7 +245,6 @@ class LongField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class FloatField(BaseField):
|
class FloatField(BaseField):
|
||||||
|
|
||||||
"""An floating point number field.
|
"""An floating point number field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -286,7 +279,6 @@ class FloatField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class DecimalField(BaseField):
|
class DecimalField(BaseField):
|
||||||
|
|
||||||
"""A fixed-point decimal number field.
|
"""A fixed-point decimal number field.
|
||||||
|
|
||||||
.. versionchanged:: 0.8
|
.. versionchanged:: 0.8
|
||||||
@ -360,7 +352,6 @@ class DecimalField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class BooleanField(BaseField):
|
class BooleanField(BaseField):
|
||||||
|
|
||||||
"""A boolean field type.
|
"""A boolean field type.
|
||||||
|
|
||||||
.. versionadded:: 0.1.2
|
.. versionadded:: 0.1.2
|
||||||
@ -379,7 +370,6 @@ class BooleanField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class DateTimeField(BaseField):
|
class DateTimeField(BaseField):
|
||||||
|
|
||||||
"""A datetime field.
|
"""A datetime field.
|
||||||
|
|
||||||
Uses the python-dateutil library if available alternatively use time.strptime
|
Uses the python-dateutil library if available alternatively use time.strptime
|
||||||
@ -447,7 +437,6 @@ class DateTimeField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class ComplexDateTimeField(StringField):
|
class ComplexDateTimeField(StringField):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
ComplexDateTimeField handles microseconds exactly instead of rounding
|
ComplexDateTimeField handles microseconds exactly instead of rounding
|
||||||
like DateTimeField does.
|
like DateTimeField does.
|
||||||
@ -531,7 +520,6 @@ class ComplexDateTimeField(StringField):
|
|||||||
|
|
||||||
|
|
||||||
class EmbeddedDocumentField(BaseField):
|
class EmbeddedDocumentField(BaseField):
|
||||||
|
|
||||||
"""An embedded document field - with a declared document_type.
|
"""An embedded document field - with a declared document_type.
|
||||||
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
|
||||||
"""
|
"""
|
||||||
@ -585,7 +573,6 @@ class EmbeddedDocumentField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class GenericEmbeddedDocumentField(BaseField):
|
class GenericEmbeddedDocumentField(BaseField):
|
||||||
|
|
||||||
"""A generic embedded document field - allows any
|
"""A generic embedded document field - allows any
|
||||||
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
:class:`~mongoengine.EmbeddedDocument` to be stored.
|
||||||
|
|
||||||
@ -624,7 +611,6 @@ class GenericEmbeddedDocumentField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class DynamicField(BaseField):
|
class DynamicField(BaseField):
|
||||||
|
|
||||||
"""A truly dynamic field type capable of handling different and varying
|
"""A truly dynamic field type capable of handling different and varying
|
||||||
types of data.
|
types of data.
|
||||||
|
|
||||||
@ -641,9 +627,9 @@ class DynamicField(BaseField):
|
|||||||
cls = value.__class__
|
cls = value.__class__
|
||||||
val = value.to_mongo()
|
val = value.to_mongo()
|
||||||
# If we its a document thats not inherited add _cls
|
# If we its a document thats not inherited add _cls
|
||||||
if (isinstance(value, Document)):
|
if isinstance(value, Document):
|
||||||
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
|
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
|
||||||
if (isinstance(value, EmbeddedDocument)):
|
if isinstance(value, EmbeddedDocument):
|
||||||
val['_cls'] = cls.__name__
|
val['_cls'] = cls.__name__
|
||||||
return val
|
return val
|
||||||
|
|
||||||
@ -678,18 +664,15 @@ class DynamicField(BaseField):
|
|||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, basestring):
|
||||||
from mongoengine.fields import StringField
|
|
||||||
return StringField().prepare_query_value(op, value)
|
return StringField().prepare_query_value(op, value)
|
||||||
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
|
return super(DynamicField, self).prepare_query_value(op, self.to_mongo(value))
|
||||||
|
|
||||||
|
|
||||||
def validate(self, value, clean=True):
|
def validate(self, value, clean=True):
|
||||||
if hasattr(value, "validate"):
|
if hasattr(value, "validate"):
|
||||||
value.validate(clean=clean)
|
value.validate(clean=clean)
|
||||||
|
|
||||||
|
|
||||||
class ListField(ComplexBaseField):
|
class ListField(ComplexBaseField):
|
||||||
|
|
||||||
"""A list field that wraps a standard field, allowing multiple instances
|
"""A list field that wraps a standard field, allowing multiple instances
|
||||||
of the field to be used as a list in the database.
|
of the field to be used as a list in the database.
|
||||||
|
|
||||||
@ -714,9 +697,10 @@ class ListField(ComplexBaseField):
|
|||||||
|
|
||||||
def prepare_query_value(self, op, value):
|
def prepare_query_value(self, op, value):
|
||||||
if self.field:
|
if self.field:
|
||||||
if op in ('set', 'unset') and (not isinstance(value, basestring)
|
if op in ('set', 'unset') and (
|
||||||
and not isinstance(value, BaseDocument)
|
not isinstance(value, basestring) and
|
||||||
and hasattr(value, '__iter__')):
|
not isinstance(value, BaseDocument) and
|
||||||
|
hasattr(value, '__iter__')):
|
||||||
return [self.field.prepare_query_value(op, v) for v in value]
|
return [self.field.prepare_query_value(op, v) for v in value]
|
||||||
return self.field.prepare_query_value(op, value)
|
return self.field.prepare_query_value(op, value)
|
||||||
return super(ListField, self).prepare_query_value(op, value)
|
return super(ListField, self).prepare_query_value(op, value)
|
||||||
@ -734,12 +718,10 @@ class EmbeddedDocumentListField(ListField):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document_type, *args, **kwargs):
|
def __init__(self, document_type, **kwargs):
|
||||||
"""
|
"""
|
||||||
:param document_type: The type of
|
:param document_type: The type of
|
||||||
:class:`~mongoengine.EmbeddedDocument` the list will hold.
|
:class:`~mongoengine.EmbeddedDocument` the list will hold.
|
||||||
:param args: Arguments passed directly into the parent
|
|
||||||
:class:`~mongoengine.ListField`.
|
|
||||||
:param kwargs: Keyword arguments passed directly into the parent
|
:param kwargs: Keyword arguments passed directly into the parent
|
||||||
:class:`~mongoengine.ListField`.
|
:class:`~mongoengine.ListField`.
|
||||||
"""
|
"""
|
||||||
@ -749,7 +731,6 @@ class EmbeddedDocumentListField(ListField):
|
|||||||
|
|
||||||
|
|
||||||
class SortedListField(ListField):
|
class SortedListField(ListField):
|
||||||
|
|
||||||
"""A ListField that sorts the contents of its list before writing to
|
"""A ListField that sorts the contents of its list before writing to
|
||||||
the database in order to ensure that a sorted list is always
|
the database in order to ensure that a sorted list is always
|
||||||
retrieved.
|
retrieved.
|
||||||
@ -801,7 +782,6 @@ def key_has_dot_or_dollar(d):
|
|||||||
|
|
||||||
|
|
||||||
class DictField(ComplexBaseField):
|
class DictField(ComplexBaseField):
|
||||||
|
|
||||||
"""A dictionary field that wraps a standard Python dictionary. This is
|
"""A dictionary field that wraps a standard Python dictionary. This is
|
||||||
similar to an embedded document, but the structure is not defined.
|
similar to an embedded document, but the structure is not defined.
|
||||||
|
|
||||||
@ -857,7 +837,6 @@ class DictField(ComplexBaseField):
|
|||||||
|
|
||||||
|
|
||||||
class MapField(DictField):
|
class MapField(DictField):
|
||||||
|
|
||||||
"""A field that maps a name to a specified field type. Similar to
|
"""A field that maps a name to a specified field type. Similar to
|
||||||
a DictField, except the 'value' of each item must match the specified
|
a DictField, except the 'value' of each item must match the specified
|
||||||
field type.
|
field type.
|
||||||
@ -873,7 +852,6 @@ class MapField(DictField):
|
|||||||
|
|
||||||
|
|
||||||
class ReferenceField(BaseField):
|
class ReferenceField(BaseField):
|
||||||
|
|
||||||
"""A reference to a document that will be automatically dereferenced on
|
"""A reference to a document that will be automatically dereferenced on
|
||||||
access (lazily).
|
access (lazily).
|
||||||
|
|
||||||
@ -995,7 +973,6 @@ class ReferenceField(BaseField):
|
|||||||
super(ReferenceField, self).prepare_query_value(op, value)
|
super(ReferenceField, self).prepare_query_value(op, value)
|
||||||
return self.to_mongo(value)
|
return self.to_mongo(value)
|
||||||
|
|
||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
|
|
||||||
if not isinstance(value, (self.document_type, DBRef)):
|
if not isinstance(value, (self.document_type, DBRef)):
|
||||||
@ -1010,7 +987,6 @@ class ReferenceField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class CachedReferenceField(BaseField):
|
class CachedReferenceField(BaseField):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A referencefield with cache fields to purpose pseudo-joins
|
A referencefield with cache fields to purpose pseudo-joins
|
||||||
|
|
||||||
@ -1025,7 +1001,6 @@ class CachedReferenceField(BaseField):
|
|||||||
"""
|
"""
|
||||||
if not isinstance(document_type, basestring) and \
|
if not isinstance(document_type, basestring) and \
|
||||||
not issubclass(document_type, (Document, basestring)):
|
not issubclass(document_type, (Document, basestring)):
|
||||||
|
|
||||||
self.error('Argument to CachedReferenceField constructor must be a'
|
self.error('Argument to CachedReferenceField constructor must be a'
|
||||||
' document class or a string')
|
' document class or a string')
|
||||||
|
|
||||||
@ -1036,6 +1011,7 @@ class CachedReferenceField(BaseField):
|
|||||||
|
|
||||||
def start_listener(self):
|
def start_listener(self):
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
|
||||||
signals.post_save.connect(self.on_document_pre_save,
|
signals.post_save.connect(self.on_document_pre_save,
|
||||||
sender=self.document_type)
|
sender=self.document_type)
|
||||||
|
|
||||||
@ -1089,7 +1065,6 @@ class CachedReferenceField(BaseField):
|
|||||||
def to_mongo(self, document):
|
def to_mongo(self, document):
|
||||||
id_field_name = self.document_type._meta['id_field']
|
id_field_name = self.document_type._meta['id_field']
|
||||||
id_field = self.document_type._fields[id_field_name]
|
id_field = self.document_type._fields[id_field_name]
|
||||||
doc_tipe = self.document_type
|
|
||||||
|
|
||||||
if isinstance(document, Document):
|
if isinstance(document, Document):
|
||||||
# We need the id from the saved object to create the DBRef
|
# We need the id from the saved object to create the DBRef
|
||||||
@ -1099,6 +1074,7 @@ class CachedReferenceField(BaseField):
|
|||||||
' been saved to the database')
|
' been saved to the database')
|
||||||
else:
|
else:
|
||||||
self.error('Only accept a document object')
|
self.error('Only accept a document object')
|
||||||
|
# TODO: should raise here or will fail next statement
|
||||||
|
|
||||||
value = SON((
|
value = SON((
|
||||||
("_id", id_field.to_mongo(id_)),
|
("_id", id_field.to_mongo(id_)),
|
||||||
@ -1121,7 +1097,7 @@ class CachedReferenceField(BaseField):
|
|||||||
|
|
||||||
def validate(self, value):
|
def validate(self, value):
|
||||||
|
|
||||||
if not isinstance(value, (self.document_type)):
|
if not isinstance(value, self.document_type):
|
||||||
self.error("A CachedReferenceField only accepts documents")
|
self.error("A CachedReferenceField only accepts documents")
|
||||||
|
|
||||||
if isinstance(value, Document) and value.id is None:
|
if isinstance(value, Document) and value.id is None:
|
||||||
@ -1150,7 +1126,6 @@ class CachedReferenceField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class GenericReferenceField(BaseField):
|
class GenericReferenceField(BaseField):
|
||||||
|
|
||||||
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
"""A reference to *any* :class:`~mongoengine.document.Document` subclass
|
||||||
that will be automatically dereferenced on access (lazily).
|
that will be automatically dereferenced on access (lazily).
|
||||||
|
|
||||||
@ -1232,7 +1207,6 @@ class GenericReferenceField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class BinaryField(BaseField):
|
class BinaryField(BaseField):
|
||||||
|
|
||||||
"""A binary data field.
|
"""A binary data field.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -1264,7 +1238,6 @@ class GridFSError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class GridFSProxy(object):
|
class GridFSProxy(object):
|
||||||
|
|
||||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
@ -1278,12 +1251,12 @@ class GridFSProxy(object):
|
|||||||
instance=None,
|
instance=None,
|
||||||
db_alias=DEFAULT_CONNECTION_NAME,
|
db_alias=DEFAULT_CONNECTION_NAME,
|
||||||
collection_name='fs'):
|
collection_name='fs'):
|
||||||
self.grid_id = grid_id # Store GridFS id for file
|
self.grid_id = grid_id # Store GridFS id for file
|
||||||
self.key = key
|
self.key = key
|
||||||
self.instance = instance
|
self.instance = instance
|
||||||
self.db_alias = db_alias
|
self.db_alias = db_alias
|
||||||
self.collection_name = collection_name
|
self.collection_name = collection_name
|
||||||
self.newfile = None # Used for partial writes
|
self.newfile = None # Used for partial writes
|
||||||
self.gridout = None
|
self.gridout = None
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@ -1410,7 +1383,6 @@ class GridFSProxy(object):
|
|||||||
|
|
||||||
|
|
||||||
class FileField(BaseField):
|
class FileField(BaseField):
|
||||||
|
|
||||||
"""A GridFS storage field.
|
"""A GridFS storage field.
|
||||||
|
|
||||||
.. versionadded:: 0.4
|
.. versionadded:: 0.4
|
||||||
@ -1444,7 +1416,7 @@ class FileField(BaseField):
|
|||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
key = self.name
|
key = self.name
|
||||||
if ((hasattr(value, 'read') and not
|
if ((hasattr(value, 'read') and not
|
||||||
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
|
||||||
# using "FileField() = file/string" notation
|
# using "FileField() = file/string" notation
|
||||||
grid_file = instance._data.get(self.name)
|
grid_file = instance._data.get(self.name)
|
||||||
# If a file already exists, delete it
|
# If a file already exists, delete it
|
||||||
@ -1494,7 +1466,6 @@ class FileField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class ImageGridFsProxy(GridFSProxy):
|
class ImageGridFsProxy(GridFSProxy):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Proxy for ImageField
|
Proxy for ImageField
|
||||||
|
|
||||||
@ -1518,6 +1489,7 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
raise ValidationError('Invalid image: %s' % e)
|
raise ValidationError('Invalid image: %s' % e)
|
||||||
|
|
||||||
# Progressive JPEG
|
# Progressive JPEG
|
||||||
|
# TODO: fixme, at least unused, at worst bad implementation
|
||||||
progressive = img.info.get('progressive') or False
|
progressive = img.info.get('progressive') or False
|
||||||
|
|
||||||
if (kwargs.get('progressive') and
|
if (kwargs.get('progressive') and
|
||||||
@ -1578,7 +1550,7 @@ class ImageGridFsProxy(GridFSProxy):
|
|||||||
if out and out.thumbnail_id:
|
if out and out.thumbnail_id:
|
||||||
self.fs.delete(out.thumbnail_id)
|
self.fs.delete(out.thumbnail_id)
|
||||||
|
|
||||||
return super(ImageGridFsProxy, self).delete(*args, **kwargs)
|
return super(ImageGridFsProxy, self).delete()
|
||||||
|
|
||||||
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
|
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
|
||||||
w, h = thumbnail.size
|
w, h = thumbnail.size
|
||||||
@ -1633,7 +1605,6 @@ class ImproperlyConfigured(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class ImageField(FileField):
|
class ImageField(FileField):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A Image File storage field.
|
A Image File storage field.
|
||||||
|
|
||||||
@ -1672,7 +1643,6 @@ class ImageField(FileField):
|
|||||||
|
|
||||||
|
|
||||||
class SequenceField(BaseField):
|
class SequenceField(BaseField):
|
||||||
|
|
||||||
"""Provides a sequential counter see:
|
"""Provides a sequential counter see:
|
||||||
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
|
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
|
||||||
|
|
||||||
@ -1796,7 +1766,6 @@ class SequenceField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class UUIDField(BaseField):
|
class UUIDField(BaseField):
|
||||||
|
|
||||||
"""A UUID field.
|
"""A UUID field.
|
||||||
|
|
||||||
.. versionadded:: 0.6
|
.. versionadded:: 0.6
|
||||||
@ -1843,13 +1812,12 @@ class UUIDField(BaseField):
|
|||||||
if not isinstance(value, basestring):
|
if not isinstance(value, basestring):
|
||||||
value = str(value)
|
value = str(value)
|
||||||
try:
|
try:
|
||||||
value = uuid.UUID(value)
|
uuid.UUID(value)
|
||||||
except Exception, exc:
|
except Exception, exc:
|
||||||
self.error('Could not convert to UUID: %s' % exc)
|
self.error('Could not convert to UUID: %s' % exc)
|
||||||
|
|
||||||
|
|
||||||
class GeoPointField(BaseField):
|
class GeoPointField(BaseField):
|
||||||
|
|
||||||
"""A list storing a longitude and latitude coordinate.
|
"""A list storing a longitude and latitude coordinate.
|
||||||
|
|
||||||
.. note:: this represents a generic point in a 2D plane and a legacy way of
|
.. note:: this represents a generic point in a 2D plane and a legacy way of
|
||||||
@ -1879,7 +1847,6 @@ class GeoPointField(BaseField):
|
|||||||
|
|
||||||
|
|
||||||
class PointField(GeoJsonBaseField):
|
class PointField(GeoJsonBaseField):
|
||||||
|
|
||||||
"""A GeoJSON field storing a longitude and latitude coordinate.
|
"""A GeoJSON field storing a longitude and latitude coordinate.
|
||||||
|
|
||||||
The data is represented as:
|
The data is represented as:
|
||||||
@ -1900,7 +1867,6 @@ class PointField(GeoJsonBaseField):
|
|||||||
|
|
||||||
|
|
||||||
class LineStringField(GeoJsonBaseField):
|
class LineStringField(GeoJsonBaseField):
|
||||||
|
|
||||||
"""A GeoJSON field storing a line of longitude and latitude coordinates.
|
"""A GeoJSON field storing a line of longitude and latitude coordinates.
|
||||||
|
|
||||||
The data is represented as:
|
The data is represented as:
|
||||||
@ -1920,7 +1886,6 @@ class LineStringField(GeoJsonBaseField):
|
|||||||
|
|
||||||
|
|
||||||
class PolygonField(GeoJsonBaseField):
|
class PolygonField(GeoJsonBaseField):
|
||||||
|
|
||||||
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
|
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
|
||||||
|
|
||||||
The data is represented as:
|
The data is represented as:
|
||||||
@ -1943,7 +1908,6 @@ class PolygonField(GeoJsonBaseField):
|
|||||||
|
|
||||||
|
|
||||||
class MultiPointField(GeoJsonBaseField):
|
class MultiPointField(GeoJsonBaseField):
|
||||||
|
|
||||||
"""A GeoJSON field storing a list of Points.
|
"""A GeoJSON field storing a list of Points.
|
||||||
|
|
||||||
The data is represented as:
|
The data is represented as:
|
||||||
@ -1964,7 +1928,6 @@ class MultiPointField(GeoJsonBaseField):
|
|||||||
|
|
||||||
|
|
||||||
class MultiLineStringField(GeoJsonBaseField):
|
class MultiLineStringField(GeoJsonBaseField):
|
||||||
|
|
||||||
"""A GeoJSON field storing a list of LineStrings.
|
"""A GeoJSON field storing a list of LineStrings.
|
||||||
|
|
||||||
The data is represented as:
|
The data is represented as:
|
||||||
@ -1985,7 +1948,6 @@ class MultiLineStringField(GeoJsonBaseField):
|
|||||||
|
|
||||||
|
|
||||||
class MultiPolygonField(GeoJsonBaseField):
|
class MultiPolygonField(GeoJsonBaseField):
|
||||||
|
|
||||||
"""A GeoJSON field storing list of Polygons.
|
"""A GeoJSON field storing list of Polygons.
|
||||||
|
|
||||||
The data is represented as:
|
The data is represented as:
|
||||||
|
@ -14,6 +14,7 @@ PY3 = sys.version_info[0] == 3
|
|||||||
if PY3:
|
if PY3:
|
||||||
import codecs
|
import codecs
|
||||||
from io import BytesIO as StringIO
|
from io import BytesIO as StringIO
|
||||||
|
|
||||||
# return s converted to binary. b('test') should be equivalent to b'test'
|
# return s converted to binary. b('test') should be equivalent to b'test'
|
||||||
def b(s):
|
def b(s):
|
||||||
return codecs.latin_1_encode(s)[0]
|
return codecs.latin_1_encode(s)[0]
|
||||||
|
@ -43,7 +43,6 @@ RE_TYPE = type(re.compile(''))
|
|||||||
|
|
||||||
|
|
||||||
class BaseQuerySet(object):
|
class BaseQuerySet(object):
|
||||||
|
|
||||||
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
"""A set of results returned from a query. Wraps a MongoDB cursor,
|
||||||
providing :class:`~mongoengine.Document` objects as the results.
|
providing :class:`~mongoengine.Document` objects as the results.
|
||||||
"""
|
"""
|
||||||
@ -87,8 +86,8 @@ class BaseQuerySet(object):
|
|||||||
self.only_fields = []
|
self.only_fields = []
|
||||||
self._max_time_ms = None
|
self._max_time_ms = None
|
||||||
|
|
||||||
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
|
def __call__(self, q_obj=None, class_check=True, read_preference=None,
|
||||||
read_preference=None, **query):
|
**query):
|
||||||
"""Filter the selected documents by calling the
|
"""Filter the selected documents by calling the
|
||||||
:class:`~mongoengine.queryset.QuerySet` with a query.
|
:class:`~mongoengine.queryset.QuerySet` with a query.
|
||||||
|
|
||||||
@ -98,9 +97,7 @@ class BaseQuerySet(object):
|
|||||||
objects, only the last one will be used
|
objects, only the last one will be used
|
||||||
:param class_check: If set to False bypass class name check when
|
:param class_check: If set to False bypass class name check when
|
||||||
querying collection
|
querying collection
|
||||||
:param slave_okay: if True, allows this query to be run against a
|
:param read_preference: if set, overrides connection-level
|
||||||
replica secondary.
|
|
||||||
:params read_preference: if set, overrides connection-level
|
|
||||||
read_preference from `ReplicaSetConnection`.
|
read_preference from `ReplicaSetConnection`.
|
||||||
:param query: Django-style query keyword arguments
|
:param query: Django-style query keyword arguments
|
||||||
"""
|
"""
|
||||||
@ -162,8 +159,8 @@ class BaseQuerySet(object):
|
|||||||
if queryset._as_pymongo:
|
if queryset._as_pymongo:
|
||||||
return queryset._get_as_pymongo(queryset._cursor[key])
|
return queryset._get_as_pymongo(queryset._cursor[key])
|
||||||
return queryset._document._from_son(queryset._cursor[key],
|
return queryset._document._from_son(queryset._cursor[key],
|
||||||
_auto_dereference=self._auto_dereference,
|
_auto_dereference=self._auto_dereference,
|
||||||
only_fields=self.only_fields)
|
only_fields=self.only_fields)
|
||||||
|
|
||||||
raise AttributeError
|
raise AttributeError
|
||||||
|
|
||||||
@ -205,7 +202,8 @@ class BaseQuerySet(object):
|
|||||||
:param language: The language that determines the list of stop words
|
:param language: The language that determines the list of stop words
|
||||||
for the search and the rules for the stemmer and tokenizer.
|
for the search and the rules for the stemmer and tokenizer.
|
||||||
If not specified, the search uses the default language of the index.
|
If not specified, the search uses the default language of the index.
|
||||||
For supported languages, see `Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
For supported languages, see
|
||||||
|
`Text Search Languages <http://docs.mongodb.org/manual/reference/text-search-languages/#text-search-languages>`.
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
if queryset._search_text:
|
if queryset._search_text:
|
||||||
@ -271,7 +269,7 @@ class BaseQuerySet(object):
|
|||||||
def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
|
def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
|
||||||
"""bulk insert documents
|
"""bulk insert documents
|
||||||
|
|
||||||
:param docs_or_doc: a document or list of documents to be inserted
|
:param doc_or_docs: a document or list of documents to be inserted
|
||||||
:param load_bulk (optional): If True returns the list of document
|
:param load_bulk (optional): If True returns the list of document
|
||||||
instances
|
instances
|
||||||
:param write_concern: Extra keyword arguments are passed down to
|
:param write_concern: Extra keyword arguments are passed down to
|
||||||
@ -406,8 +404,8 @@ class BaseQuerySet(object):
|
|||||||
if rule == CASCADE:
|
if rule == CASCADE:
|
||||||
ref_q = document_cls.objects(**{field_name + '__in': self})
|
ref_q = document_cls.objects(**{field_name + '__in': self})
|
||||||
ref_q_count = ref_q.count()
|
ref_q_count = ref_q.count()
|
||||||
if (doc != document_cls and ref_q_count > 0
|
if (doc != document_cls and ref_q_count > 0 or
|
||||||
or (doc == document_cls and ref_q_count > 0)):
|
(doc == document_cls and ref_q_count > 0)):
|
||||||
ref_q.delete(write_concern=write_concern)
|
ref_q.delete(write_concern=write_concern)
|
||||||
elif rule == NULLIFY:
|
elif rule == NULLIFY:
|
||||||
document_cls.objects(**{field_name + '__in': self}).update(
|
document_cls.objects(**{field_name + '__in': self}).update(
|
||||||
@ -528,7 +526,7 @@ class BaseQuerySet(object):
|
|||||||
try:
|
try:
|
||||||
if IS_PYMONGO_3:
|
if IS_PYMONGO_3:
|
||||||
if full_response:
|
if full_response:
|
||||||
msg = ("With PyMongo 3+, it is not possible anymore to get the full response.")
|
msg = "With PyMongo 3+, it is not possible anymore to get the full response."
|
||||||
warnings.warn(msg, DeprecationWarning)
|
warnings.warn(msg, DeprecationWarning)
|
||||||
if remove:
|
if remove:
|
||||||
result = queryset._collection.find_one_and_delete(
|
result = queryset._collection.find_one_and_delete(
|
||||||
@ -597,9 +595,10 @@ class BaseQuerySet(object):
|
|||||||
doc_map[doc['_id']] = self._get_as_pymongo(doc)
|
doc_map[doc['_id']] = self._get_as_pymongo(doc)
|
||||||
else:
|
else:
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
doc_map[doc['_id']] = self._document._from_son(doc,
|
doc_map[doc['_id']] = self._document._from_son(
|
||||||
only_fields=self.only_fields,
|
doc,
|
||||||
_auto_dereference=self._auto_dereference)
|
only_fields=self.only_fields,
|
||||||
|
_auto_dereference=self._auto_dereference)
|
||||||
|
|
||||||
return doc_map
|
return doc_map
|
||||||
|
|
||||||
@ -619,7 +618,8 @@ class BaseQuerySet(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def using(self, alias):
|
def using(self, alias):
|
||||||
"""This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
|
"""This method is for controlling which database the QuerySet will be
|
||||||
|
evaluated against if you are using more than one database.
|
||||||
|
|
||||||
:param alias: The database alias
|
:param alias: The database alias
|
||||||
|
|
||||||
@ -830,7 +830,6 @@ class BaseQuerySet(object):
|
|||||||
cleaned_fields = []
|
cleaned_fields = []
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
parts = key.split('__')
|
parts = key.split('__')
|
||||||
op = None
|
|
||||||
if parts[0] in operators:
|
if parts[0] in operators:
|
||||||
op = parts.pop(0)
|
op = parts.pop(0)
|
||||||
value = {'$' + op: value}
|
value = {'$' + op: value}
|
||||||
@ -967,7 +966,7 @@ class BaseQuerySet(object):
|
|||||||
"""Instead of returning Document instances, return raw values from
|
"""Instead of returning Document instances, return raw values from
|
||||||
pymongo.
|
pymongo.
|
||||||
|
|
||||||
:param coerce_type: Field types (if applicable) would be use to
|
:param coerce_types: Field types (if applicable) would be use to
|
||||||
coerce types.
|
coerce types.
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
@ -1259,8 +1258,8 @@ class BaseQuerySet(object):
|
|||||||
the aggregation framework instead of map-reduce.
|
the aggregation framework instead of map-reduce.
|
||||||
"""
|
"""
|
||||||
result = self._document._get_collection().aggregate([
|
result = self._document._get_collection().aggregate([
|
||||||
{ '$match': self._query },
|
{'$match': self._query},
|
||||||
{ '$group': { '_id': 'sum', 'total': { '$sum': '$' + field } } }
|
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}}
|
||||||
])
|
])
|
||||||
if IS_PYMONGO_3:
|
if IS_PYMONGO_3:
|
||||||
result = list(result)
|
result = list(result)
|
||||||
@ -1335,8 +1334,8 @@ class BaseQuerySet(object):
|
|||||||
uses the aggregation framework instead of map-reduce.
|
uses the aggregation framework instead of map-reduce.
|
||||||
"""
|
"""
|
||||||
result = self._document._get_collection().aggregate([
|
result = self._document._get_collection().aggregate([
|
||||||
{ '$match': self._query },
|
{'$match': self._query},
|
||||||
{ '$group': { '_id': 'avg', 'total': { '$avg': '$' + field } } }
|
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}}
|
||||||
])
|
])
|
||||||
if IS_PYMONGO_3:
|
if IS_PYMONGO_3:
|
||||||
result = list(result)
|
result = list(result)
|
||||||
@ -1616,7 +1615,7 @@ class BaseQuerySet(object):
|
|||||||
|
|
||||||
return frequencies
|
return frequencies
|
||||||
|
|
||||||
def _fields_to_dbfields(self, fields, subdoc=False):
|
def _fields_to_dbfields(self, fields):
|
||||||
"""Translate fields paths to its db equivalents"""
|
"""Translate fields paths to its db equivalents"""
|
||||||
ret = []
|
ret = []
|
||||||
subclasses = []
|
subclasses = []
|
||||||
@ -1638,7 +1637,7 @@ class BaseQuerySet(object):
|
|||||||
ret.append(subfield)
|
ret.append(subfield)
|
||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
except LookUpError, e:
|
except LookUpError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
__all__ = ('QueryFieldList',)
|
__all__ = ('QueryFieldList',)
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,7 +61,6 @@ class QuerySet(BaseQuerySet):
|
|||||||
data[-1] = "...(remaining elements truncated)..."
|
data[-1] = "...(remaining elements truncated)..."
|
||||||
return repr(data)
|
return repr(data)
|
||||||
|
|
||||||
|
|
||||||
def _iter_results(self):
|
def _iter_results(self):
|
||||||
"""A generator for iterating over the result cache.
|
"""A generator for iterating over the result cache.
|
||||||
|
|
||||||
@ -74,7 +73,7 @@ class QuerySet(BaseQuerySet):
|
|||||||
upper = len(self._result_cache)
|
upper = len(self._result_cache)
|
||||||
while pos < upper:
|
while pos < upper:
|
||||||
yield self._result_cache[pos]
|
yield self._result_cache[pos]
|
||||||
pos = pos + 1
|
pos += 1
|
||||||
if not self._has_more:
|
if not self._has_more:
|
||||||
raise StopIteration
|
raise StopIteration
|
||||||
if len(self._result_cache) <= pos:
|
if len(self._result_cache) <= pos:
|
||||||
@ -161,4 +160,4 @@ class QuerySetNoDeRef(QuerySet):
|
|||||||
"""Special no_dereference QuerySet"""
|
"""Special no_dereference QuerySet"""
|
||||||
|
|
||||||
def __dereference(items, max_depth=1, instance=None, name=None):
|
def __dereference(items, max_depth=1, instance=None, name=None):
|
||||||
return items
|
return items
|
||||||
|
@ -11,7 +11,6 @@ from mongoengine.python_support import IS_PYMONGO_3
|
|||||||
|
|
||||||
__all__ = ('query', 'update')
|
__all__ = ('query', 'update')
|
||||||
|
|
||||||
|
|
||||||
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
|
||||||
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
'all', 'size', 'exists', 'not', 'elemMatch', 'type')
|
||||||
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
|
||||||
@ -27,7 +26,7 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
|
|||||||
STRING_OPERATORS + CUSTOM_OPERATORS)
|
STRING_OPERATORS + CUSTOM_OPERATORS)
|
||||||
|
|
||||||
|
|
||||||
def query(_doc_cls=None, _field_operation=False, **query):
|
def query(_doc_cls=None, **query):
|
||||||
"""Transform a query from Django-style format to Mongo format.
|
"""Transform a query from Django-style format to Mongo format.
|
||||||
"""
|
"""
|
||||||
mongo_query = {}
|
mongo_query = {}
|
||||||
@ -45,8 +44,8 @@ def query(_doc_cls=None, _field_operation=False, **query):
|
|||||||
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
||||||
op = parts.pop()
|
op = parts.pop()
|
||||||
|
|
||||||
#if user escape field name by __
|
# Allw to escape operator-like field name by __
|
||||||
if len(parts) > 1 and parts[-1]=="":
|
if len(parts) > 1 and parts[-1] == "":
|
||||||
parts.pop()
|
parts.pop()
|
||||||
|
|
||||||
negate = False
|
negate = False
|
||||||
@ -359,6 +358,7 @@ def _infer_geometry(value):
|
|||||||
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
raise InvalidQueryError("Invalid $geometry dictionary should have "
|
||||||
"type and coordinates keys")
|
"type and coordinates keys")
|
||||||
elif isinstance(value, (list, set)):
|
elif isinstance(value, (list, set)):
|
||||||
|
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
||||||
try:
|
try:
|
||||||
value[0][0][0]
|
value[0][0][0]
|
||||||
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
||||||
|
@ -6,6 +6,7 @@ __all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
|
|||||||
signals_available = False
|
signals_available = False
|
||||||
try:
|
try:
|
||||||
from blinker import Namespace
|
from blinker import Namespace
|
||||||
|
|
||||||
signals_available = True
|
signals_available = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
class Namespace(object):
|
class Namespace(object):
|
||||||
@ -27,6 +28,7 @@ except ImportError:
|
|||||||
raise RuntimeError('signalling support is unavailable '
|
raise RuntimeError('signalling support is unavailable '
|
||||||
'because the blinker library is '
|
'because the blinker library is '
|
||||||
'not installed.')
|
'not installed.')
|
||||||
|
|
||||||
send = lambda *a, **kw: None
|
send = lambda *a, **kw: None
|
||||||
connect = disconnect = has_receivers_for = receivers_for = \
|
connect = disconnect = has_receivers_for = receivers_for = \
|
||||||
temporarily_connected_to = _fail
|
temporarily_connected_to = _fail
|
||||||
|
5
setup.py
5
setup.py
@ -52,12 +52,13 @@ CLASSIFIERS = [
|
|||||||
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
|
extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])}
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
extra_opts['use_2to3'] = True
|
extra_opts['use_2to3'] = True
|
||||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'Pillow>=2.0.0']
|
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0']
|
||||||
if "test" in sys.argv or "nosetests" in sys.argv:
|
if "test" in sys.argv or "nosetests" in sys.argv:
|
||||||
extra_opts['packages'] = find_packages()
|
extra_opts['packages'] = find_packages()
|
||||||
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]}
|
||||||
else:
|
else:
|
||||||
extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
|
# coverage 4 does not support Python 3.2 anymore
|
||||||
|
extra_opts['tests_require'] = ['nose', 'coverage==3.7.1', 'blinker', 'Pillow>=2.0.0', 'python-dateutil']
|
||||||
|
|
||||||
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
|
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
|
||||||
extra_opts['tests_require'].append('unittest2')
|
extra_opts['tests_require'].append('unittest2')
|
||||||
|
@ -253,13 +253,13 @@ class DeltaTest(unittest.TestCase):
|
|||||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
[1, 2, {'hello': 'world'}])
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||||
self.assertEqual(doc._delta(),
|
self.assertEqual(doc._delta(),
|
||||||
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
({}, {'embedded_field.list_field.2.list_field.2.hello': 1}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc = doc.reload(10)
|
doc = doc.reload(10)
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
del doc.embedded_field.list_field[2].list_field
|
||||||
self.assertEqual(doc._delta(),
|
self.assertEqual(doc._delta(),
|
||||||
({}, {'embedded_field.list_field.2.list_field': 1}))
|
({}, {'embedded_field.list_field.2.list_field': 1}))
|
||||||
|
|
||||||
@ -593,13 +593,13 @@ class DeltaTest(unittest.TestCase):
|
|||||||
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
self.assertEqual(doc.embedded_field.list_field[2].list_field,
|
||||||
[1, 2, {'hello': 'world'}])
|
[1, 2, {'hello': 'world'}])
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field[2]['hello'])
|
del doc.embedded_field.list_field[2].list_field[2]['hello']
|
||||||
self.assertEqual(doc._delta(),
|
self.assertEqual(doc._delta(),
|
||||||
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1}))
|
||||||
doc.save()
|
doc.save()
|
||||||
doc = doc.reload(10)
|
doc = doc.reload(10)
|
||||||
|
|
||||||
del(doc.embedded_field.list_field[2].list_field)
|
del doc.embedded_field.list_field[2].list_field
|
||||||
self.assertEqual(doc._delta(), ({},
|
self.assertEqual(doc._delta(), ({},
|
||||||
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
{'db_embedded_field.db_list_field.2.db_list_field': 1}))
|
||||||
|
|
||||||
@ -615,7 +615,7 @@ class DeltaTest(unittest.TestCase):
|
|||||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||||
|
|
||||||
p.doc = 123
|
p.doc = 123
|
||||||
del(p.doc)
|
del p.doc
|
||||||
self.assertEqual(p._delta(), (
|
self.assertEqual(p._delta(), (
|
||||||
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ class DynamicTest(unittest.TestCase):
|
|||||||
obj = collection.find_one()
|
obj = collection.find_one()
|
||||||
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name'])
|
||||||
|
|
||||||
del(p.misc)
|
del p.misc
|
||||||
p.save()
|
p.save()
|
||||||
|
|
||||||
p = self.Person.objects.get()
|
p = self.Person.objects.get()
|
||||||
@ -340,7 +340,7 @@ class DynamicTest(unittest.TestCase):
|
|||||||
person = Person.objects.first()
|
person = Person.objects.first()
|
||||||
person.attrval = "This works"
|
person.attrval = "This works"
|
||||||
|
|
||||||
person["phone"] = "555-1212" # but this should too
|
person["phone"] = "555-1212" # but this should too
|
||||||
|
|
||||||
# Same thing two levels deep
|
# Same thing two levels deep
|
||||||
person["address"]["city"] = "Lundenne"
|
person["address"]["city"] = "Lundenne"
|
||||||
@ -356,7 +356,6 @@ class DynamicTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
self.assertEqual(Person.objects.first().address.city, "Londinium")
|
||||||
|
|
||||||
|
|
||||||
person = Person.objects.first()
|
person = Person.objects.first()
|
||||||
person["age"] = 35
|
person["age"] = 35
|
||||||
person.save()
|
person.save()
|
||||||
|
@ -143,7 +143,7 @@ class IndexesTest(unittest.TestCase):
|
|||||||
meta = {
|
meta = {
|
||||||
'indexes': [
|
'indexes': [
|
||||||
{
|
{
|
||||||
'fields': ('title',),
|
'fields': ('title',),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
'allow_inheritance': True,
|
'allow_inheritance': True,
|
||||||
|
@ -1897,11 +1897,11 @@ class InstanceTest(unittest.TestCase):
|
|||||||
self.assertEqual(BlogPost.objects.count(), 0)
|
self.assertEqual(BlogPost.objects.count(), 0)
|
||||||
|
|
||||||
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
|
def test_reverse_delete_rule_cascade_triggers_pre_delete_signal(self):
|
||||||
''' ensure the pre_delete signal is triggered upon a cascading deletion
|
""" ensure the pre_delete signal is triggered upon a cascading deletion
|
||||||
setup a blog post with content, an author and editor
|
setup a blog post with content, an author and editor
|
||||||
delete the author which triggers deletion of blogpost via cascade
|
delete the author which triggers deletion of blogpost via cascade
|
||||||
blog post's pre_delete signal alters an editor attribute
|
blog post's pre_delete signal alters an editor attribute
|
||||||
'''
|
"""
|
||||||
class Editor(self.Person):
|
class Editor(self.Person):
|
||||||
review_queue = IntField(default=0)
|
review_queue = IntField(default=0)
|
||||||
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
from fields import *
|
from fields import *
|
||||||
from file_tests import *
|
from file_tests import *
|
||||||
from geo import *
|
from geo import *
|
||||||
|
@ -946,7 +946,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
BlogPost.drop_collection()
|
BlogPost.drop_collection()
|
||||||
|
|
||||||
def test_reverse_list_sorting(self):
|
def test_reverse_list_sorting(self):
|
||||||
'''Ensure that a reverse sorted list field properly sorts values'''
|
"""Ensure that a reverse sorted list field properly sorts values"""
|
||||||
|
|
||||||
class Category(EmbeddedDocument):
|
class Category(EmbeddedDocument):
|
||||||
count = IntField()
|
count = IntField()
|
||||||
@ -1334,7 +1334,6 @@ class FieldTest(unittest.TestCase):
|
|||||||
def test_atomic_update_dict_field(self):
|
def test_atomic_update_dict_field(self):
|
||||||
"""Ensure that the entire DictField can be atomically updated."""
|
"""Ensure that the entire DictField can be atomically updated."""
|
||||||
|
|
||||||
|
|
||||||
class Simple(Document):
|
class Simple(Document):
|
||||||
mapping = DictField(field=ListField(IntField(required=True)))
|
mapping = DictField(field=ListField(IntField(required=True)))
|
||||||
|
|
||||||
@ -1349,7 +1348,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||||
|
|
||||||
def create_invalid_mapping():
|
def create_invalid_mapping():
|
||||||
e.update(set__mapping={"somestrings": ["foo", "bar",]})
|
e.update(set__mapping={"somestrings": ["foo", "bar", ]})
|
||||||
|
|
||||||
self.assertRaises(ValueError, create_invalid_mapping)
|
self.assertRaises(ValueError, create_invalid_mapping)
|
||||||
|
|
||||||
@ -1460,7 +1459,7 @@ class FieldTest(unittest.TestCase):
|
|||||||
|
|
||||||
class Action(EmbeddedDocument):
|
class Action(EmbeddedDocument):
|
||||||
operation = StringField()
|
operation = StringField()
|
||||||
object = StringField()
|
object = StringField()
|
||||||
|
|
||||||
class Log(Document):
|
class Log(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
@ -3774,7 +3773,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
|
|||||||
class A(Document):
|
class A(Document):
|
||||||
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
|
my_list = ListField(EmbeddedDocumentField(EmbeddedWithUnique))
|
||||||
|
|
||||||
a1 = A(my_list=[]).save()
|
A(my_list=[]).save()
|
||||||
self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
|
self.assertRaises(NotUniqueError, lambda: A(my_list=[]).save())
|
||||||
|
|
||||||
class EmbeddedWithSparseUnique(EmbeddedDocument):
|
class EmbeddedWithSparseUnique(EmbeddedDocument):
|
||||||
@ -3783,9 +3782,8 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
|
|||||||
class B(Document):
|
class B(Document):
|
||||||
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
|
my_list = ListField(EmbeddedDocumentField(EmbeddedWithSparseUnique))
|
||||||
|
|
||||||
b1 = B(my_list=[]).save()
|
B(my_list=[]).save()
|
||||||
b2 = B(my_list=[]).save()
|
B(my_list=[]).save()
|
||||||
|
|
||||||
|
|
||||||
def test_filtered_delete(self):
|
def test_filtered_delete(self):
|
||||||
"""
|
"""
|
||||||
@ -3824,6 +3822,7 @@ class EmbeddedDocumentListFieldTestCase(unittest.TestCase):
|
|||||||
and doesn't interfere with the rest of field functionalities.
|
and doesn't interfere with the rest of field functionalities.
|
||||||
"""
|
"""
|
||||||
custom_data = {'a': 'a_value', 'b': [1, 2]}
|
custom_data = {'a': 'a_value', 'b': [1, 2]}
|
||||||
|
|
||||||
class CustomData(Document):
|
class CustomData(Document):
|
||||||
a_field = IntField()
|
a_field = IntField()
|
||||||
c_field = IntField(custom_data=custom_data)
|
c_field = IntField(custom_data=custom_data)
|
||||||
|
@ -12,7 +12,7 @@ import gridfs
|
|||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.connection import get_db
|
from mongoengine.connection import get_db
|
||||||
from mongoengine.python_support import PY3, b, StringIO
|
from mongoengine.python_support import b, StringIO
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
@ -112,7 +112,7 @@ class FileTest(unittest.TestCase):
|
|||||||
result.the_file.delete()
|
result.the_file.delete()
|
||||||
|
|
||||||
# Ensure deleted file returns None
|
# Ensure deleted file returns None
|
||||||
self.assertTrue(result.the_file.read() == None)
|
self.assertTrue(result.the_file.read() is None)
|
||||||
|
|
||||||
def test_file_fields_stream_after_none(self):
|
def test_file_fields_stream_after_none(self):
|
||||||
"""Ensure that a file field can be written to after it has been saved as
|
"""Ensure that a file field can be written to after it has been saved as
|
||||||
@ -138,7 +138,7 @@ class FileTest(unittest.TestCase):
|
|||||||
result = StreamFile.objects.first()
|
result = StreamFile.objects.first()
|
||||||
self.assertTrue(streamfile == result)
|
self.assertTrue(streamfile == result)
|
||||||
self.assertEqual(result.the_file.read(), text + more_text)
|
self.assertEqual(result.the_file.read(), text + more_text)
|
||||||
#self.assertEqual(result.the_file.content_type, content_type)
|
# self.assertEqual(result.the_file.content_type, content_type)
|
||||||
result.the_file.seek(0)
|
result.the_file.seek(0)
|
||||||
self.assertEqual(result.the_file.tell(), 0)
|
self.assertEqual(result.the_file.tell(), 0)
|
||||||
self.assertEqual(result.the_file.read(len(text)), text)
|
self.assertEqual(result.the_file.read(len(text)), text)
|
||||||
@ -148,7 +148,7 @@ class FileTest(unittest.TestCase):
|
|||||||
result.the_file.delete()
|
result.the_file.delete()
|
||||||
|
|
||||||
# Ensure deleted file returns None
|
# Ensure deleted file returns None
|
||||||
self.assertTrue(result.the_file.read() == None)
|
self.assertTrue(result.the_file.read() is None)
|
||||||
|
|
||||||
def test_file_fields_set(self):
|
def test_file_fields_set(self):
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ class GeoFieldTest(unittest.TestCase):
|
|||||||
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0])
|
||||||
self._test_for_expected_error(Location, coord, expected)
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
Location(loc=[[1, 2], [3, 4], [5, 6], [1,2]]).validate()
|
Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate()
|
||||||
|
|
||||||
def test_polygon_validation(self):
|
def test_polygon_validation(self):
|
||||||
class Location(Document):
|
class Location(Document):
|
||||||
@ -226,7 +226,7 @@ class GeoFieldTest(unittest.TestCase):
|
|||||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||||
self._test_for_expected_error(Location, coord, expected)
|
self._test_for_expected_error(Location, coord, expected)
|
||||||
|
|
||||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate()
|
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||||
|
|
||||||
def test_multipolygon_validation(self):
|
def test_multipolygon_validation(self):
|
||||||
class Location(Document):
|
class Location(Document):
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
import unittest
|
import unittest
|
||||||
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
from mongoengine.base.datastructures import StrictDict, SemiStrictDict
|
||||||
|
|
||||||
|
|
||||||
class TestStrictDict(unittest.TestCase):
|
class TestStrictDict(unittest.TestCase):
|
||||||
def strict_dict_class(self, *args, **kwargs):
|
def strict_dict_class(self, *args, **kwargs):
|
||||||
return StrictDict.create(*args, **kwargs)
|
return StrictDict.create(*args, **kwargs)
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.dtype = self.strict_dict_class(("a", "b", "c"))
|
self.dtype = self.strict_dict_class(("a", "b", "c"))
|
||||||
|
|
||||||
def test_init(self):
|
def test_init(self):
|
||||||
d = self.dtype(a=1, b=1, c=1)
|
d = self.dtype(a=1, b=1, c=1)
|
||||||
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
|
self.assertEqual((d.a, d.b, d.c), (1, 1, 1))
|
||||||
@ -38,8 +41,9 @@ class TestStrictDict(unittest.TestCase):
|
|||||||
|
|
||||||
def test_setattr_raises_on_nonexisting_attr(self):
|
def test_setattr_raises_on_nonexisting_attr(self):
|
||||||
d = self.dtype()
|
d = self.dtype()
|
||||||
|
|
||||||
def _f():
|
def _f():
|
||||||
d.x=1
|
d.x = 1
|
||||||
self.assertRaises(AttributeError, _f)
|
self.assertRaises(AttributeError, _f)
|
||||||
|
|
||||||
def test_setattr_getattr_special(self):
|
def test_setattr_getattr_special(self):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user