This commit is contained in:
Wilson Júnior
2013-05-16 12:50:47 -03:00
49 changed files with 2087 additions and 612 deletions

View File

@@ -15,7 +15,7 @@ import django
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
VERSION = (0, 8, 0, '+')
VERSION = (0, 8, 0, 'RC4')
def get_version():

View File

@@ -3,3 +3,6 @@ from mongoengine.base.datastructures import *
from mongoengine.base.document import *
from mongoengine.base.fields import *
from mongoengine.base.metaclasses import *
# Help with backwards compatibility
from mongoengine.errors import *

View File

@@ -6,6 +6,7 @@ from functools import partial
import pymongo
from bson import json_util
from bson.dbref import DBRef
from bson.son import SON
from mongoengine import signals
from mongoengine.common import _import_class
@@ -228,11 +229,16 @@ class BaseDocument(object):
pass
def to_mongo(self):
"""Return data dictionary ready for use with MongoDB.
"""Return as SON data ready for use with MongoDB.
"""
data = {}
for field_name, field in self._fields.iteritems():
data = SON()
data["_id"] = None
data['_cls'] = self._class_name
for field_name in self:
value = self._data.get(field_name, None)
field = self._fields.get(field_name)
if value is not None:
value = field.to_mongo(value)
@@ -244,19 +250,27 @@ class BaseDocument(object):
if value is not None:
data[field.db_field] = value
# Only add _cls if allow_inheritance is True
if (hasattr(self, '_meta') and
self._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True):
data['_cls'] = self._class_name
# If "_id" has not been set, then try and set it
if data["_id"] is None:
data["_id"] = self._data.get("id", None)
if '_id' in data and data['_id'] is None:
del data['_id']
if data['_id'] is None:
data.pop('_id')
# Only add _cls if allow_inheritance is True
if (not hasattr(self, '_meta') or
not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
data.pop('_cls')
if not self._dynamic:
return data
for name, field in self._dynamic_fields.items():
# Sort dynamic fields by key
dynamic_fields = sorted(self._dynamic_fields.iteritems(),
key=operator.itemgetter(0))
for name, field in dynamic_fields:
data[name] = field.to_mongo(self._data.get(name, None))
return data
def validate(self, clean=True):
@@ -648,7 +662,8 @@ class BaseDocument(object):
if include_cls and direction is not pymongo.GEO2D:
index_list.insert(0, ('_cls', 1))
spec['fields'] = index_list
if index_list:
spec['fields'] = index_list
if spec.get('sparse', False) and len(spec['fields']) > 1:
raise ValueError(
'Sparse indexes can only have one field in them. '
@@ -690,13 +705,13 @@ class BaseDocument(object):
# Add the new index to the list
fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
for f in unique_fields]
for f in unique_fields]
index = {'fields': fields, 'unique': True, 'sparse': sparse}
unique_indexes.append(index)
# Grab any embedded document field unique indexes
if (field.__class__.__name__ == "EmbeddedDocumentField" and
field.document_type != cls):
field.document_type != cls):
field_namespace = "%s." % field_name
doc_cls = field.document_type
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
@@ -704,26 +719,31 @@ class BaseDocument(object):
return unique_indexes
@classmethod
def _geo_indices(cls, inspected=None):
def _geo_indices(cls, inspected=None, parent_field=None):
inspected = inspected or []
geo_indices = []
inspected.append(cls)
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
GeoPointField = _import_class("GeoPointField")
geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
"PointField", "LineStringField", "PolygonField"]
geo_field_types = tuple([_import_class(field) for field in geo_field_type_names])
for field in cls._fields.values():
if not isinstance(field, (EmbeddedDocumentField, GeoPointField)):
if not isinstance(field, geo_field_types):
continue
if hasattr(field, 'document_type'):
field_cls = field.document_type
if field_cls in inspected:
continue
if hasattr(field_cls, '_geo_indices'):
geo_indices += field_cls._geo_indices(inspected)
geo_indices += field_cls._geo_indices(inspected, parent_field=field.db_field)
elif field._geo_index:
field_name = field.db_field
if parent_field:
field_name = "%s.%s" % (parent_field, field_name)
geo_indices.append({'fields':
[(field.db_field, pymongo.GEO2D)]})
[(field_name, field._geo_index)]})
return geo_indices
@classmethod

View File

@@ -2,7 +2,8 @@ import operator
import warnings
import weakref
from bson import DBRef, ObjectId
from bson import DBRef, ObjectId, SON
import pymongo
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
@@ -10,7 +11,7 @@ from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import BaseDict, BaseList
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField")
__all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField")
class BaseField(object):
@@ -81,13 +82,16 @@ class BaseField(object):
def __set__(self, instance, value):
"""Descriptor for assigning a value to a field in a document.
"""
changed = False
if (self.name not in instance._data or
instance._data[self.name] != value):
changed = True
instance._data[self.name] = value
if changed and instance._initialised:
instance._mark_as_changed(self.name)
if instance._initialised:
try:
if (self.name not in instance._data or
instance._data[self.name] != value):
instance._mark_as_changed(self.name)
except:
# Values cant be compared eg: naive and tz datetimes
# So mark it as changed
instance._mark_as_changed(self.name)
instance._data[self.name] = value
def error(self, message="", errors=None, field_name=None):
"""Raises a ValidationError.
@@ -183,7 +187,7 @@ class ComplexBaseField(BaseField):
# Convert lists / values so we can watch for any changes on them
if (isinstance(value, (list, tuple)) and
not isinstance(value, BaseList)):
not isinstance(value, BaseList)):
value = BaseList(value, instance, self.name)
instance._data[self.name] = value
elif isinstance(value, dict) and not isinstance(value, BaseDict):
@@ -191,8 +195,8 @@ class ComplexBaseField(BaseField):
instance._data[self.name] = value
if (self._auto_dereference and instance._initialised and
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
isinstance(value, (BaseList, BaseDict))
and not value._dereferenced):
value = self._dereference(
value, max_depth=1, instance=instance, name=self.name
)
@@ -228,7 +232,7 @@ class ComplexBaseField(BaseField):
if self.field:
value_dict = dict([(key, self.field.to_python(item))
for key, item in value.items()])
for key, item in value.items()])
else:
value_dict = {}
for k, v in value.items():
@@ -279,7 +283,7 @@ class ComplexBaseField(BaseField):
if self.field:
value_dict = dict([(key, self.field.to_mongo(item))
for key, item in value.iteritems()])
for key, item in value.iteritems()])
else:
value_dict = {}
for k, v in value.iteritems():
@@ -295,7 +299,7 @@ class ComplexBaseField(BaseField):
meta = getattr(v, '_meta', {})
allow_inheritance = (
meta.get('allow_inheritance', ALLOW_INHERITANCE)
== True)
is True)
if not allow_inheritance and not self.field:
value_dict[k] = GenericReferenceField().to_mongo(v)
else:
@@ -393,3 +397,100 @@ class ObjectIdField(BaseField):
ObjectId(unicode(value))
except:
self.error('Invalid Object ID')
class GeoJsonBaseField(BaseField):
"""A geo json field storing a geojson style object.
.. versionadded:: 0.8
"""
_geo_index = pymongo.GEOSPHERE
_type = "GeoBase"
def __init__(self, auto_index=True, *args, **kwargs):
"""
:param auto_index: Automatically create a "2dsphere" index. Defaults
to `True`.
"""
self._name = "%sField" % self._type
if not auto_index:
self._geo_index = False
super(GeoJsonBaseField, self).__init__(*args, **kwargs)
def validate(self, value):
"""Validate the GeoJson object based on its type
"""
if isinstance(value, dict):
if set(value.keys()) == set(['type', 'coordinates']):
if value['type'] != self._type:
self.error('%s type must be "%s"' % (self._name, self._type))
return self.validate(value['coordinates'])
else:
self.error('%s can only accept a valid GeoJson dictionary'
' or lists of (x, y)' % self._name)
return
elif not isinstance(value, (list, tuple)):
self.error('%s can only accept lists of [x, y]' % self._name)
return
validate = getattr(self, "_validate_%s" % self._type.lower())
error = validate(value)
if error:
self.error(error)
def _validate_polygon(self, value):
if not isinstance(value, (list, tuple)):
return 'Polygons must contain list of linestrings'
# Quick and dirty validator
try:
value[0][0][0]
except:
return "Invalid Polygon must contain at least one valid linestring"
errors = []
for val in value:
error = self._validate_linestring(val, False)
if not error and val[0] != val[-1]:
error = 'LineStrings must start and end at the same point'
if error and error not in errors:
errors.append(error)
if errors:
return "Invalid Polygon:\n%s" % ", ".join(errors)
def _validate_linestring(self, value, top_level=True):
"""Validates a linestring"""
if not isinstance(value, (list, tuple)):
return 'LineStrings must contain list of coordinate pairs'
# Quick and dirty validator
try:
value[0][0]
except:
return "Invalid LineString must contain at least one valid point"
errors = []
for val in value:
error = self._validate_point(val)
if error and error not in errors:
errors.append(error)
if errors:
if top_level:
return "Invalid LineString:\n%s" % ", ".join(errors)
else:
return "%s" % ", ".join(errors)
def _validate_point(self, value):
"""Validate each set of coords"""
if not isinstance(value, (list, tuple)):
return 'Points must be a list of coordinate pairs'
elif not len(value) == 2:
return "Value (%s) must be a two-dimensional point" % repr(value)
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
return "Both values (%s) in point must be float or int" % repr(value)
def to_mongo(self, value):
if isinstance(value, dict):
return value
return SON([("type", self._type), ("coordinates", value)])

View File

@@ -140,8 +140,31 @@ class DocumentMetaclass(type):
base._subclasses += (_cls,)
base._types = base._subclasses # TODO depreciate _types
# Handle delete rules
Document, EmbeddedDocument, DictField = cls._import_classes()
if issubclass(new_class, Document):
new_class._collection = None
# Add class to the _document_registry
_document_registry[new_class._class_name] = new_class
# In Python 2, User-defined methods objects have special read-only
# attributes 'im_func' and 'im_self' which contain the function obj
# and class instance object respectively. With Python 3 these special
# attributes have been replaced by __func__ and __self__. The Blinker
# module continues to use im_func and im_self, so the code below
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if PY3:
for key, val in new_class.__dict__.items():
if isinstance(val, classmethod):
f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
f.__dict__.update({'im_func': getattr(f, '__func__')})
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
f.__dict__.update({'im_self': getattr(f, '__self__')})
# Handle delete rules
for field in new_class._fields.itervalues():
f = field
f.owner_document = new_class
@@ -167,33 +190,11 @@ class DocumentMetaclass(type):
field.name, delete_rule)
if (field.name and hasattr(Document, field.name) and
EmbeddedDocument not in new_class.mro()):
EmbeddedDocument not in new_class.mro()):
msg = ("%s is a document method and not a valid "
"field name" % field.name)
raise InvalidDocumentError(msg)
if issubclass(new_class, Document):
new_class._collection = None
# Add class to the _document_registry
_document_registry[new_class._class_name] = new_class
# In Python 2, User-defined methods objects have special read-only
# attributes 'im_func' and 'im_self' which contain the function obj
# and class instance object respectively. With Python 3 these special
# attributes have been replaced by __func__ and __self__. The Blinker
# module continues to use im_func and im_self, so the code below
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if PY3:
for key, val in new_class.__dict__.items():
if isinstance(val, classmethod):
f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
f.__dict__.update({'im_func': getattr(f, '__func__')})
if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
f.__dict__.update({'im_self': getattr(f, '__self__')})
return new_class
def add_to_class(self, name, value):

View File

@@ -11,6 +11,7 @@ def _import_class(cls_name):
field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField',
'FileField', 'GenericReferenceField',
'GenericEmbeddedDocumentField', 'GeoPointField',
'PointField', 'LineStringField', 'PolygonField',
'ReferenceField', 'StringField', 'ComplexBaseField')
queryset_classes = ('OperationError',)
deref_classes = ('DeReference',)
@@ -33,4 +34,4 @@ def _import_class(cls_name):
for cls in import_classes:
_class_registry_cache[cls] = getattr(module, cls)
return _class_registry_cache.get(cls_name)
return _class_registry_cache.get(cls_name)

View File

@@ -137,11 +137,12 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
if alias not in _dbs:
conn = get_connection(alias)
conn_settings = _connection_settings[alias]
_dbs[alias] = conn[conn_settings['name']]
db = conn[conn_settings['name']]
# Authenticate if necessary
if conn_settings['username'] and conn_settings['password']:
_dbs[alias].authenticate(conn_settings['username'],
conn_settings['password'])
db.authenticate(conn_settings['username'],
conn_settings['password'])
_dbs[alias] = db
return _dbs[alias]

View File

@@ -1,8 +1,10 @@
from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.queryset import OperationError, QuerySet
from mongoengine.queryset import QuerySet
__all__ = ("switch_db", "switch_collection", "no_dereference", "query_counter")
__all__ = ("switch_db", "switch_collection", "no_dereference",
"no_sub_classes", "query_counter")
class switch_db(object):
@@ -130,6 +132,36 @@ class no_dereference(object):
return self.cls
class no_sub_classes(object):
""" no_sub_classes context manager.
Only returns instances of this class and no sub (inherited) classes::
with no_sub_classes(Group) as Group:
Group.objects.find()
"""
def __init__(self, cls):
""" Construct the no_sub_classes context manager.
:param cls: the class to turn querying sub classes on
"""
self.cls = cls
def __enter__(self):
""" change the objects default and _auto_dereference values"""
self.cls._all_subclasses = self.cls._subclasses
self.cls._subclasses = (self.cls,)
return self.cls
def __exit__(self, t, value, traceback):
""" Reset the default and _auto_dereference values"""
self.cls._subclasses = self.cls._all_subclasses
delattr(self.cls, '_all_subclasses')
return self.cls
class QuerySetNoDeRef(QuerySet):
"""Special no_dereference QuerySet"""
def __dereference(items, max_depth=1, instance=None, name=None):

View File

@@ -1,4 +1,3 @@
from __future__ import with_statement
import warnings
import pymongo
@@ -232,7 +231,6 @@ class Document(BaseDocument):
return not updated
return created
upsert = self._created
update_query = {}
if updates:
@@ -241,7 +239,7 @@ class Document(BaseDocument):
update_query["$unset"] = removals
if updates or removals:
last_error = collection.update(select_dict, update_query,
upsert=upsert, **write_concern)
upsert=True, **write_concern)
created = is_new_object(last_error)
cascade = (self._meta.get('cascade', True)
@@ -523,7 +521,6 @@ class Document(BaseDocument):
# an extra index on _cls, as mongodb will use the existing
# index to service queries against _cls
cls_indexed = False
def includes_cls(fields):
first_field = None
if len(fields):
@@ -548,7 +545,7 @@ class Document(BaseDocument):
# If _cls is being used (for polymorphism), it needs an index,
# only if another index doesn't begin with _cls
if (index_cls and not cls_indexed and
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) == True):
cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
collection.ensure_index('_cls', background=background,
**index_opts)
@@ -559,7 +556,7 @@ class DynamicDocument(Document):
way as an ordinary document but has expando style properties. Any data
passed or set against the :class:`~mongoengine.DynamicDocument` that is
not a field is automatically converted into a
:class:`~mongoengine.DynamicField` and data can be attributed to that
:class:`~mongoengine.fields.DynamicField` and data can be attributed to that
field.
.. note::

View File

@@ -8,13 +8,14 @@ import uuid
import warnings
from operator import itemgetter
import pymongo
import gridfs
from bson import Binary, DBRef, SON, ObjectId
from mongoengine.errors import ValidationError
from mongoengine.python_support import (PY3, bin_type, txt_type,
str_types, StringIO)
from base import (BaseField, ComplexBaseField, ObjectIdField,
from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField,
get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet
from document import Document, EmbeddedDocument
@@ -33,9 +34,8 @@ __all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
'SortedListField', 'DictField', 'MapField', 'ReferenceField',
'GenericReferenceField', 'BinaryField', 'GridFSError',
'GridFSProxy', 'FileField', 'ImageGridFsProxy',
'ImproperlyConfigured', 'ImageField', 'GeoPointField',
'SequenceField', 'UUIDField']
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField']
RECURSIVE_REFERENCE_CONSTANT = 'self'
@@ -107,11 +107,11 @@ class URLField(StringField):
"""
_URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def __init__(self, verify_exists=False, url_regex=None, **kwargs):
@@ -128,8 +128,7 @@ class URLField(StringField):
warnings.warn(
"The URLField verify_exists argument has intractable security "
"and performance issues. Accordingly, it has been deprecated.",
DeprecationWarning
)
DeprecationWarning)
try:
request = urllib2.Request(value)
urllib2.urlopen(request)
@@ -297,8 +296,9 @@ class DecimalField(BaseField):
if value is None:
return value
return decimal.Decimal(value).quantize(self.precision,
rounding=self.rounding)
# Convert to string for python 2.6 before casting to Decimal
value = decimal.Decimal("%s" % value)
return value.quantize(self.precision, rounding=self.rounding)
def to_mongo(self, value):
if value is None:
@@ -468,7 +468,7 @@ class ComplexDateTimeField(StringField):
def __get__(self, instance, owner):
data = super(ComplexDateTimeField, self).__get__(instance, owner)
if data == None:
if data is None:
return datetime.datetime.now()
if isinstance(data, datetime.datetime):
return data
@@ -657,15 +657,15 @@ class ListField(ComplexBaseField):
"""Make sure that a list of valid fields is being used.
"""
if (not isinstance(value, (list, tuple, QuerySet)) or
isinstance(value, basestring)):
isinstance(value, basestring)):
self.error('Only lists and tuples may be used in a list field')
super(ListField, self).validate(value)
def prepare_query_value(self, op, value):
if self.field:
if op in ('set', 'unset') and (not isinstance(value, basestring)
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')):
and not isinstance(value, BaseDocument)
and hasattr(value, '__iter__')):
return [self.field.prepare_query_value(op, v) for v in value]
return self.field.prepare_query_value(op, value)
return super(ListField, self).prepare_query_value(op, value)
@@ -700,7 +700,7 @@ class SortedListField(ListField):
value = super(SortedListField, self).to_mongo(value)
if self._ordering is not None:
return sorted(value, key=itemgetter(self._ordering),
reverse=self._order_reverse)
reverse=self._order_reverse)
return sorted(value, reverse=self._order_reverse)
@@ -781,7 +781,7 @@ class ReferenceField(BaseField):
* NULLIFY - Updates the reference to null.
* CASCADE - Deletes the documents associated with the reference.
* DENY - Prevent the deletion of the reference object.
* PULL - Pull the reference from a :class:`~mongoengine.ListField`
* PULL - Pull the reference from a :class:`~mongoengine.fields.ListField`
of references
Alternative syntax for registering delete rules (useful when implementing
@@ -854,7 +854,7 @@ class ReferenceField(BaseField):
return document.id
return document
elif not self.dbref and isinstance(document, basestring):
return document
return ObjectId(document)
id_field_name = self.document_type._meta['id_field']
id_field = self.document_type._fields[id_field_name]
@@ -879,7 +879,7 @@ class ReferenceField(BaseField):
"""Convert a MongoDB-compatible type to a Python type.
"""
if (not self.dbref and
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
not isinstance(value, (DBRef, Document, EmbeddedDocument))):
collection = self.document_type._get_collection_name()
value = DBRef(collection, self.document_type.id.to_python(value))
return value
@@ -1000,7 +1000,7 @@ class BinaryField(BaseField):
if not isinstance(value, (bin_type, txt_type, Binary)):
self.error("BinaryField only accepts instances of "
"(%s, %s, Binary)" % (
bin_type.__name__, txt_type.__name__))
bin_type.__name__, txt_type.__name__))
if self.max_bytes is not None and len(value) > self.max_bytes:
self.error('Binary value is too long')
@@ -1234,8 +1234,6 @@ class ImageGridFsProxy(GridFSProxy):
Insert a image in database
applying field properties (size, thumbnail_size)
"""
if not self.instance:
import ipdb; ipdb.set_trace();
field = self.instance._fields[self.key]
try:
@@ -1307,6 +1305,7 @@ class ImageGridFsProxy(GridFSProxy):
height=h,
format=format,
**kwargs)
@property
def size(self):
"""
@@ -1385,28 +1384,6 @@ class ImageField(FileField):
**kwargs)
class GeoPointField(BaseField):
"""A list storing a latitude and longitude.
.. versionadded:: 0.4
"""
_geo_index = True
def validate(self, value):
"""Make sure that a geo-value is of type (x, y)
"""
if not isinstance(value, (list, tuple)):
self.error('GeoPointField can only accept tuples or lists '
'of (x, y)')
if not len(value) == 2:
self.error('Value must be a two-dimensional point')
if (not isinstance(value[0], (float, int)) and
not isinstance(value[1], (float, int))):
self.error('Both values in point must be float or int')
class SequenceField(BaseField):
"""Provides a sequental counter see:
http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
@@ -1558,3 +1535,83 @@ class UUIDField(BaseField):
value = uuid.UUID(value)
except Exception, exc:
self.error('Could not convert to UUID: %s' % exc)
class GeoPointField(BaseField):
"""A list storing a latitude and longitude.
.. versionadded:: 0.4
"""
_geo_index = pymongo.GEO2D
def validate(self, value):
"""Make sure that a geo-value is of type (x, y)
"""
if not isinstance(value, (list, tuple)):
self.error('GeoPointField can only accept tuples or lists '
'of (x, y)')
if not len(value) == 2:
self.error("Value (%s) must be a two-dimensional point" % repr(value))
elif (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
self.error("Both values (%s) in point must be float or int" % repr(value))
class PointField(GeoJsonBaseField):
"""A geo json field storing a latitude and longitude.
The data is represented as:
.. code-block:: js
{ "type" : "Point" ,
"coordinates" : [x, y]}
You can either pass a dict with the full information or a list
to set the value.
Requires mongodb >= 2.4
.. versionadded:: 0.8
"""
_type = "Point"
class LineStringField(GeoJsonBaseField):
"""A geo json field storing a line of latitude and longitude coordinates.
The data is represented as:
.. code-block:: js
{ "type" : "LineString" ,
"coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]}
You can either pass a dict with the full information or a list of points.
Requires mongodb >= 2.4
.. versionadded:: 0.8
"""
_type = "LineString"
class PolygonField(GeoJsonBaseField):
"""A geo json field storing a polygon of latitude and longitude coordinates.
The data is represented as:
.. code-block:: js
{ "type" : "Polygon" ,
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
[[x1, y1], [x1, y1] ... [xn, yn]]}
You can either pass a dict with the full information or a list
of LineStrings. The first LineString being the outside and the rest being
holes.
Requires mongodb >= 2.4
.. versionadded:: 0.8
"""
_type = "Polygon"

View File

@@ -26,6 +26,7 @@ __all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL')
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
ITER_CHUNK_SIZE = 100
# Delete rules
DO_NOTHING = 0
@@ -63,16 +64,18 @@ class QuerySet(object):
self._none = False
self._as_pymongo = False
self._as_pymongo_coerce = False
self._result_cache = []
self._has_more = True
self._len = None
# If inheritance is allowed, only return instances and instances of
# subclasses of the class being used
if document._meta.get('allow_inheritance') == True:
if document._meta.get('allow_inheritance') is True:
self._initial_query = {"_cls": {"$in": self._document._subclasses}}
self._loaded_fields = QueryFieldList(always_include=['_cls'])
self._cursor_obj = None
self._limit = None
self._skip = None
self._slice = None
self._hint = -1 # Using -1 as None is a valid value for hint
def __call__(self, q_obj=None, class_check=True, slave_okay=False,
@@ -110,13 +113,60 @@ class QuerySet(object):
queryset._class_check = class_check
return queryset
def __len__(self):
"""Since __len__ is called quite frequently (for example, as part of
list(qs) we populate the result cache and cache the length.
"""
if self._len is not None:
return self._len
if self._has_more:
# populate the cache
list(self._iter_results())
self._len = len(self._result_cache)
return self._len
def __iter__(self):
"""Support iterator protocol"""
queryset = self
if queryset._iter:
queryset = self.clone()
queryset.rewind()
return queryset
"""Iteration utilises a results cache which iterates the cursor
in batches of ``ITER_CHUNK_SIZE``.
If ``self._has_more`` the cursor hasn't been exhausted so cache then
batch. Otherwise iterate the result_cache.
"""
self._iter = True
if self._has_more:
return self._iter_results()
# iterating over the cache.
return iter(self._result_cache)
def _iter_results(self):
"""A generator for iterating over the result cache.
Also populates the cache if there are more possible results to yield.
Raises StopIteration when there are no more results"""
pos = 0
while True:
upper = len(self._result_cache)
while pos < upper:
yield self._result_cache[pos]
pos = pos + 1
if not self._has_more:
raise StopIteration
if len(self._result_cache) <= pos:
self._populate_cache()
def _populate_cache(self):
"""
Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
(until the cursor is exhausted).
"""
if self._has_more:
try:
for i in xrange(ITER_CHUNK_SIZE):
self._result_cache.append(self.next())
except StopIteration:
self._has_more = False
def __getitem__(self, key):
"""Support skip and limit using getitem and slicing syntax.
@@ -127,8 +177,10 @@ class QuerySet(object):
if isinstance(key, slice):
try:
queryset._cursor_obj = queryset._cursor[key]
queryset._slice = key
queryset._skip, queryset._limit = key.start, key.stop
queryset._limit
if key.start and key.stop:
queryset._limit = key.stop - key.start
except IndexError, err:
# PyMongo raises an error if key.start == key.stop, catch it,
# bin it, kill it.
@@ -156,22 +208,15 @@ class QuerySet(object):
def __repr__(self):
"""Provides the string representation of the QuerySet
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
"""
if self._iter:
return '.. queryset mid-iteration ..'
data = []
for i in xrange(REPR_OUTPUT_SIZE + 1):
try:
data.append(self.next())
except StopIteration:
break
self._populate_cache()
data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
self.rewind()
return repr(data)
# Core functions
@@ -200,7 +245,7 @@ class QuerySet(object):
result = queryset.next()
except StopIteration:
msg = ("%s matching query does not exist."
% queryset._document._class_name)
% queryset._document._class_name)
raise queryset._document.DoesNotExist(msg)
try:
queryset.next()
@@ -351,7 +396,12 @@ class QuerySet(object):
"""
if self._limit == 0:
return 0
return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
if with_limit_and_skip and self._len is not None:
return self._len
count = self._cursor.count(with_limit_and_skip=with_limit_and_skip)
if with_limit_and_skip:
self._len = count
return count
def delete(self, write_concern=None):
"""Delete the documents matched by the query.
@@ -426,7 +476,7 @@ class QuerySet(object):
.. versionadded:: 0.2
"""
if not update:
if not update and not upsert:
raise OperationError("No update parameters, would remove data")
if not write_concern:
@@ -469,7 +519,8 @@ class QuerySet(object):
.. versionadded:: 0.2
"""
return self.update(upsert=upsert, multi=False, write_concern=None, **update)
return self.update(
upsert=upsert, multi=False, write_concern=write_concern, **update)
def with_id(self, object_id):
"""Retrieve the object matching the id provided. Uses `object_id` only
@@ -518,6 +569,15 @@ class QuerySet(object):
queryset._none = True
return queryset
def no_sub_classes(self):
"""
Only return instances of this document and not any inherited documents
"""
if self._document._meta.get('allow_inheritance') is True:
self._initial_query = {"_cls": self._document._class_name}
return self
def clone(self):
"""Creates a copy of the current
:class:`~mongoengine.queryset.QuerySet`
@@ -536,20 +596,15 @@ class QuerySet(object):
val = getattr(self, prop)
setattr(c, prop, copy.copy(val))
if self._slice:
c._slice = self._slice
if self._cursor_obj:
c._cursor_obj = self._cursor_obj.clone()
if self._slice:
c._cursor[self._slice]
return c
def select_related(self, max_depth=1):
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
a maximum depth in order to cut down the number queries to mongodb.
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
:class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
the number queries to mongodb.
.. versionadded:: 0.5
"""
@@ -570,7 +625,6 @@ class QuerySet(object):
else:
queryset._cursor.limit(n)
queryset._limit = n
# Return self to allow chaining
return queryset
@@ -608,6 +662,9 @@ class QuerySet(object):
:param field: the field to select distinct values from
.. note:: This is a command and won't take ordering or limit into
account.
.. versionadded:: 0.4
.. versionchanged:: 0.5 - Fixed handling references
.. versionchanged:: 0.6 - Improved db_field refrence handling
@@ -819,8 +876,7 @@ class QuerySet(object):
def to_json(self):
"""Converts a queryset to JSON"""
queryset = self.clone()
return json_util.dumps(queryset._collection_obj.find(queryset._query))
return json_util.dumps(self.as_pymongo())
def from_json(self, json_data):
"""Converts json data to unsaved objects"""
@@ -912,7 +968,7 @@ class QuerySet(object):
mr_args['out'] = output
results = getattr(queryset._collection, map_reduce_function)(
map_f, reduce_f, **mr_args)
map_f, reduce_f, **mr_args)
if map_reduce_function == 'map_reduce':
results = results.find()
@@ -1049,7 +1105,7 @@ class QuerySet(object):
""")
for result in self.map_reduce(map_func, reduce_func,
finalize_f=finalize_func, output='inline'):
finalize_f=finalize_func, output='inline'):
return result.value
else:
return 0
@@ -1062,11 +1118,11 @@ class QuerySet(object):
.. note::
Can only do direct simple mappings and cannot map across
:class:`~mongoengine.ReferenceField` or
:class:`~mongoengine.GenericReferenceField` for more complex
:class:`~mongoengine.fields.ReferenceField` or
:class:`~mongoengine.fields.GenericReferenceField` for more complex
counting a manual map reduce call would is required.
If the field is a :class:`~mongoengine.ListField`, the items within
If the field is a :class:`~mongoengine.fields.ListField`, the items within
each list will be counted individually.
:param field: the field to use
@@ -1086,20 +1142,18 @@ class QuerySet(object):
def next(self):
"""Wrap the result in a :class:`~mongoengine.Document` object.
"""
self._iter = True
try:
if self._limit == 0 or self._none:
raise StopIteration
if self._scalar:
return self._get_scalar(self._document._from_son(
self._cursor.next()))
if self._as_pymongo:
return self._get_as_pymongo(self._cursor.next())
if self._limit == 0 or self._none:
raise StopIteration
return self._document._from_son(self._cursor.next())
except StopIteration, e:
self.rewind()
raise e
raw_doc = self._cursor.next()
if self._as_pymongo:
return self._get_as_pymongo(raw_doc)
doc = self._document._from_son(raw_doc)
if self._scalar:
return self._get_scalar(doc)
return doc
def rewind(self):
"""Rewind the cursor to its unevaluated state.
@@ -1152,7 +1206,7 @@ class QuerySet(object):
self._cursor_obj.sort(order)
if self._limit is not None:
self._cursor_obj.limit(self._limit - (self._skip or 0))
self._cursor_obj.limit(self._limit)
if self._skip is not None:
self._cursor_obj.skip(self._skip)
@@ -1367,7 +1421,15 @@ class QuerySet(object):
new_data = {}
for key, value in data.iteritems():
new_path = '%s.%s' % (path, key) if path else key
if all_fields or new_path in self.__as_pymongo_fields:
if all_fields:
include_field = True
elif self._loaded_fields.value == QueryFieldList.ONLY:
include_field = new_path in self.__as_pymongo_fields
else:
include_field = new_path not in self.__as_pymongo_fields
if include_field:
new_data[key] = clean(value, path=new_path)
data = new_data
elif isinstance(data, list):
@@ -1412,15 +1474,14 @@ class QuerySet(object):
code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
code)
code)
return code
# Deprecated
def ensure_index(self, **kwargs):
"""Deprecated use :func:`~Document.ensure_index`"""
msg = ("Doc.objects()._ensure_index() is deprecated. "
"Use Doc.ensure_index() instead.")
"Use Doc.ensure_index() instead.")
warnings.warn(msg, DeprecationWarning)
self._document.__class__.ensure_index(**kwargs)
return self
@@ -1428,6 +1489,6 @@ class QuerySet(object):
def _ensure_indexes(self):
"""Deprecated use :func:`~Document.ensure_indexes`"""
msg = ("Doc.objects()._ensure_indexes() is deprecated. "
"Use Doc.ensure_indexes() instead.")
"Use Doc.ensure_indexes() instead.")
warnings.warn(msg, DeprecationWarning)
self._document.__class__.ensure_indexes()

View File

@@ -1,5 +1,6 @@
from collections import defaultdict
import pymongo
from bson import SON
from mongoengine.common import _import_class
@@ -12,7 +13,9 @@ COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
'all', 'size', 'exists', 'not')
GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
'within_box', 'within_polygon', 'near', 'near_sphere',
'max_distance')
'max_distance', 'geo_within', 'geo_within_box',
'geo_within_polygon', 'geo_within_center',
'geo_within_sphere', 'geo_intersects')
STRING_OPERATORS = ('contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith',
'exact', 'iexact')
@@ -21,7 +24,8 @@ MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
STRING_OPERATORS + CUSTOM_OPERATORS)
UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
'push_all', 'pull', 'pull_all', 'add_to_set')
'push_all', 'pull', 'pull_all', 'add_to_set',
'set_on_insert')
def query(_doc_cls=None, _field_operation=False, **query):
@@ -81,30 +85,14 @@ def query(_doc_cls=None, _field_operation=False, **query):
value = field
else:
value = field.prepare_query_value(op, value)
elif op in ('in', 'nin', 'all', 'near'):
elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
# 'in', 'nin' and 'all' require a list of values
value = [field.prepare_query_value(op, v) for v in value]
# if op and op not in COMPARISON_OPERATORS:
if op:
if op in GEO_OPERATORS:
if op == "within_distance":
value = {'$within': {'$center': value}}
elif op == "within_spherical_distance":
value = {'$within': {'$centerSphere': value}}
elif op == "within_polygon":
value = {'$within': {'$polygon': value}}
elif op == "near":
value = {'$near': value}
elif op == "near_sphere":
value = {'$nearSphere': value}
elif op == 'within_box':
value = {'$within': {'$box': value}}
elif op == "max_distance":
value = {'$maxDistance': value}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented" % op)
value = _geo_operator(field, op, value)
elif op in CUSTOM_OPERATORS:
if op == 'match':
value = {"$elemMatch": value}
@@ -176,7 +164,9 @@ def update(_doc_cls=None, **update):
if value > 0:
value = -value
elif op == 'add_to_set':
op = op.replace('_to_set', 'ToSet')
op = 'addToSet'
elif op == 'set_on_insert':
op = "setOnInsert"
match = None
if parts[-1] in COMPARISON_OPERATORS:
@@ -250,3 +240,76 @@ def update(_doc_cls=None, **update):
mongo_update[key].update(value)
return mongo_update
def _geo_operator(field, op, value):
"""Helper to return the query for a given geo query"""
if field._geo_index == pymongo.GEO2D:
if op == "within_distance":
value = {'$within': {'$center': value}}
elif op == "within_spherical_distance":
value = {'$within': {'$centerSphere': value}}
elif op == "within_polygon":
value = {'$within': {'$polygon': value}}
elif op == "near":
value = {'$near': value}
elif op == "near_sphere":
value = {'$nearSphere': value}
elif op == 'within_box':
value = {'$within': {'$box': value}}
elif op == "max_distance":
value = {'$maxDistance': value}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented for a GeoPointField" % op)
else:
if op == "geo_within":
value = {"$geoWithin": _infer_geometry(value)}
elif op == "geo_within_box":
value = {"$geoWithin": {"$box": value}}
elif op == "geo_within_polygon":
value = {"$geoWithin": {"$polygon": value}}
elif op == "geo_within_center":
value = {"$geoWithin": {"$center": value}}
elif op == "geo_within_sphere":
value = {"$geoWithin": {"$centerSphere": value}}
elif op == "geo_intersects":
value = {"$geoIntersects": _infer_geometry(value)}
elif op == "near":
value = {'$near': _infer_geometry(value)}
elif op == "max_distance":
value = {'$maxDistance': value}
else:
raise NotImplementedError("Geo method '%s' has not "
"been implemented for a %s " % (op, field._name))
return value
def _infer_geometry(value):
"""Helper method that tries to infer the $geometry shape for a given value"""
if isinstance(value, dict):
if "$geometry" in value:
return value
elif 'coordinates' in value and 'type' in value:
return {"$geometry": value}
raise InvalidQueryError("Invalid $geometry dictionary should have "
"type and coordinates keys")
elif isinstance(value, (list, set)):
try:
value[0][0][0]
return {"$geometry": {"type": "Polygon", "coordinates": value}}
except:
pass
try:
value[0][0]
return {"$geometry": {"type": "LineString", "coordinates": value}}
except:
pass
try:
value[0]
return {"$geometry": {"type": "Point", "coordinates": value}}
except:
pass
raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
"or (nested) lists of coordinate(s)")