Compare commits
33 Commits
topic/land
...
async
Author | SHA1 | Date | |
---|---|---|---|
|
1f7272d139 | ||
|
f6ba1ad788 | ||
|
294d59c9bb | ||
|
759f72169a | ||
|
1f7135be61 | ||
|
6942f9c1cf | ||
|
d9da75d1c0 | ||
|
3503c98857 | ||
|
708c3f1e2a | ||
|
6f645e8619 | ||
|
bce7ca7ac4 | ||
|
350465c25d | ||
|
5b9c70ae22 | ||
|
9b30afeca9 | ||
|
c1b202c119 | ||
|
41cfe5d2ca | ||
|
05339e184f | ||
|
447127d956 | ||
|
394334fbea | ||
|
9f8cd33d43 | ||
|
f066e28c35 | ||
|
b349a449bb | ||
|
1c5898d396 | ||
|
6802967863 | ||
|
0462f18680 | ||
|
af6699098f | ||
|
6b7e7dc124 | ||
|
6bae4c6a66 | ||
|
46da918dbe | ||
|
bb7e5f17b5 | ||
|
b9d03114c2 | ||
|
436b1ce176 | ||
|
85336f9777 |
12
.travis.yml
12
.travis.yml
@@ -15,6 +15,9 @@ env:
|
||||
- PYMONGO=2.7.1 DJANGO=dev
|
||||
- PYMONGO=2.7.1 DJANGO=1.6.5
|
||||
- PYMONGO=2.7.1 DJANGO=1.5.8
|
||||
- PYMONGO=2.7.2 DJANGO=dev
|
||||
- PYMONGO=2.7.2 DJANGO=1.6.5
|
||||
- PYMONGO=2.7.2 DJANGO=1.5.8
|
||||
|
||||
matrix:
|
||||
exclude:
|
||||
@@ -22,6 +25,10 @@ matrix:
|
||||
env: PYMONGO=dev DJANGO=dev
|
||||
- python: "2.6"
|
||||
env: PYMONGO=2.7.1 DJANGO=dev
|
||||
- python: "2.6"
|
||||
env: PYMONGO=2.7.2 DJANGO=dev
|
||||
allow_failures:
|
||||
- python: "pypy3"
|
||||
fast_finish: true
|
||||
|
||||
before_install:
|
||||
@@ -37,12 +44,17 @@ install:
|
||||
- if [[ $DJANGO == 'dev' ]]; then travis_retry pip install https://www.djangoproject.com/download/1.7c2/tarball/; fi
|
||||
- if [[ $DJANGO != 'dev' ]]; then travis_retry pip install Django==$DJANGO; fi
|
||||
- travis_retry pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b
|
||||
- travis_retry pip install coveralls
|
||||
- travis_retry python setup.py install
|
||||
|
||||
script:
|
||||
- travis_retry python setup.py test
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi;
|
||||
- coverage run --source=mongoengine setup.py test
|
||||
- coverage report -m
|
||||
- python benchmark.py
|
||||
after_script:
|
||||
coveralls --verbose
|
||||
notifications:
|
||||
irc: "irc.freenode.org#mongoengine"
|
||||
branches:
|
||||
|
4
AUTHORS
4
AUTHORS
@@ -206,3 +206,7 @@ that much better:
|
||||
* Clay McClure (https://github.com/claymation)
|
||||
* Bruno Rocha (https://github.com/rochacbruno)
|
||||
* Norberto Leite (https://github.com/nleite)
|
||||
* Bob Cribbs (https://github.com/bocribbz)
|
||||
* Jay Shirley (https://github.com/jshirley)
|
||||
* DavidBord (https://github.com/DavidBord)
|
||||
* Axel Haustant (https://github.com/noirbizarre)
|
||||
|
@@ -95,6 +95,9 @@ Fields
|
||||
.. autoclass:: mongoengine.fields.PointField
|
||||
.. autoclass:: mongoengine.fields.LineStringField
|
||||
.. autoclass:: mongoengine.fields.PolygonField
|
||||
.. autoclass:: mongoengine.fields.MultiPointField
|
||||
.. autoclass:: mongoengine.fields.MultiLineStringField
|
||||
.. autoclass:: mongoengine.fields.MultiPolygonField
|
||||
.. autoclass:: mongoengine.fields.GridFSError
|
||||
.. autoclass:: mongoengine.fields.GridFSProxy
|
||||
.. autoclass:: mongoengine.fields.ImageGridFsProxy
|
||||
|
@@ -5,6 +5,8 @@ Changelog
|
||||
|
||||
Changes in 0.9.X - DEV
|
||||
======================
|
||||
- Allow specifying the '_cls' as a field for indexes #397
|
||||
- Stop ensure_indexes running on a secondaries unless connection is through mongos #746
|
||||
- Not overriding default values when loading a subset of fields #399
|
||||
- Saving document doesn't create new fields in existing collection #620
|
||||
- Added `Queryset.aggregate` wrapper to aggregation framework #703
|
||||
@@ -46,6 +48,12 @@ Changes in 0.9.X - DEV
|
||||
- Workaround a dateutil bug #608
|
||||
- Conditional save for atomic-style operations #511
|
||||
- Allow dynamic dictionary-style field access #559
|
||||
- Increase email field length to accommodate new TLDs #726
|
||||
- index_cls is ignored when deciding to set _cls as index prefix #733
|
||||
- Make 'db' argument to connection optional #737
|
||||
- Allow atomic update for the entire `DictField` #742
|
||||
- Added MultiPointField, MultiLineField, MultiPolygonField
|
||||
- Fix multiple connections aliases being rewritten #748
|
||||
|
||||
Changes in 0.8.7
|
||||
================
|
||||
|
@@ -91,6 +91,12 @@ are as follows:
|
||||
* :class:`~mongoengine.fields.StringField`
|
||||
* :class:`~mongoengine.fields.URLField`
|
||||
* :class:`~mongoengine.fields.UUIDField`
|
||||
* :class:`~mongoengine.fields.PointField`
|
||||
* :class:`~mongoengine.fields.LineStringField`
|
||||
* :class:`~mongoengine.fields.PolygonField`
|
||||
* :class:`~mongoengine.fields.MultiPointField`
|
||||
* :class:`~mongoengine.fields.MultiLineStringField`
|
||||
* :class:`~mongoengine.fields.MultiPolygonField`
|
||||
|
||||
Field arguments
|
||||
---------------
|
||||
@@ -544,6 +550,9 @@ The following fields will explicitly add a "2dsphere" index:
|
||||
- :class:`~mongoengine.fields.PointField`
|
||||
- :class:`~mongoengine.fields.LineStringField`
|
||||
- :class:`~mongoengine.fields.PolygonField`
|
||||
- :class:`~mongoengine.fields.MultiPointField`
|
||||
- :class:`~mongoengine.fields.MultiLineStringField`
|
||||
- :class:`~mongoengine.fields.MultiPolygonField`
|
||||
|
||||
As "2dsphere" indexes can be part of a compound index, you may not want the
|
||||
automatic index but would prefer a compound index. In this example we turn off
|
||||
|
@@ -26,7 +26,7 @@ NON_FIELD_ERRORS = '__all__'
|
||||
|
||||
class BaseDocument(object):
|
||||
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '_cls', '__weakref__')
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
|
||||
|
||||
_dynamic = False
|
||||
_dynamic_lock = True
|
||||
@@ -78,6 +78,9 @@ class BaseDocument(object):
|
||||
value = getattr(self, key, None)
|
||||
setattr(self, key, value)
|
||||
|
||||
if "_cls" not in values:
|
||||
self._cls = self._class_name
|
||||
|
||||
# Set passed values after initialisation
|
||||
if self._dynamic:
|
||||
dynamic_data = {}
|
||||
@@ -718,6 +721,9 @@ class BaseDocument(object):
|
||||
ALLOW_INHERITANCE)
|
||||
include_cls = (allow_inheritance and not spec.get('sparse', False) and
|
||||
spec.get('cls', True))
|
||||
|
||||
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
|
||||
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
|
||||
if "cls" in spec:
|
||||
spec.pop('cls')
|
||||
for key in spec['fields']:
|
||||
|
@@ -457,7 +457,7 @@ class GeoJsonBaseField(BaseField):
|
||||
if error:
|
||||
self.error(error)
|
||||
|
||||
def _validate_polygon(self, value):
|
||||
def _validate_polygon(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'Polygons must contain list of linestrings'
|
||||
|
||||
@@ -475,7 +475,10 @@ class GeoJsonBaseField(BaseField):
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
if errors:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
if top_level:
|
||||
return "Invalid Polygon:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_linestring(self, value, top_level=True):
|
||||
"""Validates a linestring"""
|
||||
@@ -509,6 +512,66 @@ class GeoJsonBaseField(BaseField):
|
||||
not isinstance(value[1], (float, int))):
|
||||
return "Both values (%s) in point must be float or int" % repr(value)
|
||||
|
||||
def _validate_multipoint(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPoint must be a list of Point'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0]
|
||||
except:
|
||||
return "Invalid MultiPoint must contain at least one valid point"
|
||||
|
||||
errors = []
|
||||
for point in value:
|
||||
error = self._validate_point(point)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multilinestring(self, value, top_level=True):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiLineString must be a list of LineString'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0]
|
||||
except:
|
||||
return "Invalid MultiLineString must contain at least one valid linestring"
|
||||
|
||||
errors = []
|
||||
for linestring in value:
|
||||
error = self._validate_linestring(linestring, False)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
if top_level:
|
||||
return "Invalid MultiLineString:\n%s" % ", ".join(errors)
|
||||
else:
|
||||
return "%s" % ", ".join(errors)
|
||||
|
||||
def _validate_multipolygon(self, value):
|
||||
if not isinstance(value, (list, tuple)):
|
||||
return 'MultiPolygon must be a list of Polygon'
|
||||
|
||||
# Quick and dirty validator
|
||||
try:
|
||||
value[0][0][0][0]
|
||||
except:
|
||||
return "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
|
||||
errors = []
|
||||
for polygon in value:
|
||||
error = self._validate_polygon(polygon, False)
|
||||
if error and error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
|
||||
|
||||
def to_mongo(self, value):
|
||||
if isinstance(value, dict):
|
||||
return value
|
||||
|
@@ -47,6 +47,10 @@ class DocumentMetaclass(type):
|
||||
meta.merge(base._meta)
|
||||
attrs['_meta'] = meta
|
||||
|
||||
if '_meta' in attrs and attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
|
||||
StringField = _import_class('StringField')
|
||||
attrs['_cls'] = StringField()
|
||||
|
||||
# Handle document Fields
|
||||
|
||||
# Merge all fields from subclasses
|
||||
|
@@ -1,6 +1,10 @@
|
||||
import pymongo
|
||||
from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
|
||||
|
||||
try:
|
||||
import motor
|
||||
except ImportError:
|
||||
motor = None
|
||||
|
||||
__all__ = ['ConnectionError', 'connect', 'register_connection',
|
||||
'DEFAULT_CONNECTION_NAME']
|
||||
@@ -18,9 +22,10 @@ _connections = {}
|
||||
_dbs = {}
|
||||
|
||||
|
||||
def register_connection(alias, name, host=None, port=None,
|
||||
def register_connection(alias, name=None, host=None, port=None,
|
||||
read_preference=False,
|
||||
username=None, password=None, authentication_source=None,
|
||||
async=False,
|
||||
**kwargs):
|
||||
"""Add a connection.
|
||||
|
||||
@@ -35,18 +40,18 @@ def register_connection(alias, name, host=None, port=None,
|
||||
:param password: password to authenticate with
|
||||
:param authentication_source: database to authenticate against
|
||||
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
|
||||
|
||||
"""
|
||||
global _connection_settings
|
||||
|
||||
conn_settings = {
|
||||
'name': name,
|
||||
'name': name or 'test',
|
||||
'host': host or 'localhost',
|
||||
'port': port or 27017,
|
||||
'read_preference': read_preference,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'authentication_source': authentication_source
|
||||
'authentication_source': authentication_source,
|
||||
'async': async
|
||||
}
|
||||
|
||||
# Handle uri style connections
|
||||
@@ -98,8 +103,17 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
conn_settings.pop('username', None)
|
||||
conn_settings.pop('password', None)
|
||||
conn_settings.pop('authentication_source', None)
|
||||
async = conn_settings.pop('async')
|
||||
|
||||
if async:
|
||||
if not motor:
|
||||
raise ImproperlyConfigured("Motor library was not found")
|
||||
|
||||
connection_class = motor.MotorClient
|
||||
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
|
||||
connection_class = MongoClient
|
||||
if 'replicaSet' in conn_settings:
|
||||
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
|
||||
# Discard port since it can't be used on MongoReplicaSetClient
|
||||
@@ -107,22 +121,30 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
# Discard replicaSet if not base string
|
||||
if not isinstance(conn_settings['replicaSet'], basestring):
|
||||
conn_settings.pop('replicaSet', None)
|
||||
connection_class = MongoReplicaSetClient
|
||||
|
||||
if async:
|
||||
connection_class = motor.MotorReplicaSetClient
|
||||
else:
|
||||
connection_class = MongoReplicaSetClient
|
||||
|
||||
try:
|
||||
connection = None
|
||||
connection_settings_iterator = ((alias, settings.copy()) for alias, settings in _connection_settings.iteritems())
|
||||
for alias, connection_settings in connection_settings_iterator:
|
||||
# check for shared connections
|
||||
connection_settings_iterator = (
|
||||
(db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
|
||||
for db_alias, connection_settings in connection_settings_iterator:
|
||||
connection_settings.pop('name', None)
|
||||
connection_settings.pop('username', None)
|
||||
connection_settings.pop('password', None)
|
||||
if conn_settings == connection_settings and _connections.get(alias, None):
|
||||
connection = _connections[alias]
|
||||
if conn_settings == connection_settings and _connections.get(db_alias, None):
|
||||
connection = _connections[db_alias]
|
||||
break
|
||||
|
||||
_connections[alias] = connection if connection else connection_class(**conn_settings)
|
||||
_connections[alias] = connection if connection else connection_class(
|
||||
**conn_settings)
|
||||
except Exception, e:
|
||||
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
|
||||
raise ConnectionError(
|
||||
"Cannot connect to database %s :\n%s" % (alias, e))
|
||||
return _connections[alias]
|
||||
|
||||
|
||||
@@ -144,7 +166,7 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
return _dbs[alias]
|
||||
|
||||
|
||||
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
"""Connect to the database specified by the 'db' argument.
|
||||
|
||||
Connection settings may be provided here as well if the database is not
|
||||
|
@@ -180,7 +180,11 @@ class DeReference(object):
|
||||
return self.object_map.get(items['_ref'].id, items)
|
||||
elif '_cls' in items:
|
||||
doc = get_document(items['_cls'])._from_son(items)
|
||||
_cls = doc._data.pop('_cls', None)
|
||||
del items['_cls']
|
||||
doc._data = self._attach_objects(doc._data, depth, doc, None)
|
||||
if _cls is not None:
|
||||
doc._data['_cls'] = _cls
|
||||
return doc
|
||||
|
||||
if not hasattr(items, 'items'):
|
||||
|
@@ -594,7 +594,9 @@ class Document(BaseDocument):
|
||||
index_cls = cls._meta.get('index_cls', True)
|
||||
|
||||
collection = cls._get_collection()
|
||||
if collection.read_preference > 1:
|
||||
# 746: when connection is via mongos, the read preference is not necessarily an indication that
|
||||
# this code runs on a secondary
|
||||
if not collection.is_mongos and collection.read_preference > 1:
|
||||
return
|
||||
|
||||
# determine if an index which we are creating includes
|
||||
|
@@ -44,6 +44,7 @@ __all__ = [
|
||||
'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy',
|
||||
'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField',
|
||||
'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField',
|
||||
'MultiPointField', 'MultiLineStringField', 'MultiPolygonField',
|
||||
'GeoJsonBaseField']
|
||||
|
||||
|
||||
@@ -160,8 +161,8 @@ class EmailField(StringField):
|
||||
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
|
||||
# quoted-string
|
||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'
|
||||
# domain
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE
|
||||
# domain (max length of an ICAAN TLD is 22 characters)
|
||||
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,22}$', re.IGNORECASE
|
||||
)
|
||||
|
||||
def validate(self, value):
|
||||
@@ -826,6 +827,10 @@ class DictField(ComplexBaseField):
|
||||
return StringField().prepare_query_value(op, value)
|
||||
|
||||
if hasattr(self.field, 'field'):
|
||||
if op in ('set', 'unset') and isinstance(value, dict):
|
||||
return dict(
|
||||
(k, self.field.prepare_query_value(op, v))
|
||||
for k, v in value.items())
|
||||
return self.field.prepare_query_value(op, value)
|
||||
|
||||
return super(DictField, self).prepare_query_value(op, value)
|
||||
@@ -1899,3 +1904,70 @@ class PolygonField(GeoJsonBaseField):
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
_type = "Polygon"
|
||||
|
||||
|
||||
class MultiPointField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a list of Points.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiPoint" ,
|
||||
"coordinates" : [[x1, y1], [x2, y2]]}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiPoint"
|
||||
|
||||
|
||||
class MultiLineStringField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing a list of LineStrings.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "MultiLineString" ,
|
||||
"coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]]}
|
||||
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiLineString"
|
||||
|
||||
|
||||
class MultiPolygonField(GeoJsonBaseField):
|
||||
|
||||
"""A GeoJSON field storing list of Polygons.
|
||||
|
||||
The data is represented as:
|
||||
|
||||
.. code-block:: js
|
||||
|
||||
{ "type" : "Polygon" ,
|
||||
"coordinates" : [[
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]
|
||||
], [
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]],
|
||||
[[x1, y1], [x1, y1] ... [xn, yn]]
|
||||
]
|
||||
}
|
||||
|
||||
You can either pass a dict with the full information or a list
|
||||
of Polygons.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_type = "MultiPolygon"
|
||||
|
@@ -1413,8 +1413,11 @@ class BaseQuerySet(object):
|
||||
def _query(self):
|
||||
if self._mongo_query is None:
|
||||
self._mongo_query = self._query_obj.to_query(self._document)
|
||||
if self._class_check:
|
||||
self._mongo_query.update(self._initial_query)
|
||||
if self._class_check and self._initial_query:
|
||||
if "_cls" in self._mongo_query:
|
||||
self._mongo_query = {"$and": [self._initial_query, self._mongo_query]}
|
||||
else:
|
||||
self._mongo_query.update(self._initial_query)
|
||||
return self._mongo_query
|
||||
|
||||
@property
|
||||
|
0
tests/async/__init__.py
Normal file
0
tests/async/__init__.py
Normal file
36
tests/async/test_connection.py
Normal file
36
tests/async/test_connection.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from mongoengine import *
|
||||
import motor
|
||||
import mongoengine.connection
|
||||
from mongoengine.connection import get_db, get_connection, ConnectionError
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
|
||||
|
||||
class ConnectionTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mongoengine.connection._connection_settings = {}
|
||||
mongoengine.connection._connections = {}
|
||||
mongoengine.connection._dbs = {}
|
||||
|
||||
def test_register_connection(self):
|
||||
"""
|
||||
Ensure that the connect() method works properly.
|
||||
"""
|
||||
register_connection('asyncdb', 'mongoengineasynctest', async=True)
|
||||
|
||||
self.assertEqual(
|
||||
mongoengine.connection._connection_settings['asyncdb']['name'],
|
||||
'mongoengineasynctest')
|
||||
|
||||
self.assertTrue(
|
||||
mongoengine.connection._connection_settings['asyncdb']['async'])
|
||||
conn = get_connection('asyncdb')
|
||||
self.assertTrue(isinstance(conn, motor.MotorClient))
|
||||
|
||||
db = get_db('asyncdb')
|
||||
self.assertTrue(isinstance(db, motor.MotorDatabase))
|
||||
self.assertEqual(db.name, 'mongoengineasynctest')
|
@@ -36,9 +36,9 @@ class ClassMethodsTest(unittest.TestCase):
|
||||
def test_definition(self):
|
||||
"""Ensure that document may be defined using fields.
|
||||
"""
|
||||
self.assertEqual(['age', 'id', 'name'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name'],
|
||||
sorted(self.Person._fields.keys()))
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField"],
|
||||
self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"],
|
||||
sorted([x.__class__.__name__ for x in
|
||||
self.Person._fields.values()]))
|
||||
|
||||
|
@@ -175,6 +175,16 @@ class IndexesTest(unittest.TestCase):
|
||||
info = A._get_collection().index_information()
|
||||
self.assertEqual(len(info.keys()), 2)
|
||||
|
||||
class B(A):
|
||||
c = StringField()
|
||||
d = StringField()
|
||||
meta = {
|
||||
'indexes': [{'fields': ['c']}, {'fields': ['d'], 'cls': True}],
|
||||
'allow_inheritance': True
|
||||
}
|
||||
self.assertEqual([('c', 1)], B._meta['index_specs'][1]['fields'])
|
||||
self.assertEqual([('_cls', 1), ('d', 1)], B._meta['index_specs'][2]['fields'])
|
||||
|
||||
def test_build_index_spec_is_not_destructive(self):
|
||||
|
||||
class MyDoc(Document):
|
||||
|
@@ -163,7 +163,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Employee._get_collection_name(),
|
||||
Person._get_collection_name())
|
||||
@@ -180,7 +180,7 @@ class InheritanceTest(unittest.TestCase):
|
||||
class Employee(Person):
|
||||
salary = IntField()
|
||||
|
||||
self.assertEqual(['age', 'id', 'name', 'salary'],
|
||||
self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'],
|
||||
sorted(Employee._fields.keys()))
|
||||
self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(),
|
||||
['_cls', 'name', 'age'])
|
||||
|
@@ -462,7 +462,7 @@ class InstanceTest(unittest.TestCase):
|
||||
self.assertEqual(person['name'], 'Another User')
|
||||
|
||||
# Length = length(assigned fields + id)
|
||||
self.assertEqual(len(person), 3)
|
||||
self.assertEqual(len(person), 4)
|
||||
|
||||
self.assertTrue('age' in person)
|
||||
person.age = None
|
||||
|
@@ -18,6 +18,7 @@ from bson import Binary, DBRef, ObjectId
|
||||
from mongoengine import *
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.base import _document_registry
|
||||
from mongoengine.base.datastructures import BaseDict
|
||||
from mongoengine.errors import NotRegistered
|
||||
from mongoengine.python_support import PY3, b, bin_type
|
||||
|
||||
@@ -1251,6 +1252,30 @@ class FieldTest(unittest.TestCase):
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_atomic_update_dict_field(self):
|
||||
"""Ensure that the entire DictField can be atomically updated."""
|
||||
|
||||
|
||||
class Simple(Document):
|
||||
mapping = DictField(field=ListField(IntField(required=True)))
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
e = Simple()
|
||||
e.mapping['someints'] = [1, 2]
|
||||
e.save()
|
||||
e.update(set__mapping={"ints": [3, 4]})
|
||||
e.reload()
|
||||
self.assertEqual(BaseDict, type(e.mapping))
|
||||
self.assertEqual({"ints": [3, 4]}, e.mapping)
|
||||
|
||||
def create_invalid_mapping():
|
||||
e.update(set__mapping={"somestrings": ["foo", "bar",]})
|
||||
|
||||
self.assertRaises(ValueError, create_invalid_mapping)
|
||||
|
||||
Simple.drop_collection()
|
||||
|
||||
def test_mapfield(self):
|
||||
"""Ensure that the MapField handles the declared type."""
|
||||
|
||||
@@ -2902,6 +2927,9 @@ class FieldTest(unittest.TestCase):
|
||||
"aJIazqqWkm7.net"))
|
||||
self.assertTrue(user.validate() is None)
|
||||
|
||||
user = User(email="new-tld@example.technology")
|
||||
self.assertTrue(user.validate() is None)
|
||||
|
||||
user = User(email='me@localhost')
|
||||
self.assertRaises(ValidationError, user.validate)
|
||||
|
||||
@@ -3006,6 +3034,29 @@ class FieldTest(unittest.TestCase):
|
||||
test.dictionary # Just access to test getter
|
||||
self.assertRaises(ValidationError, test.validate)
|
||||
|
||||
def test_cls_field(self):
|
||||
class Animal(Document):
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
class Fish(Animal):
|
||||
pass
|
||||
|
||||
class Mammal(Animal):
|
||||
pass
|
||||
|
||||
class Dog(Mammal):
|
||||
pass
|
||||
|
||||
class Human(Mammal):
|
||||
pass
|
||||
|
||||
Animal.objects.delete()
|
||||
Dog().save()
|
||||
Fish().save()
|
||||
Human().save()
|
||||
self.assertEquals(Animal.objects(_cls__in=["Animal.Mammal.Dog", "Animal.Fish"]).count(), 2)
|
||||
self.assertEquals(Animal.objects(_cls__in=["Animal.Fish.Guppy"]).count(), 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@@ -19,8 +19,8 @@ class GeoFieldTest(unittest.TestCase):
|
||||
def _test_for_expected_error(self, Cls, loc, expected):
|
||||
try:
|
||||
Cls(loc=loc).validate()
|
||||
self.fail()
|
||||
except ValidationError, e:
|
||||
self.fail('Should not validate the location {0}'.format(loc))
|
||||
except ValidationError as e:
|
||||
self.assertEqual(expected, e.to_dict()['loc'])
|
||||
|
||||
def test_geopoint_validation(self):
|
||||
@@ -155,6 +155,117 @@ class GeoFieldTest(unittest.TestCase):
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate()
|
||||
|
||||
def test_multipoint_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiPointField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiPointField type must be "MultiPoint"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPoint", "coordinates": [[1, 2, 3]]}
|
||||
expected = "Value ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[]]
|
||||
expected = "Invalid MultiPoint must contain at least one valid point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]], [[1, 2, 3]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Value (%s) must be a two-dimensional point" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
invalid_coords = [[[{}, {}]], [("a", "b")]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Both values (%s) in point must be float or int" % repr(coord[0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[1, 2]]).validate()
|
||||
Location(loc={
|
||||
"type": "MultiPoint",
|
||||
"coordinates": [
|
||||
[1, 2],
|
||||
[81.4471435546875, 23.61432859499169]
|
||||
]}).validate()
|
||||
|
||||
def test_multilinestring_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiLineStringField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiLineStringField type must be "MultiLineString"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiLineString", "coordinates": [[[1, 2, 3]]]}
|
||||
expected = "Invalid MultiLineString:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [5, "a"]
|
||||
expected = "Invalid MultiLineString must contain at least one valid linestring"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[1, 2, 3]]]
|
||||
expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0])
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
for coord in invalid_coords:
|
||||
expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0])
|
||||
self._test_for_expected_error(Location, coord, expected)
|
||||
|
||||
Location(loc=[[[1, 2], [3, 4], [5, 6], [1,2]]]).validate()
|
||||
|
||||
def test_multipolygon_validation(self):
|
||||
class Location(Document):
|
||||
loc = MultiPolygonField()
|
||||
|
||||
invalid_coords = {"x": 1, "y": 2}
|
||||
expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MadeUp", "coordinates": [[]]}
|
||||
expected = 'MultiPolygonField type must be "MultiPolygon"'
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]}
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[5, "a"]]]]
|
||||
expected = "Invalid MultiPolygon:\nBoth values ([5, 'a']) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[]]]]
|
||||
expected = "Invalid MultiPolygon must contain at least one valid Polygon"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2, 3]]]]
|
||||
expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[{}, {}]]], [[("a", "b")]]]
|
||||
expected = "Invalid MultiPolygon:\nBoth values ([{}, {}]) in point must be float or int, Both values (('a', 'b')) in point must be float or int"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
invalid_coords = [[[[1, 2], [3, 4]]]]
|
||||
expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point"
|
||||
self._test_for_expected_error(Location, invalid_coords, expected)
|
||||
|
||||
Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate()
|
||||
|
||||
def test_indexes_geopoint(self):
|
||||
"""Ensure that indexes are created automatically for GeoPointFields.
|
||||
"""
|
||||
|
@@ -147,6 +147,18 @@ class ConnectionTest(unittest.TestCase):
|
||||
date_doc = DateDoc.objects.first()
|
||||
self.assertEqual(d, date_doc.the_date)
|
||||
|
||||
def test_multiple_connection_settings(self):
|
||||
connect('mongoenginetest', alias='t1', host="localhost")
|
||||
|
||||
connect('mongoenginetest2', alias='t2', host="127.0.0.1")
|
||||
|
||||
mongo_connections = mongoengine.connection._connections
|
||||
self.assertEqual(len(mongo_connections.items()), 2)
|
||||
self.assertTrue('t1' in mongo_connections.keys())
|
||||
self.assertTrue('t2' in mongo_connections.keys())
|
||||
self.assertEqual(mongo_connections['t1'].host, 'localhost')
|
||||
self.assertEqual(mongo_connections['t2'].host, '127.0.0.1')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Reference in New Issue
Block a user