Compare commits

...

11 Commits

Author SHA1 Message Date
Stefan Wojcik
df12211c25 dont let the MongoDB URI override connection settings it doesnt explicitly specify 2016-12-03 21:08:18 -05:00
Stefan Wojcik
088c5f49d9 update the changelog 2016-12-03 16:32:14 -05:00
Ollie Ford
d8d98b6143 Support Falsey primary_keys (#1354) 2016-12-03 16:10:05 -05:00
zeez
02fb3b9315 Support for authentication mechanism #905 (#1333) 2016-12-03 16:08:24 -05:00
Francesc Elies
4f87db784e Make the README example easier to replicate (#1382) 2016-12-02 22:05:20 -05:00
Jérôme Lafréchoux
7e6287b925 Merge pull request #1417 from MongoEngine/fix-db-field-in-sum-and-average
Fix BaseQuerySet#sum and BaseQuerySet#average for fields that specify a db_field
2016-12-02 20:53:48 +01:00
Stefan Wojcik
999cdfd997 Fix BaseQuerySet#sum and BaseQuerySet#average for fields that specify a db_field 2016-12-02 11:32:38 -05:00
Jérôme Lafréchoux
8d6cb087c6 Fix changelog 2016-11-29 09:28:13 +01:00
Stefan Wojcik
2b7417c728 add a missing entry to the changelog 2016-11-28 19:33:11 -05:00
Stefan Wójcik
3c455cf1c1 Improve health of this package (#1409)
* added flake8 and flake8-import-order to travis for py27

* fixed a test that fails from time to time depending on an order of a dict

* flake8 tweaks for the entire codebase excluding tests
2016-11-28 19:00:34 -05:00
Stefan Wójcik
5135185e31 Use SVG in README badges 2016-11-28 12:31:50 -05:00
27 changed files with 480 additions and 341 deletions

View File

@@ -1,4 +1,5 @@
language: python
python:
- '2.6'
- '2.7'
@@ -7,35 +8,51 @@ python:
- '3.5'
- pypy
- pypy3
env:
- PYMONGO=2.7
- PYMONGO=2.8
- PYMONGO=3.0
- PYMONGO=dev
matrix:
fast_finish: true
before_install:
- travis_retry sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' |
sudo tee /etc/apt/sources.list.d/mongodb.list
- travis_retry sudo apt-get update
- travis_retry sudo apt-get install mongodb-org-server
install:
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
python-tk
# virtualenv>=14.0.0 has dropped Python 3.2 support
- travis_retry pip install "virtualenv<14.0.0" "tox>=1.9" coveralls
- travis_retry pip install --upgrade pip
- travis_retry pip install coveralls
- travis_retry pip install flake8
- travis_retry pip install tox>=1.9
- travis_retry pip install "virtualenv<14.0.0" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
# Run flake8 for py27
before_script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then tox -e flake8; fi
script:
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
after_script: coveralls --verbose
notifications:
irc: irc.freenode.org#mongoengine
branches:
only:
- master
- /^v.*$/
deploy:
provider: pypi
user: the_drow

View File

@@ -6,15 +6,15 @@ MongoEngine
:Author: Harry Marr (http://github.com/hmarr)
:Maintainer: Ross Lawley (http://github.com/rozza)
.. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master
:target: http://travis-ci.org/MongoEngine/mongoengine
.. image:: https://travis-ci.org/MongoEngine/mongoengine.svg?branch=master
:target: https://travis-ci.org/MongoEngine/mongoengine
.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master
:target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master
.. image:: https://coveralls.io/repos/github/MongoEngine/mongoengine/badge.svg?branch=master
:target: https://coveralls.io/github/MongoEngine/mongoengine?branch=master
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.png
:target: https://landscape.io/github/MongoEngine/mongoengine/master
:alt: Code Health
.. image:: https://landscape.io/github/MongoEngine/mongoengine/master/landscape.svg?style=flat
:target: https://landscape.io/github/MongoEngine/mongoengine/master
:alt: Code Health
About
=====
@@ -52,10 +52,14 @@ Some simple examples of what MongoEngine code looks like:
.. code :: python
from mongoengine import *
connect('mydb')
class BlogPost(Document):
title = StringField(required=True, max_length=200)
posted = DateTimeField(default=datetime.datetime.now)
tags = ListField(StringField(max_length=50))
meta = {'allow_inheritance': True}
class TextPost(BlogPost):
content = StringField(required=True)

View File

@@ -4,14 +4,15 @@ Changelog
Changes in 0.10.8
=================
- Fill this in as PRs for v0.10.8 are merged
- Added ability to specify an authentication mechanism (e.g. X.509) #1333
- Added support for falsey primary keys (e.g. doc.pk = 0) #1354
- Fixed BaseQuerySet#sum/average for fields w/ explicit db_field #1417
Changes in 0.10.7
=================
- Dropped Python 3.2 support #1390
- Fixed the bug where dynamic doc has index inside a dict field #1278
- Fixed: ListField minus index assignment does not work #1128
- Fixed not being able to specify `use_db_field=False` on `ListField(EmbeddedDocumentField)` instances
- Fixed cascade delete mixing among collections #1224
- Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206
- Raise `OperationError` when trying to do a `drop_collection` on document with no collection set.
@@ -27,7 +28,8 @@ Changes in 0.10.7
- Added support for pickling QuerySet instances. #1397
- Fixed connecting to a list of hosts #1389
- Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334
- Improvements to the dictionary fields docs # 1383
- Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218
- Improvements to the dictionary fields docs #1383
Changes in 0.10.6
=================

View File

@@ -1,15 +1,15 @@
import document
from document import *
import fields
from fields import *
import connection
from connection import *
import document
from document import *
import errors
from errors import *
import fields
from fields import *
import queryset
from queryset import *
import signals
from signals import *
from errors import *
import errors
__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
list(queryset.__all__) + signals.__all__ + list(errors.__all__))
@@ -22,4 +22,5 @@ def get_version():
return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
return '.'.join(map(str, VERSION))
__version__ = get_version()

View File

@@ -1,5 +1,5 @@
import weakref
import itertools
import weakref
from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
@@ -199,8 +199,9 @@ class BaseList(list):
def _mark_as_changed(self, key=None):
if hasattr(self._instance, '_mark_as_changed'):
if key:
self._instance._mark_as_changed('%s.%s' % (self._name,
key % len(self)))
self._instance._mark_as_changed(
'%s.%s' % (self._name, key % len(self))
)
else:
self._instance._mark_as_changed(self._name)

View File

@@ -1,28 +1,28 @@
import copy
import operator
import numbers
import operator
from collections import Hashable
from functools import partial
import pymongo
from bson import json_util, ObjectId
from bson import ObjectId, json_util
from bson.dbref import DBRef
from bson.son import SON
import pymongo
from mongoengine import signals
from mongoengine.common import _import_class
from mongoengine.errors import (ValidationError, InvalidDocumentError,
LookUpError, FieldDoesNotExist)
from mongoengine.python_support import PY3, txt_type
from mongoengine.base.common import get_document, ALLOW_INHERITANCE
from mongoengine.base.common import ALLOW_INHERITANCE, get_document
from mongoengine.base.datastructures import (
BaseDict,
BaseList,
EmbeddedDocumentList,
StrictDict,
SemiStrictDict
SemiStrictDict,
StrictDict
)
from mongoengine.base.fields import ComplexBaseField
from mongoengine.common import _import_class
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError,
LookUpError, ValidationError)
from mongoengine.python_support import PY3, txt_type
__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
@@ -73,7 +73,7 @@ class BaseDocument(object):
# if so raise an Exception.
if not self._dynamic and (self._meta.get('strict', True) or _created):
_undefined_fields = set(values.keys()) - set(
self._fields.keys() + ['id', 'pk', '_cls', '_text_score'])
self._fields.keys() + ['id', 'pk', '_cls', '_text_score'])
if _undefined_fields:
msg = (
"The fields '{0}' do not exist on the document '{1}'"
@@ -310,7 +310,7 @@ class BaseDocument(object):
data = SON()
data["_id"] = None
data['_cls'] = self._class_name
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
root_fields = set([f.split('.')[0] for f in fields])
@@ -333,11 +333,11 @@ class BaseDocument(object):
i.replace(key, '') for i in fields
if i.startswith(key)]
ex_vars['fields'] = embedded_fields
ex_vars['fields'] = embedded_fields
if 'use_db_field' in f_inputs:
ex_vars['use_db_field'] = use_db_field
value = field.to_mongo(value, **ex_vars)
# Handle self generating fields
@@ -566,8 +566,10 @@ class BaseDocument(object):
continue
if isinstance(field, ReferenceField):
continue
elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
and db_field_name not in changed_fields):
elif (
isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and
db_field_name not in changed_fields
):
# Find all embedded fields that have been changed
changed = data._get_changed_fields(inspected)
changed_fields += ["%s%s" % (key, k) for k in changed if k]
@@ -608,7 +610,7 @@ class BaseDocument(object):
break
elif isinstance(d, list) and p.lstrip('-').isdigit():
if p[0] == '-':
p = str(len(d)+int(p))
p = str(len(d) + int(p))
try:
d = d[int(p)]
except IndexError:
@@ -644,7 +646,7 @@ class BaseDocument(object):
for p in parts:
if isinstance(d, list) and p.lstrip('-').isdigit():
if p[0] == '-':
p = str(len(d)+int(p))
p = str(len(d) + int(p))
d = d[int(p)]
elif (hasattr(d, '__getattribute__') and
not isinstance(d, dict)):
@@ -775,8 +777,12 @@ class BaseDocument(object):
# Check to see if we need to include _cls
allow_inheritance = cls._meta.get('allow_inheritance',
ALLOW_INHERITANCE)
include_cls = (allow_inheritance and not spec.get('sparse', False) and
spec.get('cls', True) and '_cls' not in spec['fields'])
include_cls = (
allow_inheritance and
not spec.get('sparse', False) and
spec.get('cls', True) and
'_cls' not in spec['fields']
)
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))

View File

@@ -5,12 +5,12 @@ import weakref
from bson import DBRef, ObjectId, SON
import pymongo
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
from mongoengine.base.common import ALLOW_INHERITANCE
from mongoengine.base.datastructures import (
BaseDict, BaseList, EmbeddedDocumentList
)
from mongoengine.common import _import_class
from mongoengine.errors import ValidationError
__all__ = ("BaseField", "ComplexBaseField",
"ObjectIdField", "GeoJsonBaseField")
@@ -85,13 +85,13 @@ class BaseField(object):
self.null = null
self.sparse = sparse
self._owner_document = None
# Detect and report conflicts between metadata and base properties.
conflicts = set(dir(self)) & set(kwargs)
if conflicts:
raise TypeError("%s already has attribute(s): %s" % (
self.__class__.__name__, ', '.join(conflicts) ))
self.__class__.__name__, ', '.join(conflicts)))
# Assign metadata to the instance
# This efficient method is available because no __slots__ are defined.
self.__dict__.update(kwargs)
@@ -169,11 +169,11 @@ class BaseField(object):
f_inputs = self.to_mongo.__code__.co_varnames
ex_vars = {}
if 'fields' in f_inputs:
ex_vars['fields'] = fields
ex_vars['fields'] = fields
if 'use_db_field' in f_inputs:
ex_vars['use_db_field'] = use_db_field
return self.to_mongo(value, **ex_vars)
def prepare_query_value(self, op, value):
@@ -206,7 +206,6 @@ class BaseField(object):
elif value not in choice_list:
self.error('Value must be one of %s' % unicode(choice_list))
def _validate(self, value, **kwargs):
# Check the Choices Constraint
if self.choices:

View File

@@ -1,5 +1,7 @@
import warnings
from mongoengine.base.common import ALLOW_INHERITANCE, _document_registry
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
from mongoengine.common import _import_class
from mongoengine.errors import InvalidDocumentError
from mongoengine.python_support import PY3
@@ -7,16 +9,14 @@ from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
MultipleObjectsReturned,
QuerySetManager)
from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
class DocumentMetaclass(type):
"""Metaclass for all documents.
"""
"""Metaclass for all documents."""
# TODO lower complexity of this method
def __new__(cls, name, bases, attrs):
flattened_bases = cls._get_bases(bases)
super_new = super(DocumentMetaclass, cls).__new__
@@ -162,7 +162,7 @@ class DocumentMetaclass(type):
# copies __func__ into im_func and __self__ into im_self for
# classmethod objects in Document derived classes.
if PY3:
for key, val in new_class.__dict__.items():
for val in new_class.__dict__.values():
if isinstance(val, classmethod):
f = val.__get__(new_class)
if hasattr(f, '__func__') and not hasattr(f, 'im_func'):

View File

@@ -6,6 +6,7 @@ __all__ = ['ConnectionError', 'connect', 'register_connection',
DEFAULT_CONNECTION_NAME = 'default'
if IS_PYMONGO_3:
READ_PREFERENCE = ReadPreference.PRIMARY
else:
@@ -24,7 +25,9 @@ _dbs = {}
def register_connection(alias, name=None, host=None, port=None,
read_preference=READ_PREFERENCE,
username=None, password=None, authentication_source=None,
username=None, password=None,
authentication_source=None,
authentication_mechanism=None,
**kwargs):
"""Add a connection.
@@ -38,6 +41,9 @@ def register_connection(alias, name=None, host=None, port=None,
:param username: username to authenticate with
:param password: password to authenticate with
:param authentication_source: database to authenticate against
:param authentication_mechanism: database authentication mechanisms.
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
:param is_mock: explicitly use mongomock for this connection
(can also be done by using `mongomock://` as db host prefix)
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
@@ -53,9 +59,11 @@ def register_connection(alias, name=None, host=None, port=None,
'read_preference': read_preference,
'username': username,
'password': password,
'authentication_source': authentication_source
'authentication_source': authentication_source,
'authentication_mechanism': authentication_mechanism
}
# Handle uri style connections
conn_host = conn_settings['host']
# host can be a list or a string, so if string, force to a list
if isinstance(conn_host, str_types):
@@ -63,25 +71,33 @@ def register_connection(alias, name=None, host=None, port=None,
resolved_hosts = []
for entity in conn_host:
# Handle uri style connections
# Handle Mongomock
if entity.startswith('mongomock://'):
conn_settings['is_mock'] = True
# `mongomock://` is not a valid url prefix and must be replaced by `mongodb://`
resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1))
# Handle URI style connections, only updating connection params which
# were explicitly specified in the URI.
elif '://' in entity:
uri_dict = uri_parser.parse_uri(entity)
resolved_hosts.append(entity)
conn_settings.update({
'name': uri_dict.get('database') or name,
'username': uri_dict.get('username'),
'password': uri_dict.get('password'),
'read_preference': read_preference,
})
if uri_dict.get('database'):
conn_settings['name'] = uri_dict.get('database')
for param in ('read_preference', 'username', 'password'):
if uri_dict.get(param):
conn_settings[param] = uri_dict[param]
uri_options = uri_dict['options']
if 'replicaset' in uri_options:
conn_settings['replicaSet'] = True
if 'authsource' in uri_options:
conn_settings['authentication_source'] = uri_options['authsource']
if 'authmechanism' in uri_options:
conn_settings['authentication_mechanism'] = uri_options['authmechanism']
else:
resolved_hosts.append(entity)
conn_settings['host'] = resolved_hosts
@@ -123,6 +139,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
conn_settings.pop('username', None)
conn_settings.pop('password', None)
conn_settings.pop('authentication_source', None)
conn_settings.pop('authentication_mechanism', None)
is_mock = conn_settings.pop('is_mock', None)
if is_mock:
@@ -157,6 +174,7 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
connection_settings.pop('username', None)
connection_settings.pop('password', None)
connection_settings.pop('authentication_source', None)
connection_settings.pop('authentication_mechanism', None)
if conn_settings == connection_settings and _connections.get(db_alias, None):
connection = _connections[db_alias]
break
@@ -176,11 +194,13 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
conn = get_connection(alias)
conn_settings = _connection_settings[alias]
db = conn[conn_settings['name']]
auth_kwargs = {'source': conn_settings['authentication_source']}
if conn_settings['authentication_mechanism'] is not None:
auth_kwargs['mechanism'] = conn_settings['authentication_mechanism']
# Authenticate if necessary
if conn_settings['username'] and conn_settings['password']:
db.authenticate(conn_settings['username'],
conn_settings['password'],
source=conn_settings['authentication_source'])
if conn_settings['username'] and (conn_settings['password'] or
conn_settings['authentication_mechanism'] == 'MONGODB-X509'):
db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs)
_dbs[alias] = db
return _dbs[alias]

View File

@@ -1,15 +1,14 @@
from bson import DBRef, SON
from mongoengine.python_support import txt_type
from base import (
from .base import (
BaseDict, BaseList, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document
)
from fields import (ReferenceField, ListField, DictField, MapField)
from connection import get_db
from queryset import QuerySet
from document import Document, EmbeddedDocument
from .connection import get_db
from .document import Document, EmbeddedDocument
from .fields import DictField, ListField, MapField, ReferenceField
from .python_support import txt_type
from .queryset import QuerySet
class DeReference(object):

View File

@@ -1,28 +1,29 @@
import warnings
import pymongo
import re
import warnings
from pymongo.read_preferences import ReadPreference
from bson.dbref import DBRef
import pymongo
from pymongo.read_preferences import ReadPreference
from mongoengine import signals
from mongoengine.common import _import_class
from mongoengine.base import (
DocumentMetaclass,
TopLevelDocumentMetaclass,
BaseDocument,
BaseDict,
BaseList,
EmbeddedDocumentList,
ALLOW_INHERITANCE,
BaseDict,
BaseDocument,
BaseList,
DocumentMetaclass,
EmbeddedDocumentList,
TopLevelDocumentMetaclass,
get_document
)
from mongoengine.errors import (InvalidQueryError, InvalidDocumentError,
from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
from mongoengine.context_managers import switch_collection, switch_db
from mongoengine.errors import (InvalidDocumentError, InvalidQueryError,
SaveConditionError)
from mongoengine.python_support import IS_PYMONGO_3
from mongoengine.queryset import (OperationError, NotUniqueError,
from mongoengine.queryset import (NotUniqueError, OperationError,
QuerySet, transform)
from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
from mongoengine.context_managers import switch_db, switch_collection
__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
'DynamicEmbeddedDocument', 'OperationError',
@@ -332,8 +333,10 @@ class Document(BaseDocument):
# Correct behaviour in 2.X and in 3.0.1+ versions
if not object_id and pymongo.version_tuple == (3, 0):
pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk)
object_id = self._qs.filter(pk=pk_as_mongo_obj).first() and \
self._qs.filter(pk=pk_as_mongo_obj).first().pk
object_id = (
self._qs.filter(pk=pk_as_mongo_obj).first() and
self._qs.filter(pk=pk_as_mongo_obj).first().pk
) # TODO doesn't this make 2 queries?
else:
object_id = doc['_id']
updates, removals = self._delta()
@@ -469,7 +472,7 @@ class Document(BaseDocument):
Raises :class:`OperationError` if called on an object that has not yet
been saved.
"""
if not self.pk:
if self.pk is None:
if kwargs.get('upsert', False):
query = self.to_mongo()
if "_cls" in query:
@@ -501,10 +504,10 @@ class Document(BaseDocument):
signal_kwargs = signal_kwargs or {}
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
# Delete FileFields separately
# Delete FileFields separately
FileField = _import_class('FileField')
for name, field in self._fields.iteritems():
if isinstance(field, FileField):
if isinstance(field, FileField):
getattr(self, name).delete()
try:
@@ -601,7 +604,7 @@ class Document(BaseDocument):
elif "max_depth" in kwargs:
max_depth = kwargs["max_depth"]
if not self.pk:
if self.pk is None:
raise self.DoesNotExist("Document does not exist")
obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
**self._object_key).only(*fields).limit(
@@ -652,7 +655,7 @@ class Document(BaseDocument):
def to_dbref(self):
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
`__raw__` queries."""
if not self.pk:
if self.pk is None:
msg = "Only saved documents can have a valid dbref"
raise OperationError(msg)
return DBRef(self.__class__._get_collection_name(), self.pk)

View File

@@ -8,6 +8,9 @@ import uuid
import warnings
from operator import itemgetter
from bson import Binary, DBRef, ObjectId, SON
import gridfs
import pymongo
import six
try:
@@ -17,22 +20,18 @@ except ImportError:
else:
import dateutil.parser
import pymongo
import gridfs
from bson import Binary, DBRef, SON, ObjectId
try:
from bson.int64 import Int64
except ImportError:
Int64 = long
from mongoengine.errors import ValidationError, DoesNotExist
from mongoengine.python_support import (PY3, bin_type, txt_type,
str_types, StringIO)
from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField,
get_document, BaseDocument)
from queryset import DO_NOTHING, QuerySet
from document import Document, EmbeddedDocument
from connection import get_db, DEFAULT_CONNECTION_NAME
from .base import (BaseDocument, BaseField, ComplexBaseField, GeoJsonBaseField,
ObjectIdField, get_document)
from .connection import DEFAULT_CONNECTION_NAME, get_db
from .document import Document, EmbeddedDocument
from .errors import DoesNotExist, ValidationError
from .python_support import PY3, StringIO, bin_type, str_types, txt_type
from .queryset import DO_NOTHING, QuerySet
try:
from PIL import Image, ImageOps
@@ -1015,11 +1014,10 @@ class ReferenceField(BaseField):
if self.document_type._meta.get('abstract') and \
not isinstance(value, self.document_type):
self.error('%s is not an instance of abstract reference'
' type %s' % (value._class_name,
self.document_type._class_name)
)
self.error(
'%s is not an instance of abstract reference type %s' % (
self.document_type._class_name)
)
def lookup_member(self, member_name):
return self.document_type._fields.get(member_name)
@@ -1126,7 +1124,7 @@ class CachedReferenceField(BaseField):
new_fields = [f for f in self.fields if f in fields]
else:
new_fields = self.fields
value.update(dict(document.to_mongo(use_db_field, fields=new_fields)))
return value

View File

@@ -1,6 +1,6 @@
from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
InvalidQueryError, OperationError,
NotUniqueError)
from mongoengine.errors import (DoesNotExist, InvalidQueryError,
MultipleObjectsReturned, NotUniqueError,
OperationError)
from mongoengine.queryset.field_list import *
from mongoengine.queryset.manager import *
from mongoengine.queryset.queryset import *

View File

@@ -7,20 +7,19 @@ import pprint
import re
import warnings
from bson import SON
from bson import SON, json_util
from bson.code import Code
from bson import json_util
import pymongo
import pymongo.errors
from pymongo.common import validate_read_preference
from mongoengine import signals
from mongoengine.base.common import get_document
from mongoengine.common import _import_class
from mongoengine.connection import get_db
from mongoengine.context_managers import switch_db
from mongoengine.common import _import_class
from mongoengine.base.common import get_document
from mongoengine.errors import (OperationError, NotUniqueError,
InvalidQueryError, LookUpError)
from mongoengine.errors import (InvalidQueryError, LookUpError,
NotUniqueError, OperationError)
from mongoengine.python_support import IS_PYMONGO_3
from mongoengine.queryset import transform
from mongoengine.queryset.field_list import QueryFieldList
@@ -155,10 +154,8 @@ class BaseQuerySet(object):
# forse load cursor
# self._cursor
def __getitem__(self, key):
"""Support skip and limit using getitem and slicing syntax.
"""
"""Support skip and limit using getitem and slicing syntax."""
queryset = self.clone()
# Slice provided
@@ -529,8 +526,9 @@ class BaseQuerySet(object):
.. versionadded:: 0.10.2
"""
atomic_update = self.update(multi=False, upsert=True, write_concern=write_concern,
full_result=True, **update)
atomic_update = self.update(multi=False, upsert=True,
write_concern=write_concern,
full_result=True, **update)
if atomic_update['updatedExisting']:
document = self.get()
@@ -1273,9 +1271,10 @@ class BaseQuerySet(object):
:param field: the field to sum over; use dot notation to refer to
embedded document fields
"""
db_field = self._fields_to_dbfields([field]).pop()
pipeline = [
{'$match': self._query},
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + field}}}
{'$group': {'_id': 'sum', 'total': {'$sum': '$' + db_field}}}
]
# if we're performing a sum over a list field, we sum up all the
@@ -1302,9 +1301,10 @@ class BaseQuerySet(object):
:param field: the field to average over; use dot notation to refer to
embedded document fields
"""
db_field = self._fields_to_dbfields([field]).pop()
pipeline = [
{'$match': self._query},
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + field}}}
{'$group': {'_id': 'avg', 'total': {'$avg': '$' + db_field}}}
]
# if we're performing an average over a list field, we average out

View File

@@ -1,6 +1,6 @@
from mongoengine.errors import OperationError
from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY,
CASCADE, DENY, PULL)
from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING,
NULLIFY, PULL)
__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
'DENY', 'PULL')

View File

@@ -1,11 +1,11 @@
from collections import defaultdict
import pymongo
from bson import SON
import pymongo
from mongoengine.base.fields import UPDATE_OPERATORS
from mongoengine.connection import get_connection
from mongoengine.common import _import_class
from mongoengine.connection import get_connection
from mongoengine.errors import InvalidQueryError
from mongoengine.python_support import IS_PYMONGO_3
@@ -108,8 +108,11 @@ def query(_doc_cls=None, **kwargs):
elif op in ('match', 'elemMatch'):
ListField = _import_class('ListField')
EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
if (isinstance(value, dict) and isinstance(field, ListField) and
isinstance(field.field, EmbeddedDocumentField)):
if (
isinstance(value, dict) and
isinstance(field, ListField) and
isinstance(field.field, EmbeddedDocumentField)
):
value = query(field.field.document_type, **value)
else:
value = field.prepare_query_value(op, value)

View File

@@ -29,7 +29,7 @@ except ImportError:
'because the blinker library is '
'not installed.')
send = lambda *a, **kw: None
send = lambda *a, **kw: None # noqa
connect = disconnect = has_receivers_for = receivers_for = \
temporarily_connected_to = _fail
del _fail

View File

@@ -1,3 +1,5 @@
nose
pymongo>=2.7.1
six==1.10.0
flake8
flake8-import-order

View File

@@ -5,3 +5,9 @@ cover-erase = 1
cover-branches = 1
cover-package = mongoengine
tests = tests
[flake8]
ignore=E501,F401,F403,F405,I201
exclude=build,dist,docs,venv,.tox,.eggs,tests
max-complexity=42
application-import-names=mongoengine,tests

View File

@@ -1,6 +1,6 @@
import os
import sys
from setuptools import setup, find_packages
from setuptools import find_packages, setup
# Hack to silence atexit traceback in newer python versions
try:
@@ -8,8 +8,10 @@ try:
except ImportError:
pass
DESCRIPTION = 'MongoEngine is a Python Object-Document ' + \
'Mapper for working with MongoDB.'
DESCRIPTION = (
'MongoEngine is a Python Object-Document '
'Mapper for working with MongoDB.'
)
try:
with open('README.rst') as fin:
@@ -23,6 +25,7 @@ def get_version(version_tuple):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
# Dirty hack to get version number from monogengine/__init__.py - we can't
# import it as it depends on PyMongo and PyMongo isn't installed until this
# file is read
@@ -64,21 +67,22 @@ else:
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
extra_opts['tests_require'].append('unittest2')
setup(name='mongoengine',
version=VERSION,
author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com',
maintainer="Ross Lawley",
maintainer_email="ross.lawley@{nospam}gmail.com",
url='http://mongoengine.org/',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
license='MIT',
include_package_data=True,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo>=2.7.1', 'six'],
test_suite='nose.collector',
**extra_opts
setup(
name='mongoengine',
version=VERSION,
author='Harry Marr',
author_email='harry.marr@{nospam}gmail.com',
maintainer="Ross Lawley",
maintainer_email="ross.lawley@{nospam}gmail.com",
url='http://mongoengine.org/',
download_url='https://github.com/MongoEngine/mongoengine/tarball/master',
license='MIT',
include_package_data=True,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['pymongo>=2.7.1', 'six'],
test_suite='nose.collector',
**extra_opts
)

View File

@@ -844,7 +844,12 @@ class IndexesTest(unittest.TestCase):
self.assertEqual({'text': 'OK', '_id': {'term': 'ok', 'name': 'n'}},
report.to_mongo())
self.assertEqual(report, ReportDictField.objects.get(pk=my_key))
# We can't directly call ReportDictField.objects.get(pk=my_key),
# because dicts are unordered, and if the order in MongoDB is
# different than the one in `my_key`, this test will fail.
self.assertEqual(report, ReportDictField.objects.get(pk__name=my_key['name']))
self.assertEqual(report, ReportDictField.objects.get(pk__term=my_key['term']))
def test_string_indexes(self):

View File

@@ -3202,5 +3202,20 @@ class InstanceTest(unittest.TestCase):
self.assertEqual(b._instance, a)
self.assertEqual(idx, 2)
def test_falsey_pk(self):
"""Ensure that we can create and update a document with Falsey PK.
"""
class Person(Document):
age = IntField(primary_key=True)
height = FloatField()
person = Person()
person.age = 0
person.height = 1.89
person.save()
person.update(set__height=2.0)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,28 +1,23 @@
# -*- coding: utf-8 -*-
import sys
sys.path[0:0] = [""]
import datetime
import unittest
import uuid
from bson import DBRef, ObjectId
from nose.plugins.skip import SkipTest
from datetime import datetime, timedelta
import pymongo
from pymongo.errors import ConfigurationError
from pymongo.read_preferences import ReadPreference
from bson import ObjectId, DBRef
from mongoengine import *
from mongoengine.connection import get_connection, get_db
from mongoengine.python_support import PY3, IS_PYMONGO_3
from mongoengine.context_managers import query_counter, switch_db
from mongoengine.queryset import (QuerySet, QuerySetManager,
MultipleObjectsReturned, DoesNotExist,
queryset_manager)
from mongoengine.errors import InvalidQueryError
from mongoengine.python_support import IS_PYMONGO_3, PY3
from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned,
QuerySet, QuerySetManager, queryset_manager)
__all__ = ("QuerySetTest",)
@@ -184,12 +179,14 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(self.Person.objects.count(), 55)
self.assertEqual("Person object", "%s" % self.Person.objects[0])
self.assertEqual(
"[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[1:3])
self.assertEqual(
"[<Person: Person object>, <Person: Person object>]", "%s" % self.Person.objects[51:53])
self.assertEqual("[<Person: Person object>, <Person: Person object>]",
"%s" % self.Person.objects[1:3])
self.assertEqual("[<Person: Person object>, <Person: Person object>]",
"%s" % self.Person.objects[51:53])
# Test only after limit
self.assertEqual(self.Person.objects().limit(2).only('name')[0].age, None)
# Test only after skip
self.assertEqual(self.Person.objects().skip(2).only('name')[0].age, None)
@@ -287,6 +284,9 @@ class QuerySetTest(unittest.TestCase):
blog = Blog.objects(posts__0__comments__0__name='testa').get()
self.assertEqual(blog, blog1)
blog = Blog.objects(posts__0__comments__0__name='testb').get()
self.assertEqual(blog, blog2)
query = Blog.objects(posts__1__comments__1__name='testb')
self.assertEqual(query.count(), 2)
@@ -633,39 +633,39 @@ class QuerySetTest(unittest.TestCase):
self.assertRaises(ValidationError, Doc.objects().update, dt_f="datetime", upsert=True)
self.assertRaises(ValidationError, Doc.objects().update, ed_f__str_f=1, upsert=True)
def test_update_related_models( self ):
class TestPerson( Document ):
def test_update_related_models(self):
class TestPerson(Document):
name = StringField()
class TestOrganization( Document ):
class TestOrganization(Document):
name = StringField()
owner = ReferenceField( TestPerson )
owner = ReferenceField(TestPerson)
TestPerson.drop_collection()
TestOrganization.drop_collection()
p = TestPerson( name='p1' )
p = TestPerson(name='p1')
p.save()
o = TestOrganization( name='o1' )
o = TestOrganization(name='o1')
o.save()
o.owner = p
p.name = 'p2'
self.assertEqual( o._get_changed_fields(), [ 'owner' ] )
self.assertEqual( p._get_changed_fields(), [ 'name' ] )
self.assertEqual(o._get_changed_fields(), ['owner'])
self.assertEqual(p._get_changed_fields(), ['name'])
o.save()
self.assertEqual( o._get_changed_fields(), [] )
self.assertEqual( p._get_changed_fields(), [ 'name' ] ) # Fails; it's empty
self.assertEqual(o._get_changed_fields(), [])
self.assertEqual(p._get_changed_fields(), ['name']) # Fails; it's empty
# This will do NOTHING at all, even though we changed the name
p.save()
p.reload()
self.assertEqual( p.name, 'p2' ) # Fails; it's still `p1`
self.assertEqual(p.name, 'p2') # Fails; it's still `p1`
def test_upsert(self):
self.Person.drop_collection()
@@ -694,7 +694,6 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(30, bobby.age)
self.assertEqual(bob.id, bobby.id)
def test_set_on_insert(self):
self.Person.drop_collection()
@@ -1113,24 +1112,29 @@ class QuerySetTest(unittest.TestCase):
blog_2.save()
blog_3.save()
blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1",
is_published=True,
published_date=datetime(2010, 1, 5, 0, 0, 0))
blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2",
is_published=True,
published_date=datetime(2010, 1, 6, 0, 0, 0))
blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3",
is_published=True,
published_date=datetime(2010, 1, 7, 0, 0, 0))
blog_post_1.save()
blog_post_2.save()
blog_post_3.save()
BlogPost.objects.create(
blog=blog_1,
title="Blog Post #1",
is_published=True,
published_date=datetime.datetime(2010, 1, 5, 0, 0, 0)
)
BlogPost.objects.create(
blog=blog_2,
title="Blog Post #2",
is_published=True,
published_date=datetime.datetime(2010, 1, 6, 0, 0, 0)
)
BlogPost.objects.create(
blog=blog_3,
title="Blog Post #3",
is_published=True,
published_date=datetime.datetime(2010, 1, 7, 0, 0, 0)
)
# find all published blog posts before 2010-01-07
published_posts = BlogPost.published()
published_posts = published_posts.filter(
published_date__lt=datetime(2010, 1, 7, 0, 0, 0))
published_date__lt=datetime.datetime(2010, 1, 7, 0, 0, 0))
self.assertEqual(published_posts.count(), 2)
blog_posts = BlogPost.objects
@@ -1161,16 +1165,18 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
blog_post_1 = BlogPost(title="Blog Post #1",
published_date=datetime(2010, 1, 5, 0, 0, 0))
blog_post_2 = BlogPost(title="Blog Post #2",
published_date=datetime(2010, 1, 6, 0, 0, 0))
blog_post_3 = BlogPost(title="Blog Post #3",
published_date=datetime(2010, 1, 7, 0, 0, 0))
blog_post_1.save()
blog_post_2.save()
blog_post_3.save()
blog_post_1 = BlogPost.objects.create(
title="Blog Post #1",
published_date=datetime.datetime(2010, 1, 5, 0, 0, 0)
)
blog_post_2 = BlogPost.objects.create(
title="Blog Post #2",
published_date=datetime.datetime(2010, 1, 6, 0, 0, 0)
)
blog_post_3 = BlogPost.objects.create(
title="Blog Post #3",
published_date=datetime.datetime(2010, 1, 7, 0, 0, 0)
)
# get the "first" BlogPost using default ordering
# from BlogPost.meta.ordering
@@ -1219,7 +1225,7 @@ class QuerySetTest(unittest.TestCase):
}
BlogPost.objects.create(
title='whatever', published_date=datetime.utcnow())
title='whatever', published_date=datetime.datetime.utcnow())
with db_ops_tracker() as q:
BlogPost.objects.get(title='whatever')
@@ -2082,18 +2088,22 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
blog_post_3 = BlogPost(title="Blog Post #3",
published_date=datetime(2010, 1, 6, 0, 0, 0))
blog_post_2 = BlogPost(title="Blog Post #2",
published_date=datetime(2010, 1, 5, 0, 0, 0))
blog_post_4 = BlogPost(title="Blog Post #4",
published_date=datetime(2010, 1, 7, 0, 0, 0))
blog_post_1 = BlogPost(title="Blog Post #1", published_date=None)
blog_post_3.save()
blog_post_1.save()
blog_post_4.save()
blog_post_2.save()
blog_post_3 = BlogPost.objects.create(
title="Blog Post #3",
published_date=datetime.datetime(2010, 1, 6, 0, 0, 0)
)
blog_post_2 = BlogPost.objects.create(
title="Blog Post #2",
published_date=datetime.datetime(2010, 1, 5, 0, 0, 0)
)
blog_post_4 = BlogPost.objects.create(
title="Blog Post #4",
published_date=datetime.datetime(2010, 1, 7, 0, 0, 0)
)
blog_post_1 = BlogPost.objects.create(
title="Blog Post #1",
published_date=None
)
expected = [blog_post_1, blog_post_2, blog_post_3, blog_post_4]
self.assertSequence(BlogPost.objects.order_by('published_date'),
@@ -2112,16 +2122,18 @@ class QuerySetTest(unittest.TestCase):
BlogPost.drop_collection()
blog_post_1 = BlogPost(title="A",
published_date=datetime(2010, 1, 6, 0, 0, 0))
blog_post_2 = BlogPost(title="B",
published_date=datetime(2010, 1, 6, 0, 0, 0))
blog_post_3 = BlogPost(title="C",
published_date=datetime(2010, 1, 7, 0, 0, 0))
blog_post_2.save()
blog_post_3.save()
blog_post_1.save()
blog_post_1 = BlogPost.objects.create(
title="A",
published_date=datetime.datetime(2010, 1, 6, 0, 0, 0)
)
blog_post_2 = BlogPost.objects.create(
title="B",
published_date=datetime.datetime(2010, 1, 6, 0, 0, 0)
)
blog_post_3 = BlogPost.objects.create(
title="C",
published_date=datetime.datetime(2010, 1, 7, 0, 0, 0)
)
qs = BlogPost.objects.order_by('published_date', 'title')
expected = [blog_post_1, blog_post_2, blog_post_3]
@@ -2425,7 +2437,7 @@ class QuerySetTest(unittest.TestCase):
Link.drop_collection()
now = datetime.utcnow()
now = datetime.datetime.utcnow()
# Note: Test data taken from a custom Reddit homepage on
# Fri, 12 Feb 2010 14:36:00 -0600. Link ordering should
@@ -2434,27 +2446,27 @@ class QuerySetTest(unittest.TestCase):
Link(title="Google Buzz auto-followed a woman's abusive ex ...",
up_votes=1079,
down_votes=553,
submitted=now - timedelta(hours=4)).save()
submitted=now - datetime.timedelta(hours=4)).save()
Link(title="We did it! Barbie is a computer engineer.",
up_votes=481,
down_votes=124,
submitted=now - timedelta(hours=2)).save()
submitted=now - datetime.timedelta(hours=2)).save()
Link(title="This Is A Mosquito Getting Killed By A Laser",
up_votes=1446,
down_votes=530,
submitted=now - timedelta(hours=13)).save()
submitted=now - datetime.timedelta(hours=13)).save()
Link(title="Arabic flashcards land physics student in jail.",
up_votes=215,
down_votes=105,
submitted=now - timedelta(hours=6)).save()
submitted=now - datetime.timedelta(hours=6)).save()
Link(title="The Burger Lab: Presenting, the Flood Burger",
up_votes=48,
down_votes=17,
submitted=now - timedelta(hours=5)).save()
submitted=now - datetime.timedelta(hours=5)).save()
Link(title="How to see polarization with the naked eye",
up_votes=74,
down_votes=13,
submitted=now - timedelta(hours=10)).save()
submitted=now - datetime.timedelta(hours=10)).save()
map_f = """
function() {
@@ -2504,7 +2516,7 @@ class QuerySetTest(unittest.TestCase):
# provide the reddit epoch (used for ranking) as a variable available
# to all phases of the map/reduce operation: map, reduce, and finalize.
reddit_epoch = mktime(datetime(2005, 12, 8, 7, 46, 43).timetuple())
reddit_epoch = mktime(datetime.datetime(2005, 12, 8, 7, 46, 43).timetuple())
scope = {'reddit_epoch': reddit_epoch}
# run a map/reduce operation across all links. ordering is set
@@ -2826,6 +2838,34 @@ class QuerySetTest(unittest.TestCase):
sum([a for a in ages if a >= 50])
)
def test_sum_over_db_field(self):
"""Ensure that a field mapped to a db field with a different name
can be summed over correctly.
"""
class UserVisit(Document):
num_visits = IntField(db_field='visits')
UserVisit.drop_collection()
UserVisit.objects.create(num_visits=10)
UserVisit.objects.create(num_visits=5)
self.assertEqual(UserVisit.objects.sum('num_visits'), 15)
def test_average_over_db_field(self):
"""Ensure that a field mapped to a db field with a different name
can have its average computed correctly.
"""
class UserVisit(Document):
num_visits = IntField(db_field='visits')
UserVisit.drop_collection()
UserVisit.objects.create(num_visits=20)
UserVisit.objects.create(num_visits=10)
self.assertEqual(UserVisit.objects.average('num_visits'), 15)
def test_embedded_average(self):
class Pay(EmbeddedDocument):
value = DecimalField()
@@ -3096,13 +3136,11 @@ class QuerySetTest(unittest.TestCase):
mark_twain = Author(name="Mark Twain")
john_tolkien = Author(name="John Ronald Reuel Tolkien")
book = Book(title="Tom Sawyer", authors=[mark_twain]).save()
book = Book(
title="The Lord of the Rings", authors=[john_tolkien]).save()
book = Book(
title="The Stories", authors=[mark_twain, john_tolkien]).save()
authors = Book.objects.distinct("authors")
Book.objects.create(title="Tom Sawyer", authors=[mark_twain])
Book.objects.create(title="The Lord of the Rings", authors=[john_tolkien])
Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien])
authors = Book.objects.distinct("authors")
self.assertEqual(authors, [mark_twain, john_tolkien])
def test_distinct_ListField_EmbeddedDocumentField_EmbeddedDocumentField(self):
@@ -3132,17 +3170,14 @@ class QuerySetTest(unittest.TestCase):
mark_twain = Author(name="Mark Twain", country=scotland)
john_tolkien = Author(name="John Ronald Reuel Tolkien", country=tibet)
book = Book(title="Tom Sawyer", authors=[mark_twain]).save()
book = Book(
title="The Lord of the Rings", authors=[john_tolkien]).save()
book = Book(
title="The Stories", authors=[mark_twain, john_tolkien]).save()
country_list = Book.objects.distinct("authors.country")
Book.objects.create(title="Tom Sawyer", authors=[mark_twain])
Book.objects.create(title="The Lord of the Rings", authors=[john_tolkien])
Book.objects.create(title="The Stories", authors=[mark_twain, john_tolkien])
country_list = Book.objects.distinct("authors.country")
self.assertEqual(country_list, [scotland, tibet])
continent_list = Book.objects.distinct("authors.country.continent")
self.assertEqual(continent_list, [europe, asia])
def test_distinct_ListField_ReferenceField(self):
@@ -3174,7 +3209,7 @@ class QuerySetTest(unittest.TestCase):
class BlogPost(Document):
tags = ListField(StringField())
deleted = BooleanField(default=False)
date = DateTimeField(default=datetime.now)
date = DateTimeField(default=datetime.datetime.now)
@queryset_manager
def objects(cls, qryset):
@@ -3997,14 +4032,14 @@ class QuerySetTest(unittest.TestCase):
self.assertEqual(
"A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0])
if PY3:
self.assertEqual(
"['A1', 'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3])
self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by(
self.assertEqual("['A1', 'A2']", "%s" % self.Person.objects.order_by(
'age').scalar('name')[1:3])
self.assertEqual("['A51', 'A52']", "%s" % self.Person.objects.order_by(
'age').scalar('name')[51:53])
else:
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by(
self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by(
'age').scalar('name')[1:3])
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by(
self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by(
'age').scalar('name')[51:53])
# with_id and in_bulk
@@ -4013,12 +4048,12 @@ class QuerySetTest(unittest.TestCase):
self.Person.objects.scalar('name').with_id(person.id))
pks = self.Person.objects.order_by('age').scalar('pk')[1:3]
names = self.Person.objects.scalar('name').in_bulk(list(pks)).values()
if PY3:
self.assertEqual("['A1', 'A2']", "%s" % sorted(
self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
expected = "['A1', 'A2']"
else:
self.assertEqual("[u'A1', u'A2']", "%s" % sorted(
self.Person.objects.scalar('name').in_bulk(list(pks)).values()))
expected = "[u'A1', u'A2']"
self.assertEqual(expected, "%s" % sorted(names))
def test_elem_match(self):
class Foo(EmbeddedDocument):
@@ -4115,7 +4150,7 @@ class QuerySetTest(unittest.TestCase):
txt = StringField()
meta = {
'indexes': [ 'txt' ]
'indexes': ['txt']
}
Bar.drop_collection()
@@ -4130,49 +4165,49 @@ class QuerySetTest(unittest.TestCase):
# read_preference as a kwarg
bars = Bar.objects(read_preference=ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._read_preference,
ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
ReadPreference.SECONDARY_PREFERRED)
# read_preference as a query set method
bars = Bar.objects.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._read_preference,
ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
ReadPreference.SECONDARY_PREFERRED)
# read_preference after skip
bars = Bar.objects.skip(1) \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._read_preference,
ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
ReadPreference.SECONDARY_PREFERRED)
# read_preference after limit
bars = Bar.objects.limit(1) \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._read_preference,
ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
ReadPreference.SECONDARY_PREFERRED)
# read_preference after order_by
bars = Bar.objects.order_by('txt') \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._read_preference,
ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
ReadPreference.SECONDARY_PREFERRED)
# read_preference after hint
bars = Bar.objects.hint([('txt', 1)]) \
.read_preference(ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(
bars._read_preference, ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._read_preference,
ReadPreference.SECONDARY_PREFERRED)
self.assertEqual(bars._cursor._Cursor__read_preference,
ReadPreference.SECONDARY_PREFERRED)
ReadPreference.SECONDARY_PREFERRED)
def test_json_simple(self):
@@ -4208,7 +4243,7 @@ class QuerySetTest(unittest.TestCase):
int_field = IntField(default=1)
float_field = FloatField(default=1.1)
boolean_field = BooleanField(default=True)
datetime_field = DateTimeField(default=datetime.now)
datetime_field = DateTimeField(default=datetime.datetime.now)
embedded_document_field = EmbeddedDocumentField(
EmbeddedDoc, default=lambda: EmbeddedDoc())
list_field = ListField(default=lambda: [1, 2, 3])
@@ -4218,7 +4253,7 @@ class QuerySetTest(unittest.TestCase):
Simple, default=lambda: Simple().save())
map_field = MapField(IntField(), default=lambda: {"simple": 1})
decimal_field = DecimalField(default=1.0)
complex_datetime_field = ComplexDateTimeField(default=datetime.now)
complex_datetime_field = ComplexDateTimeField(default=datetime.datetime.now)
url_field = URLField(default="http://mongoengine.org")
dynamic_field = DynamicField(default=1)
generic_reference_field = GenericReferenceField(
@@ -4565,8 +4600,7 @@ class QuerySetTest(unittest.TestCase):
B.drop_collection()
a = A.objects.create(id='custom_id')
b = B.objects.create(a=a)
B.objects.create(a=a)
self.assertEqual(B.objects.count(), 1)
self.assertEqual(B.objects.get(a=a).a, a)

View File

@@ -1,11 +1,7 @@
import sys
sys.path[0:0] = [""]
import unittest
from mongoengine import *
from mongoengine.queryset import Q
from mongoengine.queryset import transform
from mongoengine.queryset import Q, transform
__all__ = ("TransformTest",)
@@ -41,8 +37,8 @@ class TransformTest(unittest.TestCase):
DicDoc.drop_collection()
Doc.drop_collection()
DicDoc().save()
doc = Doc().save()
dic_doc = DicDoc().save()
for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")):
update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc})
@@ -55,7 +51,6 @@ class TransformTest(unittest.TestCase):
update = transform.update(DicDoc, pull__dictField__test=doc)
self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict))
def test_query_field_name(self):
"""Ensure that the correct field name is used when querying.
"""
@@ -156,26 +151,33 @@ class TransformTest(unittest.TestCase):
class Doc(Document):
meta = {'allow_inheritance': False}
raw_query = Doc.objects(__raw__={'deleted': False,
'scraped': 'yes',
'$nor': [{'views.extracted': 'no'},
{'attachments.views.extracted':'no'}]
})._query
raw_query = Doc.objects(__raw__={
'deleted': False,
'scraped': 'yes',
'$nor': [
{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}
]
})._query
expected = {'deleted': False, 'scraped': 'yes',
'$nor': [{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}]}
self.assertEqual(expected, raw_query)
self.assertEqual(raw_query, {
'deleted': False,
'scraped': 'yes',
'$nor': [
{'views.extracted': 'no'},
{'attachments.views.extracted': 'no'}
]
})
def test_geojson_PointField(self):
class Location(Document):
loc = PointField()
update = transform.update(Location, set__loc=[1, 2])
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}})
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1,2]})
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}})
update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1, 2]})
self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1, 2]}}})
def test_geojson_LineStringField(self):
class Location(Document):
@@ -238,5 +240,6 @@ class TransformTest(unittest.TestCase):
events = Event.objects(location__within=box)
self.assertRaises(InvalidQueryError, lambda: events.count())
if __name__ == '__main__':
unittest.main()

View File

@@ -1,14 +1,12 @@
import sys
sys.path[0:0] = [""]
import datetime
import re
import unittest
from bson import ObjectId
from datetime import datetime
from mongoengine import *
from mongoengine.queryset import Q
from mongoengine.errors import InvalidQueryError
from mongoengine.queryset import Q
__all__ = ("QTest",)
@@ -132,12 +130,12 @@ class QTest(unittest.TestCase):
TestDoc(x=10).save()
TestDoc(y=True).save()
self.assertEqual(query,
{'$and': [
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
]})
self.assertEqual(query, {
'$and': [
{'$or': [{'x': {'$gt': 0}}, {'x': {'$exists': False}}]},
{'$or': [{'x': {'$lt': 100}}, {'y': True}]}
]
})
self.assertEqual(2, TestDoc.objects(q1 & q2).count())
def test_or_and_or_combination(self):
@@ -157,15 +155,14 @@ class QTest(unittest.TestCase):
q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False)))
query = (q1 | q2).to_query(TestDoc)
self.assertEqual(query,
{'$or': [
self.assertEqual(query, {
'$or': [
{'$and': [{'x': {'$gt': 0}},
{'$or': [{'y': True}, {'y': {'$exists': False}}]}]},
{'$and': [{'x': {'$lt': 100}},
{'$or': [{'y': False}, {'y': {'$exists': False}}]}]}
]}
)
]
})
self.assertEqual(2, TestDoc.objects(q1 | q2).count())
def test_multiple_occurence_in_field(self):
@@ -215,19 +212,19 @@ class QTest(unittest.TestCase):
BlogPost.drop_collection()
post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False)
post1 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 8), published=False)
post1.save()
post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True)
post2 = BlogPost(title='Test 2', publish_date=datetime.datetime(2010, 1, 15), published=True)
post2.save()
post3 = BlogPost(title='Test 3', published=True)
post3.save()
post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8))
post4 = BlogPost(title='Test 4', publish_date=datetime.datetime(2010, 1, 8))
post4.save()
post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15))
post5 = BlogPost(title='Test 1', publish_date=datetime.datetime(2010, 1, 15))
post5.save()
post6 = BlogPost(title='Test 1', published=False)
@@ -250,7 +247,7 @@ class QTest(unittest.TestCase):
self.assertTrue(all(obj.id in posts for obj in published_posts))
# Check Q object combination
date = datetime(2010, 1, 10)
date = datetime.datetime(2010, 1, 10)
q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True))
posts = [post.id for post in q]
@@ -273,8 +270,10 @@ class QTest(unittest.TestCase):
# Test invalid query objs
def wrong_query_objs():
self.Person.objects('user1')
def wrong_query_objs_filter():
self.Person.objects('user1')
self.assertRaises(InvalidQueryError, wrong_query_objs)
self.assertRaises(InvalidQueryError, wrong_query_objs_filter)
@@ -284,7 +283,6 @@ class QTest(unittest.TestCase):
person = self.Person(name='Guido van Rossum')
person.save()
import re
obj = self.Person.objects(Q(name=re.compile('^Gui'))).first()
self.assertEqual(obj, person)
obj = self.Person.objects(Q(name=re.compile('^gui'))).first()

View File

@@ -174,19 +174,9 @@ class ConnectionTest(unittest.TestCase):
c.mongoenginetest.system.users.remove({})
def test_connect_uri_without_db(self):
"""Ensure connect() method works properly with uri's without database_name
"""Ensure connect() method works properly if the URI doesn't
include a database name.
"""
c = connect(db='mongoenginetest', alias='admin')
c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({})
c.admin.add_user("admin", "password")
c.admin.authenticate("admin", "password")
c.mongoenginetest.add_user("username", "password")
if not IS_PYMONGO_3:
self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost')
connect("mongoenginetest", host='mongodb://localhost/')
conn = get_connection()
@@ -196,8 +186,31 @@ class ConnectionTest(unittest.TestCase):
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'mongoenginetest')
c.admin.system.users.remove({})
c.mongoenginetest.system.users.remove({})
def test_connect_uri_default_db(self):
"""Ensure connect() defaults to the right database name if
the URI and the database_name don't explicitly specify it.
"""
connect(host='mongodb://localhost/')
conn = get_connection()
self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient))
db = get_db()
self.assertTrue(isinstance(db, pymongo.database.Database))
self.assertEqual(db.name, 'test')
def test_uri_without_credentials_doesnt_override_conn_settings(self):
"""Ensure connect() uses the username & password params if the URI
doesn't explicitly specify them.
"""
c = connect(host='mongodb://localhost/mongoenginetest',
username='user',
password='pass')
# OperationFailure means that mongoengine attempted authentication
# w/ the provided username/password and failed - that's the desired
# behavior. If the MongoDB URI would override the credentials
self.assertRaises(OperationFailure, get_db)
def test_connect_uri_with_authsource(self):
"""Ensure that the connect() method works well with

10
tox.ini
View File

@@ -1,6 +1,5 @@
[tox]
envlist = {py26,py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28}
#envlist = {py26,py27,py33,py34,pypy,pypy3}-{mg27,mg28,mg30,mgdev}
envlist = {py26,py27,py33,py34,py35,pypy,pypy3}-{mg27,mg28},flake8
[testenv]
commands =
@@ -14,3 +13,10 @@ deps =
setenv =
PYTHON_EGG_CACHE = {envdir}/python-eggs
passenv = windir
[testenv:flake8]
deps =
flake8
flake8-import-order
commands =
flake8