Compare commits
38 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
af292b0ec2 | ||
|
1ead7f9b2b | ||
|
5c91877b69 | ||
|
b1002dd4f9 | ||
|
a4fe091a51 | ||
|
216217e2c6 | ||
|
799775b3a7 | ||
|
ae0384df29 | ||
|
8f57279dc7 | ||
|
e8dbd12f22 | ||
|
ca230d28b4 | ||
|
c96065b187 | ||
|
2abcf4764d | ||
|
bb0b1e88ef | ||
|
63c9135184 | ||
|
7fac0ef961 | ||
|
5a2e268160 | ||
|
a4e4e8f440 | ||
|
b62ce947a6 | ||
|
9538662262 | ||
|
09d7ae4f80 | ||
|
d7ded366c7 | ||
|
09c77973a0 | ||
|
22f3c70234 | ||
|
6527b1386f | ||
|
baabf97acd | ||
|
97005aca66 | ||
|
6e8ea50c19 | ||
|
1fcd706e11 | ||
|
008bb19b0b | ||
|
023acab779 | ||
|
5d120ebca0 | ||
|
f91b89f723 | ||
|
1181b75e16 | ||
|
5f00b4f923 | ||
|
4c31193b82 | ||
|
17fc9d1886 | ||
|
d7285d43dd |
@@ -1,24 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
sudo apt-get remove mongodb-org-server
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
||||
|
||||
if [ "$MONGODB" = "3.4" ]; then
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.4.17
|
||||
# service should be started automatically
|
||||
elif [ "$MONGODB" = "3.6" ]; then
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2930ADAE8CAF5059EE73BB4B58712A2291FA4AD5
|
||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install mongodb-org-server=3.6.12
|
||||
# service should be started automatically
|
||||
else
|
||||
echo "Invalid MongoDB version, expected 2.6, 3.0, 3.2, 3.4 or 3.6."
|
||||
exit 1
|
||||
fi;
|
||||
|
||||
mkdir db
|
||||
1>db/logs mongod --dbpath=db &
|
55
.travis.yml
55
.travis.yml
@@ -16,52 +16,59 @@
|
||||
#
|
||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||
|
||||
language: python
|
||||
|
||||
language: python
|
||||
python:
|
||||
- 2.7
|
||||
- 3.5
|
||||
- 3.6
|
||||
- pypy
|
||||
|
||||
dist: xenial
|
||||
|
||||
env:
|
||||
- MONGODB=3.4 PYMONGO=3.x
|
||||
global:
|
||||
- MONGODB_3_4=3.4.17
|
||||
- MONGODB_3_6=3.6.12
|
||||
matrix:
|
||||
- MONGODB=${MONGODB_3_4} PYMONGO=3.x
|
||||
|
||||
matrix:
|
||||
|
||||
# Finish the build as soon as one job fails
|
||||
fast_finish: true
|
||||
|
||||
include:
|
||||
- python: 2.7
|
||||
env: MONGODB=3.4 PYMONGO=3.4.x
|
||||
env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x
|
||||
- python: 3.6
|
||||
env: MONGODB=3.6 PYMONGO=3.x
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=3.x
|
||||
- python: 3.7
|
||||
env: MONGODB=${MONGODB_3_6} PYMONGO=3.x
|
||||
|
||||
before_install:
|
||||
- bash .install_mongodb_on_travis.sh
|
||||
- sleep 20 # https://docs.travis-ci.com/user/database-setup/#mongodb-does-not-immediately-accept-connections
|
||||
- mongo --eval 'db.version();'
|
||||
|
||||
install:
|
||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
||||
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
||||
python-tk
|
||||
- travis_retry pip install --upgrade pip
|
||||
- travis_retry pip install coveralls
|
||||
- travis_retry pip install flake8 flake8-import-order
|
||||
- travis_retry pip install "tox" # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||
- travis_retry pip install "virtualenv" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
# Install Mongo
|
||||
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||
# Install python dependencies
|
||||
- pip install --upgrade pip
|
||||
- pip install coveralls
|
||||
- pip install flake8 flake8-import-order
|
||||
- pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||
- pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||
# Install the tox venv
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||
|
||||
# Cache dependencies installed via pip
|
||||
cache: pip
|
||||
|
||||
# Run flake8 for py27
|
||||
before_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi
|
||||
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for py27
|
||||
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||
|
||||
script:
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||
|
||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||
@@ -95,5 +102,5 @@ deploy:
|
||||
on:
|
||||
tags: true
|
||||
repo: MongoEngine/mongoengine
|
||||
condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4)
|
||||
condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4.17)
|
||||
python: 2.7
|
||||
|
@@ -47,7 +47,7 @@ Dependencies
|
||||
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||
At the very least, you'll need these two packages to use MongoEngine:
|
||||
|
||||
- pymongo>=3.5
|
||||
- pymongo>=3.4
|
||||
- six>=1.10.0
|
||||
|
||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||
|
207
benchmark.py
207
benchmark.py
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Simple benchmark comparing PyMongo and MongoEngine.
|
||||
|
||||
Sample run on a mid 2015 MacBook Pro (commit b282511):
|
||||
|
||||
Benchmarking...
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo
|
||||
2.58979988098
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
||||
1.26657605171
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine
|
||||
8.4351580143
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
||||
7.20191693306
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
|
||||
6.31104588509
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
||||
6.07083487511
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
||||
5.97704291344
|
||||
----------------------------------------------------------------------------------------------------
|
||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
||||
5.9111430645
|
||||
"""
|
||||
|
||||
import timeit
|
||||
|
||||
|
||||
def main():
|
||||
print("Benchmarking...")
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.timeit_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']['key' + str(j)] = 'value ' + str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - Pymongo""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
from pymongo.write_concern import WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.save(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('timeit_test')
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect('timeit_test')
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(validate=False, write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print("-" * 100)
|
||||
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print(t.timeit(1))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
148
benchmarks/test_basic_doc_ops.py
Normal file
148
benchmarks/test_basic_doc_ops.py
Normal file
@@ -0,0 +1,148 @@
|
||||
from timeit import repeat
|
||||
|
||||
import mongoengine
|
||||
from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument,
|
||||
EmbeddedDocumentField, IntField, ListField,
|
||||
StringField)
|
||||
|
||||
mongoengine.connect(db='mongoengine_benchmark_test')
|
||||
|
||||
|
||||
def timeit(f, n=10000):
|
||||
return min(repeat(f, repeat=3, number=n)) / float(n)
|
||||
|
||||
|
||||
def test_basic():
|
||||
class Book(Document):
|
||||
name = StringField()
|
||||
pages = IntField()
|
||||
tags = ListField(StringField())
|
||||
is_published = BooleanField()
|
||||
author_email = EmailField()
|
||||
|
||||
Book.drop_collection()
|
||||
|
||||
def init_book():
|
||||
return Book(
|
||||
name='Always be closing',
|
||||
pages=100,
|
||||
tags=['self-help', 'sales'],
|
||||
is_published=True,
|
||||
author_email='alec@example.com',
|
||||
)
|
||||
|
||||
print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6))
|
||||
|
||||
b = init_book()
|
||||
print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6))
|
||||
|
||||
print(
|
||||
'Doc setattr: %.3fus' % (
|
||||
timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6
|
||||
)
|
||||
)
|
||||
|
||||
print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6))
|
||||
|
||||
print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6))
|
||||
|
||||
def save_book():
|
||||
b._mark_as_changed('name')
|
||||
b._mark_as_changed('tags')
|
||||
b.save()
|
||||
|
||||
print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6))
|
||||
|
||||
son = b.to_mongo()
|
||||
print(
|
||||
'Load from SON: %.3fus' % (
|
||||
timeit(lambda: Book._from_son(son), 1000) * 10**6
|
||||
)
|
||||
)
|
||||
|
||||
print(
|
||||
'Load from database: %.3fus' % (
|
||||
timeit(lambda: Book.objects[0], 100) * 10**6
|
||||
)
|
||||
)
|
||||
|
||||
def create_and_delete_book():
|
||||
b = init_book()
|
||||
b.save()
|
||||
b.delete()
|
||||
|
||||
print(
|
||||
'Init + save to database + delete: %.3fms' % (
|
||||
timeit(create_and_delete_book, 10) * 10**3
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_big_doc():
|
||||
class Contact(EmbeddedDocument):
|
||||
name = StringField()
|
||||
title = StringField()
|
||||
address = StringField()
|
||||
|
||||
class Company(Document):
|
||||
name = StringField()
|
||||
contacts = ListField(EmbeddedDocumentField(Contact))
|
||||
|
||||
Company.drop_collection()
|
||||
|
||||
def init_company():
|
||||
return Company(
|
||||
name='MongoDB, Inc.',
|
||||
contacts=[
|
||||
Contact(
|
||||
name='Contact %d' % x,
|
||||
title='CEO',
|
||||
address='Address %d' % x,
|
||||
)
|
||||
for x in range(1000)
|
||||
]
|
||||
)
|
||||
|
||||
company = init_company()
|
||||
print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3))
|
||||
|
||||
print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3))
|
||||
|
||||
company.save()
|
||||
|
||||
def save_company():
|
||||
company._mark_as_changed('name')
|
||||
company._mark_as_changed('contacts')
|
||||
company.save()
|
||||
|
||||
print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3))
|
||||
|
||||
son = company.to_mongo()
|
||||
print(
|
||||
'Load from SON: %.3fms' % (
|
||||
timeit(lambda: Company._from_son(son), 100) * 10**3
|
||||
)
|
||||
)
|
||||
|
||||
print(
|
||||
'Load from database: %.3fms' % (
|
||||
timeit(lambda: Company.objects[0], 100) * 10**3
|
||||
)
|
||||
)
|
||||
|
||||
def create_and_delete_company():
|
||||
c = init_company()
|
||||
c.save()
|
||||
c.delete()
|
||||
|
||||
print(
|
||||
'Init + save to database + delete: %.3fms' % (
|
||||
timeit(create_and_delete_company, 10) * 10**3
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_basic()
|
||||
print('-' * 100)
|
||||
test_big_doc()
|
154
benchmarks/test_inserts.py
Normal file
154
benchmarks/test_inserts.py
Normal file
@@ -0,0 +1,154 @@
|
||||
import timeit
|
||||
|
||||
|
||||
def main():
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
noddy = db.noddy
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert_one(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries.')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
from pymongo import MongoClient, WriteConcern
|
||||
connection = MongoClient()
|
||||
|
||||
db = connection.mongoengine_benchmark_test
|
||||
noddy = db.noddy.with_options(write_concern=WriteConcern(w=0))
|
||||
|
||||
for i in range(10000):
|
||||
example = {'fields': {}}
|
||||
for j in range(20):
|
||||
example['fields']["key"+str(j)] = "value "+str(j)
|
||||
|
||||
noddy.insert_one(example)
|
||||
|
||||
myNoddys = noddy.find()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
setup = """
|
||||
from pymongo import MongoClient
|
||||
connection = MongoClient()
|
||||
connection.drop_database('mongoengine_benchmark_test')
|
||||
connection.close()
|
||||
|
||||
from mongoengine import Document, DictField, connect
|
||||
connect("mongoengine_benchmark_test")
|
||||
|
||||
class Noddy(Document):
|
||||
fields = DictField()
|
||||
"""
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries.')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
fields = {}
|
||||
for j in range(20):
|
||||
fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.fields = fields
|
||||
noddy.save()
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0})
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
stmt = """
|
||||
for i in range(10000):
|
||||
noddy = Noddy()
|
||||
for j in range(20):
|
||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||
|
||||
myNoddys = Noddy.objects()
|
||||
[n for n in myNoddys] # iterate
|
||||
"""
|
||||
|
||||
print('-' * 100)
|
||||
print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).')
|
||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||
print('{}s'.format(t.timeit(1)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,3 +1,4 @@
|
||||
|
||||
=========
|
||||
Changelog
|
||||
=========
|
||||
@@ -6,6 +7,17 @@ Development
|
||||
===========
|
||||
- (Fill this out as you fix issues and develop your features).
|
||||
|
||||
Changes in 0.18.2
|
||||
=================
|
||||
- Replace some of the deprecated PyMongo v2.x methods with their v3.x equivalents #2097
|
||||
- Various code clarity and documentation improvements
|
||||
|
||||
Changes in 0.18.1
|
||||
=================
|
||||
- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields
|
||||
instead of updating only the modified fields. This bug only occurs when using custom pk #2082
|
||||
- Add Python 3.7 in travis #2058
|
||||
|
||||
Changes in 0.18.0
|
||||
=================
|
||||
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
||||
|
@@ -714,11 +714,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. ::
|
||||
Shard keys
|
||||
==========
|
||||
|
||||
If your collection is sharded, then you need to specify the shard key as a tuple,
|
||||
using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`.
|
||||
This ensures that the shard key is sent with the query when calling the
|
||||
:meth:`~mongoengine.document.Document.save` or
|
||||
:meth:`~mongoengine.document.Document.update` method on an existing
|
||||
If your collection is sharded by multiple keys, then you can improve shard
|
||||
routing (and thus the performance of your application) by specifying the shard
|
||||
key, using the :attr:`shard_key` attribute of
|
||||
:attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple.
|
||||
|
||||
This ensures that the full shard key is sent with the query when calling
|
||||
methods such as :meth:`~mongoengine.document.Document.save`,
|
||||
:meth:`~mongoengine.document.Document.update`,
|
||||
:meth:`~mongoengine.document.Document.modify`, or
|
||||
:meth:`~mongoengine.document.Document.delete` on an existing
|
||||
:class:`~mongoengine.Document` instance::
|
||||
|
||||
class LogEntry(Document):
|
||||
@@ -728,7 +733,8 @@ This ensures that the shard key is sent with the query when calling the
|
||||
data = StringField()
|
||||
|
||||
meta = {
|
||||
'shard_key': ('machine', 'timestamp',)
|
||||
'shard_key': ('machine', 'timestamp'),
|
||||
'indexes': ('machine', 'timestamp'),
|
||||
}
|
||||
|
||||
.. _document-inheritance:
|
||||
|
@@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
|
||||
list(signals.__all__) + list(errors.__all__))
|
||||
|
||||
|
||||
VERSION = (0, 18, 0)
|
||||
VERSION = (0, 18, 2)
|
||||
|
||||
|
||||
def get_version():
|
||||
|
@@ -11,18 +11,20 @@ __all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyRe
|
||||
|
||||
|
||||
def mark_as_changed_wrapper(parent_method):
|
||||
"""Decorators that ensures _mark_as_changed method gets called"""
|
||||
"""Decorator that ensures _mark_as_changed method gets called."""
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = parent_method(self, *args, **kwargs) # Can't use super() in the decorator
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, *args, **kwargs)
|
||||
self._mark_as_changed()
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
def mark_key_as_changed_wrapper(parent_method):
|
||||
"""Decorators that ensures _mark_as_changed method gets called with the key argument"""
|
||||
"""Decorator that ensures _mark_as_changed method gets called with the key argument"""
|
||||
def wrapper(self, key, *args, **kwargs):
|
||||
result = parent_method(self, key, *args, **kwargs) # Can't use super() in the decorator
|
||||
# Can't use super() in the decorator.
|
||||
result = parent_method(self, key, *args, **kwargs)
|
||||
self._mark_as_changed(key)
|
||||
return result
|
||||
return wrapper
|
||||
|
@@ -25,6 +25,16 @@ NON_FIELD_ERRORS = '__all__'
|
||||
|
||||
|
||||
class BaseDocument(object):
|
||||
# TODO simplify how `_changed_fields` is used.
|
||||
# Currently, handling of `_changed_fields` seems unnecessarily convoluted:
|
||||
# 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's
|
||||
# not setting it to `[]` (or any other value) in `__init__`.
|
||||
# 2. `EmbeddedDocument` sets `_changed_fields` to `[]` it its overloaded
|
||||
# `__init__`.
|
||||
# 3. `Document` does NOT set `_changed_fields` upon initialization. The
|
||||
# field is primarily set via `_from_son` or `_clear_changed_fields`,
|
||||
# though there are also other methods that manipulate it.
|
||||
# 4. The codebase is littered with `hasattr` calls for `_changed_fields`.
|
||||
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
||||
'_dynamic_fields', '_auto_id_field', '_db_field_map',
|
||||
'__weakref__')
|
||||
@@ -35,13 +45,20 @@ class BaseDocument(object):
|
||||
|
||||
def __init__(self, *args, **values):
|
||||
"""
|
||||
Initialise a document or embedded document
|
||||
Initialise a document or an embedded document.
|
||||
|
||||
:param __auto_convert: Try and will cast python objects to Object types
|
||||
:param values: A dictionary of values for the document
|
||||
:param dict values: A dictionary of keys and values for the document.
|
||||
It may contain additional reserved keywords, e.g. "__auto_convert".
|
||||
:param bool __auto_convert: If True, supplied values will be converted
|
||||
to Python-type values via each field's `to_python` method.
|
||||
:param set __only_fields: A set of fields that have been loaded for
|
||||
this document. Empty if all fields have been loaded.
|
||||
:param bool _created: Indicates whether this is a brand new document
|
||||
or whether it's already been persisted before. Defaults to true.
|
||||
"""
|
||||
self._initialised = False
|
||||
self._created = True
|
||||
|
||||
if args:
|
||||
# Combine positional arguments with named arguments.
|
||||
# We only want named arguments.
|
||||
@@ -58,7 +75,6 @@ class BaseDocument(object):
|
||||
|
||||
__auto_convert = values.pop('__auto_convert', True)
|
||||
|
||||
# 399: set default values only to fields loaded from DB
|
||||
__only_fields = set(values.pop('__only_fields', values))
|
||||
|
||||
_created = values.pop('_created', True)
|
||||
@@ -83,7 +99,9 @@ class BaseDocument(object):
|
||||
|
||||
self._dynamic_fields = SON()
|
||||
|
||||
# Assign default values to instance
|
||||
# Assign default values to the instance.
|
||||
# We set default values only for fields loaded from DB. See
|
||||
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
|
||||
for key, field in iteritems(self._fields):
|
||||
if self._db_field_map.get(key, key) in __only_fields:
|
||||
continue
|
||||
@@ -125,6 +143,7 @@ class BaseDocument(object):
|
||||
# Flag initialised
|
||||
self._initialised = True
|
||||
self._created = _created
|
||||
|
||||
signals.post_init.send(self.__class__, document=self)
|
||||
|
||||
def __delattr__(self, *args, **kwargs):
|
||||
@@ -665,9 +684,7 @@ class BaseDocument(object):
|
||||
|
||||
@classmethod
|
||||
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
||||
"""Create an instance of a Document (subclass) from a PyMongo
|
||||
SON.
|
||||
"""
|
||||
"""Create an instance of a Document (subclass) from a PyMongo SON."""
|
||||
if not only_fields:
|
||||
only_fields = []
|
||||
|
||||
@@ -690,7 +707,6 @@ class BaseDocument(object):
|
||||
if class_name != cls._class_name:
|
||||
cls = get_document(class_name)
|
||||
|
||||
changed_fields = []
|
||||
errors_dict = {}
|
||||
|
||||
fields = cls._fields
|
||||
@@ -720,8 +736,13 @@ class BaseDocument(object):
|
||||
if cls.STRICT:
|
||||
data = {k: v for k, v in iteritems(data) if k in cls._fields}
|
||||
|
||||
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
|
||||
obj._changed_fields = changed_fields
|
||||
obj = cls(
|
||||
__auto_convert=False,
|
||||
_created=created,
|
||||
__only_fields=only_fields,
|
||||
**data
|
||||
)
|
||||
obj._changed_fields = []
|
||||
if not _auto_dereference:
|
||||
obj._fields = fields
|
||||
|
||||
|
@@ -128,10 +128,9 @@ class BaseField(object):
|
||||
return instance._data.get(self.name)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
"""Descriptor for assigning a value to a field in a document.
|
||||
"""
|
||||
# If setting to None and there is a default
|
||||
# Then set the value to the default value
|
||||
"""Descriptor for assigning a value to a field in a document."""
|
||||
# If setting to None and there is a default value provided for this
|
||||
# field, then set the value to the default value.
|
||||
if value is None:
|
||||
if self.null:
|
||||
value = None
|
||||
@@ -142,12 +141,16 @@ class BaseField(object):
|
||||
|
||||
if instance._initialised:
|
||||
try:
|
||||
if (self.name not in instance._data or
|
||||
instance._data[self.name] != value):
|
||||
value_has_changed = (
|
||||
self.name not in instance._data or
|
||||
instance._data[self.name] != value
|
||||
)
|
||||
if value_has_changed:
|
||||
instance._mark_as_changed(self.name)
|
||||
except Exception:
|
||||
# Values cant be compared eg: naive and tz datetimes
|
||||
# So mark it as changed
|
||||
# Some values can't be compared and throw an error when we
|
||||
# attempt to do so (e.g. tz-naive and tz-aware datetimes).
|
||||
# Mark the field as changed in such cases.
|
||||
instance._mark_as_changed(self.name)
|
||||
|
||||
EmbeddedDocument = _import_class('EmbeddedDocument')
|
||||
@@ -157,6 +160,7 @@ class BaseField(object):
|
||||
for v in value:
|
||||
if isinstance(v, EmbeddedDocument):
|
||||
v._instance = weakref.proxy(instance)
|
||||
|
||||
instance._data[self.name] = value
|
||||
|
||||
def error(self, message='', errors=None, field_name=None):
|
||||
|
@@ -2,9 +2,17 @@ from pymongo import MongoClient, ReadPreference, uri_parser
|
||||
from pymongo.database import _check_name
|
||||
import six
|
||||
|
||||
__all__ = ['MongoEngineConnectionError', 'connect', 'disconnect', 'disconnect_all',
|
||||
'register_connection', 'DEFAULT_CONNECTION_NAME', 'DEFAULT_DATABASE_NAME',
|
||||
'get_db', 'get_connection']
|
||||
__all__ = [
|
||||
'DEFAULT_CONNECTION_NAME',
|
||||
'DEFAULT_DATABASE_NAME',
|
||||
'MongoEngineConnectionError',
|
||||
'connect',
|
||||
'disconnect',
|
||||
'disconnect_all',
|
||||
'get_connection',
|
||||
'get_db',
|
||||
'register_connection',
|
||||
]
|
||||
|
||||
|
||||
DEFAULT_CONNECTION_NAME = 'default'
|
||||
@@ -263,17 +271,17 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
||||
else:
|
||||
connection_class = MongoClient
|
||||
|
||||
# Re-use existing connection if one is suitable
|
||||
# Re-use existing connection if one is suitable.
|
||||
existing_connection = _find_existing_connection(raw_conn_settings)
|
||||
|
||||
# If an existing connection was found, assign it to the new alias
|
||||
if existing_connection:
|
||||
_connections[alias] = existing_connection
|
||||
connection = existing_connection
|
||||
else:
|
||||
_connections[alias] = _create_connection(alias=alias,
|
||||
connection_class=connection_class,
|
||||
**conn_settings)
|
||||
|
||||
connection = _create_connection(
|
||||
alias=alias,
|
||||
connection_class=connection_class,
|
||||
**conn_settings
|
||||
)
|
||||
_connections[alias] = connection
|
||||
return _connections[alias]
|
||||
|
||||
|
||||
@@ -359,8 +367,11 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
new_conn_settings = _get_connection_settings(db, **kwargs)
|
||||
|
||||
if new_conn_settings != prev_conn_setting:
|
||||
raise MongoEngineConnectionError(
|
||||
'A different connection with alias `%s` was already registered. Use disconnect() first' % alias)
|
||||
err_msg = (
|
||||
u'A different connection with alias `{}` was already '
|
||||
u'registered. Use disconnect() first'
|
||||
).format(alias)
|
||||
raise MongoEngineConnectionError(err_msg)
|
||||
else:
|
||||
register_connection(alias, db, **kwargs)
|
||||
|
||||
|
@@ -182,8 +182,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
@classmethod
|
||||
def _get_collection(cls):
|
||||
"""Return the corresponding PyMongo collection of this document.
|
||||
Upon the first call, it will ensure that indexes gets created. The returned collection then gets cached
|
||||
"""Return the PyMongo collection corresponding to this document.
|
||||
|
||||
Upon first call, this method:
|
||||
1. Initializes a :class:`~pymongo.collection.Collection` corresponding
|
||||
to this document.
|
||||
2. Creates indexes defined in this document's :attr:`meta` dictionary.
|
||||
This happens only if `auto_create_index` is True.
|
||||
"""
|
||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||
# Get the collection, either capped or regular.
|
||||
@@ -366,7 +371,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
if write_concern is None:
|
||||
write_concern = {}
|
||||
|
||||
doc_id = self.to_mongo(fields=['id'])
|
||||
doc_id = self.to_mongo(fields=[self._meta['id_field']])
|
||||
created = ('_id' not in doc_id or self._created or force_insert)
|
||||
|
||||
signals.pre_save_post_validation.send(self.__class__, document=self,
|
||||
@@ -539,7 +544,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
|
||||
@property
|
||||
def _qs(self):
|
||||
"""Return the queryset to use for updating / reloading / deletions."""
|
||||
"""Return the default queryset corresponding to this document."""
|
||||
if not hasattr(self, '__objects'):
|
||||
self.__objects = QuerySet(self, self._get_collection())
|
||||
return self.__objects
|
||||
@@ -547,9 +552,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
||||
@property
|
||||
def _object_key(self):
|
||||
"""Get the query dict that can be used to fetch this object from
|
||||
the database. Most of the time it's a simple PK lookup, but in
|
||||
case of a sharded collection with a compound shard key, it can
|
||||
contain a more complex query.
|
||||
the database.
|
||||
|
||||
Most of the time the dict is a simple PK lookup, but in case of
|
||||
a sharded collection with a compound shard key, it can contain a more
|
||||
complex query.
|
||||
"""
|
||||
select_dict = {'pk': self.pk}
|
||||
shard_key = self.__class__._meta.get('shard_key', tuple())
|
||||
|
@@ -10,6 +10,7 @@ from operator import itemgetter
|
||||
from bson import Binary, DBRef, ObjectId, SON
|
||||
import gridfs
|
||||
import pymongo
|
||||
from pymongo import ReturnDocument
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
@@ -1964,10 +1965,12 @@ class SequenceField(BaseField):
|
||||
sequence_name = self.get_sequence_name()
|
||||
sequence_id = '%s.%s' % (sequence_name, self.name)
|
||||
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||
counter = collection.find_and_modify(query={'_id': sequence_id},
|
||||
update={'$inc': {'next': 1}},
|
||||
new=True,
|
||||
upsert=True)
|
||||
|
||||
counter = collection.find_one_and_update(
|
||||
filter={'_id': sequence_id},
|
||||
update={'$inc': {'next': 1}},
|
||||
return_document=ReturnDocument.AFTER,
|
||||
upsert=True)
|
||||
return self.value_decorator(counter['next'])
|
||||
|
||||
def set_next_value(self, value):
|
||||
@@ -1975,10 +1978,11 @@ class SequenceField(BaseField):
|
||||
sequence_name = self.get_sequence_name()
|
||||
sequence_id = "%s.%s" % (sequence_name, self.name)
|
||||
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||
counter = collection.find_and_modify(query={"_id": sequence_id},
|
||||
update={"$set": {"next": value}},
|
||||
new=True,
|
||||
upsert=True)
|
||||
counter = collection.find_one_and_update(
|
||||
filter={"_id": sequence_id},
|
||||
update={"$set": {"next": value}},
|
||||
return_document=ReturnDocument.AFTER,
|
||||
upsert=True)
|
||||
return self.value_decorator(counter['next'])
|
||||
|
||||
def get_next_value(self):
|
||||
|
@@ -73,6 +73,7 @@ class BaseQuerySet(object):
|
||||
self._initial_query = {
|
||||
'_cls': {'$in': self._document._subclasses}}
|
||||
self._loaded_fields = QueryFieldList(always_include=['_cls'])
|
||||
|
||||
self._cursor_obj = None
|
||||
self._limit = None
|
||||
self._skip = None
|
||||
@@ -480,9 +481,10 @@ class BaseQuerySet(object):
|
||||
write_concern=write_concern,
|
||||
**{'pull_all__%s' % field_name: self})
|
||||
|
||||
result = queryset._collection.remove(queryset._query, **write_concern)
|
||||
if result:
|
||||
return result.get('n')
|
||||
with set_write_concern(queryset._collection, write_concern) as collection:
|
||||
result = collection.delete_many(queryset._query)
|
||||
if result.acknowledged:
|
||||
return result.deleted_count
|
||||
|
||||
def update(self, upsert=False, multi=True, write_concern=None,
|
||||
full_result=False, **update):
|
||||
@@ -707,8 +709,9 @@ class BaseQuerySet(object):
|
||||
return queryset
|
||||
|
||||
def no_sub_classes(self):
|
||||
"""
|
||||
Only return instances of this document and not any inherited documents
|
||||
"""Filter for only the instances of this specific document.
|
||||
|
||||
Do NOT return any inherited documents.
|
||||
"""
|
||||
if self._document._meta.get('allow_inheritance') is True:
|
||||
self._initial_query = {'_cls': self._document._class_name}
|
||||
@@ -1009,13 +1012,15 @@ class BaseQuerySet(object):
|
||||
return queryset
|
||||
|
||||
def order_by(self, *keys):
|
||||
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
|
||||
order may be specified by prepending each of the keys by a + or a -.
|
||||
Ascending order is assumed. If no keys are passed, existing ordering
|
||||
is cleared instead.
|
||||
"""Order the :class:`~mongoengine.queryset.QuerySet` by the given keys.
|
||||
|
||||
The order may be specified by prepending each of the keys by a "+" or
|
||||
a "-". Ascending order is assumed if there's no prefix.
|
||||
|
||||
If no keys are passed, existing ordering is cleared instead.
|
||||
|
||||
:param keys: fields to order the query results by; keys may be
|
||||
prefixed with **+** or **-** to determine the ordering direction
|
||||
prefixed with "+" or a "-" to determine the ordering direction.
|
||||
"""
|
||||
queryset = self.clone()
|
||||
|
||||
|
@@ -1,33 +1,30 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import bson
|
||||
import os
|
||||
import pickle
|
||||
import unittest
|
||||
import uuid
|
||||
import warnings
|
||||
import weakref
|
||||
from datetime import datetime
|
||||
|
||||
import bson
|
||||
from bson import DBRef, ObjectId
|
||||
from pymongo.errors import DuplicateKeyError
|
||||
from six import iteritems
|
||||
|
||||
from mongoengine.mongodb_support import get_mongodb_version, MONGODB_36, MONGODB_34
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from tests import fixtures
|
||||
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
|
||||
PickleDynamicEmbedded, PickleDynamicTest)
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
from mongoengine import *
|
||||
from mongoengine.base import get_document, _document_registry
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
|
||||
InvalidQueryError, NotUniqueError,
|
||||
FieldDoesNotExist, SaveConditionError)
|
||||
from mongoengine.queryset import NULLIFY, Q
|
||||
from mongoengine.context_managers import switch_db, query_counter
|
||||
from mongoengine import signals
|
||||
from mongoengine.base import _document_registry, get_document
|
||||
from mongoengine.connection import get_db
|
||||
from mongoengine.context_managers import query_counter, switch_db
|
||||
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, \
|
||||
InvalidQueryError, NotRegistered, NotUniqueError, SaveConditionError)
|
||||
from mongoengine.mongodb_support import MONGODB_34, MONGODB_36, get_mongodb_version
|
||||
from mongoengine.pymongo_support import list_collection_names
|
||||
from mongoengine.queryset import NULLIFY, Q
|
||||
from tests import fixtures
|
||||
from tests.fixtures import (PickleDynamicEmbedded, PickleDynamicTest, \
|
||||
PickleEmbedded, PickleSignalsTest, PickleTest)
|
||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||
|
||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||
'../fields/mongoengine.png')
|
||||
@@ -336,41 +333,36 @@ class InstanceTest(MongoDBTestCase):
|
||||
self.assertEqual(User._fields['username'].db_field, '_id')
|
||||
self.assertEqual(User._meta['id_field'], 'username')
|
||||
|
||||
# test no primary key field
|
||||
self.assertRaises(ValidationError, User(name='test').save)
|
||||
User.objects.create(username='test', name='test user')
|
||||
user = User.objects.first()
|
||||
self.assertEqual(user.id, 'test')
|
||||
self.assertEqual(user.pk, 'test')
|
||||
user_dict = User.objects._collection.find_one()
|
||||
self.assertEqual(user_dict['_id'], 'test')
|
||||
|
||||
# define a subclass with a different primary key field than the
|
||||
# parent
|
||||
with self.assertRaises(ValueError):
|
||||
def test_change_custom_id_field_in_subclass(self):
|
||||
"""Subclasses cannot override which field is the primary key."""
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField()
|
||||
meta = {'allow_inheritance': True}
|
||||
|
||||
with self.assertRaises(ValueError) as e:
|
||||
class EmailUser(User):
|
||||
email = StringField(primary_key=True)
|
||||
exc = e.exception
|
||||
self.assertEqual(str(exc), 'Cannot override primary key field')
|
||||
|
||||
class EmailUser(User):
|
||||
email = StringField()
|
||||
def test_custom_id_field_is_required(self):
|
||||
"""Ensure the custom primary key field is required."""
|
||||
class User(Document):
|
||||
username = StringField(primary_key=True)
|
||||
name = StringField()
|
||||
|
||||
user = User(username='test', name='test user')
|
||||
user.save()
|
||||
|
||||
user_obj = User.objects.first()
|
||||
self.assertEqual(user_obj.id, 'test')
|
||||
self.assertEqual(user_obj.pk, 'test')
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'test')
|
||||
self.assertNotIn('username', user_son['_id'])
|
||||
|
||||
User.drop_collection()
|
||||
|
||||
user = User(pk='mongo', name='mongo user')
|
||||
user.save()
|
||||
|
||||
user_obj = User.objects.first()
|
||||
self.assertEqual(user_obj.id, 'mongo')
|
||||
self.assertEqual(user_obj.pk, 'mongo')
|
||||
|
||||
user_son = User.objects._collection.find_one()
|
||||
self.assertEqual(user_son['_id'], 'mongo')
|
||||
self.assertNotIn('username', user_son['_id'])
|
||||
with self.assertRaises(ValidationError) as e:
|
||||
User(name='test').save()
|
||||
exc = e.exception
|
||||
self.assertTrue("Field is required: ['username']" in str(exc))
|
||||
|
||||
def test_document_not_registered(self):
|
||||
class Place(Document):
|
||||
@@ -1260,6 +1252,50 @@ class InstanceTest(MongoDBTestCase):
|
||||
self.assertTrue(w1.toggle)
|
||||
self.assertEqual(w1.count, 3)
|
||||
|
||||
def test_save_update_selectively(self):
|
||||
class WildBoy(Document):
|
||||
age = IntField()
|
||||
name = StringField()
|
||||
|
||||
WildBoy.drop_collection()
|
||||
|
||||
WildBoy(age=12, name='John').save()
|
||||
|
||||
boy1 = WildBoy.objects().first()
|
||||
boy2 = WildBoy.objects().first()
|
||||
|
||||
boy1.age = 99
|
||||
boy1.save()
|
||||
boy2.name = 'Bob'
|
||||
boy2.save()
|
||||
|
||||
fresh_boy = WildBoy.objects().first()
|
||||
self.assertEqual(fresh_boy.age, 99)
|
||||
self.assertEqual(fresh_boy.name, 'Bob')
|
||||
|
||||
def test_save_update_selectively_with_custom_pk(self):
|
||||
# Prevents regression of #2082
|
||||
class WildBoy(Document):
|
||||
pk_id = StringField(primary_key=True)
|
||||
age = IntField()
|
||||
name = StringField()
|
||||
|
||||
WildBoy.drop_collection()
|
||||
|
||||
WildBoy(pk_id='A', age=12, name='John').save()
|
||||
|
||||
boy1 = WildBoy.objects().first()
|
||||
boy2 = WildBoy.objects().first()
|
||||
|
||||
boy1.age = 99
|
||||
boy1.save()
|
||||
boy2.name = 'Bob'
|
||||
boy2.save()
|
||||
|
||||
fresh_boy = WildBoy.objects().first()
|
||||
self.assertEqual(fresh_boy.age, 99)
|
||||
self.assertEqual(fresh_boy.name, 'Bob')
|
||||
|
||||
def test_update(self):
|
||||
"""Ensure that an existing document is updated instead of be
|
||||
overwritten.
|
||||
@@ -1542,7 +1578,7 @@ class InstanceTest(MongoDBTestCase):
|
||||
self.assertEqual(person.age, 21)
|
||||
self.assertEqual(person.active, False)
|
||||
|
||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop(self):
|
||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc(self):
|
||||
# Refers to Issue #1685
|
||||
class EmbeddedChildModel(EmbeddedDocument):
|
||||
id = DictField(primary_key=True)
|
||||
@@ -1552,9 +1588,11 @@ class InstanceTest(MongoDBTestCase):
|
||||
EmbeddedChildModel)
|
||||
|
||||
emb = EmbeddedChildModel(id={'1': [1]})
|
||||
ParentModel(children=emb)._get_changed_fields()
|
||||
changed_fields = ParentModel(child=emb)._get_changed_fields()
|
||||
self.assertEqual(changed_fields, [])
|
||||
|
||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop(self):
|
||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc(self):
|
||||
# Refers to Issue #1685
|
||||
class User(Document):
|
||||
id = IntField(primary_key=True)
|
||||
name = StringField()
|
||||
|
@@ -7,7 +7,6 @@ from decimal import Decimal
|
||||
|
||||
from bson import DBRef, ObjectId
|
||||
import pymongo
|
||||
from pymongo.errors import ConfigurationError
|
||||
from pymongo.read_preferences import ReadPreference
|
||||
from pymongo.results import UpdateResult
|
||||
import six
|
||||
@@ -17,7 +16,7 @@ from mongoengine import *
|
||||
from mongoengine.connection import get_connection, get_db
|
||||
from mongoengine.context_managers import query_counter, switch_db
|
||||
from mongoengine.errors import InvalidQueryError
|
||||
from mongoengine.mongodb_support import get_mongodb_version, MONGODB_36
|
||||
from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version
|
||||
from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned,
|
||||
QuerySet, QuerySetManager, queryset_manager)
|
||||
|
||||
@@ -832,8 +831,6 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
def test_bulk_insert(self):
|
||||
"""Ensure that bulk insert works"""
|
||||
MONGO_VER = self.mongodb_version
|
||||
|
||||
class Comment(EmbeddedDocument):
|
||||
name = StringField()
|
||||
|
||||
@@ -847,10 +844,6 @@ class QuerySetTest(unittest.TestCase):
|
||||
|
||||
Blog.drop_collection()
|
||||
|
||||
# get MongoDB version info
|
||||
connection = get_connection()
|
||||
info = connection.test.command('buildInfo')
|
||||
|
||||
# Recreates the collection
|
||||
self.assertEqual(0, Blog.objects.count())
|
||||
|
||||
@@ -1864,8 +1857,8 @@ class QuerySetTest(unittest.TestCase):
|
||||
self.Person.objects()[:1].delete()
|
||||
self.assertEqual(1, BlogPost.objects.count())
|
||||
|
||||
def test_limit_with_write_concern_0(self):
|
||||
|
||||
def test_delete_edge_case_with_write_concern_0_return_None(self):
|
||||
"""Return None when write is unacknowledged"""
|
||||
p1 = self.Person(name="User Z", age=20).save()
|
||||
del_result = p1.delete(w=0)
|
||||
self.assertEqual(None, del_result)
|
||||
@@ -5386,6 +5379,13 @@ class QuerySetTest(unittest.TestCase):
|
||||
{'_id': None, 'avg': 29, 'total': 2}
|
||||
])
|
||||
|
||||
data = Person.objects().aggregate({'$match': {'name': 'Isabella Luanna'}})
|
||||
self.assertEqual(list(data), [
|
||||
{u'_id': p1.pk,
|
||||
u'age': 16,
|
||||
u'name': u'Isabella Luanna'}]
|
||||
)
|
||||
|
||||
def test_queryset_aggregation_with_skip(self):
|
||||
class Person(Document):
|
||||
name = StringField()
|
||||
|
Reference in New Issue
Block a user