Compare commits
29 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
e8dbd12f22 | ||
|
ca230d28b4 | ||
|
c96065b187 | ||
|
2abcf4764d | ||
|
bb0b1e88ef | ||
|
63c9135184 | ||
|
7fac0ef961 | ||
|
5a2e268160 | ||
|
a4e4e8f440 | ||
|
b62ce947a6 | ||
|
9538662262 | ||
|
09d7ae4f80 | ||
|
d7ded366c7 | ||
|
09c77973a0 | ||
|
22f3c70234 | ||
|
6527b1386f | ||
|
baabf97acd | ||
|
97005aca66 | ||
|
6e8ea50c19 | ||
|
1fcd706e11 | ||
|
008bb19b0b | ||
|
023acab779 | ||
|
5d120ebca0 | ||
|
f91b89f723 | ||
|
1181b75e16 | ||
|
5f00b4f923 | ||
|
4c31193b82 | ||
|
17fc9d1886 | ||
|
d7285d43dd |
@@ -1,24 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
sudo apt-get remove mongodb-org-server
|
|
||||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
|
||||||
|
|
||||||
if [ "$MONGODB" = "3.4" ]; then
|
|
||||||
sudo apt-key adv --keyserver keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6
|
|
||||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install mongodb-org-server=3.4.17
|
|
||||||
# service should be started automatically
|
|
||||||
elif [ "$MONGODB" = "3.6" ]; then
|
|
||||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2930ADAE8CAF5059EE73BB4B58712A2291FA4AD5
|
|
||||||
echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install mongodb-org-server=3.6.12
|
|
||||||
# service should be started automatically
|
|
||||||
else
|
|
||||||
echo "Invalid MongoDB version, expected 2.6, 3.0, 3.2, 3.4 or 3.6."
|
|
||||||
exit 1
|
|
||||||
fi;
|
|
||||||
|
|
||||||
mkdir db
|
|
||||||
1>db/logs mongod --dbpath=db &
|
|
53
.travis.yml
53
.travis.yml
@@ -16,52 +16,59 @@
|
|||||||
#
|
#
|
||||||
# Reminder: Update README.rst if you change MongoDB versions we test.
|
# Reminder: Update README.rst if you change MongoDB versions we test.
|
||||||
|
|
||||||
language: python
|
|
||||||
|
|
||||||
|
language: python
|
||||||
python:
|
python:
|
||||||
- 2.7
|
- 2.7
|
||||||
- 3.5
|
- 3.5
|
||||||
- 3.6
|
- 3.6
|
||||||
- pypy
|
- pypy
|
||||||
|
|
||||||
|
dist: xenial
|
||||||
|
|
||||||
env:
|
env:
|
||||||
- MONGODB=3.4 PYMONGO=3.x
|
global:
|
||||||
|
- MONGODB_3_4=3.4.17
|
||||||
|
- MONGODB_3_6=3.6.12
|
||||||
|
matrix:
|
||||||
|
- MONGODB=${MONGODB_3_4} PYMONGO=3.x
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
|
|
||||||
# Finish the build as soon as one job fails
|
# Finish the build as soon as one job fails
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- python: 2.7
|
- python: 2.7
|
||||||
env: MONGODB=3.4 PYMONGO=3.4.x
|
env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x
|
||||||
- python: 3.6
|
- python: 3.6
|
||||||
env: MONGODB=3.6 PYMONGO=3.x
|
env: MONGODB=${MONGODB_3_6} PYMONGO=3.x
|
||||||
|
- python: 3.7
|
||||||
|
env: MONGODB=${MONGODB_3_6} PYMONGO=3.x
|
||||||
|
|
||||||
before_install:
|
|
||||||
- bash .install_mongodb_on_travis.sh
|
|
||||||
- sleep 20 # https://docs.travis-ci.com/user/database-setup/#mongodb-does-not-immediately-accept-connections
|
|
||||||
- mongo --eval 'db.version();'
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev
|
# Install Mongo
|
||||||
libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev
|
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz
|
||||||
python-tk
|
- tar xzf mongodb-linux-x86_64-${MONGODB}.tgz
|
||||||
- travis_retry pip install --upgrade pip
|
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version
|
||||||
- travis_retry pip install coveralls
|
# Install python dependencies
|
||||||
- travis_retry pip install flake8 flake8-import-order
|
- pip install --upgrade pip
|
||||||
- travis_retry pip install "tox" # tox 3.11.0 has requirement virtualenv>=14.0.0
|
- pip install coveralls
|
||||||
- travis_retry pip install "virtualenv" # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
- pip install flake8 flake8-import-order
|
||||||
- travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
- pip install tox # tox 3.11.0 has requirement virtualenv>=14.0.0
|
||||||
|
- pip install virtualenv # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32)
|
||||||
|
# Install the tox venv
|
||||||
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test
|
||||||
|
|
||||||
# Cache dependencies installed via pip
|
|
||||||
cache: pip
|
|
||||||
|
|
||||||
# Run flake8 for py27
|
|
||||||
before_script:
|
before_script:
|
||||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi
|
- mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data
|
||||||
|
- ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork
|
||||||
|
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi # Run flake8 for py27
|
||||||
|
- mongo --eval 'db.version();' # Make sure mongo is awake
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
- tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage
|
||||||
|
|
||||||
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
# For now only submit coveralls for Python v2.7. Python v3.x currently shows
|
||||||
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
# 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible
|
||||||
|
@@ -47,7 +47,7 @@ Dependencies
|
|||||||
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_.
|
||||||
At the very least, you'll need these two packages to use MongoEngine:
|
At the very least, you'll need these two packages to use MongoEngine:
|
||||||
|
|
||||||
- pymongo>=3.5
|
- pymongo>=3.4
|
||||||
- six>=1.10.0
|
- six>=1.10.0
|
||||||
|
|
||||||
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
If you utilize a ``DateTimeField``, you might also use a more flexible date parser:
|
||||||
|
207
benchmark.py
207
benchmark.py
@@ -1,207 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
"""
|
|
||||||
Simple benchmark comparing PyMongo and MongoEngine.
|
|
||||||
|
|
||||||
Sample run on a mid 2015 MacBook Pro (commit b282511):
|
|
||||||
|
|
||||||
Benchmarking...
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo
|
|
||||||
2.58979988098
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - Pymongo write_concern={"w": 0}
|
|
||||||
1.26657605171
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine
|
|
||||||
8.4351580143
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries without continual assign - MongoEngine
|
|
||||||
7.20191693306
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True
|
|
||||||
6.31104588509
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True
|
|
||||||
6.07083487511
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False
|
|
||||||
5.97704291344
|
|
||||||
----------------------------------------------------------------------------------------------------
|
|
||||||
Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False
|
|
||||||
5.9111430645
|
|
||||||
"""
|
|
||||||
|
|
||||||
import timeit
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("Benchmarking...")
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
|
|
||||||
db = connection.timeit_test
|
|
||||||
noddy = db.noddy
|
|
||||||
|
|
||||||
for i in range(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']['key' + str(j)] = 'value ' + str(j)
|
|
||||||
|
|
||||||
noddy.save(example)
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - Pymongo""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
from pymongo.write_concern import WriteConcern
|
|
||||||
connection = MongoClient()
|
|
||||||
|
|
||||||
db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0))
|
|
||||||
noddy = db.noddy
|
|
||||||
|
|
||||||
for i in range(10000):
|
|
||||||
example = {'fields': {}}
|
|
||||||
for j in range(20):
|
|
||||||
example['fields']["key"+str(j)] = "value "+str(j)
|
|
||||||
|
|
||||||
noddy.save(example)
|
|
||||||
|
|
||||||
myNoddys = noddy.find()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
setup = """
|
|
||||||
from pymongo import MongoClient
|
|
||||||
connection = MongoClient()
|
|
||||||
connection.drop_database('timeit_test')
|
|
||||||
connection.close()
|
|
||||||
|
|
||||||
from mongoengine import Document, DictField, connect
|
|
||||||
connect('timeit_test')
|
|
||||||
|
|
||||||
class Noddy(Document):
|
|
||||||
fields = DictField()
|
|
||||||
"""
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - MongoEngine""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
fields = {}
|
|
||||||
for j in range(20):
|
|
||||||
fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.fields = fields
|
|
||||||
noddy.save()
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries without continual assign - MongoEngine""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(write_concern={"w": 0}, cascade=True)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(write_concern={"w": 0}, validate=False, cascade=True)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(validate=False, write_concern={"w": 0})
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
stmt = """
|
|
||||||
for i in range(10000):
|
|
||||||
noddy = Noddy()
|
|
||||||
for j in range(20):
|
|
||||||
noddy.fields["key"+str(j)] = "value "+str(j)
|
|
||||||
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
|
||||||
|
|
||||||
myNoddys = Noddy.objects()
|
|
||||||
[n for n in myNoddys] # iterate
|
|
||||||
"""
|
|
||||||
|
|
||||||
print("-" * 100)
|
|
||||||
print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""")
|
|
||||||
t = timeit.Timer(stmt=stmt, setup=setup)
|
|
||||||
print(t.timeit(1))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
148
benchmarks/test_basic_doc_ops.py
Normal file
148
benchmarks/test_basic_doc_ops.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
from timeit import repeat
|
||||||
|
|
||||||
|
import mongoengine
|
||||||
|
from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument,
|
||||||
|
EmbeddedDocumentField, IntField, ListField,
|
||||||
|
StringField)
|
||||||
|
|
||||||
|
mongoengine.connect(db='mongoengine_benchmark_test')
|
||||||
|
|
||||||
|
|
||||||
|
def timeit(f, n=10000):
|
||||||
|
return min(repeat(f, repeat=3, number=n)) / float(n)
|
||||||
|
|
||||||
|
|
||||||
|
def test_basic():
|
||||||
|
class Book(Document):
|
||||||
|
name = StringField()
|
||||||
|
pages = IntField()
|
||||||
|
tags = ListField(StringField())
|
||||||
|
is_published = BooleanField()
|
||||||
|
author_email = EmailField()
|
||||||
|
|
||||||
|
Book.drop_collection()
|
||||||
|
|
||||||
|
def init_book():
|
||||||
|
return Book(
|
||||||
|
name='Always be closing',
|
||||||
|
pages=100,
|
||||||
|
tags=['self-help', 'sales'],
|
||||||
|
is_published=True,
|
||||||
|
author_email='alec@example.com',
|
||||||
|
)
|
||||||
|
|
||||||
|
print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6))
|
||||||
|
|
||||||
|
b = init_book()
|
||||||
|
print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6))
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Doc setattr: %.3fus' % (
|
||||||
|
timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6))
|
||||||
|
|
||||||
|
print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6))
|
||||||
|
|
||||||
|
def save_book():
|
||||||
|
b._mark_as_changed('name')
|
||||||
|
b._mark_as_changed('tags')
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6))
|
||||||
|
|
||||||
|
son = b.to_mongo()
|
||||||
|
print(
|
||||||
|
'Load from SON: %.3fus' % (
|
||||||
|
timeit(lambda: Book._from_son(son), 1000) * 10**6
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Load from database: %.3fus' % (
|
||||||
|
timeit(lambda: Book.objects[0], 100) * 10**6
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_and_delete_book():
|
||||||
|
b = init_book()
|
||||||
|
b.save()
|
||||||
|
b.delete()
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Init + save to database + delete: %.3fms' % (
|
||||||
|
timeit(create_and_delete_book, 10) * 10**3
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_big_doc():
|
||||||
|
class Contact(EmbeddedDocument):
|
||||||
|
name = StringField()
|
||||||
|
title = StringField()
|
||||||
|
address = StringField()
|
||||||
|
|
||||||
|
class Company(Document):
|
||||||
|
name = StringField()
|
||||||
|
contacts = ListField(EmbeddedDocumentField(Contact))
|
||||||
|
|
||||||
|
Company.drop_collection()
|
||||||
|
|
||||||
|
def init_company():
|
||||||
|
return Company(
|
||||||
|
name='MongoDB, Inc.',
|
||||||
|
contacts=[
|
||||||
|
Contact(
|
||||||
|
name='Contact %d' % x,
|
||||||
|
title='CEO',
|
||||||
|
address='Address %d' % x,
|
||||||
|
)
|
||||||
|
for x in range(1000)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
company = init_company()
|
||||||
|
print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3))
|
||||||
|
|
||||||
|
print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3))
|
||||||
|
|
||||||
|
company.save()
|
||||||
|
|
||||||
|
def save_company():
|
||||||
|
company._mark_as_changed('name')
|
||||||
|
company._mark_as_changed('contacts')
|
||||||
|
company.save()
|
||||||
|
|
||||||
|
print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3))
|
||||||
|
|
||||||
|
son = company.to_mongo()
|
||||||
|
print(
|
||||||
|
'Load from SON: %.3fms' % (
|
||||||
|
timeit(lambda: Company._from_son(son), 100) * 10**3
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Load from database: %.3fms' % (
|
||||||
|
timeit(lambda: Company.objects[0], 100) * 10**3
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_and_delete_company():
|
||||||
|
c = init_company()
|
||||||
|
c.save()
|
||||||
|
c.delete()
|
||||||
|
|
||||||
|
print(
|
||||||
|
'Init + save to database + delete: %.3fms' % (
|
||||||
|
timeit(create_and_delete_company, 10) * 10**3
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
test_basic()
|
||||||
|
print('-' * 100)
|
||||||
|
test_big_doc()
|
154
benchmarks/test_inserts.py
Normal file
154
benchmarks/test_inserts.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import timeit
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
setup = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
connection.drop_database('mongoengine_benchmark_test')
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.mongoengine_benchmark_test
|
||||||
|
noddy = db.noddy
|
||||||
|
|
||||||
|
for i in range(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.insert_one(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('PyMongo: Creating 10000 dictionaries.')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
from pymongo import MongoClient, WriteConcern
|
||||||
|
connection = MongoClient()
|
||||||
|
|
||||||
|
db = connection.mongoengine_benchmark_test
|
||||||
|
noddy = db.noddy.with_options(write_concern=WriteConcern(w=0))
|
||||||
|
|
||||||
|
for i in range(10000):
|
||||||
|
example = {'fields': {}}
|
||||||
|
for j in range(20):
|
||||||
|
example['fields']["key"+str(j)] = "value "+str(j)
|
||||||
|
|
||||||
|
noddy.insert_one(example)
|
||||||
|
|
||||||
|
myNoddys = noddy.find()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
setup = """
|
||||||
|
from pymongo import MongoClient
|
||||||
|
connection = MongoClient()
|
||||||
|
connection.drop_database('mongoengine_benchmark_test')
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
from mongoengine import Document, DictField, connect
|
||||||
|
connect("mongoengine_benchmark_test")
|
||||||
|
|
||||||
|
class Noddy(Document):
|
||||||
|
fields = DictField()
|
||||||
|
"""
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('MongoEngine: Creating 10000 dictionaries.')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
fields = {}
|
||||||
|
for j in range(20):
|
||||||
|
fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.fields = fields
|
||||||
|
noddy.save()
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(write_concern={"w": 0})
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(write_concern={"w": 0}, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
stmt = """
|
||||||
|
for i in range(10000):
|
||||||
|
noddy = Noddy()
|
||||||
|
for j in range(20):
|
||||||
|
noddy.fields["key"+str(j)] = "value "+str(j)
|
||||||
|
noddy.save(force_insert=True, write_concern={"w": 0}, validate=False)
|
||||||
|
|
||||||
|
myNoddys = Noddy.objects()
|
||||||
|
[n for n in myNoddys] # iterate
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('-' * 100)
|
||||||
|
print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).')
|
||||||
|
t = timeit.Timer(stmt=stmt, setup=setup)
|
||||||
|
print('{}s'.format(t.timeit(1)))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
=========
|
=========
|
||||||
Changelog
|
Changelog
|
||||||
=========
|
=========
|
||||||
@@ -6,6 +7,12 @@ Development
|
|||||||
===========
|
===========
|
||||||
- (Fill this out as you fix issues and develop your features).
|
- (Fill this out as you fix issues and develop your features).
|
||||||
|
|
||||||
|
Changes in 0.18.1
|
||||||
|
=================
|
||||||
|
- Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields
|
||||||
|
instead of updating only the modified fields. This bug only occurs when using custom pk #2082
|
||||||
|
- Add Python 3.7 in travis #2058
|
||||||
|
|
||||||
Changes in 0.18.0
|
Changes in 0.18.0
|
||||||
=================
|
=================
|
||||||
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
|
||||||
|
@@ -23,7 +23,7 @@ __all__ = (list(document.__all__) + list(fields.__all__) +
|
|||||||
list(signals.__all__) + list(errors.__all__))
|
list(signals.__all__) + list(errors.__all__))
|
||||||
|
|
||||||
|
|
||||||
VERSION = (0, 18, 0)
|
VERSION = (0, 18, 1)
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
|
@@ -25,6 +25,16 @@ NON_FIELD_ERRORS = '__all__'
|
|||||||
|
|
||||||
|
|
||||||
class BaseDocument(object):
|
class BaseDocument(object):
|
||||||
|
# TODO simplify how `_changed_fields` is used.
|
||||||
|
# Currently, handling of `_changed_fields` seems unnecessarily convoluted:
|
||||||
|
# 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's
|
||||||
|
# not setting it to `[]` (or any other value) in `__init__`.
|
||||||
|
# 2. `EmbeddedDocument` sets `_changed_fields` to `[]` it its overloaded
|
||||||
|
# `__init__`.
|
||||||
|
# 3. `Document` does NOT set `_changed_fields` upon initialization. The
|
||||||
|
# field is primarily set via `_from_son` or `_clear_changed_fields`,
|
||||||
|
# though there are also other methods that manipulate it.
|
||||||
|
# 4. The codebase is littered with `hasattr` calls for `_changed_fields`.
|
||||||
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
__slots__ = ('_changed_fields', '_initialised', '_created', '_data',
|
||||||
'_dynamic_fields', '_auto_id_field', '_db_field_map',
|
'_dynamic_fields', '_auto_id_field', '_db_field_map',
|
||||||
'__weakref__')
|
'__weakref__')
|
||||||
@@ -35,13 +45,20 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
def __init__(self, *args, **values):
|
def __init__(self, *args, **values):
|
||||||
"""
|
"""
|
||||||
Initialise a document or embedded document
|
Initialise a document or an embedded document.
|
||||||
|
|
||||||
:param __auto_convert: Try and will cast python objects to Object types
|
:param dict values: A dictionary of keys and values for the document.
|
||||||
:param values: A dictionary of values for the document
|
It may contain additional reserved keywords, e.g. "__auto_convert".
|
||||||
|
:param bool __auto_convert: If True, supplied values will be converted
|
||||||
|
to Python-type values via each field's `to_python` method.
|
||||||
|
:param set __only_fields: A set of fields that have been loaded for
|
||||||
|
this document. Empty if all fields have been loaded.
|
||||||
|
:param bool _created: Indicates whether this is a brand new document
|
||||||
|
or whether it's already been persisted before. Defaults to true.
|
||||||
"""
|
"""
|
||||||
self._initialised = False
|
self._initialised = False
|
||||||
self._created = True
|
self._created = True
|
||||||
|
|
||||||
if args:
|
if args:
|
||||||
# Combine positional arguments with named arguments.
|
# Combine positional arguments with named arguments.
|
||||||
# We only want named arguments.
|
# We only want named arguments.
|
||||||
@@ -58,7 +75,6 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
__auto_convert = values.pop('__auto_convert', True)
|
__auto_convert = values.pop('__auto_convert', True)
|
||||||
|
|
||||||
# 399: set default values only to fields loaded from DB
|
|
||||||
__only_fields = set(values.pop('__only_fields', values))
|
__only_fields = set(values.pop('__only_fields', values))
|
||||||
|
|
||||||
_created = values.pop('_created', True)
|
_created = values.pop('_created', True)
|
||||||
@@ -83,7 +99,9 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
self._dynamic_fields = SON()
|
self._dynamic_fields = SON()
|
||||||
|
|
||||||
# Assign default values to instance
|
# Assign default values to the instance.
|
||||||
|
# We set default values only for fields loaded from DB. See
|
||||||
|
# https://github.com/mongoengine/mongoengine/issues/399 for more info.
|
||||||
for key, field in iteritems(self._fields):
|
for key, field in iteritems(self._fields):
|
||||||
if self._db_field_map.get(key, key) in __only_fields:
|
if self._db_field_map.get(key, key) in __only_fields:
|
||||||
continue
|
continue
|
||||||
@@ -125,6 +143,7 @@ class BaseDocument(object):
|
|||||||
# Flag initialised
|
# Flag initialised
|
||||||
self._initialised = True
|
self._initialised = True
|
||||||
self._created = _created
|
self._created = _created
|
||||||
|
|
||||||
signals.post_init.send(self.__class__, document=self)
|
signals.post_init.send(self.__class__, document=self)
|
||||||
|
|
||||||
def __delattr__(self, *args, **kwargs):
|
def __delattr__(self, *args, **kwargs):
|
||||||
@@ -665,9 +684,7 @@ class BaseDocument(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
|
||||||
"""Create an instance of a Document (subclass) from a PyMongo
|
"""Create an instance of a Document (subclass) from a PyMongo SON."""
|
||||||
SON.
|
|
||||||
"""
|
|
||||||
if not only_fields:
|
if not only_fields:
|
||||||
only_fields = []
|
only_fields = []
|
||||||
|
|
||||||
@@ -690,7 +707,6 @@ class BaseDocument(object):
|
|||||||
if class_name != cls._class_name:
|
if class_name != cls._class_name:
|
||||||
cls = get_document(class_name)
|
cls = get_document(class_name)
|
||||||
|
|
||||||
changed_fields = []
|
|
||||||
errors_dict = {}
|
errors_dict = {}
|
||||||
|
|
||||||
fields = cls._fields
|
fields = cls._fields
|
||||||
@@ -720,8 +736,13 @@ class BaseDocument(object):
|
|||||||
if cls.STRICT:
|
if cls.STRICT:
|
||||||
data = {k: v for k, v in iteritems(data) if k in cls._fields}
|
data = {k: v for k, v in iteritems(data) if k in cls._fields}
|
||||||
|
|
||||||
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
|
obj = cls(
|
||||||
obj._changed_fields = changed_fields
|
__auto_convert=False,
|
||||||
|
_created=created,
|
||||||
|
__only_fields=only_fields,
|
||||||
|
**data
|
||||||
|
)
|
||||||
|
obj._changed_fields = []
|
||||||
if not _auto_dereference:
|
if not _auto_dereference:
|
||||||
obj._fields = fields
|
obj._fields = fields
|
||||||
|
|
||||||
|
@@ -2,9 +2,17 @@ from pymongo import MongoClient, ReadPreference, uri_parser
|
|||||||
from pymongo.database import _check_name
|
from pymongo.database import _check_name
|
||||||
import six
|
import six
|
||||||
|
|
||||||
__all__ = ['MongoEngineConnectionError', 'connect', 'disconnect', 'disconnect_all',
|
__all__ = [
|
||||||
'register_connection', 'DEFAULT_CONNECTION_NAME', 'DEFAULT_DATABASE_NAME',
|
'DEFAULT_CONNECTION_NAME',
|
||||||
'get_db', 'get_connection']
|
'DEFAULT_DATABASE_NAME',
|
||||||
|
'MongoEngineConnectionError',
|
||||||
|
'connect',
|
||||||
|
'disconnect',
|
||||||
|
'disconnect_all',
|
||||||
|
'get_connection',
|
||||||
|
'get_db',
|
||||||
|
'register_connection',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONNECTION_NAME = 'default'
|
DEFAULT_CONNECTION_NAME = 'default'
|
||||||
@@ -263,17 +271,17 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
|
|||||||
else:
|
else:
|
||||||
connection_class = MongoClient
|
connection_class = MongoClient
|
||||||
|
|
||||||
# Re-use existing connection if one is suitable
|
# Re-use existing connection if one is suitable.
|
||||||
existing_connection = _find_existing_connection(raw_conn_settings)
|
existing_connection = _find_existing_connection(raw_conn_settings)
|
||||||
|
|
||||||
# If an existing connection was found, assign it to the new alias
|
|
||||||
if existing_connection:
|
if existing_connection:
|
||||||
_connections[alias] = existing_connection
|
connection = existing_connection
|
||||||
else:
|
else:
|
||||||
_connections[alias] = _create_connection(alias=alias,
|
connection = _create_connection(
|
||||||
connection_class=connection_class,
|
alias=alias,
|
||||||
**conn_settings)
|
connection_class=connection_class,
|
||||||
|
**conn_settings
|
||||||
|
)
|
||||||
|
_connections[alias] = connection
|
||||||
return _connections[alias]
|
return _connections[alias]
|
||||||
|
|
||||||
|
|
||||||
@@ -359,8 +367,11 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
|||||||
new_conn_settings = _get_connection_settings(db, **kwargs)
|
new_conn_settings = _get_connection_settings(db, **kwargs)
|
||||||
|
|
||||||
if new_conn_settings != prev_conn_setting:
|
if new_conn_settings != prev_conn_setting:
|
||||||
raise MongoEngineConnectionError(
|
err_msg = (
|
||||||
'A different connection with alias `%s` was already registered. Use disconnect() first' % alias)
|
u'A different connection with alias `{}` was already '
|
||||||
|
u'registered. Use disconnect() first'
|
||||||
|
).format(alias)
|
||||||
|
raise MongoEngineConnectionError(err_msg)
|
||||||
else:
|
else:
|
||||||
register_connection(alias, db, **kwargs)
|
register_connection(alias, db, **kwargs)
|
||||||
|
|
||||||
|
@@ -182,8 +182,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_collection(cls):
|
def _get_collection(cls):
|
||||||
"""Return the corresponding PyMongo collection of this document.
|
"""Return the PyMongo collection corresponding to this document.
|
||||||
Upon the first call, it will ensure that indexes gets created. The returned collection then gets cached
|
|
||||||
|
Upon first call, this method:
|
||||||
|
1. Initializes a :class:`~pymongo.collection.Collection` corresponding
|
||||||
|
to this document.
|
||||||
|
2. Creates indexes defined in this document's :attr:`meta` dictionary.
|
||||||
|
This happens only if `auto_create_index` is True.
|
||||||
"""
|
"""
|
||||||
if not hasattr(cls, '_collection') or cls._collection is None:
|
if not hasattr(cls, '_collection') or cls._collection is None:
|
||||||
# Get the collection, either capped or regular.
|
# Get the collection, either capped or regular.
|
||||||
@@ -366,7 +371,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
|
|||||||
if write_concern is None:
|
if write_concern is None:
|
||||||
write_concern = {}
|
write_concern = {}
|
||||||
|
|
||||||
doc_id = self.to_mongo(fields=['id'])
|
doc_id = self.to_mongo(fields=[self._meta['id_field']])
|
||||||
created = ('_id' not in doc_id or self._created or force_insert)
|
created = ('_id' not in doc_id or self._created or force_insert)
|
||||||
|
|
||||||
signals.pre_save_post_validation.send(self.__class__, document=self,
|
signals.pre_save_post_validation.send(self.__class__, document=self,
|
||||||
|
@@ -1009,13 +1009,15 @@ class BaseQuerySet(object):
|
|||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def order_by(self, *keys):
|
def order_by(self, *keys):
|
||||||
"""Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
|
"""Order the :class:`~mongoengine.queryset.QuerySet` by the given keys.
|
||||||
order may be specified by prepending each of the keys by a + or a -.
|
|
||||||
Ascending order is assumed. If no keys are passed, existing ordering
|
The order may be specified by prepending each of the keys by a "+" or
|
||||||
is cleared instead.
|
a "-". Ascending order is assumed if there's no prefix.
|
||||||
|
|
||||||
|
If no keys are passed, existing ordering is cleared instead.
|
||||||
|
|
||||||
:param keys: fields to order the query results by; keys may be
|
:param keys: fields to order the query results by; keys may be
|
||||||
prefixed with **+** or **-** to determine the ordering direction
|
prefixed with "+" or a "-" to determine the ordering direction.
|
||||||
"""
|
"""
|
||||||
queryset = self.clone()
|
queryset = self.clone()
|
||||||
|
|
||||||
|
@@ -1,33 +1,30 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import bson
|
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import unittest
|
import unittest
|
||||||
import uuid
|
import uuid
|
||||||
import warnings
|
|
||||||
import weakref
|
import weakref
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
import bson
|
||||||
from bson import DBRef, ObjectId
|
from bson import DBRef, ObjectId
|
||||||
from pymongo.errors import DuplicateKeyError
|
from pymongo.errors import DuplicateKeyError
|
||||||
from six import iteritems
|
from six import iteritems
|
||||||
|
|
||||||
from mongoengine.mongodb_support import get_mongodb_version, MONGODB_36, MONGODB_34
|
|
||||||
from mongoengine.pymongo_support import list_collection_names
|
|
||||||
from tests import fixtures
|
|
||||||
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
|
|
||||||
PickleDynamicEmbedded, PickleDynamicTest)
|
|
||||||
from tests.utils import MongoDBTestCase, get_as_pymongo
|
|
||||||
|
|
||||||
from mongoengine import *
|
from mongoengine import *
|
||||||
from mongoengine.base import get_document, _document_registry
|
|
||||||
from mongoengine.connection import get_db
|
|
||||||
from mongoengine.errors import (NotRegistered, InvalidDocumentError,
|
|
||||||
InvalidQueryError, NotUniqueError,
|
|
||||||
FieldDoesNotExist, SaveConditionError)
|
|
||||||
from mongoengine.queryset import NULLIFY, Q
|
|
||||||
from mongoengine.context_managers import switch_db, query_counter
|
|
||||||
from mongoengine import signals
|
from mongoengine import signals
|
||||||
|
from mongoengine.base import _document_registry, get_document
|
||||||
|
from mongoengine.connection import get_db
|
||||||
|
from mongoengine.context_managers import query_counter, switch_db
|
||||||
|
from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, \
|
||||||
|
InvalidQueryError, NotRegistered, NotUniqueError, SaveConditionError)
|
||||||
|
from mongoengine.mongodb_support import MONGODB_34, MONGODB_36, get_mongodb_version
|
||||||
|
from mongoengine.pymongo_support import list_collection_names
|
||||||
|
from mongoengine.queryset import NULLIFY, Q
|
||||||
|
from tests import fixtures
|
||||||
|
from tests.fixtures import (PickleDynamicEmbedded, PickleDynamicTest, \
|
||||||
|
PickleEmbedded, PickleSignalsTest, PickleTest)
|
||||||
|
from tests.utils import MongoDBTestCase, get_as_pymongo
|
||||||
|
|
||||||
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__),
|
||||||
'../fields/mongoengine.png')
|
'../fields/mongoengine.png')
|
||||||
@@ -336,41 +333,36 @@ class InstanceTest(MongoDBTestCase):
|
|||||||
self.assertEqual(User._fields['username'].db_field, '_id')
|
self.assertEqual(User._fields['username'].db_field, '_id')
|
||||||
self.assertEqual(User._meta['id_field'], 'username')
|
self.assertEqual(User._meta['id_field'], 'username')
|
||||||
|
|
||||||
# test no primary key field
|
User.objects.create(username='test', name='test user')
|
||||||
self.assertRaises(ValidationError, User(name='test').save)
|
user = User.objects.first()
|
||||||
|
self.assertEqual(user.id, 'test')
|
||||||
|
self.assertEqual(user.pk, 'test')
|
||||||
|
user_dict = User.objects._collection.find_one()
|
||||||
|
self.assertEqual(user_dict['_id'], 'test')
|
||||||
|
|
||||||
# define a subclass with a different primary key field than the
|
def test_change_custom_id_field_in_subclass(self):
|
||||||
# parent
|
"""Subclasses cannot override which field is the primary key."""
|
||||||
with self.assertRaises(ValueError):
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField()
|
||||||
|
meta = {'allow_inheritance': True}
|
||||||
|
|
||||||
|
with self.assertRaises(ValueError) as e:
|
||||||
class EmailUser(User):
|
class EmailUser(User):
|
||||||
email = StringField(primary_key=True)
|
email = StringField(primary_key=True)
|
||||||
|
exc = e.exception
|
||||||
|
self.assertEqual(str(exc), 'Cannot override primary key field')
|
||||||
|
|
||||||
class EmailUser(User):
|
def test_custom_id_field_is_required(self):
|
||||||
email = StringField()
|
"""Ensure the custom primary key field is required."""
|
||||||
|
class User(Document):
|
||||||
|
username = StringField(primary_key=True)
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
user = User(username='test', name='test user')
|
with self.assertRaises(ValidationError) as e:
|
||||||
user.save()
|
User(name='test').save()
|
||||||
|
exc = e.exception
|
||||||
user_obj = User.objects.first()
|
self.assertTrue("Field is required: ['username']" in str(exc))
|
||||||
self.assertEqual(user_obj.id, 'test')
|
|
||||||
self.assertEqual(user_obj.pk, 'test')
|
|
||||||
|
|
||||||
user_son = User.objects._collection.find_one()
|
|
||||||
self.assertEqual(user_son['_id'], 'test')
|
|
||||||
self.assertNotIn('username', user_son['_id'])
|
|
||||||
|
|
||||||
User.drop_collection()
|
|
||||||
|
|
||||||
user = User(pk='mongo', name='mongo user')
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
user_obj = User.objects.first()
|
|
||||||
self.assertEqual(user_obj.id, 'mongo')
|
|
||||||
self.assertEqual(user_obj.pk, 'mongo')
|
|
||||||
|
|
||||||
user_son = User.objects._collection.find_one()
|
|
||||||
self.assertEqual(user_son['_id'], 'mongo')
|
|
||||||
self.assertNotIn('username', user_son['_id'])
|
|
||||||
|
|
||||||
def test_document_not_registered(self):
|
def test_document_not_registered(self):
|
||||||
class Place(Document):
|
class Place(Document):
|
||||||
@@ -1260,6 +1252,50 @@ class InstanceTest(MongoDBTestCase):
|
|||||||
self.assertTrue(w1.toggle)
|
self.assertTrue(w1.toggle)
|
||||||
self.assertEqual(w1.count, 3)
|
self.assertEqual(w1.count, 3)
|
||||||
|
|
||||||
|
def test_save_update_selectively(self):
|
||||||
|
class WildBoy(Document):
|
||||||
|
age = IntField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
WildBoy.drop_collection()
|
||||||
|
|
||||||
|
WildBoy(age=12, name='John').save()
|
||||||
|
|
||||||
|
boy1 = WildBoy.objects().first()
|
||||||
|
boy2 = WildBoy.objects().first()
|
||||||
|
|
||||||
|
boy1.age = 99
|
||||||
|
boy1.save()
|
||||||
|
boy2.name = 'Bob'
|
||||||
|
boy2.save()
|
||||||
|
|
||||||
|
fresh_boy = WildBoy.objects().first()
|
||||||
|
self.assertEqual(fresh_boy.age, 99)
|
||||||
|
self.assertEqual(fresh_boy.name, 'Bob')
|
||||||
|
|
||||||
|
def test_save_update_selectively_with_custom_pk(self):
|
||||||
|
# Prevents regression of #2082
|
||||||
|
class WildBoy(Document):
|
||||||
|
pk_id = StringField(primary_key=True)
|
||||||
|
age = IntField()
|
||||||
|
name = StringField()
|
||||||
|
|
||||||
|
WildBoy.drop_collection()
|
||||||
|
|
||||||
|
WildBoy(pk_id='A', age=12, name='John').save()
|
||||||
|
|
||||||
|
boy1 = WildBoy.objects().first()
|
||||||
|
boy2 = WildBoy.objects().first()
|
||||||
|
|
||||||
|
boy1.age = 99
|
||||||
|
boy1.save()
|
||||||
|
boy2.name = 'Bob'
|
||||||
|
boy2.save()
|
||||||
|
|
||||||
|
fresh_boy = WildBoy.objects().first()
|
||||||
|
self.assertEqual(fresh_boy.age, 99)
|
||||||
|
self.assertEqual(fresh_boy.name, 'Bob')
|
||||||
|
|
||||||
def test_update(self):
|
def test_update(self):
|
||||||
"""Ensure that an existing document is updated instead of be
|
"""Ensure that an existing document is updated instead of be
|
||||||
overwritten.
|
overwritten.
|
||||||
@@ -1542,7 +1578,7 @@ class InstanceTest(MongoDBTestCase):
|
|||||||
self.assertEqual(person.age, 21)
|
self.assertEqual(person.age, 21)
|
||||||
self.assertEqual(person.active, False)
|
self.assertEqual(person.active, False)
|
||||||
|
|
||||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop(self):
|
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc(self):
|
||||||
# Refers to Issue #1685
|
# Refers to Issue #1685
|
||||||
class EmbeddedChildModel(EmbeddedDocument):
|
class EmbeddedChildModel(EmbeddedDocument):
|
||||||
id = DictField(primary_key=True)
|
id = DictField(primary_key=True)
|
||||||
@@ -1552,9 +1588,11 @@ class InstanceTest(MongoDBTestCase):
|
|||||||
EmbeddedChildModel)
|
EmbeddedChildModel)
|
||||||
|
|
||||||
emb = EmbeddedChildModel(id={'1': [1]})
|
emb = EmbeddedChildModel(id={'1': [1]})
|
||||||
ParentModel(children=emb)._get_changed_fields()
|
changed_fields = ParentModel(child=emb)._get_changed_fields()
|
||||||
|
self.assertEqual(changed_fields, [])
|
||||||
|
|
||||||
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop(self):
|
def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc(self):
|
||||||
|
# Refers to Issue #1685
|
||||||
class User(Document):
|
class User(Document):
|
||||||
id = IntField(primary_key=True)
|
id = IntField(primary_key=True)
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
@@ -7,7 +7,6 @@ from decimal import Decimal
|
|||||||
|
|
||||||
from bson import DBRef, ObjectId
|
from bson import DBRef, ObjectId
|
||||||
import pymongo
|
import pymongo
|
||||||
from pymongo.errors import ConfigurationError
|
|
||||||
from pymongo.read_preferences import ReadPreference
|
from pymongo.read_preferences import ReadPreference
|
||||||
from pymongo.results import UpdateResult
|
from pymongo.results import UpdateResult
|
||||||
import six
|
import six
|
||||||
@@ -17,7 +16,7 @@ from mongoengine import *
|
|||||||
from mongoengine.connection import get_connection, get_db
|
from mongoengine.connection import get_connection, get_db
|
||||||
from mongoengine.context_managers import query_counter, switch_db
|
from mongoengine.context_managers import query_counter, switch_db
|
||||||
from mongoengine.errors import InvalidQueryError
|
from mongoengine.errors import InvalidQueryError
|
||||||
from mongoengine.mongodb_support import get_mongodb_version, MONGODB_36
|
from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version
|
||||||
from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned,
|
from mongoengine.queryset import (DoesNotExist, MultipleObjectsReturned,
|
||||||
QuerySet, QuerySetManager, queryset_manager)
|
QuerySet, QuerySetManager, queryset_manager)
|
||||||
|
|
||||||
@@ -832,8 +831,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
def test_bulk_insert(self):
|
def test_bulk_insert(self):
|
||||||
"""Ensure that bulk insert works"""
|
"""Ensure that bulk insert works"""
|
||||||
MONGO_VER = self.mongodb_version
|
|
||||||
|
|
||||||
class Comment(EmbeddedDocument):
|
class Comment(EmbeddedDocument):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
|
||||||
@@ -847,10 +844,6 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
|
|
||||||
Blog.drop_collection()
|
Blog.drop_collection()
|
||||||
|
|
||||||
# get MongoDB version info
|
|
||||||
connection = get_connection()
|
|
||||||
info = connection.test.command('buildInfo')
|
|
||||||
|
|
||||||
# Recreates the collection
|
# Recreates the collection
|
||||||
self.assertEqual(0, Blog.objects.count())
|
self.assertEqual(0, Blog.objects.count())
|
||||||
|
|
||||||
@@ -5386,6 +5379,13 @@ class QuerySetTest(unittest.TestCase):
|
|||||||
{'_id': None, 'avg': 29, 'total': 2}
|
{'_id': None, 'avg': 29, 'total': 2}
|
||||||
])
|
])
|
||||||
|
|
||||||
|
data = Person.objects().aggregate({'$match': {'name': 'Isabella Luanna'}})
|
||||||
|
self.assertEqual(list(data), [
|
||||||
|
{u'_id': p1.pk,
|
||||||
|
u'age': 16,
|
||||||
|
u'name': u'Isabella Luanna'}]
|
||||||
|
)
|
||||||
|
|
||||||
def test_queryset_aggregation_with_skip(self):
|
def test_queryset_aggregation_with_skip(self):
|
||||||
class Person(Document):
|
class Person(Document):
|
||||||
name = StringField()
|
name = StringField()
|
||||||
|
Reference in New Issue
Block a user