diff --git a/.travis.yml b/.travis.yml index b7c56a02..40736165 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,20 +6,28 @@ python: - "2.7" - "3.2" - "3.3" + - "3.4" + - "pypy" env: - - PYMONGO=dev DJANGO=1.5.1 - - PYMONGO=dev DJANGO=1.4.2 - - PYMONGO=2.5 DJANGO=1.5.1 - - PYMONGO=2.5 DJANGO=1.4.2 + - PYMONGO=dev DJANGO=1.6.5 + - PYMONGO=dev DJANGO=1.5.8 + - PYMONGO=2.7.1 DJANGO=1.6.5 + - PYMONGO=2.7.1 DJANGO=1.5.8 + +matrix: + fast_finish: true + install: - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi - - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install django==$DJANGO --use-mirrors ; true; fi + - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev python-tk - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi - - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi + - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO; true; fi + - pip install Django==$DJANGO + - pip install https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.1.tar.gz#md5=1534bb15cf311f07afaa3aacba1c028b - python setup.py install script: - python setup.py test + - if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then 2to3 . -w; fi; + - python benchmark.py notifications: irc: "irc.freenode.org#mongoengine" branches: diff --git a/AUTHORS b/AUTHORS index 7788139e..c6c47d79 100644 --- a/AUTHORS +++ b/AUTHORS @@ -16,8 +16,6 @@ Dervived from the git logs, inevitably incomplete but all of whom and others have submitted patches, reported bugs and generally helped make MongoEngine that much better: - * Harry Marr - * Ross Lawley * blackbrrr * Florian Schlachter * Vincent Driessen @@ -136,7 +134,7 @@ that much better: * Paul Swartz * Sundar Raman * Benoit Louy - * lraucy + * Loic Raucy (https://github.com/lraucy) * hellysmile * Jaepil Jeong * Daniil Sharou @@ -169,4 +167,36 @@ that much better: * Massimo Santini (https://github.com/mapio) * Nigel McNie (https://github.com/nigelmcnie) * ygbourhis (https://github.com/ygbourhis) - * Bob Dickinson (https://github.com/BobDickinson) \ No newline at end of file + * Bob Dickinson (https://github.com/BobDickinson) + * Michael Bartnett (https://github.com/michaelbartnett) + * Alon Horev (https://github.com/alonho) + * Kelvin Hammond (https://github.com/kelvinhammond) + * Jatin Chopra (https://github.com/jatin) + * Paul Uithol (https://github.com/PaulUithol) + * Thom Knowles (https://github.com/fleat) + * Paul (https://github.com/squamous) + * Olivier Cortès (https://github.com/Karmak23) + * crazyzubr (https://github.com/crazyzubr) + * FrankSomething (https://github.com/FrankSomething) + * Alexandr Morozov (https://github.com/LK4D4) + * mishudark (https://github.com/mishudark) + * Joe Friedl (https://github.com/grampajoe) + * Daniel Ward (https://github.com/danielward) + * Aniket Deshpande (https://github.com/anicake) + * rfkrocktk (https://github.com/rfkrocktk) + * Gustavo Andrés Angulo (https://github.com/woakas) + * Dmytro Popovych (https://github.com/drudim) + * Tom (https://github.com/tomprimozic) + * j0hnsmith (https://github.com/j0hnsmith) + * Damien Churchill (https://github.com/damoxc) + * Jonathan Simon Prates (https://github.com/jonathansp) + * Thiago Papageorgiou (https://github.com/tmpapageorgiou) + * Omer Katz (https://github.com/thedrow) + * Falcon Dai (https://github.com/falcondai) + * Polyrabbit (https://github.com/polyrabbit) + * Sagiv Malihi (https://github.com/sagivmalihi) + * Dmitry Konishchev (https://github.com/KonishchevDmitry) + * Martyn Smith (https://github.com/martynsmith) + * Andrei Zbikowski (https://github.com/b1naryth1ef) + * Ronald van Rij (https://github.com/ronaldvanrij) + * François Schmidts (https://github.com/jaesivsm) diff --git a/README.rst b/README.rst index ea4b5050..8c3ee26e 100644 --- a/README.rst +++ b/README.rst @@ -8,6 +8,9 @@ MongoEngine .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master :target: http://travis-ci.org/MongoEngine/mongoengine + +.. image:: https://coveralls.io/repos/MongoEngine/mongoengine/badge.png?branch=master + :target: https://coveralls.io/r/MongoEngine/mongoengine?branch=master About ===== @@ -26,9 +29,18 @@ setup.py install``. Dependencies ============ -- pymongo 2.5+ +- pymongo>=2.5 - sphinx (optional - for documentation generation) +Optional Dependencies +--------------------- +- **Django Integration:** Django>=1.4.0 for Python 2.x or PyPy and Django>=1.5.0 for Python 3.x +- **Image Fields**: Pillow>=2.0.0 or PIL (not recommended since MongoEngine is tested with Pillow) +- dateutil>=2.1.0 + +.. note + MongoEngine always runs it's test suite against the latest patch version of each dependecy. e.g.: Django 1.6.5 + Examples ======== Some simple examples of what MongoEngine code looks like:: @@ -92,4 +104,4 @@ Community Contributing ============ -We welcome contributions! see the`Contribution guidelines `_ +We welcome contributions! see the `Contribution guidelines `_ diff --git a/benchmark.py b/benchmark.py index 16b2fd47..53ecf32c 100644 --- a/benchmark.py +++ b/benchmark.py @@ -15,7 +15,7 @@ def cprofile_main(): class Noddy(Document): fields = DictField() - for i in xrange(1): + for i in range(1): noddy = Noddy() for j in range(20): noddy.fields["key" + str(j)] = "value " + str(j) @@ -113,6 +113,7 @@ def main(): 4.68946313858 ---------------------------------------------------------------------------------------------------- """ + print("Benchmarking...") setup = """ from pymongo import MongoClient @@ -127,7 +128,7 @@ connection = MongoClient() db = connection.timeit_test noddy = db.noddy -for i in xrange(10000): +for i in range(10000): example = {'fields': {}} for j in range(20): example['fields']["key"+str(j)] = "value "+str(j) @@ -138,10 +139,10 @@ myNoddys = noddy.find() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - Pymongo""" + print("-" * 100) + print("""Creating 10000 dictionaries - Pymongo""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) stmt = """ from pymongo import MongoClient @@ -150,7 +151,7 @@ connection = MongoClient() db = connection.timeit_test noddy = db.noddy -for i in xrange(10000): +for i in range(10000): example = {'fields': {}} for j in range(20): example['fields']["key"+str(j)] = "value "+str(j) @@ -161,10 +162,10 @@ myNoddys = noddy.find() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""" + print("-" * 100) + print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) setup = """ from pymongo import MongoClient @@ -180,7 +181,7 @@ class Noddy(Document): """ stmt = """ -for i in xrange(10000): +for i in range(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) @@ -190,13 +191,13 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine""" + print("-" * 100) + print("""Creating 10000 dictionaries - MongoEngine""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) stmt = """ -for i in xrange(10000): +for i in range(10000): noddy = Noddy() fields = {} for j in range(20): @@ -208,13 +209,13 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries without continual assign - MongoEngine""" + print("-" * 100) + print("""Creating 10000 dictionaries without continual assign - MongoEngine""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) stmt = """ -for i in xrange(10000): +for i in range(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) @@ -224,13 +225,13 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""" + print("-" * 100) + print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) stmt = """ -for i in xrange(10000): +for i in range(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) @@ -240,13 +241,13 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""" + print("-" * 100) + print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) stmt = """ -for i in xrange(10000): +for i in range(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) @@ -256,13 +257,13 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""" + print("-" * 100) + print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) stmt = """ -for i in xrange(10000): +for i in range(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) @@ -272,11 +273,11 @@ myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ - print "-" * 100 - print """Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""" + print("-" * 100) + print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") t = timeit.Timer(stmt=stmt, setup=setup) - print t.timeit(1) + print(t.timeit(1)) if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/docs/_themes/nature/static/nature.css_t b/docs/_themes/nature/static/nature.css_t deleted file mode 100644 index 03b0379d..00000000 --- a/docs/_themes/nature/static/nature.css_t +++ /dev/null @@ -1,229 +0,0 @@ -/** - * Sphinx stylesheet -- default theme - * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - */ - -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: Arial, sans-serif; - font-size: 100%; - background-color: #111; - color: #555; - margin: 0; - padding: 0; -} - -div.documentwrapper { - float: left; - width: 100%; -} - -div.bodywrapper { - margin: 0 0 0 230px; -} - -hr{ - border: 1px solid #B1B4B6; -} - -div.document { - background-color: #eee; -} - -div.body { - background-color: #ffffff; - color: #3E4349; - padding: 0 30px 30px 30px; - font-size: 0.8em; -} - -div.footer { - color: #555; - width: 100%; - padding: 13px 0; - text-align: center; - font-size: 75%; -} - -div.footer a { - color: #444; - text-decoration: underline; -} - -div.related { - background-color: #6BA81E; - line-height: 32px; - color: #fff; - text-shadow: 0px 1px 0 #444; - font-size: 0.80em; -} - -div.related a { - color: #E2F3CC; -} - -div.sphinxsidebar { - font-size: 0.75em; - line-height: 1.5em; -} - -div.sphinxsidebarwrapper{ - padding: 20px 0; -} - -div.sphinxsidebar h3, -div.sphinxsidebar h4 { - font-family: Arial, sans-serif; - color: #222; - font-size: 1.2em; - font-weight: normal; - margin: 0; - padding: 5px 10px; - background-color: #ddd; - text-shadow: 1px 1px 0 white -} - -div.sphinxsidebar h4{ - font-size: 1.1em; -} - -div.sphinxsidebar h3 a { - color: #444; -} - - -div.sphinxsidebar p { - color: #888; - padding: 5px 20px; -} - -div.sphinxsidebar p.topless { -} - -div.sphinxsidebar ul { - margin: 10px 20px; - padding: 0; - color: #000; -} - -div.sphinxsidebar a { - color: #444; -} - -div.sphinxsidebar input { - border: 1px solid #ccc; - font-family: sans-serif; - font-size: 1em; -} - -div.sphinxsidebar input[type=text]{ - margin-left: 20px; -} - -/* -- body styles ----------------------------------------------------------- */ - -a { - color: #005B81; - text-decoration: none; -} - -a:hover { - color: #E32E00; - text-decoration: underline; -} - -div.body h1, -div.body h2, -div.body h3, -div.body h4, -div.body h5, -div.body h6 { - font-family: Arial, sans-serif; - background-color: #BED4EB; - font-weight: normal; - color: #212224; - margin: 30px 0px 10px 0px; - padding: 5px 0 5px 10px; - text-shadow: 0px 1px 0 white -} - -div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } -div.body h2 { font-size: 150%; background-color: #C8D5E3; } -div.body h3 { font-size: 120%; background-color: #D8DEE3; } -div.body h4 { font-size: 110%; background-color: #D8DEE3; } -div.body h5 { font-size: 100%; background-color: #D8DEE3; } -div.body h6 { font-size: 100%; background-color: #D8DEE3; } - -a.headerlink { - color: #c60f0f; - font-size: 0.8em; - padding: 0 4px 0 4px; - text-decoration: none; -} - -a.headerlink:hover { - background-color: #c60f0f; - color: white; -} - -div.body p, div.body dd, div.body li { - line-height: 1.5em; -} - -div.admonition p.admonition-title + p { - display: inline; -} - -div.highlight{ - background-color: white; -} - -div.note { - background-color: #eee; - border: 1px solid #ccc; -} - -div.seealso { - background-color: #ffc; - border: 1px solid #ff6; -} - -div.topic { - background-color: #eee; -} - -div.warning { - background-color: #ffe4e4; - border: 1px solid #f66; -} - -p.admonition-title { - display: inline; -} - -p.admonition-title:after { - content: ":"; -} - -pre { - padding: 10px; - background-color: White; - color: #222; - line-height: 1.2em; - border: 1px solid #C6C9CB; - font-size: 1.2em; - margin: 1.5em 0 1.5em 0; - -webkit-box-shadow: 1px 1px 1px #d8d8d8; - -moz-box-shadow: 1px 1px 1px #d8d8d8; -} - -tt { - background-color: #ecf0f3; - color: #222; - padding: 1px 2px; - font-size: 1.2em; - font-family: monospace; -} diff --git a/docs/_themes/nature/static/pygments.css b/docs/_themes/nature/static/pygments.css deleted file mode 100644 index 652b7612..00000000 --- a/docs/_themes/nature/static/pygments.css +++ /dev/null @@ -1,54 +0,0 @@ -.c { color: #999988; font-style: italic } /* Comment */ -.k { font-weight: bold } /* Keyword */ -.o { font-weight: bold } /* Operator */ -.cm { color: #999988; font-style: italic } /* Comment.Multiline */ -.cp { color: #999999; font-weight: bold } /* Comment.preproc */ -.c1 { color: #999988; font-style: italic } /* Comment.Single */ -.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ -.ge { font-style: italic } /* Generic.Emph */ -.gr { color: #aa0000 } /* Generic.Error */ -.gh { color: #999999 } /* Generic.Heading */ -.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ -.go { color: #111 } /* Generic.Output */ -.gp { color: #555555 } /* Generic.Prompt */ -.gs { font-weight: bold } /* Generic.Strong */ -.gu { color: #aaaaaa } /* Generic.Subheading */ -.gt { color: #aa0000 } /* Generic.Traceback */ -.kc { font-weight: bold } /* Keyword.Constant */ -.kd { font-weight: bold } /* Keyword.Declaration */ -.kp { font-weight: bold } /* Keyword.Pseudo */ -.kr { font-weight: bold } /* Keyword.Reserved */ -.kt { color: #445588; font-weight: bold } /* Keyword.Type */ -.m { color: #009999 } /* Literal.Number */ -.s { color: #bb8844 } /* Literal.String */ -.na { color: #008080 } /* Name.Attribute */ -.nb { color: #999999 } /* Name.Builtin */ -.nc { color: #445588; font-weight: bold } /* Name.Class */ -.no { color: #ff99ff } /* Name.Constant */ -.ni { color: #800080 } /* Name.Entity */ -.ne { color: #990000; font-weight: bold } /* Name.Exception */ -.nf { color: #990000; font-weight: bold } /* Name.Function */ -.nn { color: #555555 } /* Name.Namespace */ -.nt { color: #000080 } /* Name.Tag */ -.nv { color: purple } /* Name.Variable */ -.ow { font-weight: bold } /* Operator.Word */ -.mf { color: #009999 } /* Literal.Number.Float */ -.mh { color: #009999 } /* Literal.Number.Hex */ -.mi { color: #009999 } /* Literal.Number.Integer */ -.mo { color: #009999 } /* Literal.Number.Oct */ -.sb { color: #bb8844 } /* Literal.String.Backtick */ -.sc { color: #bb8844 } /* Literal.String.Char */ -.sd { color: #bb8844 } /* Literal.String.Doc */ -.s2 { color: #bb8844 } /* Literal.String.Double */ -.se { color: #bb8844 } /* Literal.String.Escape */ -.sh { color: #bb8844 } /* Literal.String.Heredoc */ -.si { color: #bb8844 } /* Literal.String.Interpol */ -.sx { color: #bb8844 } /* Literal.String.Other */ -.sr { color: #808000 } /* Literal.String.Regex */ -.s1 { color: #bb8844 } /* Literal.String.Single */ -.ss { color: #bb8844 } /* Literal.String.Symbol */ -.bp { color: #999999 } /* Name.Builtin.Pseudo */ -.vc { color: #ff99ff } /* Name.Variable.Class */ -.vg { color: #ff99ff } /* Name.Variable.Global */ -.vi { color: #ff99ff } /* Name.Variable.Instance */ -.il { color: #009999 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/docs/_themes/nature/theme.conf b/docs/_themes/nature/theme.conf deleted file mode 100644 index 1cc40044..00000000 --- a/docs/_themes/nature/theme.conf +++ /dev/null @@ -1,4 +0,0 @@ -[theme] -inherit = basic -stylesheet = nature.css -pygments_style = tango diff --git a/docs/_themes/sphinx_rtd_theme/__init__.py b/docs/_themes/sphinx_rtd_theme/__init__.py new file mode 100755 index 00000000..1440863d --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/__init__.py @@ -0,0 +1,17 @@ +"""Sphinx ReadTheDocs theme. + +From https://github.com/ryan-roemer/sphinx-bootstrap-theme. + +""" +import os + +VERSION = (0, 1, 5) + +__version__ = ".".join(str(v) for v in VERSION) +__version_full__ = __version__ + + +def get_html_theme_path(): + """Return list of HTML theme paths.""" + cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + return cur_dir diff --git a/docs/_themes/sphinx_rtd_theme/breadcrumbs.html b/docs/_themes/sphinx_rtd_theme/breadcrumbs.html new file mode 100755 index 00000000..3e4f359c --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/breadcrumbs.html @@ -0,0 +1,15 @@ + +
+ diff --git a/docs/_themes/sphinx_rtd_theme/footer.html b/docs/_themes/sphinx_rtd_theme/footer.html new file mode 100755 index 00000000..e42d753f --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/footer.html @@ -0,0 +1,30 @@ +
+ {% if next or prev %} + + {% endif %} + +
+ +

+ {%- if show_copyright %} + {%- if hasdoc('copyright') %} + {% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} + {%- else %} + {% trans copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %} + {%- endif %} + {%- endif %} + + {%- if last_updated %} + {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} + {%- endif %} +

+ + {% trans %}Sphinx theme provided by Read the Docs{% endtrans %} +
diff --git a/docs/_themes/sphinx_rtd_theme/layout.html b/docs/_themes/sphinx_rtd_theme/layout.html new file mode 100755 index 00000000..febe8eb0 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/layout.html @@ -0,0 +1,142 @@ +{# TEMPLATE VAR SETTINGS #} +{%- set url_root = pathto('', 1) %} +{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} +{%- if not embedded and docstitle %} + {%- set titlesuffix = " — "|safe + docstitle|e %} +{%- else %} + {%- set titlesuffix = "" %} +{%- endif %} + + + + + + + + {% block htmltitle %} + {{ title|striptags|e }}{{ titlesuffix }} + {% endblock %} + + {# FAVICON #} + {% if favicon %} + + {% endif %} + {# CANONICAL #} + {%- if theme_canonical_url %} + + {%- endif %} + + {# CSS #} + + + {# JS #} + {% if not embedded %} + + + {%- for scriptfile in script_files %} + + {%- endfor %} + + {% if use_opensearch %} + + {% endif %} + + {% endif %} + + {# RTD hosts these file themselves, so just load on non RTD builds #} + {% if not READTHEDOCS %} + + + {% endif %} + + {% for cssfile in css_files %} + + {% endfor %} + + {%- block linktags %} + {%- if hasdoc('about') %} + + {%- endif %} + {%- if hasdoc('genindex') %} + + {%- endif %} + {%- if hasdoc('search') %} + + {%- endif %} + {%- if hasdoc('copyright') %} + + {%- endif %} + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} + {%- endblock %} + {%- block extrahead %} {% endblock %} + + + + + + + +
+ + {# SIDE NAV, TOGGLES ON MOBILE #} + + +
+ + {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} + + + + {# PAGE CONTENT #} +
+
+ {% include "breadcrumbs.html" %} + {% block body %}{% endblock %} + {% include "footer.html" %} +
+
+ +
+ +
+ {% include "versions.html" %} + + diff --git a/docs/_themes/sphinx_rtd_theme/layout_old.html b/docs/_themes/sphinx_rtd_theme/layout_old.html new file mode 100755 index 00000000..deb8df2a --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/layout_old.html @@ -0,0 +1,205 @@ +{# + basic/layout.html + ~~~~~~~~~~~~~~~~~ + + Master layout template for Sphinx themes. + + :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{%- block doctype -%} + +{%- endblock %} +{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} +{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} +{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and + (sidebars != []) %} +{%- set url_root = pathto('', 1) %} +{# XXX necessary? #} +{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} +{%- if not embedded and docstitle %} + {%- set titlesuffix = " — "|safe + docstitle|e %} +{%- else %} + {%- set titlesuffix = "" %} +{%- endif %} + +{%- macro relbar() %} + +{%- endmacro %} + +{%- macro sidebar() %} + {%- if render_sidebar %} +
+
+ {%- block sidebarlogo %} + {%- if logo %} + + {%- endif %} + {%- endblock %} + {%- if sidebars != None %} + {#- new style sidebar: explicitly include/exclude templates #} + {%- for sidebartemplate in sidebars %} + {%- include sidebartemplate %} + {%- endfor %} + {%- else %} + {#- old style sidebars: using blocks -- should be deprecated #} + {%- block sidebartoc %} + {%- include "localtoc.html" %} + {%- endblock %} + {%- block sidebarrel %} + {%- include "relations.html" %} + {%- endblock %} + {%- block sidebarsourcelink %} + {%- include "sourcelink.html" %} + {%- endblock %} + {%- if customsidebar %} + {%- include customsidebar %} + {%- endif %} + {%- block sidebarsearch %} + {%- include "searchbox.html" %} + {%- endblock %} + {%- endif %} +
+
+ {%- endif %} +{%- endmacro %} + +{%- macro script() %} + + {%- for scriptfile in script_files %} + + {%- endfor %} +{%- endmacro %} + +{%- macro css() %} + + + {%- for cssfile in css_files %} + + {%- endfor %} +{%- endmacro %} + + + + + {{ metatags }} + {%- block htmltitle %} + {{ title|striptags|e }}{{ titlesuffix }} + {%- endblock %} + {{ css() }} + {%- if not embedded %} + {{ script() }} + {%- if use_opensearch %} + + {%- endif %} + {%- if favicon %} + + {%- endif %} + {%- endif %} +{%- block linktags %} + {%- if hasdoc('about') %} + + {%- endif %} + {%- if hasdoc('genindex') %} + + {%- endif %} + {%- if hasdoc('search') %} + + {%- endif %} + {%- if hasdoc('copyright') %} + + {%- endif %} + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} +{%- endblock %} +{%- block extrahead %} {% endblock %} + + +{%- block header %}{% endblock %} + +{%- block relbar1 %}{{ relbar() }}{% endblock %} + +{%- block content %} + {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} + +
+ {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} +
+ {% block body %} {% endblock %} +
+ {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} + + {%- block sidebar2 %}{{ sidebar() }}{% endblock %} +
+
+{%- endblock %} + +{%- block relbar2 %}{{ relbar() }}{% endblock %} + +{%- block footer %} + +

asdf asdf asdf asdf 22

+{%- endblock %} + + + diff --git a/docs/_themes/sphinx_rtd_theme/search.html b/docs/_themes/sphinx_rtd_theme/search.html new file mode 100755 index 00000000..d8bbe690 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/search.html @@ -0,0 +1,50 @@ +{# + basic/search.html + ~~~~~~~~~~~~~~~~~ + + Template for the search page. + + :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{%- extends "layout.html" %} +{% set title = _('Search') %} +{% set script_files = script_files + ['_static/searchtools.js'] %} +{% block extrahead %} + + {# this is used when loading the search index using $.ajax fails, + such as on Chrome for documents on localhost #} + + {{ super() }} +{% endblock %} +{% block body %} + + + {% if search_performed %} +

{{ _('Search Results') }}

+ {% if not search_results %} +

{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}

+ {% endif %} + {% endif %} +
+ {% if search_results %} +
    + {% for href, caption, context in search_results %} +
  • + {{ caption }} +

    {{ context|e }}

    +
  • + {% endfor %} +
+ {% endif %} +
+{% endblock %} diff --git a/docs/_themes/sphinx_rtd_theme/searchbox.html b/docs/_themes/sphinx_rtd_theme/searchbox.html new file mode 100755 index 00000000..f62545ea --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/searchbox.html @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/docs/_themes/sphinx_rtd_theme/static/css/badge_only.css b/docs/_themes/sphinx_rtd_theme/static/css/badge_only.css new file mode 100755 index 00000000..7fccc414 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/static/css/badge_only.css @@ -0,0 +1 @@ +.font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} diff --git a/docs/_themes/sphinx_rtd_theme/static/css/theme.css b/docs/_themes/sphinx_rtd_theme/static/css/theme.css new file mode 100755 index 00000000..a37f8d8c --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/static/css/theme.css @@ -0,0 +1 @@ +*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}[hidden]{display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:hover,a:active{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}blockquote{margin:0}dfn{font-style:italic}hr{display:block;height:1px;border:0;border-top:1px solid #ccc;margin:20px 0;padding:0}ins{background:#ff9;color:#000;text-decoration:none}mark{background:#ff0;color:#000;font-style:italic;font-weight:bold}pre,code,.rst-content tt,kbd,samp{font-family:monospace,serif;_font-family:"courier new",monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:before,q:after{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}ul,ol,dl{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure{margin:0}form{margin:0}fieldset{border:0;margin:0;padding:0}label{cursor:pointer}legend{border:0;*margin-left:-7px;padding:0;white-space:normal}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{box-sizing:border-box;padding:0;*width:13px;*height:13px}input[type="search"]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}textarea{overflow:auto;vertical-align:top;resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:0.2em 0;background:#ccc;color:#000;padding:0.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none !important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{html,body,section{background:none !important}*{box-shadow:none !important;text-shadow:none !important;filter:none !important;-ms-filter:none !important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}.font-smooth,.icon:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-tag-input-group .wy-tag .wy-tag-remove:before,.rst-content .admonition-title:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content dl dt .headerlink:before,.wy-alert,.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .seealso,.btn,input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="date"],input[type="month"],input[type="time"],input[type="datetime"],input[type="datetime-local"],input[type="week"],input[type="number"],input[type="search"],input[type="tel"],input[type="color"],select,textarea,.wy-tag-input-group,.wy-menu-vertical li.on a,.wy-menu-vertical li.current>a,.wy-side-nav-search>a,.wy-side-nav-search .wy-dropdown>a,.wy-nav-top a{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-tag-input-group .wy-tag .wy-tag-remove:before,.rst-content .admonition-title:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content dl dt .headerlink:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon,a .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success a .wy-input-context,a .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger a .wy-input-context,a .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning a .wy-input-context,a .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info a .wy-input-context,a .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag a .wy-tag-remove,a .rst-content .admonition-title,.rst-content a .admonition-title,a .rst-content h1 .headerlink,.rst-content h1 a .headerlink,a .rst-content h2 .headerlink,.rst-content h2 a .headerlink,a .rst-content h3 .headerlink,.rst-content h3 a .headerlink,a .rst-content h4 .headerlink,.rst-content h4 a .headerlink,a .rst-content h5 .headerlink,.rst-content h5 a .headerlink,a .rst-content h6 .headerlink,.rst-content h6 a .headerlink,a .rst-content dl dt .headerlink,.rst-content dl dt a .headerlink{display:inline-block;text-decoration:inherit}.icon-large:before{vertical-align:-10%;font-size:1.33333em}.btn .icon,.btn .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success .btn .wy-input-context,.btn .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .btn .wy-input-context,.btn .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .btn .wy-input-context,.btn .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info .btn .wy-input-context,.btn .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag .btn .wy-tag-remove,.btn .rst-content .admonition-title,.rst-content .btn .admonition-title,.btn .rst-content h1 .headerlink,.rst-content h1 .btn .headerlink,.btn .rst-content h2 .headerlink,.rst-content h2 .btn .headerlink,.btn .rst-content h3 .headerlink,.rst-content h3 .btn .headerlink,.btn .rst-content h4 .headerlink,.rst-content h4 .btn .headerlink,.btn .rst-content h5 .headerlink,.rst-content h5 .btn .headerlink,.btn .rst-content h6 .headerlink,.rst-content h6 .btn .headerlink,.btn .rst-content dl dt .headerlink,.rst-content dl dt .btn .headerlink,.nav .icon,.nav .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success .nav .wy-input-context,.nav .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .nav .wy-input-context,.nav .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .nav .wy-input-context,.nav .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info .nav .wy-input-context,.nav .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag .nav .wy-tag-remove,.nav .rst-content .admonition-title,.rst-content .nav .admonition-title,.nav .rst-content h1 .headerlink,.rst-content h1 .nav .headerlink,.nav .rst-content h2 .headerlink,.rst-content h2 .nav .headerlink,.nav .rst-content h3 .headerlink,.rst-content h3 .nav .headerlink,.nav .rst-content h4 .headerlink,.rst-content h4 .nav .headerlink,.nav .rst-content h5 .headerlink,.rst-content h5 .nav .headerlink,.nav .rst-content h6 .headerlink,.rst-content h6 .nav .headerlink,.nav .rst-content dl dt .headerlink,.rst-content dl dt .nav .headerlink{display:inline}.btn .icon.icon-large,.btn .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-success .btn .icon-large.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .btn .icon-large.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .btn .icon-large.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-info .btn .icon-large.wy-input-context,.btn .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove,.wy-tag-input-group .wy-tag .btn .icon-large.wy-tag-remove,.btn .rst-content .icon-large.admonition-title,.rst-content .btn .icon-large.admonition-title,.btn .rst-content h1 .icon-large.headerlink,.rst-content h1 .btn .icon-large.headerlink,.btn .rst-content h2 .icon-large.headerlink,.rst-content h2 .btn .icon-large.headerlink,.btn .rst-content h3 .icon-large.headerlink,.rst-content h3 .btn .icon-large.headerlink,.btn .rst-content h4 .icon-large.headerlink,.rst-content h4 .btn .icon-large.headerlink,.btn .rst-content h5 .icon-large.headerlink,.rst-content h5 .btn .icon-large.headerlink,.btn .rst-content h6 .icon-large.headerlink,.rst-content h6 .btn .icon-large.headerlink,.btn .rst-content dl dt .icon-large.headerlink,.rst-content dl dt .btn .icon-large.headerlink,.nav .icon.icon-large,.nav .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-success .nav .icon-large.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .nav .icon-large.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .nav .icon-large.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context,.wy-inline-validate.wy-inline-validate-info .nav .icon-large.wy-input-context,.nav .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove,.wy-tag-input-group .wy-tag .nav .icon-large.wy-tag-remove,.nav .rst-content .icon-large.admonition-title,.rst-content .nav .icon-large.admonition-title,.nav .rst-content h1 .icon-large.headerlink,.rst-content h1 .nav .icon-large.headerlink,.nav .rst-content h2 .icon-large.headerlink,.rst-content h2 .nav .icon-large.headerlink,.nav .rst-content h3 .icon-large.headerlink,.rst-content h3 .nav .icon-large.headerlink,.nav .rst-content h4 .icon-large.headerlink,.rst-content h4 .nav .icon-large.headerlink,.nav .rst-content h5 .icon-large.headerlink,.rst-content h5 .nav .icon-large.headerlink,.nav .rst-content h6 .icon-large.headerlink,.rst-content h6 .nav .icon-large.headerlink,.nav .rst-content dl dt .icon-large.headerlink,.rst-content dl dt .nav .icon-large.headerlink{line-height:0.9em}.btn .icon.icon-spin,.btn .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-success .btn .icon-spin.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .btn .icon-spin.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .btn .icon-spin.wy-input-context,.btn .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-info .btn .icon-spin.wy-input-context,.btn .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove,.wy-tag-input-group .wy-tag .btn .icon-spin.wy-tag-remove,.btn .rst-content .icon-spin.admonition-title,.rst-content .btn .icon-spin.admonition-title,.btn .rst-content h1 .icon-spin.headerlink,.rst-content h1 .btn .icon-spin.headerlink,.btn .rst-content h2 .icon-spin.headerlink,.rst-content h2 .btn .icon-spin.headerlink,.btn .rst-content h3 .icon-spin.headerlink,.rst-content h3 .btn .icon-spin.headerlink,.btn .rst-content h4 .icon-spin.headerlink,.rst-content h4 .btn .icon-spin.headerlink,.btn .rst-content h5 .icon-spin.headerlink,.rst-content h5 .btn .icon-spin.headerlink,.btn .rst-content h6 .icon-spin.headerlink,.rst-content h6 .btn .icon-spin.headerlink,.btn .rst-content dl dt .icon-spin.headerlink,.rst-content dl dt .btn .icon-spin.headerlink,.nav .icon.icon-spin,.nav .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-success .nav .icon-spin.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-danger .nav .icon-spin.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-warning .nav .icon-spin.wy-input-context,.nav .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context,.wy-inline-validate.wy-inline-validate-info .nav .icon-spin.wy-input-context,.nav .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove,.wy-tag-input-group .wy-tag .nav .icon-spin.wy-tag-remove,.nav .rst-content .icon-spin.admonition-title,.rst-content .nav .icon-spin.admonition-title,.nav .rst-content h1 .icon-spin.headerlink,.rst-content h1 .nav .icon-spin.headerlink,.nav .rst-content h2 .icon-spin.headerlink,.rst-content h2 .nav .icon-spin.headerlink,.nav .rst-content h3 .icon-spin.headerlink,.rst-content h3 .nav .icon-spin.headerlink,.nav .rst-content h4 .icon-spin.headerlink,.rst-content h4 .nav .icon-spin.headerlink,.nav .rst-content h5 .icon-spin.headerlink,.rst-content h5 .nav .icon-spin.headerlink,.nav .rst-content h6 .icon-spin.headerlink,.rst-content h6 .nav .icon-spin.headerlink,.nav .rst-content dl dt .icon-spin.headerlink,.rst-content dl dt .nav .icon-spin.headerlink{display:inline-block}.btn.icon:before,.wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:before,.wy-tag-input-group .wy-tag .btn.wy-tag-remove:before,.rst-content .btn.admonition-title:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content dl dt .btn.headerlink:before{opacity:0.5;-webkit-transition:opacity 0.05s ease-in;-moz-transition:opacity 0.05s ease-in;transition:opacity 0.05s ease-in}.btn.icon:hover:before,.wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:hover:before,.wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:hover:before,.wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:hover:before,.wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:hover:before,.wy-tag-input-group .wy-tag .btn.wy-tag-remove:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content dl dt .btn.headerlink:hover:before{opacity:1}.btn-mini .icon:before,.btn-mini .wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .btn-mini .wy-input-context:before,.btn-mini .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .btn-mini .wy-input-context:before,.btn-mini .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .btn-mini .wy-input-context:before,.btn-mini .wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .btn-mini .wy-input-context:before,.btn-mini .wy-tag-input-group .wy-tag .wy-tag-remove:before,.wy-tag-input-group .wy-tag .btn-mini .wy-tag-remove:before,.btn-mini .rst-content .admonition-title:before,.rst-content .btn-mini .admonition-title:before,.btn-mini .rst-content h1 .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.btn-mini .rst-content dl dt .headerlink:before,.rst-content dl dt .btn-mini .headerlink:before{font-size:14px;vertical-align:-15%}li .icon,li .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success li .wy-input-context,li .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger li .wy-input-context,li .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning li .wy-input-context,li .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info li .wy-input-context,li .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag li .wy-tag-remove,li .rst-content .admonition-title,.rst-content li .admonition-title,li .rst-content h1 .headerlink,.rst-content h1 li .headerlink,li .rst-content h2 .headerlink,.rst-content h2 li .headerlink,li .rst-content h3 .headerlink,.rst-content h3 li .headerlink,li .rst-content h4 .headerlink,.rst-content h4 li .headerlink,li .rst-content h5 .headerlink,.rst-content h5 li .headerlink,li .rst-content h6 .headerlink,.rst-content h6 li .headerlink,li .rst-content dl dt .headerlink,.rst-content dl dt li .headerlink{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon,ul.icons li .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success ul.icons li .wy-input-context,ul.icons li .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger ul.icons li .wy-input-context,ul.icons li .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning ul.icons li .wy-input-context,ul.icons li .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info ul.icons li .wy-input-context,ul.icons li .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag ul.icons li .wy-tag-remove,ul.icons li .rst-content .admonition-title,.rst-content ul.icons li .admonition-title,ul.icons li .rst-content h1 .headerlink,.rst-content h1 ul.icons li .headerlink,ul.icons li .rst-content h2 .headerlink,.rst-content h2 ul.icons li .headerlink,ul.icons li .rst-content h3 .headerlink,.rst-content h3 ul.icons li .headerlink,ul.icons li .rst-content h4 .headerlink,.rst-content h4 ul.icons li .headerlink,ul.icons li .rst-content h5 .headerlink,.rst-content h5 ul.icons li .headerlink,ul.icons li .rst-content h6 .headerlink,.rst-content h6 ul.icons li .headerlink,ul.icons li .rst-content dl dt .headerlink,.rst-content dl dt ul.icons li .headerlink{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-glass:before{content:"\f000"}.icon-music:before{content:"\f001"}.icon-search:before{content:"\f002"}.icon-envelope-alt:before{content:"\f003"}.icon-heart:before{content:"\f004"}.icon-star:before{content:"\f005"}.icon-star-empty:before{content:"\f006"}.icon-user:before{content:"\f007"}.icon-film:before{content:"\f008"}.icon-th-large:before{content:"\f009"}.icon-th:before{content:"\f00a"}.icon-th-list:before{content:"\f00b"}.icon-ok:before{content:"\f00c"}.icon-remove:before,.wy-tag-input-group .wy-tag .wy-tag-remove:before{content:"\f00d"}.icon-zoom-in:before{content:"\f00e"}.icon-zoom-out:before{content:"\f010"}.icon-power-off:before,.icon-off:before{content:"\f011"}.icon-signal:before{content:"\f012"}.icon-gear:before,.icon-cog:before{content:"\f013"}.icon-trash:before{content:"\f014"}.icon-home:before{content:"\f015"}.icon-file-alt:before{content:"\f016"}.icon-time:before{content:"\f017"}.icon-road:before{content:"\f018"}.icon-download-alt:before{content:"\f019"}.icon-download:before{content:"\f01a"}.icon-upload:before{content:"\f01b"}.icon-inbox:before{content:"\f01c"}.icon-play-circle:before{content:"\f01d"}.icon-rotate-right:before,.icon-repeat:before{content:"\f01e"}.icon-refresh:before{content:"\f021"}.icon-list-alt:before{content:"\f022"}.icon-lock:before{content:"\f023"}.icon-flag:before{content:"\f024"}.icon-headphones:before{content:"\f025"}.icon-volume-off:before{content:"\f026"}.icon-volume-down:before{content:"\f027"}.icon-volume-up:before{content:"\f028"}.icon-qrcode:before{content:"\f029"}.icon-barcode:before{content:"\f02a"}.icon-tag:before{content:"\f02b"}.icon-tags:before{content:"\f02c"}.icon-book:before{content:"\f02d"}.icon-bookmark:before{content:"\f02e"}.icon-print:before{content:"\f02f"}.icon-camera:before{content:"\f030"}.icon-font:before{content:"\f031"}.icon-bold:before{content:"\f032"}.icon-italic:before{content:"\f033"}.icon-text-height:before{content:"\f034"}.icon-text-width:before{content:"\f035"}.icon-align-left:before{content:"\f036"}.icon-align-center:before{content:"\f037"}.icon-align-right:before{content:"\f038"}.icon-align-justify:before{content:"\f039"}.icon-list:before{content:"\f03a"}.icon-indent-left:before{content:"\f03b"}.icon-indent-right:before{content:"\f03c"}.icon-facetime-video:before{content:"\f03d"}.icon-picture:before{content:"\f03e"}.icon-pencil:before{content:"\f040"}.icon-map-marker:before{content:"\f041"}.icon-adjust:before{content:"\f042"}.icon-tint:before{content:"\f043"}.icon-edit:before{content:"\f044"}.icon-share:before{content:"\f045"}.icon-check:before{content:"\f046"}.icon-move:before{content:"\f047"}.icon-step-backward:before{content:"\f048"}.icon-fast-backward:before{content:"\f049"}.icon-backward:before{content:"\f04a"}.icon-play:before{content:"\f04b"}.icon-pause:before{content:"\f04c"}.icon-stop:before{content:"\f04d"}.icon-forward:before{content:"\f04e"}.icon-fast-forward:before{content:"\f050"}.icon-step-forward:before{content:"\f051"}.icon-eject:before{content:"\f052"}.icon-chevron-left:before{content:"\f053"}.icon-chevron-right:before{content:"\f054"}.icon-plus-sign:before{content:"\f055"}.icon-minus-sign:before{content:"\f056"}.icon-remove-sign:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:"\f057"}.icon-ok-sign:before{content:"\f058"}.icon-question-sign:before{content:"\f059"}.icon-info-sign:before{content:"\f05a"}.icon-screenshot:before{content:"\f05b"}.icon-remove-circle:before{content:"\f05c"}.icon-ok-circle:before{content:"\f05d"}.icon-ban-circle:before{content:"\f05e"}.icon-arrow-left:before{content:"\f060"}.icon-arrow-right:before{content:"\f061"}.icon-arrow-up:before{content:"\f062"}.icon-arrow-down:before{content:"\f063"}.icon-mail-forward:before,.icon-share-alt:before{content:"\f064"}.icon-resize-full:before{content:"\f065"}.icon-resize-small:before{content:"\f066"}.icon-plus:before{content:"\f067"}.icon-minus:before{content:"\f068"}.icon-asterisk:before{content:"\f069"}.icon-exclamation-sign:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.rst-content .admonition-title:before{content:"\f06a"}.icon-gift:before{content:"\f06b"}.icon-leaf:before{content:"\f06c"}.icon-fire:before{content:"\f06d"}.icon-eye-open:before{content:"\f06e"}.icon-eye-close:before{content:"\f070"}.icon-warning-sign:before{content:"\f071"}.icon-plane:before{content:"\f072"}.icon-calendar:before{content:"\f073"}.icon-random:before{content:"\f074"}.icon-comment:before{content:"\f075"}.icon-magnet:before{content:"\f076"}.icon-chevron-up:before{content:"\f077"}.icon-chevron-down:before{content:"\f078"}.icon-retweet:before{content:"\f079"}.icon-shopping-cart:before{content:"\f07a"}.icon-folder-close:before{content:"\f07b"}.icon-folder-open:before{content:"\f07c"}.icon-resize-vertical:before{content:"\f07d"}.icon-resize-horizontal:before{content:"\f07e"}.icon-bar-chart:before{content:"\f080"}.icon-twitter-sign:before{content:"\f081"}.icon-facebook-sign:before{content:"\f082"}.icon-camera-retro:before{content:"\f083"}.icon-key:before{content:"\f084"}.icon-gears:before,.icon-cogs:before{content:"\f085"}.icon-comments:before{content:"\f086"}.icon-thumbs-up-alt:before{content:"\f087"}.icon-thumbs-down-alt:before{content:"\f088"}.icon-star-half:before{content:"\f089"}.icon-heart-empty:before{content:"\f08a"}.icon-signout:before{content:"\f08b"}.icon-linkedin-sign:before{content:"\f08c"}.icon-pushpin:before{content:"\f08d"}.icon-external-link:before{content:"\f08e"}.icon-signin:before{content:"\f090"}.icon-trophy:before{content:"\f091"}.icon-github-sign:before{content:"\f092"}.icon-upload-alt:before{content:"\f093"}.icon-lemon:before{content:"\f094"}.icon-phone:before{content:"\f095"}.icon-unchecked:before,.icon-check-empty:before{content:"\f096"}.icon-bookmark-empty:before{content:"\f097"}.icon-phone-sign:before{content:"\f098"}.icon-twitter:before{content:"\f099"}.icon-facebook:before{content:"\f09a"}.icon-github:before{content:"\f09b"}.icon-unlock:before{content:"\f09c"}.icon-credit-card:before{content:"\f09d"}.icon-rss:before{content:"\f09e"}.icon-hdd:before{content:"\f0a0"}.icon-bullhorn:before{content:"\f0a1"}.icon-bell:before{content:"\f0a2"}.icon-certificate:before{content:"\f0a3"}.icon-hand-right:before{content:"\f0a4"}.icon-hand-left:before{content:"\f0a5"}.icon-hand-up:before{content:"\f0a6"}.icon-hand-down:before{content:"\f0a7"}.icon-circle-arrow-left:before{content:"\f0a8"}.icon-circle-arrow-right:before{content:"\f0a9"}.icon-circle-arrow-up:before{content:"\f0aa"}.icon-circle-arrow-down:before{content:"\f0ab"}.icon-globe:before{content:"\f0ac"}.icon-wrench:before{content:"\f0ad"}.icon-tasks:before{content:"\f0ae"}.icon-filter:before{content:"\f0b0"}.icon-briefcase:before{content:"\f0b1"}.icon-fullscreen:before{content:"\f0b2"}.icon-group:before{content:"\f0c0"}.icon-link:before{content:"\f0c1"}.icon-cloud:before{content:"\f0c2"}.icon-beaker:before{content:"\f0c3"}.icon-cut:before{content:"\f0c4"}.icon-copy:before{content:"\f0c5"}.icon-paperclip:before,.icon-paper-clip:before{content:"\f0c6"}.icon-save:before{content:"\f0c7"}.icon-sign-blank:before{content:"\f0c8"}.icon-reorder:before{content:"\f0c9"}.icon-list-ul:before{content:"\f0ca"}.icon-list-ol:before{content:"\f0cb"}.icon-strikethrough:before{content:"\f0cc"}.icon-underline:before{content:"\f0cd"}.icon-table:before{content:"\f0ce"}.icon-magic:before{content:"\f0d0"}.icon-truck:before{content:"\f0d1"}.icon-pinterest:before{content:"\f0d2"}.icon-pinterest-sign:before{content:"\f0d3"}.icon-google-plus-sign:before{content:"\f0d4"}.icon-google-plus:before{content:"\f0d5"}.icon-money:before{content:"\f0d6"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.icon-columns:before{content:"\f0db"}.icon-sort:before{content:"\f0dc"}.icon-sort-down:before{content:"\f0dd"}.icon-sort-up:before{content:"\f0de"}.icon-envelope:before{content:"\f0e0"}.icon-linkedin:before{content:"\f0e1"}.icon-rotate-left:before,.icon-undo:before{content:"\f0e2"}.icon-legal:before{content:"\f0e3"}.icon-dashboard:before{content:"\f0e4"}.icon-comment-alt:before{content:"\f0e5"}.icon-comments-alt:before{content:"\f0e6"}.icon-bolt:before{content:"\f0e7"}.icon-sitemap:before{content:"\f0e8"}.icon-umbrella:before{content:"\f0e9"}.icon-paste:before{content:"\f0ea"}.icon-lightbulb:before{content:"\f0eb"}.icon-exchange:before{content:"\f0ec"}.icon-cloud-download:before{content:"\f0ed"}.icon-cloud-upload:before{content:"\f0ee"}.icon-user-md:before{content:"\f0f0"}.icon-stethoscope:before{content:"\f0f1"}.icon-suitcase:before{content:"\f0f2"}.icon-bell-alt:before{content:"\f0f3"}.icon-coffee:before{content:"\f0f4"}.icon-food:before{content:"\f0f5"}.icon-file-text-alt:before{content:"\f0f6"}.icon-building:before{content:"\f0f7"}.icon-hospital:before{content:"\f0f8"}.icon-ambulance:before{content:"\f0f9"}.icon-medkit:before{content:"\f0fa"}.icon-fighter-jet:before{content:"\f0fb"}.icon-beer:before{content:"\f0fc"}.icon-h-sign:before{content:"\f0fd"}.icon-plus-sign-alt:before{content:"\f0fe"}.icon-double-angle-left:before{content:"\f100"}.icon-double-angle-right:before{content:"\f101"}.icon-double-angle-up:before{content:"\f102"}.icon-double-angle-down:before{content:"\f103"}.icon-angle-left:before{content:"\f104"}.icon-angle-right:before{content:"\f105"}.icon-angle-up:before{content:"\f106"}.icon-angle-down:before{content:"\f107"}.icon-desktop:before{content:"\f108"}.icon-laptop:before{content:"\f109"}.icon-tablet:before{content:"\f10a"}.icon-mobile-phone:before{content:"\f10b"}.icon-circle-blank:before{content:"\f10c"}.icon-quote-left:before{content:"\f10d"}.icon-quote-right:before{content:"\f10e"}.icon-spinner:before{content:"\f110"}.icon-circle:before{content:"\f111"}.icon-mail-reply:before,.icon-reply:before{content:"\f112"}.icon-github-alt:before{content:"\f113"}.icon-folder-close-alt:before{content:"\f114"}.icon-folder-open-alt:before{content:"\f115"}.icon-expand-alt:before{content:"\f116"}.icon-collapse-alt:before{content:"\f117"}.icon-smile:before{content:"\f118"}.icon-frown:before{content:"\f119"}.icon-meh:before{content:"\f11a"}.icon-gamepad:before{content:"\f11b"}.icon-keyboard:before{content:"\f11c"}.icon-flag-alt:before{content:"\f11d"}.icon-flag-checkered:before{content:"\f11e"}.icon-terminal:before{content:"\f120"}.icon-code:before{content:"\f121"}.icon-reply-all:before{content:"\f122"}.icon-mail-reply-all:before{content:"\f122"}.icon-star-half-full:before,.icon-star-half-empty:before{content:"\f123"}.icon-location-arrow:before{content:"\f124"}.icon-crop:before{content:"\f125"}.icon-code-fork:before{content:"\f126"}.icon-unlink:before{content:"\f127"}.icon-question:before{content:"\f128"}.icon-info:before{content:"\f129"}.icon-exclamation:before{content:"\f12a"}.icon-superscript:before{content:"\f12b"}.icon-subscript:before{content:"\f12c"}.icon-eraser:before{content:"\f12d"}.icon-puzzle-piece:before{content:"\f12e"}.icon-microphone:before{content:"\f130"}.icon-microphone-off:before{content:"\f131"}.icon-shield:before{content:"\f132"}.icon-calendar-empty:before{content:"\f133"}.icon-fire-extinguisher:before{content:"\f134"}.icon-rocket:before{content:"\f135"}.icon-maxcdn:before{content:"\f136"}.icon-chevron-sign-left:before{content:"\f137"}.icon-chevron-sign-right:before{content:"\f138"}.icon-chevron-sign-up:before{content:"\f139"}.icon-chevron-sign-down:before{content:"\f13a"}.icon-html5:before{content:"\f13b"}.icon-css3:before{content:"\f13c"}.icon-anchor:before{content:"\f13d"}.icon-unlock-alt:before{content:"\f13e"}.icon-bullseye:before{content:"\f140"}.icon-ellipsis-horizontal:before{content:"\f141"}.icon-ellipsis-vertical:before{content:"\f142"}.icon-rss-sign:before{content:"\f143"}.icon-play-sign:before{content:"\f144"}.icon-ticket:before{content:"\f145"}.icon-minus-sign-alt:before{content:"\f146"}.icon-check-minus:before{content:"\f147"}.icon-level-up:before{content:"\f148"}.icon-level-down:before{content:"\f149"}.icon-check-sign:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:"\f14a"}.icon-edit-sign:before{content:"\f14b"}.icon-external-link-sign:before{content:"\f14c"}.icon-share-sign:before{content:"\f14d"}.icon-compass:before{content:"\f14e"}.icon-collapse:before{content:"\f150"}.icon-collapse-top:before{content:"\f151"}.icon-expand:before{content:"\f152"}.icon-euro:before,.icon-eur:before{content:"\f153"}.icon-gbp:before{content:"\f154"}.icon-dollar:before,.icon-usd:before{content:"\f155"}.icon-rupee:before,.icon-inr:before{content:"\f156"}.icon-yen:before,.icon-jpy:before{content:"\f157"}.icon-renminbi:before,.icon-cny:before{content:"\f158"}.icon-won:before,.icon-krw:before{content:"\f159"}.icon-bitcoin:before,.icon-btc:before{content:"\f15a"}.icon-file:before{content:"\f15b"}.icon-file-text:before{content:"\f15c"}.icon-sort-by-alphabet:before{content:"\f15d"}.icon-sort-by-alphabet-alt:before{content:"\f15e"}.icon-sort-by-attributes:before{content:"\f160"}.icon-sort-by-attributes-alt:before{content:"\f161"}.icon-sort-by-order:before{content:"\f162"}.icon-sort-by-order-alt:before{content:"\f163"}.icon-thumbs-up:before{content:"\f164"}.icon-thumbs-down:before{content:"\f165"}.icon-youtube-sign:before{content:"\f166"}.icon-youtube:before{content:"\f167"}.icon-xing:before{content:"\f168"}.icon-xing-sign:before{content:"\f169"}.icon-youtube-play:before{content:"\f16a"}.icon-dropbox:before{content:"\f16b"}.icon-stackexchange:before{content:"\f16c"}.icon-instagram:before{content:"\f16d"}.icon-flickr:before{content:"\f16e"}.icon-adn:before{content:"\f170"}.icon-bitbucket:before{content:"\f171"}.icon-bitbucket-sign:before{content:"\f172"}.icon-tumblr:before{content:"\f173"}.icon-tumblr-sign:before{content:"\f174"}.icon-long-arrow-down:before{content:"\f175"}.icon-long-arrow-up:before{content:"\f176"}.icon-long-arrow-left:before{content:"\f177"}.icon-long-arrow-right:before{content:"\f178"}.icon-apple:before{content:"\f179"}.icon-windows:before{content:"\f17a"}.icon-android:before{content:"\f17b"}.icon-linux:before{content:"\f17c"}.icon-dribbble:before{content:"\f17d"}.icon-skype:before{content:"\f17e"}.icon-foursquare:before{content:"\f180"}.icon-trello:before{content:"\f181"}.icon-female:before{content:"\f182"}.icon-male:before{content:"\f183"}.icon-gittip:before{content:"\f184"}.icon-sun:before{content:"\f185"}.icon-moon:before{content:"\f186"}.icon-archive:before{content:"\f187"}.icon-bug:before{content:"\f188"}.icon-vk:before{content:"\f189"}.icon-weibo:before{content:"\f18a"}.icon-renren:before{content:"\f18b"}.wy-alert,.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .seealso{padding:12px;line-height:24px;margin-bottom:24px}.wy-alert-title,.rst-content .admonition-title{color:#fff;font-weight:bold;display:block;color:#fff;background:transparent;margin:-12px;padding:6px 12px;margin-bottom:12px}.wy-alert.wy-alert-danger,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.rst-content .wy-alert-danger.seealso{background:#fdf3f2}.wy-alert.wy-alert-danger .wy-alert-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .danger .wy-alert-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .danger .admonition-title,.rst-content .error .admonition-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.seealso .admonition-title{background:#f29f97}.wy-alert.wy-alert-warning,.rst-content .wy-alert-warning.note,.rst-content .attention,.rst-content .caution,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.tip,.rst-content .warning,.rst-content .wy-alert-warning.seealso{background:#ffedcc}.wy-alert.wy-alert-warning .wy-alert-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .attention .wy-alert-title,.rst-content .caution .wy-alert-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .attention .admonition-title,.rst-content .caution .admonition-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .warning .admonition-title,.rst-content .wy-alert-warning.seealso .admonition-title{background:#f0b37e}.wy-alert.wy-alert-info,.rst-content .note,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.rst-content .seealso{background:#e7f2fa}.wy-alert.wy-alert-info .wy-alert-title,.rst-content .note .wy-alert-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .seealso .wy-alert-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.rst-content .note .admonition-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .seealso .admonition-title{background:#6ab0de}.wy-alert.wy-alert-success,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.warning,.rst-content .wy-alert-success.seealso{background:#dbfaf4}.wy-alert.wy-alert-success .wy-alert-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .hint .wy-alert-title,.rst-content .important .wy-alert-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .hint .admonition-title,.rst-content .important .admonition-title,.rst-content .tip .admonition-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.seealso .admonition-title{background:#1abc9c}.wy-alert.wy-alert-neutral,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.rst-content .wy-alert-neutral.seealso{background:#f3f6f6}.wy-alert.wy-alert-neutral strong,.rst-content .wy-alert-neutral.note strong,.rst-content .wy-alert-neutral.attention strong,.rst-content .wy-alert-neutral.caution strong,.rst-content .wy-alert-neutral.danger strong,.rst-content .wy-alert-neutral.error strong,.rst-content .wy-alert-neutral.hint strong,.rst-content .wy-alert-neutral.important strong,.rst-content .wy-alert-neutral.tip strong,.rst-content .wy-alert-neutral.warning strong,.rst-content .wy-alert-neutral.seealso strong{color:#404040}.wy-alert.wy-alert-neutral a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.rst-content .wy-alert-neutral.seealso a{color:#2980b9}.wy-tray-container{position:fixed;top:-50px;left:0;width:100%;-webkit-transition:top 0.2s ease-in;-moz-transition:top 0.2s ease-in;transition:top 0.2s ease-in}.wy-tray-container.on{top:0}.wy-tray-container li{display:none;width:100%;background:#343131;padding:12px 24px;color:#fff;margin-bottom:6px;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,0.1),0px -1px 2px -1px rgba(255,255,255,0.5) inset}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.btn{display:inline-block;*display:inline;zoom:1;line-height:normal;white-space:nowrap;vertical-align:baseline;text-align:center;cursor:pointer;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;font-size:100%;padding:6px 12px;color:#fff;border:1px solid rgba(0,0,0,0.1);border-bottom:solid 3px rgba(0,0,0,0.1);background-color:#27ae60;text-decoration:none;font-weight:500;box-shadow:0px 1px 2px -1px rgba(255,255,255,0.5) inset;-webkit-transition:all 0.1s linear;-moz-transition:all 0.1s linear;transition:all 0.1s linear;outline-none:false}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;color:#fff;outline:0}.btn:active{border-top:solid 3px rgba(0,0,0,0.1);border-bottom:solid 1px rgba(0,0,0,0.1);box-shadow:0px 1px 2px -1px rgba(0,0,0,0.5) inset}.btn[disabled]{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn-disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn-disabled:hover,.btn-disabled:focus,.btn-disabled:active{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9 !important}.btn-info:hover{background-color:#2e8ece !important}.btn-neutral{background-color:#f3f6f6 !important;color:#404040 !important}.btn-neutral:hover{background-color:#e5ebeb !important;color:#404040}.btn-danger{background-color:#e74c3c !important}.btn-danger:hover{background-color:#ea6153 !important}.btn-warning{background-color:#e67e22 !important}.btn-warning:hover{background-color:#e98b39 !important}.btn-invert{background-color:#343131}.btn-invert:hover{background-color:#413d3d !important}.btn-link{background-color:transparent !important;color:#2980b9;border-color:transparent}.btn-link:hover{background-color:transparent !important;color:#409ad5;border-color:transparent}.btn-link:active{background-color:transparent !important;border-color:transparent;border-top:solid 1px transparent;border-bottom:solid 3px transparent}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:before,.wy-btn-group:after{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown:hover .wy-dropdown-menu{display:block}.wy-dropdown .caret:after{font-family:fontawesome-webfont;content:"\f0d7";font-size:70%}.wy-dropdown-menu{position:absolute;top:100%;left:0;display:none;float:left;min-width:100%;background:#fcfcfc;z-index:100;border:solid 1px #cfd7dd;box-shadow:0 5px 5px 0 rgba(0,0,0,0.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:solid 1px #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type="search"]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned input,.wy-form-aligned textarea,.wy-form-aligned select,.wy-form-aligned .wy-help-inline,.wy-form-aligned label{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:0.5em 1em 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:0.5em}fieldset{border:0;margin:0;padding:0}legend{display:block;width:100%;border:0;padding:0;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label{display:block;margin:0 0 0.3125em 0;color:#999;font-size:90%}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button{-webkit-appearance:button;cursor:pointer;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer;*overflow:visible}input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="date"],input[type="month"],input[type="time"],input[type="datetime"],input[type="datetime-local"],input[type="week"],input[type="number"],input[type="search"],input[type="tel"],input[type="color"]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}input[type="datetime-local"]{padding:0.34375em 0.625em}input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0;margin-right:0.3125em;*height:13px;*width:13px}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}input[type="text"]:focus,input[type="password"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus{outline:0;outline:thin dotted \9;border-color:#2980b9}input.no-focus:focus{border-color:#ccc !important}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type="text"][disabled],input[type="password"][disabled],input[type="email"][disabled],input[type="url"][disabled],input[type="date"][disabled],input[type="month"][disabled],input[type="time"][disabled],input[type="datetime"][disabled],input[type="datetime-local"][disabled],input[type="week"][disabled],input[type="number"][disabled],input[type="search"][disabled],input[type="tel"][disabled],input[type="color"][disabled]{cursor:not-allowed;background-color:#f3f6f6;color:#cad2d3}input:focus:invalid,textarea:focus:invalid,select:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,textarea:focus:invalid:focus,select:focus:invalid:focus{border-color:#e9322d}input[type="file"]:focus:invalid:focus,input[type="radio"]:focus:invalid:focus,input[type="checkbox"]:focus:invalid:focus{outline-color:#e9322d}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%}select,textarea{padding:0.5em 0.625em;display:inline-block;border:1px solid #ccc;font-size:0.8em;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#fff;color:#cad2d3;border-color:transparent}.wy-checkbox,.wy-radio{margin:0.5em 0;color:#404040 !important;display:block}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{padding:6px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:solid 1px #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:0.5em 0.625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.wy-control-group{margin-bottom:24px;*zoom:1}.wy-control-group:before,.wy-control-group:after{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type="text"],.wy-control-group.wy-control-group-error input[type="password"],.wy-control-group.wy-control-group-error input[type="email"],.wy-control-group.wy-control-group-error input[type="url"],.wy-control-group.wy-control-group-error input[type="date"],.wy-control-group.wy-control-group-error input[type="month"],.wy-control-group.wy-control-group-error input[type="time"],.wy-control-group.wy-control-group-error input[type="datetime"],.wy-control-group.wy-control-group-error input[type="datetime-local"],.wy-control-group.wy-control-group-error input[type="week"],.wy-control-group.wy-control-group-error input[type="number"],.wy-control-group.wy-control-group-error input[type="search"],.wy-control-group.wy-control-group-error input[type="tel"],.wy-control-group.wy-control-group-error input[type="color"]{border:solid 2px #e74c3c}.wy-control-group.wy-control-group-error textarea{border:solid 2px #e74c3c}.wy-control-group.fluid-input input[type="text"],.wy-control-group.fluid-input input[type="password"],.wy-control-group.fluid-input input[type="email"],.wy-control-group.fluid-input input[type="url"],.wy-control-group.fluid-input input[type="date"],.wy-control-group.fluid-input input[type="month"],.wy-control-group.fluid-input input[type="time"],.wy-control-group.fluid-input input[type="datetime"],.wy-control-group.fluid-input input[type="datetime-local"],.wy-control-group.fluid-input input[type="week"],.wy-control-group.fluid-input input[type="number"],.wy-control-group.fluid-input input[type="search"],.wy-control-group.fluid-input input[type="tel"],.wy-control-group.fluid-input input[type="color"]{width:100%}.wy-form-message-inline{display:inline-block;padding-left:0.3em;color:#666;vertical-align:middle;font-size:90%}.wy-form-message{display:block;color:#ccc;font-size:70%;margin-top:0.3125em;font-style:italic}.wy-tag-input-group{padding:4px 4px 0px 4px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}.wy-tag-input-group .wy-tag{display:inline-block;background-color:rgba(0,0,0,0.1);padding:0.5em 0.625em;border-radius:2px;position:relative;margin-bottom:4px}.wy-tag-input-group .wy-tag .wy-tag-remove{color:#ccc;margin-left:5px}.wy-tag-input-group .wy-tag .wy-tag-remove:hover{color:#e74c3c}.wy-tag-input-group label{margin-left:5px;display:inline-block;margin-bottom:0}.wy-tag-input-group input{border:none;font-size:100%;margin-bottom:4px;box-shadow:none}.wy-form-upload{border:solid 1px #ccc;border-bottom:solid 3px #ccc;background-color:#fff;padding:24px;display:inline-block;text-align:center;cursor:pointer;color:#404040;-webkit-transition:border-color 0.1s ease-in;-moz-transition:border-color 0.1s ease-in;transition:border-color 0.1s ease-in;*zoom:1}.wy-form-upload:before,.wy-form-upload:after{display:table;content:""}.wy-form-upload:after{clear:both}@media screen and (max-width: 480px){.wy-form-upload{width:100%}}.wy-form-upload .image-drop{display:none}.wy-form-upload .image-desktop{display:none}.wy-form-upload .image-loading{display:none}.wy-form-upload .wy-form-upload-icon{display:block;font-size:32px;color:#b3b3b3}.wy-form-upload .image-drop .wy-form-upload-icon{color:#27ae60}.wy-form-upload p{font-size:90%}.wy-form-upload .wy-form-upload-image{float:left;margin-right:24px}@media screen and (max-width: 480px){.wy-form-upload .wy-form-upload-image{width:100%;margin-bottom:24px}}.wy-form-upload img{max-width:125px;max-height:125px;opacity:0.9;-webkit-transition:opacity 0.1s ease-in;-moz-transition:opacity 0.1s ease-in;transition:opacity 0.1s ease-in}.wy-form-upload .wy-form-upload-content{float:left}@media screen and (max-width: 480px){.wy-form-upload .wy-form-upload-content{width:100%}}.wy-form-upload:hover{border-color:#b3b3b3;color:#404040}.wy-form-upload:hover .image-desktop{display:block}.wy-form-upload:hover .image-drag{display:none}.wy-form-upload:hover img{opacity:1}.wy-form-upload:active{border-top:solid 3px #ccc;border-bottom:solid 1px #ccc}.wy-form-upload.wy-form-upload-big{width:100%;text-align:center;padding:72px}.wy-form-upload.wy-form-upload-big .wy-form-upload-content{float:none}.wy-form-upload.wy-form-upload-file p{margin-bottom:0}.wy-form-upload.wy-form-upload-file .wy-form-upload-icon{display:inline-block;font-size:inherit}.wy-form-upload.wy-form-upload-drop{background-color:#ddf7e8}.wy-form-upload.wy-form-upload-drop .image-drop{display:block}.wy-form-upload.wy-form-upload-drop .image-desktop{display:none}.wy-form-upload.wy-form-upload-drop .image-drag{display:none}.wy-form-upload.wy-form-upload-loading .image-drag{display:none}.wy-form-upload.wy-form-upload-loading .image-desktop{display:none}.wy-form-upload.wy-form-upload-loading .image-loading{display:block}.wy-form-upload.wy-form-upload-loading .wy-input-prefix{display:none}.wy-form-upload.wy-form-upload-loading p{margin-bottom:0}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}.wy-form-gallery-manage{margin-left:-12px;margin-right:-12px}.wy-form-gallery-manage li{float:left;padding:12px;width:20%;cursor:pointer}@media screen and (max-width: 768px){.wy-form-gallery-manage li{width:25%}}@media screen and (max-width: 480px){.wy-form-gallery-manage li{width:50%}}.wy-form-gallery-manage li:active{cursor:move}.wy-form-gallery-manage li>a{padding:12px;background-color:#fff;border:solid 1px #e1e4e5;border-bottom:solid 3px #e1e4e5;display:inline-block;-webkit-transition:all 0.1s ease-in;-moz-transition:all 0.1s ease-in;transition:all 0.1s ease-in}.wy-form-gallery-manage li>a:active{border:solid 1px #ccc;border-top:solid 3px #ccc}.wy-form-gallery-manage img{width:100%;-webkit-transition:all 0.05s ease-in;-moz-transition:all 0.05s ease-in;transition:all 0.05s ease-in}li.wy-form-gallery-edit{position:relative;color:#fff;padding:24px;width:100%;display:block;background-color:#343131;border-radius:4px}li.wy-form-gallery-edit .arrow{position:absolute;display:block;top:-50px;left:50%;margin-left:-25px;z-index:500;height:0;width:0;border-color:transparent;border-style:solid;border-width:25px;border-bottom-color:#343131}@media only screen and (max-width: 480px){.wy-form button[type="submit"]{margin:0.7em 0 0}.wy-form input[type="text"],.wy-form input[type="password"],.wy-form input[type="email"],.wy-form input[type="url"],.wy-form input[type="date"],.wy-form input[type="month"],.wy-form input[type="time"],.wy-form input[type="datetime"],.wy-form input[type="datetime-local"],.wy-form input[type="week"],.wy-form input[type="number"],.wy-form input[type="search"],.wy-form input[type="tel"],.wy-form input[type="color"]{margin-bottom:0.3em;display:block}.wy-form label{margin-bottom:0.3em;display:block}.wy-form input[type="password"],.wy-form input[type="email"],.wy-form input[type="url"],.wy-form input[type="date"],.wy-form input[type="month"],.wy-form input[type="time"],.wy-form input[type="datetime"],.wy-form input[type="datetime-local"],.wy-form input[type="week"],.wy-form input[type="number"],.wy-form input[type="search"],.wy-form input[type="tel"],.wy-form input[type="color"]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:0.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-controls{margin:1.5em 0 0 0}.wy-form .wy-help-inline,.wy-form-message-inline,.wy-form-message{display:block;font-size:80%;padding:0.2em 0 0.8em}}@media screen and (max-width: 768px){.tablet-hide{display:none}}@media screen and (max-width: 480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.wy-grid-one-col{*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;max-width:1066px;margin-top:1.618em}.wy-grid-one-col:before,.wy-grid-one-col:after{display:table;content:""}.wy-grid-one-col:after{clear:both}.wy-grid-one-col section{display:block;float:left;margin-right:2.35765%;width:100%;background:#fcfcfc;padding:1.618em;margin-right:0}.wy-grid-one-col section:last-child{margin-right:0}.wy-grid-index-card{*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;max-width:460px;margin-top:1.618em;background:#fcfcfc;padding:1.618em}.wy-grid-index-card:before,.wy-grid-index-card:after{display:table;content:""}.wy-grid-index-card:after{clear:both}.wy-grid-index-card header,.wy-grid-index-card section,.wy-grid-index-card aside{display:block;float:left;margin-right:2.35765%;width:100%}.wy-grid-index-card header:last-child,.wy-grid-index-card section:last-child,.wy-grid-index-card aside:last-child{margin-right:0}.wy-grid-index-card.twocol{max-width:768px}.wy-grid-index-card.twocol section{display:block;float:left;margin-right:2.35765%;width:48.82117%}.wy-grid-index-card.twocol section:last-child{margin-right:0}.wy-grid-index-card.twocol aside{display:block;float:left;margin-right:2.35765%;width:48.82117%}.wy-grid-index-card.twocol aside:last-child{margin-right:0}.wy-grid-search-filter{*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;margin-bottom:24px}.wy-grid-search-filter:before,.wy-grid-search-filter:after{display:table;content:""}.wy-grid-search-filter:after{clear:both}.wy-grid-search-filter .wy-grid-search-filter-input{display:block;float:left;margin-right:2.35765%;width:74.41059%}.wy-grid-search-filter .wy-grid-search-filter-input:last-child{margin-right:0}.wy-grid-search-filter .wy-grid-search-filter-btn{display:block;float:left;margin-right:2.35765%;width:23.23176%}.wy-grid-search-filter .wy-grid-search-filter-btn:last-child{margin-right:0}.wy-table,.rst-content table.docutils,.rst-content table.field-list{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.wy-table caption,.rst-content table.docutils caption,.rst-content table.field-list caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.wy-table td,.rst-content table.docutils td,.rst-content table.field-list td,.wy-table th,.rst-content table.docutils th,.rst-content table.field-list th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.wy-table td:first-child,.rst-content table.docutils td:first-child,.rst-content table.field-list td:first-child,.wy-table th:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list th:first-child{border-left-width:0}.wy-table thead,.rst-content table.docutils thead,.rst-content table.field-list thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.wy-table thead th,.rst-content table.docutils thead th,.rst-content table.field-list thead th{font-weight:bold;border-bottom:solid 2px #e1e4e5}.wy-table td,.rst-content table.docutils td,.rst-content table.field-list td{background-color:transparent;vertical-align:middle}.wy-table td p,.rst-content table.docutils td p,.rst-content table.field-list td p{line-height:18px;margin-bottom:0}.wy-table .wy-table-cell-min,.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min{width:1%;padding-right:0}.wy-table .wy-table-cell-min input[type=checkbox],.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox],.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:gray;font-size:90%}.wy-table-tertiary{color:gray;font-size:80%}.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td,.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td{background-color:#f3f6f6}.wy-table-backed{background-color:#f3f6f6}.wy-table-bordered-all,.rst-content table.docutils{border:1px solid #e1e4e5}.wy-table-bordered-all td,.rst-content table.docutils td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.wy-table-bordered-all tbody>tr:last-child td,.rst-content table.docutils tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px 0;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0 !important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}html{height:100%;overflow-x:hidden}body{font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;font-weight:normal;color:#404040;min-height:100%;overflow-x:hidden;background:#edf0f2}a{color:#2980b9;text-decoration:none}a:hover{color:#3091d1}.link-danger{color:#e74c3c}.link-danger:hover{color:#d62c1a}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:"Roboto Slab","ff-tisa-web-pro","Georgia",Arial,sans-serif}p{line-height:24px;margin:0;font-size:16px;margin-bottom:24px}h1{font-size:175%}h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}small{font-size:80%}code,.rst-content tt{white-space:nowrap;max-width:100%;background:#fff;border:solid 1px #e1e4e5;font-size:75%;padding:0 5px;font-family:"Incosolata","Consolata","Monaco",monospace;color:#e74c3c;overflow-x:auto}code.code-large,.rst-content tt.code-large{font-size:90%}.full-width{width:100%}.wy-plain-list-disc,.rst-content .section ul,.rst-content .toctree-wrapper ul{list-style:disc;line-height:24px;margin-bottom:24px}.wy-plain-list-disc li,.rst-content .section ul li,.rst-content .toctree-wrapper ul li{list-style:disc;margin-left:24px}.wy-plain-list-disc li ul,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li ul{margin-bottom:0}.wy-plain-list-disc li li,.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li{list-style:circle}.wy-plain-list-disc li li li,.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li{list-style:square}.wy-plain-list-decimal,.rst-content .section ol,.rst-content ol.arabic{list-style:decimal;line-height:24px;margin-bottom:24px}.wy-plain-list-decimal li,.rst-content .section ol li,.rst-content ol.arabic li{list-style:decimal;margin-left:24px}.wy-type-large{font-size:120%}.wy-type-normal{font-size:100%}.wy-type-small{font-size:100%}.wy-type-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22 !important}a.wy-text-warning:hover{color:#eb9950 !important}.wy-text-info{color:#2980b9 !important}a.wy-text-info:hover{color:#409ad5 !important}.wy-text-success{color:#27ae60 !important}a.wy-text-success:hover{color:#36d278 !important}.wy-text-danger{color:#e74c3c !important}a.wy-text-danger:hover{color:#ed7669 !important}.wy-text-neutral{color:#404040 !important}a.wy-text-neutral:hover{color:#595959 !important}.codeblock-example{border:1px solid #e1e4e5;border-bottom:none;padding:24px;padding-top:48px;font-weight:500;background:#fff;position:relative}.codeblock-example:after{content:"Example";position:absolute;top:0px;left:0px;background:#9b59b6;color:#fff;padding:6px 12px}.codeblock-example.prettyprint-example-only{border:1px solid #e1e4e5;margin-bottom:24px}.codeblock,.rst-content .literal-block,div[class^='highlight']{border:1px solid #e1e4e5;padding:0px;overflow-x:auto;background:#fff;margin:1px 0 24px 0}.codeblock div[class^='highlight'],.rst-content .literal-block div[class^='highlight'],div[class^='highlight'] div[class^='highlight']{border:none;background:none;margin:0}div[class^='highlight'] td.code{width:100%}.linenodiv pre{border-right:solid 1px #e6e9ea;margin:0;padding:12px 12px;font-family:"Incosolata","Consolata","Monaco",monospace;font-size:12px;line-height:1.5;color:#d9d9d9}div[class^='highlight'] pre{white-space:pre;margin:0;padding:12px 12px;font-family:"Incosolata","Consolata","Monaco",monospace;font-size:12px;line-height:1.5;display:block;overflow:auto;color:#404040}pre.literal-block{@extends .codeblock;}@media print{.codeblock,.rst-content .literal-block,div[class^='highlight'],div[class^='highlight'] pre{white-space:pre-wrap}}.hll{background-color:#ffc;margin:0 -12px;padding:0 12px;display:block}.c{color:#998;font-style:italic}.err{color:#a61717;background-color:#e3d2d2}.k{font-weight:bold}.o{font-weight:bold}.cm{color:#998;font-style:italic}.cp{color:#999;font-weight:bold}.c1{color:#998;font-style:italic}.cs{color:#999;font-weight:bold;font-style:italic}.gd{color:#000;background-color:#fdd}.gd .x{color:#000;background-color:#faa}.ge{font-style:italic}.gr{color:#a00}.gh{color:#999}.gi{color:#000;background-color:#dfd}.gi .x{color:#000;background-color:#afa}.go{color:#888}.gp{color:#555}.gs{font-weight:bold}.gu{color:purple;font-weight:bold}.gt{color:#a00}.kc{font-weight:bold}.kd{font-weight:bold}.kn{font-weight:bold}.kp{font-weight:bold}.kr{font-weight:bold}.kt{color:#458;font-weight:bold}.m{color:#099}.s{color:#d14}.n{color:#333}.na{color:teal}.nb{color:#0086b3}.nc{color:#458;font-weight:bold}.no{color:teal}.ni{color:purple}.ne{color:#900;font-weight:bold}.nf{color:#900;font-weight:bold}.nn{color:#555}.nt{color:navy}.nv{color:teal}.ow{font-weight:bold}.w{color:#bbb}.mf{color:#099}.mh{color:#099}.mi{color:#099}.mo{color:#099}.sb{color:#d14}.sc{color:#d14}.sd{color:#d14}.s2{color:#d14}.se{color:#d14}.sh{color:#d14}.si{color:#d14}.sx{color:#d14}.sr{color:#009926}.s1{color:#d14}.ss{color:#990073}.bp{color:#999}.vc{color:teal}.vg{color:teal}.vi{color:teal}.il{color:#099}.gc{color:#999;background-color:#eaf2f5}.wy-breadcrumbs li{display:inline-block}.wy-breadcrumbs li.wy-breadcrumbs-aside{float:right}.wy-breadcrumbs li a{display:inline-block;padding:5px}.wy-breadcrumbs li a:first-child{padding-left:0}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width: 480px){.wy-breadcrumbs-extra{display:none}.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:before,.wy-menu-horiz:after{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz ul,.wy-menu-horiz li{display:inline-block}.wy-menu-horiz li:hover{background:rgba(255,255,255,0.1)}.wy-menu-horiz li.divide-left{border-left:solid 1px #404040}.wy-menu-horiz li.divide-right{border-right:solid 1px #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical header{height:32px;display:inline-block;line-height:32px;padding:0 1.618em;display:block;font-weight:bold;text-transform:uppercase;font-size:80%;color:#2980b9;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:solid 1px #404040}.wy-menu-vertical li.divide-bottom{border-bottom:solid 1px #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:gray;border-right:solid 1px #c9c9c9;padding:0.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.wy-menu-vertical li.on a,.wy-menu-vertical li.current>a{color:#404040;padding:0.4045em 1.618em;font-weight:bold;position:relative;background:#fcfcfc;border:none;border-bottom:solid 1px #c9c9c9;border-top:solid 1px #c9c9c9;padding-left:1.618em -4px}.wy-menu-vertical li.on a:hover,.wy-menu-vertical li.current>a:hover{background:#fcfcfc}.wy-menu-vertical li.tocktree-l2.current>a{background:#c9c9c9}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical .local-toc li ul{display:block}.wy-menu-vertical li ul li a{margin-bottom:0;color:#b3b3b3;font-weight:normal}.wy-menu-vertical a{display:inline-block;line-height:18px;padding:0.4045em 1.618em;display:block;position:relative;font-size:90%;color:#b3b3b3}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-side-nav-search{z-index:200;background-color:#2980b9;text-align:center;padding:0.809em;display:block;color:#fcfcfc;margin-bottom:0.809em}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto 0.809em auto;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search>a,.wy-side-nav-search .wy-dropdown>a{color:#fcfcfc;font-size:100%;font-weight:bold;display:inline-block;padding:4px 6px;margin-bottom:0.809em}.wy-side-nav-search>a:hover,.wy-side-nav-search .wy-dropdown>a:hover{background:rgba(255,255,255,0.1)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all 0.2s ease-in;-moz-transition:all 0.2s ease-in;transition:all 0.2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:left repeat-y #fcfcfc;background-image:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoxOERBMTRGRDBFMUUxMUUzODUwMkJCOThDMEVFNURFMCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxOERBMTRGRTBFMUUxMUUzODUwMkJCOThDMEVFNURFMCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjE4REExNEZCMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjE4REExNEZDMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+EwrlwAAAAA5JREFUeNpiMDU0BAgwAAE2AJgB9BnaAAAAAElFTkSuQmCC);background-size:300px 1px}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:absolute;top:0;left:0;width:300px;overflow:hidden;min-height:100%;background:#343131;z-index:200}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:0.4045em 0.809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:before,.wy-nav-top:after{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:bold}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,0.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:#999}footer p{margin-bottom:12px}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:before,.rst-footer-buttons:after{display:table;content:""}.rst-footer-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:solid 1px #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:solid 1px #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:gray;font-size:90%}@media screen and (max-width: 768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width: 1400px){.wy-nav-content-wrap{background:rgba(0,0,0,0.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.wy-nav-side{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-success .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-info .rst-versions .rst-current-version .wy-input-context,.rst-versions .rst-current-version .wy-tag-input-group .wy-tag .wy-tag-remove,.wy-tag-input-group .wy-tag .rst-versions .rst-current-version .wy-tag-remove,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-content dl dt .rst-versions .rst-current-version .headerlink{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}.rst-content img{max-width:100%;height:auto !important}.rst-content .section>img{margin-bottom:24px}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content .note .last,.rst-content .attention .last,.rst-content .caution .last,.rst-content .danger .last,.rst-content .error .last,.rst-content .hint .last,.rst-content .important .last,.rst-content .tip .last,.rst-content .warning .last,.rst-content .seealso .last{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,0.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent !important;border-color:rgba(0,0,0,0.1) !important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha li{list-style:upper-alpha}.rst-content .section ol p,.rst-content .section ul p{margin-bottom:12px}.rst-content .line-block{margin-left:24px}.rst-content .topic-title{font-weight:bold;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0px 0px 24px 24px}.rst-content .align-left{float:left;margin:0px 24px 24px 0px}.rst-content .align-center{margin:auto;display:block}.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink{display:none;visibility:hidden;font-size:14px}.rst-content h1 .headerlink:after,.rst-content h2 .headerlink:after,.rst-content h3 .headerlink:after,.rst-content h4 .headerlink:after,.rst-content h5 .headerlink:after,.rst-content h6 .headerlink:after,.rst-content dl dt .headerlink:after{visibility:visible;content:"\f0c1";font-family:fontawesome-webfont;display:inline-block}.rst-content h1:hover .headerlink,.rst-content h2:hover .headerlink,.rst-content h3:hover .headerlink,.rst-content h4:hover .headerlink,.rst-content h5:hover .headerlink,.rst-content h6:hover .headerlink,.rst-content dl dt:hover .headerlink{display:inline-block}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:solid 1px #e1e4e5}.rst-content .sidebar p,.rst-content .sidebar ul,.rst-content .sidebar dl{font-size:90%}.rst-content .sidebar .last{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:"Roboto Slab","ff-tisa-web-pro","Georgia",Arial,sans-serif;font-weight:bold;background:#e1e4e5;padding:6px 12px;margin:-24px;margin-bottom:24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;display:inline-block;font-weight:bold;padding:0 6px}.rst-content .footnote-reference,.rst-content .citation-reference{vertical-align:super;font-size:90%}.rst-content table.docutils.citation,.rst-content table.docutils.footnote{background:none;border:none;color:#999}.rst-content table.docutils.citation td,.rst-content table.docutils.citation tr,.rst-content table.docutils.footnote td,.rst-content table.docutils.footnote tr{border:none;background-color:transparent !important;white-space:normal}.rst-content table.docutils.citation td.label,.rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}.rst-content table.field-list{border:none}.rst-content table.field-list td{border:none}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left;padding-left:0}.rst-content tt{color:#000}.rst-content tt big,.rst-content tt em{font-size:100% !important;line-height:normal}.rst-content tt .xref,a .rst-content tt{font-weight:bold}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:bold}.rst-content dl p,.rst-content dl table,.rst-content dl ul,.rst-content dl ol{margin-bottom:12px !important}.rst-content dl dd{margin:0 0 12px 24px}.rst-content dl:not(.docutils){margin-bottom:24px}.rst-content dl:not(.docutils) dt{display:inline-block;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:solid 3px #6ab0de;padding:6px;position:relative}.rst-content dl:not(.docutils) dt:before{color:#6ab0de}.rst-content dl:not(.docutils) dt .headerlink{color:#404040;font-size:100% !important}.rst-content dl:not(.docutils) dl dt{margin-bottom:6px;border:none;border-left:solid 3px #ccc;background:#f0f0f0;color:gray}.rst-content dl:not(.docutils) dl dt .headerlink{color:#404040;font-size:100% !important}.rst-content dl:not(.docutils) dt:first-child{margin-top:0}.rst-content dl:not(.docutils) tt{font-weight:bold}.rst-content dl:not(.docutils) tt.descname,.rst-content dl:not(.docutils) tt.descclassname{background-color:transparent;border:none;padding:0;font-size:100% !important}.rst-content dl:not(.docutils) tt.descname{font-weight:bold}.rst-content dl:not(.docutils) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:bold}.rst-content dl:not(.docutils) .property{display:inline-block;padding-right:8px}.rst-content .viewcode-link,.rst-content .viewcode-back{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}@media screen and (max-width: 480px){.rst-content .sidebar{width:100%}}span[id*='MathJax-Span']{color:#404040} diff --git a/docs/_themes/sphinx_rtd_theme/static/favicon.ico b/docs/_themes/sphinx_rtd_theme/static/favicon.ico new file mode 100644 index 00000000..6970cfde Binary files /dev/null and b/docs/_themes/sphinx_rtd_theme/static/favicon.ico differ diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot new file mode 100755 index 00000000..0662cb96 Binary files /dev/null and b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.eot differ diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg new file mode 100755 index 00000000..2edb4ec3 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.svg @@ -0,0 +1,399 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf new file mode 100755 index 00000000..d3659246 Binary files /dev/null and b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.ttf differ diff --git a/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff new file mode 100755 index 00000000..b9bd17e1 Binary files /dev/null and b/docs/_themes/sphinx_rtd_theme/static/font/fontawesome_webfont.woff differ diff --git a/docs/_themes/sphinx_rtd_theme/static/js/theme.js b/docs/_themes/sphinx_rtd_theme/static/js/theme.js new file mode 100755 index 00000000..58e514c0 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/static/js/theme.js @@ -0,0 +1,16 @@ +$( document ).ready(function() { + // Shift nav in mobile when clicking the menu. + $("[data-toggle='wy-nav-top']").click(function() { + $("[data-toggle='wy-nav-shift']").toggleClass("shift"); + $("[data-toggle='rst-versions']").toggleClass("shift"); + }); + // Close menu when you click a link. + $(".wy-menu-vertical .current ul li a").click(function() { + $("[data-toggle='wy-nav-shift']").removeClass("shift"); + $("[data-toggle='rst-versions']").toggleClass("shift"); + }); + $("[data-toggle='rst-current-version']").click(function() { + $("[data-toggle='rst-versions']").toggleClass("shift-up"); + }); + $("table.docutils:not(.field-list").wrap("
"); +}); diff --git a/docs/_themes/sphinx_rtd_theme/theme.conf b/docs/_themes/sphinx_rtd_theme/theme.conf new file mode 100755 index 00000000..173ca698 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/theme.conf @@ -0,0 +1,8 @@ +[theme] +inherit = basic +stylesheet = css/theme.css + +[options] +typekit_id = hiw1hhg +analytics_id = +canonical_url = \ No newline at end of file diff --git a/docs/_themes/sphinx_rtd_theme/versions.html b/docs/_themes/sphinx_rtd_theme/versions.html new file mode 100755 index 00000000..93319be8 --- /dev/null +++ b/docs/_themes/sphinx_rtd_theme/versions.html @@ -0,0 +1,37 @@ +{% if READTHEDOCS %} +{# Add rst-badge after rst-versions for small badge style. #} +
+ + Read the Docs + v: {{ current_version }} + + +
+
+
Versions
+ {% for slug, url in versions %} +
{{ slug }}
+ {% endfor %} +
+
+
Downloads
+ {% for type, url in downloads %} +
{{ type }}
+ {% endfor %} +
+
+
On Read the Docs
+
+ Project Home +
+
+ Builds +
+
+
+ Free document hosting provided by Read the Docs. + +
+
+{% endif %} + diff --git a/docs/apireference.rst b/docs/apireference.rst index d0627278..6c42d40a 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -38,18 +38,28 @@ Context Managers ================ .. autoclass:: mongoengine.context_managers.switch_db +.. autoclass:: mongoengine.context_managers.switch_collection .. autoclass:: mongoengine.context_managers.no_dereference .. autoclass:: mongoengine.context_managers.query_counter Querying ======== -.. autoclass:: mongoengine.queryset.QuerySet - :members: +.. automodule:: mongoengine.queryset + :synopsis: Queryset level operations - .. automethod:: mongoengine.queryset.QuerySet.__call__ + .. autoclass:: mongoengine.queryset.QuerySet + :members: + :inherited-members: -.. autofunction:: mongoengine.queryset.queryset_manager + .. automethod:: QuerySet.__call__ + + .. autoclass:: mongoengine.queryset.QuerySetNoCache + :members: + + .. automethod:: mongoengine.queryset.QuerySetNoCache.__call__ + + .. autofunction:: mongoengine.queryset.queryset_manager Fields ====== diff --git a/docs/changelog.rst b/docs/changelog.rst index 0f86a629..13cd1b69 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,13 +3,111 @@ Changelog ========= +Changes in 0.9.X - DEV +====================== + +- Added the ability to reload specific document fields #100 +- Added db_alias support and fixes for custom map/reduce output #586 +- post_save signal now has access to delta information about field changes #594 #589 +- Don't query with $orderby for qs.get() #600 +- Fix id shard key save issue #636 +- Fixes issue with recursive embedded document errors #557 +- Fix clear_changed_fields() clearing unsaved documents bug #602 +- Removing support for Django 1.4.x, pymongo 2.5.x, pymongo 2.6.x. +- Removing support for Python < 2.6.6 +- Fixed $maxDistance location for geoJSON $near queries with MongoDB 2.6+ #664 +- QuerySet.modify() method to provide find_and_modify() like behaviour #677 +- Added support for the using() method on a queryset #676 +- PYPY support #673 +- Connection pooling #674 +- Avoid to open all documents from cursors in an if stmt #655 +- Ability to clear the ordering #657 +- Raise NotUniqueError in Document.update() on pymongo.errors.DuplicateKeyError #626 +- Slots - memory improvements #625 +- Fixed incorrectly split a query key when it ends with "_" #619 +- Geo docs updates #613 +- Workaround a dateutil bug #608 +- Conditional save for atomic-style operations #511 +- Allow dynamic dictionary-style field access #559 + +Changes in 0.8.7 +================ +- Calling reload on deleted / nonexistant documents raises DoesNotExist (#538) +- Stop ensure_indexes running on a secondaries (#555) +- Fix circular import issue with django auth (#531) (#545) + +Changes in 0.8.6 +================ +- Fix django auth import (#531) + +Changes in 0.8.5 +================ +- Fix multi level nested fields getting marked as changed (#523) +- Django 1.6 login fix (#522) (#527) +- Django 1.6 session fix (#509) +- EmbeddedDocument._instance is now set when settng the attribute (#506) +- Fixed EmbeddedDocument with ReferenceField equality issue (#502) +- Fixed GenericReferenceField serialization order (#499) +- Fixed count and none bug (#498) +- Fixed bug with .only() and DictField with digit keys (#496) +- Added user_permissions to Django User object (#491, #492) +- Fix updating Geo Location fields (#488) +- Fix handling invalid dict field value (#485) +- Added app_label to MongoUser (#484) +- Use defaults when host and port are passed as None (#483) +- Fixed distinct casting issue with ListField of EmbeddedDocuments (#470) +- Fixed Django 1.6 sessions (#454, #480) + +Changes in 0.8.4 +================ +- Remove database name necessity in uri connection schema (#452) +- Fixed "$pull" semantics for nested ListFields (#447) +- Allow fields to be named the same as query operators (#445) +- Updated field filter logic - can now exclude subclass fields (#443) +- Fixed dereference issue with embedded listfield referencefields (#439) +- Fixed slice when using inheritance causing fields to be excluded (#437) +- Fixed ._get_db() attribute after a Document.switch_db() (#441) +- Dynamic Fields store and recompose Embedded Documents / Documents correctly (#449) +- Handle dynamic fieldnames that look like digits (#434) +- Added get_user_document and improve mongo_auth module (#423) +- Added str representation of GridFSProxy (#424) +- Update transform to handle docs erroneously passed to unset (#416) +- Fixed indexing - turn off _cls (#414) +- Fixed dereference threading issue in ComplexField.__get__ (#412) +- Fixed QuerySetNoCache.count() caching (#410) +- Don't follow references in _get_changed_fields (#422, #417) +- Allow args and kwargs to be passed through to_json (#420) + +Changes in 0.8.3 +================ +- Fixed EmbeddedDocuments with `id` also storing `_id` (#402) +- Added get_proxy_object helper to filefields (#391) +- Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) +- Fixed sum and average mapreduce dot notation support (#375, #376, #393) +- Fixed as_pymongo to return the id (#386) +- Document.select_related() now respects `db_alias` (#377) +- Reload uses shard_key if applicable (#384) +- Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) + + **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 + +- Fixed pickling dynamic documents `_dynamic_fields` (#387) +- Fixed ListField setslice and delslice dirty tracking (#390) +- Added Django 1.5 PY3 support (#392) +- Added match ($elemMatch) support for EmbeddedDocuments (#379) +- Fixed weakref being valid after reload (#374) +- Fixed queryset.get() respecting no_dereference (#373) +- Added full_result kwarg to update (#380) + + + Changes in 0.8.2 ================ - Added compare_indexes helper (#361) - Fixed cascading saves which weren't turned off as planned (#291) - Fixed Datastructures so instances are a Document or EmbeddedDocument (#363) - Improved cascading saves write performance (#361) -- Fixed amibiguity and differing behaviour regarding field defaults (#349) +- Fixed ambiguity and differing behaviour regarding field defaults (#349) - ImageFields now include PIL error messages if invalid error (#353) - Added lock when calling doc.Delete() for when signals have no sender (#350) - Reload forces read preference to be PRIMARY (#355) diff --git a/docs/conf.py b/docs/conf.py index 40c1f430..cddd35db 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -92,7 +92,7 @@ pygments_style = 'sphinx' # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'nature' +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -116,7 +116,7 @@ html_theme_path = ['_themes'] # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +html_favicon = "favicon.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -200,3 +200,6 @@ latex_documents = [ autoclass_content = 'both' +html_theme_options = dict( + canonical_url='http://docs.mongoengine.org/en/latest/' +) diff --git a/docs/django.rst b/docs/django.rst index da151888..5d2423f2 100644 --- a/docs/django.rst +++ b/docs/django.rst @@ -45,7 +45,7 @@ The :mod:`~mongoengine.django.auth` module also contains a Custom User model ================= Django 1.5 introduced `Custom user Models -` +`_ which can be used as an alternative to the MongoEngine authentication backend. The main advantage of this option is that other components relying on @@ -74,7 +74,7 @@ An additional ``MONGOENGINE_USER_DOCUMENT`` setting enables you to replace the The custom :class:`User` must be a :class:`~mongoengine.Document` class, but otherwise has the same requirements as a standard custom user model, as specified in the `Django Documentation -`. +`_. In particular, the custom class must define :attr:`USERNAME_FIELD` and :attr:`REQUIRED_FIELDS` attributes. @@ -90,10 +90,15 @@ session backend, ensure that your settings module has into your settings module:: SESSION_ENGINE = 'mongoengine.django.sessions' + SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer' Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesn't delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports `mongodb TTL `_. +.. note:: ``SESSION_SERIALIZER`` is only necessary in Django 1.6 as the default + serializer is based around JSON and doesn't know how to convert + ``bson.objectid.ObjectId`` instances to strings. + .. versionadded:: 0.2.1 Storage @@ -128,7 +133,7 @@ appended to the filename until the generated filename doesn't exist. The >>> fs.listdir() ([], [u'hello.txt']) -All files will be saved and retrieved in GridFS via the :class::`FileDocument` +All files will be saved and retrieved in GridFS via the :class:`FileDocument` document, allowing easy access to the files without the GridFSStorage backend.:: @@ -137,3 +142,36 @@ backend.:: [] .. versionadded:: 0.4 + +Shortcuts +========= +Inspired by the `Django shortcut get_object_or_404 +`_, +the :func:`~mongoengine.django.shortcuts.get_document_or_404` method returns +a document or raises an Http404 exception if the document does not exist:: + + from mongoengine.django.shortcuts import get_document_or_404 + + admin_user = get_document_or_404(User, username='root') + +The first argument may be a Document or QuerySet object. All other passed arguments +and keyword arguments are used in the query:: + + foo_email = get_document_or_404(User.objects.only('email'), username='foo', is_active=True).email + +.. note:: Like with :func:`get`, a MultipleObjectsReturned will be raised if more than one + object is found. + + +Also inspired by the `Django shortcut get_list_or_404 +`_, +the :func:`~mongoengine.django.shortcuts.get_list_or_404` method returns a list of +documents or raises an Http404 exception if the list is empty:: + + from mongoengine.django.shortcuts import get_list_or_404 + + active_users = get_list_or_404(User, is_active=True) + +The first argument may be a Document or QuerySet object. All other passed +arguments and keyword arguments are used to filter the query. + diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 854e2c30..dd89938b 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -23,12 +23,15 @@ arguments should be provided:: connect('project1', username='webapp', password='pwd123') -Uri style connections are also supported as long as you include the database -name - just supply the uri as the :attr:`host` to +Uri style connections are also supported - just supply the uri as +the :attr:`host` to :func:`~mongoengine.connect`:: connect('project1', host='mongodb://localhost/database_name') +Note that database name from uri has priority over name +in ::func:`~mongoengine.connect` + ReplicaSets =========== @@ -97,3 +100,18 @@ access to the same User document across databases:: .. note:: Make sure any aliases have been registered with :func:`~mongoengine.register_connection` before using the context manager. + +There is also a switch collection context manager as well. The +:class:`~mongoengine.context_managers.switch_collection` context manager allows +you to change the collection for a given class allowing quick and easy +access to the same Group document across collection:: + + from mongoengine.context_managers import switch_db + + class Group(Document): + name = StringField() + + Group(name="test").save() # Saves in the default db + + with switch_collection(Group, 'group2000') as Group: + Group(name="hello Group 2000 collection!").save() # Saves in group2000 collection diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index b3b1e592..07bce3bb 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -54,7 +54,7 @@ be saved :: There is one caveat on Dynamic Documents: fields cannot start with `_` -Dynamic fields are stored in alphabetical order *after* any declared fields. +Dynamic fields are stored in creation order *after* any declared fields. Fields ====== @@ -290,6 +290,12 @@ instance of the object to the query:: # Find all pages that both Bob and John have authored Page.objects(authors__all=[bob, john]) + # Remove Bob from the authors for a page. + Page.objects(id='...').update_one(pull__authors=bob) + + # Add John to the authors for a page. + Page.objects(id='...').update_one(push__authors=john) + Dealing with deletion of referred documents ''''''''''''''''''''''''''''''''''''''''''' @@ -442,6 +448,8 @@ The following example shows a :class:`Log` document that will be limited to ip_address = StringField() meta = {'max_documents': 1000, 'max_size': 2000000} +.. defining-indexes_ + Indexes ======= @@ -450,8 +458,8 @@ by creating a list of index specifications called :attr:`indexes` in the :attr:`~mongoengine.Document.meta` dictionary, where an index specification may either be a single field name, a tuple containing multiple field names, or a dictionary containing a full index definition. A direction may be specified on -fields by prefixing the field name with a **+** or a **-** sign. Note that -direction only matters on multi-field indexes. :: +fields by prefixing the field name with a **+** (for ascending) or a **-** sign +(for descending). Note that direction only matters on multi-field indexes. :: class Page(Document): title = StringField() @@ -485,6 +493,35 @@ If a dictionary is passed then the following options are available: Inheritance adds extra fields indices see: :ref:`document-inheritance`. +Global index default options +---------------------------- + +There are a few top level defaults for all indexes that can be set:: + + class Page(Document): + title = StringField() + rating = StringField() + meta = { + 'index_options': {}, + 'index_background': True, + 'index_drop_dups': True, + 'index_cls': False + } + + +:attr:`index_options` (Optional) + Set any default index options - see the `full options list `_ + +:attr:`index_background` (Optional) + Set the default value for if an index should be indexed in the background + +:attr:`index_drop_dups` (Optional) + Set the default value for if an index should drop duplicates + +:attr:`index_cls` (Optional) + A way to turn off a specific index for _cls. + + Compound Indexes and Indexing sub documents ------------------------------------------- @@ -494,6 +531,8 @@ field name to the index definition. Sometimes its more efficient to index parts of Embedded / dictionary fields, in this case use 'dot' notation to identify the value to index eg: `rank.title` +.. _geospatial-indexes: + Geospatial indexes ------------------ @@ -558,6 +597,11 @@ documentation for more information. A common usecase might be session data:: ] } +.. warning:: TTL indexes happen on the MongoDB server and not in the application + code, therefore no signals will be fired on document deletion. + If you need signals to be fired on deletion, then you must handle the + deletion of Documents in your application code. + Comparing Indexes ----------------- @@ -653,7 +697,6 @@ document.:: .. note:: From 0.8 onwards you must declare :attr:`allow_inheritance` defaults to False, meaning you must set it to True to use inheritance. - Working with existing data -------------------------- As MongoEngine no longer defaults to needing :attr:`_cls` you can quickly and @@ -673,3 +716,25 @@ defining all possible field types. If you use :class:`~mongoengine.Document` and the database contains data that isn't defined then that data will be stored in the `document._data` dictionary. + +Abstract classes +================ + +If you want to add some extra functionality to a group of Document classes but +you don't need or want the overhead of inheritance you can use the +:attr:`abstract` attribute of :attr:`-mongoengine.Document.meta`. +This won't turn on :ref:`document-inheritance` but will allow you to keep your +code DRY:: + + class BaseDocument(Document): + meta = { + 'abstract': True, + } + def check_permissions(self): + ... + + class User(BaseDocument): + ... + +Now the User class will have access to the inherited `check_permissions` method +and won't store any of the extra `_cls` information. diff --git a/docs/guide/gridfs.rst b/docs/guide/gridfs.rst index d81bb922..68e7a6d2 100644 --- a/docs/guide/gridfs.rst +++ b/docs/guide/gridfs.rst @@ -20,7 +20,7 @@ a document is created to store details about animals, including a photo:: marmot = Animal(genus='Marmota', family='Sciuridae') - marmot_photo = open('marmot.jpg', 'r') + marmot_photo = open('marmot.jpg', 'rb') marmot.photo.put(marmot_photo, content_type = 'image/jpeg') marmot.save() @@ -46,7 +46,7 @@ slightly different manner. First, a new file must be created by calling the marmot.photo.write('some_more_image_data') marmot.photo.close() - marmot.photo.save() + marmot.save() Deletion -------- @@ -70,5 +70,5 @@ Replacing files Files can be replaced with the :func:`replace` method. This works just like the :func:`put` method so even metadata can (and should) be replaced:: - another_marmot = open('another_marmot.png', 'r') + another_marmot = open('another_marmot.png', 'rb') marmot.photo.replace(another_marmot, content_type='image/png') diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 1350130e..34996dc6 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -16,7 +16,9 @@ fetch documents from the database:: .. note:: As of MongoEngine 0.8 the querysets utilise a local cache. So iterating - it multiple times will only cause a single query. + it multiple times will only cause a single query. If this is not the + desired behavour you can call :class:`~mongoengine.QuerySet.no_cache` + (version **0.8.3+**) to return a non-caching queryset. Filtering queries ================= @@ -90,8 +92,8 @@ were added in 0.8 for: :class:`~mongoengine.fields.PointField`, * ``geo_within`` -- Check if a geometry is within a polygon. For ease of use it accepts either a geojson geometry or just the polygon coordinates eg:: - loc.objects(point__geo_with=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) - loc.objects(point__geo_with={"type": "Polygon", + loc.objects(point__geo_within=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) + loc.objects(point__geo_within={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) * ``geo_within_box`` - simplified geo_within searching with a box eg:: @@ -486,8 +488,9 @@ calling it with keyword arguments:: Atomic updates ============== Documents may be updated atomically by using the -:meth:`~mongoengine.queryset.QuerySet.update_one` and -:meth:`~mongoengine.queryset.QuerySet.update` methods on a +:meth:`~mongoengine.queryset.QuerySet.update_one`, +:meth:`~mongoengine.queryset.QuerySet.update` and +:meth:`~mongoengine.queryset.QuerySet.modify` methods on a :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" that you may use with these methods: @@ -495,14 +498,15 @@ that you may use with these methods: * ``unset`` -- delete a particular value (since MongoDB v1.3+) * ``inc`` -- increment a value by a given amount * ``dec`` -- decrement a value by a given amount -* ``pop`` -- remove the last item from a list * ``push`` -- append a value to a list * ``push_all`` -- append several values to a list -* ``pop`` -- remove the first or last element of a list +* ``pop`` -- remove the first or last element of a list `depending on the value`_ * ``pull`` -- remove a value from a list * ``pull_all`` -- remove several values from a list * ``add_to_set`` -- add value to a list only if its not in the list already +.. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/ + The syntax for atomic updates is similar to the querying syntax, but the modifier comes before the field, not after it:: diff --git a/docs/upgrade.rst b/docs/upgrade.rst index c3d31824..b55fe312 100644 --- a/docs/upgrade.rst +++ b/docs/upgrade.rst @@ -2,12 +2,28 @@ Upgrading ######### +0.8.7 +***** + +Calling reload on deleted / nonexistant documents now raises a DoesNotExist +exception. + + +0.8.2 to 0.8.3 +************** + +Minor change that may impact users: + +DynamicDocument fields are now stored in creation order after any declared +fields. Previously they were stored alphabetically. + + 0.7 to 0.8 ********** There have been numerous backwards breaking changes in 0.8. The reasons for -these are ensure that MongoEngine has sane defaults going forward and -performs the best it can out the box. Where possible there have been +these are to ensure that MongoEngine has sane defaults going forward and that it +performs the best it can out of the box. Where possible there have been FutureWarnings to help get you ready for the change, but that hasn't been possible for the whole of the release. @@ -61,7 +77,7 @@ inherited classes like so: :: Document Definition ------------------- -The default for inheritance has changed - its now off by default and +The default for inheritance has changed - it is now off by default and :attr:`_cls` will not be stored automatically with the class. So if you extend your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` you will need to declare :attr:`allow_inheritance` in the meta data like so: :: @@ -71,7 +87,7 @@ you will need to declare :attr:`allow_inheritance` in the meta data like so: :: meta = {'allow_inheritance': True} -Previously, if you had data the database that wasn't defined in the Document +Previously, if you had data in the database that wasn't defined in the Document definition, it would set it as an attribute on the document. This is no longer the case and the data is set only in the ``document._data`` dictionary: :: @@ -92,8 +108,8 @@ the case and the data is set only in the ``document._data`` dictionary: :: AttributeError: 'Animal' object has no attribute 'size' The Document class has introduced a reserved function `clean()`, which will be -called before saving the document. If your document class happen to have a method -with the same name, please try rename it. +called before saving the document. If your document class happens to have a method +with the same name, please try to rename it. def clean(self): pass @@ -101,7 +117,7 @@ with the same name, please try rename it. ReferenceField -------------- -ReferenceFields now store ObjectId's by default - this is more efficient than +ReferenceFields now store ObjectIds by default - this is more efficient than DBRefs as we already know what Document types they reference:: # Old code @@ -147,7 +163,7 @@ UUIDFields now default to storing binary values:: class Animal(Document): uuid = UUIDField(binary=False) -To migrate all the uuid's you need to touch each object and mark it as dirty +To migrate all the uuids you need to touch each object and mark it as dirty eg:: # Doc definition @@ -165,7 +181,7 @@ eg:: DecimalField ------------ -DecimalField now store floats - previous it was storing strings and that +DecimalFields now store floats - previously it was storing strings and that made it impossible to do comparisons when querying correctly.:: # Old code @@ -176,7 +192,7 @@ made it impossible to do comparisons when querying correctly.:: class Person(Document): balance = DecimalField(force_string=True) -To migrate all the uuid's you need to touch each object and mark it as dirty +To migrate all the DecimalFields you need to touch each object and mark it as dirty eg:: # Doc definition @@ -188,7 +204,7 @@ eg:: p._mark_as_changed('balance') p.save() -.. note:: DecimalField's have also been improved with the addition of precision +.. note:: DecimalFields have also been improved with the addition of precision and rounding. See :class:`~mongoengine.fields.DecimalField` for more information. `An example test migration for DecimalFields is available on github @@ -197,7 +213,7 @@ eg:: Cascading Saves --------------- To improve performance document saves will no longer automatically cascade. -Any changes to a Documents references will either have to be saved manually or +Any changes to a Document's references will either have to be saved manually or you will have to explicitly tell it to cascade on save:: # At the class level: @@ -239,7 +255,7 @@ update your code like so: :: # Update example a) assign queryset after a change: mammals = Animal.objects(type="mammal") - carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so fitler can be applied + carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied [m for m in carnivores] # This will return all carnivores # Update example b) chain the queryset: @@ -260,13 +276,13 @@ queryset you should upgrade to use count:: len(Animal.objects(type="mammal")) # New code - Animal.objects(type="mammal").count()) + Animal.objects(type="mammal").count() .only() now inline with .exclude() ---------------------------------- -The behaviour of `.only()` was highly ambious, now it works in the mirror fashion +The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion to `.exclude()`. Chaining `.only()` calls will increase the fields required:: # Old code @@ -430,7 +446,7 @@ main areas of changed are: choices in fields, map_reduce and collection names. Choice options: =============== -Are now expected to be an iterable of tuples, with the first element in each +Are now expected to be an iterable of tuples, with the first element in each tuple being the actual value to be stored. The second element is the human-readable name for the option. @@ -452,8 +468,8 @@ such the following have been changed: Default collection naming ========================= -Previously it was just lowercase, its now much more pythonic and readable as -its lowercase and underscores, previously :: +Previously it was just lowercase, it's now much more pythonic and readable as +it's lowercase and underscores, previously :: class MyAceDocument(Document): pass @@ -520,5 +536,5 @@ Alternatively, you can rename your collections eg :: mongodb 1.8 > 2.0 + =================== -Its been reported that indexes may need to be recreated to the newer version of indexes. +It's been reported that indexes may need to be recreated to the newer version of indexes. To do this drop indexes and call ``ensure_indexes`` on each model. diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index 5bd12019..ac33ad2b 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -15,7 +15,7 @@ import django __all__ = (list(document.__all__) + fields.__all__ + connection.__all__ + list(queryset.__all__) + signals.__all__ + list(errors.__all__)) -VERSION = (0, 8, 2) +VERSION = (0, 8, 7) def get_version(): diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index adcd8d04..32a66018 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -1,4 +1,6 @@ import weakref +import functools +import itertools from mongoengine.common import _import_class __all__ = ("BaseDict", "BaseList") @@ -108,6 +110,14 @@ class BaseList(list): self._mark_as_changed() return super(BaseList, self).__delitem__(*args, **kwargs) + def __setslice__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).__setslice__(*args, **kwargs) + + def __delslice__(self, *args, **kwargs): + self._mark_as_changed() + return super(BaseList, self).__delslice__(*args, **kwargs) + def __getstate__(self): self.instance = None self._dereferenced = False @@ -148,3 +158,98 @@ class BaseList(list): def _mark_as_changed(self): if hasattr(self._instance, '_mark_as_changed'): self._instance._mark_as_changed(self._name) + + +class StrictDict(object): + __slots__ = () + _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) + _classes = {} + def __init__(self, **kwargs): + for k,v in kwargs.iteritems(): + setattr(self, k, v) + def __getitem__(self, key): + key = '_reserved_' + key if key in self._special_fields else key + try: + return getattr(self, key) + except AttributeError: + raise KeyError(key) + def __setitem__(self, key, value): + key = '_reserved_' + key if key in self._special_fields else key + return setattr(self, key, value) + def __contains__(self, key): + return hasattr(self, key) + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + def pop(self, key, default=None): + v = self.get(key, default) + try: + delattr(self, key) + except AttributeError: + pass + return v + def iteritems(self): + for key in self: + yield key, self[key] + def items(self): + return [(k, self[k]) for k in iter(self)] + def keys(self): + return list(iter(self)) + def __iter__(self): + return (key for key in self.__slots__ if hasattr(self, key)) + def __len__(self): + return len(list(self.iteritems())) + def __eq__(self, other): + return self.items() == other.items() + def __neq__(self, other): + return self.items() != other.items() + + @classmethod + def create(cls, allowed_keys): + allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) + allowed_keys = frozenset(allowed_keys_tuple) + if allowed_keys not in cls._classes: + class SpecificStrictDict(cls): + __slots__ = allowed_keys_tuple + cls._classes[allowed_keys] = SpecificStrictDict + return cls._classes[allowed_keys] + + +class SemiStrictDict(StrictDict): + __slots__ = ('_extras') + _classes = {} + def __getattr__(self, attr): + try: + super(SemiStrictDict, self).__getattr__(attr) + except AttributeError: + try: + return self.__getattribute__('_extras')[attr] + except KeyError as e: + raise AttributeError(e) + def __setattr__(self, attr, value): + try: + super(SemiStrictDict, self).__setattr__(attr, value) + except AttributeError: + try: + self._extras[attr] = value + except AttributeError: + self._extras = {attr: value} + + def __delattr__(self, attr): + try: + super(SemiStrictDict, self).__delattr__(attr) + except AttributeError: + try: + del self._extras[attr] + except KeyError as e: + raise AttributeError(e) + + def __iter__(self): + try: + extras_iter = iter(self.__getattribute__('_extras')) + except AttributeError: + extras_iter = () + return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter) + diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index ca154a29..e77ea080 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,10 +1,11 @@ import copy import operator import numbers +from collections import Hashable from functools import partial import pymongo -from bson import json_util +from bson import json_util, ObjectId from bson.dbref import DBRef from bson.son import SON @@ -12,24 +13,23 @@ from mongoengine import signals from mongoengine.common import _import_class from mongoengine.errors import (ValidationError, InvalidDocumentError, LookUpError) -from mongoengine.python_support import (PY3, UNICODE_KWARGS, txt_type, - to_str_keys_recursive) +from mongoengine.python_support import PY3, txt_type from mongoengine.base.common import get_document, ALLOW_INHERITANCE -from mongoengine.base.datastructures import BaseDict, BaseList +from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict, SemiStrictDict from mongoengine.base.fields import ComplexBaseField __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') NON_FIELD_ERRORS = '__all__' - class BaseDocument(object): + __slots__ = ('_changed_fields', '_initialised', '_created', '_data', + '_dynamic_fields', '_auto_id_field', '_db_field_map', '_cls', '__weakref__') _dynamic = False - _created = True _dynamic_lock = True - _initialised = False + STRICT = False def __init__(self, *args, **values): """ @@ -38,10 +38,15 @@ class BaseDocument(object): :param __auto_convert: Try and will cast python objects to Object types :param values: A dictionary of values for the document """ + self._initialised = False + self._created = True if args: # Combine positional arguments with named arguments. # We only want named arguments. field = iter(self._fields_ordered) + # If its an automatic id field then skip to the first defined field + if self._auto_id_field: + next(field) for value in args: name = next(field) if name in values: @@ -50,7 +55,12 @@ class BaseDocument(object): __auto_convert = values.pop("__auto_convert", True) signals.pre_init.send(self.__class__, document=self, values=values) - self._data = {} + if self.STRICT and not self._dynamic: + self._data = StrictDict.create(allowed_keys=self._fields.keys())() + else: + self._data = SemiStrictDict.create(allowed_keys=self._fields.keys())() + + self._dynamic_fields = SON() # Assign default values to instance for key, field in self._fields.iteritems(): @@ -61,7 +71,6 @@ class BaseDocument(object): # Set passed values after initialisation if self._dynamic: - self._dynamic_fields = {} dynamic_data = {} for key, value in values.iteritems(): if key in self._fields or key == '_id': @@ -116,6 +125,7 @@ class BaseDocument(object): field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field + self._fields_ordered += (name,) if not name.startswith('_'): value = self.__expand_dynamic_values(name, value) @@ -125,24 +135,33 @@ class BaseDocument(object): self._data[name] = value if hasattr(self, '_changed_fields'): self._mark_as_changed(name) + try: + self__created = self._created + except AttributeError: + self__created = True - if (self._is_document and not self._created and + if (self._is_document and not self__created and name in self._meta.get('shard_key', tuple()) and self._data.get(name) != value): OperationError = _import_class('OperationError') msg = "Shard Keys are immutable. Tried to update %s" % name raise OperationError(msg) + try: + self__initialised = self._initialised + except AttributeError: + self__initialised = False # Check if the user has created a new instance of a class - if (self._is_document and self._initialised - and self._created and name == self._meta['id_field']): + if (self._is_document and self__initialised + and self__created and name == self._meta['id_field']): super(BaseDocument, self).__setattr__('_created', False) super(BaseDocument, self).__setattr__(name, value) def __getstate__(self): data = {} - for k in ('_changed_fields', '_initialised', '_created'): + for k in ('_changed_fields', '_initialised', '_created', + '_dynamic_fields', '_fields_ordered'): if hasattr(self, k): data[k] = getattr(self, k) data['_data'] = self.to_mongo() @@ -151,21 +170,24 @@ class BaseDocument(object): def __setstate__(self, data): if isinstance(data["_data"], SON): data["_data"] = self.__class__._from_son(data["_data"])._data - for k in ('_changed_fields', '_initialised', '_created', '_data'): + for k in ('_changed_fields', '_initialised', '_created', '_data', + '_dynamic_fields'): if k in data: setattr(self, k, data[k]) + if '_fields_ordered' in data: + setattr(type(self), '_fields_ordered', data['_fields_ordered']) + dynamic_fields = data.get('_dynamic_fields') or SON() + for k in dynamic_fields.keys(): + setattr(self, k, data["_data"].get(k)) def __iter__(self): - if 'id' in self._fields and 'id' not in self._fields_ordered: - return iter(('id', ) + self._fields_ordered) - return iter(self._fields_ordered) def __getitem__(self, name): """Dictionary-style field access, return a field's value if present. """ try: - if name in self._fields: + if name in self._fields_ordered: return getattr(self, name) except AttributeError: pass @@ -175,7 +197,7 @@ class BaseDocument(object): """Dictionary-style field access, set a field's value. """ # Ensure that the field exists before settings its value - if name not in self._fields: + if not self._dynamic and name not in self._fields: raise KeyError(name) return setattr(self, name, value) @@ -241,6 +263,8 @@ class BaseDocument(object): for field_name in self: value = self._data.get(field_name, None) field = self._fields.get(field_name) + if field is None and self._dynamic: + field = self._dynamic_fields.get(field_name) if value is not None: value = field.to_mongo(value) @@ -254,8 +278,10 @@ class BaseDocument(object): data[field.db_field] = value # If "_id" has not been set, then try and set it - if data["_id"] is None: - data["_id"] = self._data.get("id", None) + Document = _import_class("Document") + if isinstance(self, Document): + if data["_id"] is None: + data["_id"] = self._data.get("id", None) if data['_id'] is None: data.pop('_id') @@ -265,15 +291,6 @@ class BaseDocument(object): not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)): data.pop('_cls') - if not self._dynamic: - return data - - # Sort dynamic fields by key - dynamic_fields = sorted(self._dynamic_fields.iteritems(), - key=operator.itemgetter(0)) - for name, field in dynamic_fields: - data[name] = field.to_mongo(self._data.get(name, None)) - return data def validate(self, clean=True): @@ -289,11 +306,8 @@ class BaseDocument(object): errors[NON_FIELD_ERRORS] = error # Get a list of tuples of field names and their current values - fields = [(field, self._data.get(name)) - for name, field in self._fields.items()] - if self._dynamic: - fields += [(field, self._data.get(name)) - for name, field in self._dynamic_fields.items()] + fields = [(self._fields.get(name, self._dynamic_fields.get(name)), + self._data.get(name)) for name in self._fields_ordered] EmbeddedDocumentField = _import_class("EmbeddedDocumentField") GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField") @@ -318,14 +332,14 @@ class BaseDocument(object): pk = "None" if hasattr(self, 'pk'): pk = self.pk - elif self._instance: + elif self._instance and hasattr(self._instance, 'pk'): pk = self._instance.pk message = "ValidationError (%s:%s) " % (self._class_name, pk) raise ValidationError(message, errors=errors) - def to_json(self): + def to_json(self, *args, **kwargs): """Converts a document to JSON""" - return json_util.dumps(self.to_mongo()) + return json_util.dumps(self.to_mongo(), *args, **kwargs) @classmethod def from_json(cls, json_data): @@ -377,70 +391,82 @@ class BaseDocument(object): self._changed_fields.append(key) def _clear_changed_fields(self): + """Using get_changed_fields iterate and remove any fields that are + marked as changed""" + for changed in self._get_changed_fields(): + parts = changed.split(".") + data = self + for part in parts: + if isinstance(data, list): + try: + data = data[int(part)] + except IndexError: + data = None + elif isinstance(data, dict): + data = data.get(part, None) + else: + data = getattr(data, part, None) + if hasattr(data, "_changed_fields"): + if hasattr(data, "_is_document") and data._is_document: + continue + data._changed_fields = [] self._changed_fields = [] - EmbeddedDocumentField = _import_class("EmbeddedDocumentField") - for field_name, field in self._fields.iteritems(): - if (isinstance(field, ComplexBaseField) and - isinstance(field.field, EmbeddedDocumentField)): - field_value = getattr(self, field_name, None) - if field_value: - for idx in (field_value if isinstance(field_value, dict) - else xrange(len(field_value))): - field_value[idx]._clear_changed_fields() - elif isinstance(field, EmbeddedDocumentField): - field_value = getattr(self, field_name, None) - if field_value: - field_value._clear_changed_fields() + + def _nestable_types_changed_fields(self, changed_fields, key, data, inspected): + # Loop list / dict fields as they contain documents + # Determine the iterator to use + if not hasattr(data, 'items'): + iterator = enumerate(data) + else: + iterator = data.iteritems() + + for index, value in iterator: + list_key = "%s%s." % (key, index) + if hasattr(value, '_get_changed_fields'): + changed = value._get_changed_fields(inspected) + changed_fields += ["%s%s" % (list_key, k) + for k in changed if k] + elif isinstance(value, (list, tuple, dict)): + self._nestable_types_changed_fields(changed_fields, list_key, value, inspected) def _get_changed_fields(self, inspected=None): """Returns a list of all fields that have explicitly been changed. """ EmbeddedDocument = _import_class("EmbeddedDocument") DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument") - _changed_fields = [] - _changed_fields += getattr(self, '_changed_fields', []) - + ReferenceField = _import_class("ReferenceField") + changed_fields = [] + changed_fields += getattr(self, '_changed_fields', []) inspected = inspected or set() - if hasattr(self, 'id'): + if hasattr(self, 'id') and isinstance(self.id, Hashable): if self.id in inspected: - return _changed_fields + return changed_fields inspected.add(self.id) - field_list = self._fields.copy() - if self._dynamic: - field_list.update(self._dynamic_fields) - - for field_name in field_list: - + for field_name in self._fields_ordered: db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name - field = self._data.get(field_name, None) - if hasattr(field, 'id'): - if field.id in inspected: - continue - inspected.add(field.id) + data = self._data.get(field_name, None) + field = self._fields.get(field_name) - if (isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) - and db_field_name not in _changed_fields): + if hasattr(data, 'id'): + if data.id in inspected: + continue + inspected.add(data.id) + if isinstance(field, ReferenceField): + continue + elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) + and db_field_name not in changed_fields): # Find all embedded fields that have been changed - changed = field._get_changed_fields(inspected) - _changed_fields += ["%s%s" % (key, k) for k in changed if k] - elif (isinstance(field, (list, tuple, dict)) and - db_field_name not in _changed_fields): - # Loop list / dict fields as they contain documents - # Determine the iterator to use - if not hasattr(field, 'items'): - iterator = enumerate(field) - else: - iterator = field.iteritems() - for index, value in iterator: - if not hasattr(value, '_get_changed_fields'): - continue - list_key = "%s%s." % (key, index) - changed = value._get_changed_fields(inspected) - _changed_fields += ["%s%s" % (list_key, k) - for k in changed if k] - return _changed_fields + changed = data._get_changed_fields(inspected) + changed_fields += ["%s%s" % (key, k) for k in changed if k] + elif (isinstance(data, (list, tuple, dict)) and + db_field_name not in changed_fields): + if (hasattr(field, 'field') and + isinstance(field.field, ReferenceField)): + continue + self._nestable_types_changed_fields(changed_fields, key, data, inspected) + return changed_fields def _delta(self): """Returns the delta (set, unset) of the changes for a document. @@ -450,7 +476,6 @@ class BaseDocument(object): doc = self.to_mongo() set_fields = self._get_changed_fields() - set_data = {} unset_data = {} parts = [] if hasattr(self, '_changed_fields'): @@ -461,7 +486,7 @@ class BaseDocument(object): d = doc new_path = [] for p in parts: - if isinstance(d, DBRef): + if isinstance(d, (ObjectId, DBRef)): break elif isinstance(d, list) and p.isdigit(): d = d[int(p)] @@ -537,10 +562,6 @@ class BaseDocument(object): # class if unavailable class_name = son.get('_cls', cls._class_name) data = dict(("%s" % key, value) for key, value in son.iteritems()) - if not UNICODE_KWARGS: - # python 2.6.4 and lower cannot handle unicode keys - # passed to class constructor example: cls(**data) - to_str_keys_recursive(data) # Return correct subclass for document type if class_name != cls._class_name: @@ -578,6 +599,8 @@ class BaseDocument(object): % (cls._class_name, errors)) raise InvalidDocumentError(msg) + if cls.STRICT: + data = dict((k, v) for k,v in data.iteritems() if k in cls._fields) obj = cls(__auto_convert=False, **data) obj._changed_fields = changed_fields obj._created = False @@ -630,8 +653,10 @@ class BaseDocument(object): # Check to see if we need to include _cls allow_inheritance = cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) - include_cls = allow_inheritance and not spec.get('sparse', False) - + include_cls = (allow_inheritance and not spec.get('sparse', False) and + spec.get('cls', True)) + if "cls" in spec: + spec.pop('cls') for key in spec['fields']: # If inherited spec continue if isinstance(key, (list, tuple)): @@ -754,6 +779,9 @@ class BaseDocument(object): """Lookup a field based on its attribute and return a list containing the field's parents and the field. """ + + ListField = _import_class("ListField") + if not isinstance(parts, (list, tuple)): parts = [parts] fields = [] @@ -761,7 +789,7 @@ class BaseDocument(object): for field_name in parts: # Handle ListField indexing: - if field_name.isdigit(): + if field_name.isdigit() and isinstance(field, ListField): new_field = field.field fields.append(field_name) continue @@ -812,7 +840,11 @@ class BaseDocument(object): """Dynamically set the display value for a field with choices""" for attr_name, field in self._fields.items(): if field.choices: - setattr(self, + if self._dynamic: + obj = self + else: + obj = type(self) + setattr(obj, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field)) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index eda9b3c9..04c559e2 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -89,12 +89,7 @@ class BaseField(object): return self # Get value from document instance if available - value = instance._data.get(self.name) - - EmbeddedDocument = _import_class('EmbeddedDocument') - if isinstance(value, EmbeddedDocument) and value._instance is None: - value._instance = weakref.proxy(instance) - return value + return instance._data.get(self.name) def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. @@ -116,6 +111,10 @@ class BaseField(object): # Values cant be compared eg: naive and tz datetimes # So mark it as changed instance._mark_as_changed(self.name) + + EmbeddedDocument = _import_class('EmbeddedDocument') + if isinstance(value, EmbeddedDocument) and value._instance is None: + value._instance = weakref.proxy(instance) instance._data[self.name] = value def error(self, message="", errors=None, field_name=None): @@ -186,7 +185,6 @@ class ComplexBaseField(BaseField): """ field = None - __dereference = False def __get__(self, instance, owner): """Descriptor to automatically dereference references. @@ -201,9 +199,11 @@ class ComplexBaseField(BaseField): (self.field is None or isinstance(self.field, (GenericReferenceField, ReferenceField)))) + _dereference = _import_class("DeReference")() + self._auto_dereference = instance._fields[self.name]._auto_dereference - if not self.__dereference and instance._initialised and dereference: - instance._data[self.name] = self._dereference( + if instance._initialised and dereference and instance._data.get(self.name): + instance._data[self.name] = _dereference( instance._data.get(self.name), max_depth=1, instance=instance, name=self.name ) @@ -222,7 +222,7 @@ class ComplexBaseField(BaseField): if (self._auto_dereference and instance._initialised and isinstance(value, (BaseList, BaseDict)) and not value._dereferenced): - value = self._dereference( + value = _dereference( value, max_depth=1, instance=instance, name=self.name ) value._dereferenced = True @@ -382,13 +382,6 @@ class ComplexBaseField(BaseField): owner_document = property(_get_owner_document, _set_owner_document) - @property - def _dereference(self,): - if not self.__dereference: - DeReference = _import_class("DeReference") - self.__dereference = DeReference() # Cached - return self.__dereference - class ObjectIdField(BaseField): """A field wrapper around MongoDB's ObjectIds. diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 444d9a25..4b2e8b9b 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -91,11 +91,12 @@ class DocumentMetaclass(type): attrs['_fields'] = doc_fields attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k)) for k, v in doc_fields.iteritems()]) + attrs['_reverse_db_field_map'] = dict( + (v, k) for k, v in attrs['_db_field_map'].iteritems()) + attrs['_fields_ordered'] = tuple(i[1] for i in sorted( (v.creation_counter, v.name) for v in doc_fields.itervalues())) - attrs['_reverse_db_field_map'] = dict( - (v, k) for k, v in attrs['_db_field_map'].iteritems()) # # Set document hierarchy @@ -358,12 +359,19 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): new_class.id = field # Set primary key if not defined by the document + new_class._auto_id_field = getattr(parent_doc_cls, + '_auto_id_field', False) if not new_class._meta.get('id_field'): + new_class._auto_id_field = True new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class._fields['id'].name = 'id' new_class.id = new_class._fields['id'] + # Prepend id field to _fields_ordered + if 'id' in new_class._fields and 'id' not in new_class._fields_ordered: + new_class._fields_ordered = ('id', ) + new_class._fields_ordered + # Merge in exceptions with parent hierarchy exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) module = attrs.get('__module__') diff --git a/mongoengine/common.py b/mongoengine/common.py index 20d51387..daa194b9 100644 --- a/mongoengine/common.py +++ b/mongoengine/common.py @@ -23,8 +23,9 @@ def _import_class(cls_name): field_classes = ('DictField', 'DynamicField', 'EmbeddedDocumentField', 'FileField', 'GenericReferenceField', 'GenericEmbeddedDocumentField', 'GeoPointField', - 'PointField', 'LineStringField', 'PolygonField', - 'ReferenceField', 'StringField', 'ComplexBaseField') + 'PointField', 'LineStringField', 'ListField', + 'PolygonField', 'ReferenceField', 'StringField', + 'ComplexBaseField', 'GeoJsonBaseField') queryset_classes = ('OperationError',) deref_classes = ('DeReference',) diff --git a/mongoengine/connection.py b/mongoengine/connection.py index abab269f..d3efac62 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -18,7 +18,7 @@ _connections = {} _dbs = {} -def register_connection(alias, name, host='localhost', port=27017, +def register_connection(alias, name, host=None, port=None, is_slave=False, read_preference=False, slaves=None, username=None, password=None, **kwargs): """Add a connection. @@ -43,8 +43,8 @@ def register_connection(alias, name, host='localhost', port=27017, conn_settings = { 'name': name, - 'host': host, - 'port': port, + 'host': host or 'localhost', + 'port': port or 27017, 'is_slave': is_slave, 'slaves': slaves or [], 'username': username, @@ -53,19 +53,15 @@ def register_connection(alias, name, host='localhost', port=27017, } # Handle uri style connections - if "://" in host: - uri_dict = uri_parser.parse_uri(host) - if uri_dict.get('database') is None: - raise ConnectionError("If using URI style connection include "\ - "database name in string") + if "://" in conn_settings['host']: + uri_dict = uri_parser.parse_uri(conn_settings['host']) conn_settings.update({ - 'host': host, - 'name': uri_dict.get('database'), + 'name': uri_dict.get('database') or name, 'username': uri_dict.get('username'), 'password': uri_dict.get('password'), 'read_preference': read_preference, }) - if "replicaSet" in host: + if "replicaSet" in conn_settings['host']: conn_settings['replicaSet'] = True conn_settings.update(kwargs) @@ -97,20 +93,11 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): raise ConnectionError(msg) conn_settings = _connection_settings[alias].copy() - if hasattr(pymongo, 'version_tuple'): # Support for 2.1+ - conn_settings.pop('name', None) - conn_settings.pop('slaves', None) - conn_settings.pop('is_slave', None) - conn_settings.pop('username', None) - conn_settings.pop('password', None) - else: - # Get all the slave connections - if 'slaves' in conn_settings: - slaves = [] - for slave_alias in conn_settings['slaves']: - slaves.append(get_connection(slave_alias)) - conn_settings['slaves'] = slaves - conn_settings.pop('read_preference', None) + conn_settings.pop('name', None) + conn_settings.pop('slaves', None) + conn_settings.pop('is_slave', None) + conn_settings.pop('username', None) + conn_settings.pop('password', None) connection_class = MongoClient if 'replicaSet' in conn_settings: @@ -123,7 +110,19 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): connection_class = MongoReplicaSetClient try: - _connections[alias] = connection_class(**conn_settings) + connection = None + connection_settings_iterator = ((alias, settings.copy()) for alias, settings in _connection_settings.iteritems()) + for alias, connection_settings in connection_settings_iterator: + connection_settings.pop('name', None) + connection_settings.pop('slaves', None) + connection_settings.pop('is_slave', None) + connection_settings.pop('username', None) + connection_settings.pop('password', None) + if conn_settings == connection_settings and _connections.get(alias, None): + connection = _connections[alias] + break + + _connections[alias] = connection if connection else connection_class(**conn_settings) except Exception, e: raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) return _connections[alias] diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 13ed1009..cc860066 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -1,6 +1,5 @@ from mongoengine.common import _import_class from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db -from mongoengine.queryset import QuerySet __all__ = ("switch_db", "switch_collection", "no_dereference", @@ -162,12 +161,6 @@ class no_sub_classes(object): return self.cls -class QuerySetNoDeRef(QuerySet): - """Special no_dereference QuerySet""" - def __dereference(items, max_depth=1, instance=None, name=None): - return items - - class query_counter(object): """ Query_counter context manager to get the number of queries. """ diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index d822e430..074fe09b 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -4,7 +4,7 @@ from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) from fields import (ReferenceField, ListField, DictField, MapField) from connection import get_db from queryset import QuerySet -from document import Document +from document import Document, EmbeddedDocument class DeReference(object): @@ -33,7 +33,8 @@ class DeReference(object): self.max_depth = max_depth doc_type = None - if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)): + if instance and isinstance(instance, (Document, EmbeddedDocument, + TopLevelDocumentMetaclass)): doc_type = instance._fields.get(name) while hasattr(doc_type, 'field'): doc_type = doc_type.field diff --git a/mongoengine/django/auth.py b/mongoengine/django/auth.py index cff4b743..0a309c4c 100644 --- a/mongoengine/django/auth.py +++ b/mongoengine/django/auth.py @@ -8,6 +8,10 @@ from django.contrib import auth from django.contrib.auth.models import AnonymousUser from django.utils.translation import ugettext_lazy as _ +from .utils import datetime_now + +REDIRECT_FIELD_NAME = 'next' + try: from django.contrib.auth.hashers import check_password, make_password except ImportError: @@ -33,10 +37,6 @@ except ImportError: hash = get_hexdigest(algo, salt, raw_password) return '%s$%s$%s' % (algo, salt, hash) -from .utils import datetime_now - -REDIRECT_FIELD_NAME = 'next' - class ContentType(Document): name = StringField(max_length=100) @@ -230,6 +230,9 @@ class User(Document): date_joined = DateTimeField(default=datetime_now, verbose_name=_('date joined')) + user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'), + help_text=_('Permissions for the user.')) + USERNAME_FIELD = 'username' REQUIRED_FIELDS = ['email'] @@ -378,9 +381,10 @@ class MongoEngineBackend(object): supports_object_permissions = False supports_anonymous_user = False supports_inactive_user = False + _user_doc = False def authenticate(self, username=None, password=None): - user = User.objects(username=username).first() + user = self.user_document.objects(username=username).first() if user: if password and user.check_password(password): backend = auth.get_backends()[0] @@ -389,8 +393,14 @@ class MongoEngineBackend(object): return None def get_user(self, user_id): - return User.objects.with_id(user_id) + return self.user_document.objects.with_id(user_id) + @property + def user_document(self): + if self._user_doc is False: + from .mongo_auth.models import get_user_document + self._user_doc = get_user_document() + return self._user_doc def get_user(userid): """Returns a User object from an id (User.id). Django's equivalent takes diff --git a/mongoengine/django/mongo_auth/models.py b/mongoengine/django/mongo_auth/models.py index 3529d8e1..60965e6d 100644 --- a/mongoengine/django/mongo_auth/models.py +++ b/mongoengine/django/mongo_auth/models.py @@ -1,4 +1,5 @@ from django.conf import settings +from django.contrib.auth.hashers import make_password from django.contrib.auth.models import UserManager from django.core.exceptions import ImproperlyConfigured from django.db import models @@ -6,10 +7,29 @@ from django.utils.importlib import import_module from django.utils.translation import ugettext_lazy as _ +__all__ = ( + 'get_user_document', +) + + MONGOENGINE_USER_DOCUMENT = getattr( settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User') +def get_user_document(): + """Get the user document class used for authentication. + + This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which + defaults to `mongoengine.django.auth.User`. + + """ + + name = MONGOENGINE_USER_DOCUMENT + dot = name.rindex('.') + module = import_module(name[:dot]) + return getattr(module, name[dot + 1:]) + + class MongoUserManager(UserManager): """A User manager wich allows the use of MongoEngine documents in Django. @@ -44,7 +64,7 @@ class MongoUserManager(UserManager): def contribute_to_class(self, model, name): super(MongoUserManager, self).contribute_to_class(model, name) self.dj_model = self.model - self.model = self._get_user_document() + self.model = get_user_document() self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD username = models.CharField(_('username'), max_length=30, unique=True) @@ -55,16 +75,6 @@ class MongoUserManager(UserManager): field = models.CharField(_(name), max_length=30) field.contribute_to_class(self.dj_model, name) - def _get_user_document(self): - try: - name = MONGOENGINE_USER_DOCUMENT - dot = name.rindex('.') - module = import_module(name[:dot]) - return getattr(module, name[dot + 1:]) - except ImportError: - raise ImproperlyConfigured("Error importing %s, please check " - "settings.MONGOENGINE_USER_DOCUMENT" - % name) def get(self, *args, **kwargs): try: @@ -85,5 +95,21 @@ class MongoUserManager(UserManager): class MongoUser(models.Model): + """"Dummy user model for Django. + + MongoUser is used to replace Django's UserManager with MongoUserManager. + The actual user document class is mongoengine.django.auth.User or any + other document class specified in MONGOENGINE_USER_DOCUMENT. + + To get the user document class, use `get_user_document()`. + + """ + objects = MongoUserManager() + class Meta: + app_label = 'mongo_auth' + + def set_password(self, password): + """Doesn't do anything, but works around the issue with Django 1.6.""" + make_password(password) diff --git a/mongoengine/django/sessions.py b/mongoengine/django/sessions.py index c90807e5..f260951b 100644 --- a/mongoengine/django/sessions.py +++ b/mongoengine/django/sessions.py @@ -1,7 +1,11 @@ +from bson import json_util from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError from django.core.exceptions import SuspiciousOperation -from django.utils.encoding import force_unicode +try: + from django.utils.encoding import force_unicode +except ImportError: + from django.utils.encoding import force_text as force_unicode from mongoengine.document import Document from mongoengine import fields @@ -52,6 +56,12 @@ class SessionStore(SessionBase): """A MongoEngine-based session store for Django. """ + def _get_session(self, *args, **kwargs): + sess = super(SessionStore, self)._get_session(*args, **kwargs) + if sess.get('_auth_user_id', None): + sess['_auth_user_id'] = str(sess.get('_auth_user_id')) + return sess + def load(self): try: s = MongoSession.objects(session_key=self.session_key, @@ -100,3 +110,15 @@ class SessionStore(SessionBase): return session_key = self.session_key MongoSession.objects(session_key=session_key).delete() + + +class BSONSerializer(object): + """ + Serializer that can handle BSON types (eg ObjectId). + """ + def dumps(self, obj): + return json_util.dumps(obj, separators=(',', ':')).encode('ascii') + + def loads(self, data): + return json_util.loads(data.decode('ascii')) + diff --git a/mongoengine/django/storage.py b/mongoengine/django/storage.py index 341455cd..9df6f9e8 100644 --- a/mongoengine/django/storage.py +++ b/mongoengine/django/storage.py @@ -76,7 +76,7 @@ class GridFSStorage(Storage): """Find the documents in the store with the given name """ docs = self.document.objects - doc = [d for d in docs if getattr(d, self.field).name == name] + doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name] if doc: return doc[0] else: diff --git a/mongoengine/document.py b/mongoengine/document.py index a61ed079..67f442ae 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -12,7 +12,9 @@ from mongoengine.common import _import_class from mongoengine.base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, BaseDict, BaseList, ALLOW_INHERITANCE, get_document) -from mongoengine.queryset import OperationError, NotUniqueError, QuerySet +from mongoengine.errors import ValidationError +from mongoengine.queryset import (OperationError, NotUniqueError, + QuerySet, transform) from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME from mongoengine.context_managers import switch_db, switch_collection @@ -53,20 +55,21 @@ class EmbeddedDocument(BaseDocument): dictionary. """ + __slots__ = ('_instance') + # The __metaclass__ attribute is removed by 2to3 when running with Python3 # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 my_metaclass = DocumentMetaclass __metaclass__ = DocumentMetaclass - _instance = None - def __init__(self, *args, **kwargs): super(EmbeddedDocument, self).__init__(*args, **kwargs) + self._instance = None self._changed_fields = [] def __eq__(self, other): if isinstance(other, self.__class__): - return self._data == other._data + return self.to_mongo() == other.to_mongo() return False def __ne__(self, other): @@ -124,6 +127,8 @@ class Document(BaseDocument): my_metaclass = TopLevelDocumentMetaclass __metaclass__ = TopLevelDocumentMetaclass + __slots__ = ('__objects' ) + def pk(): """Primary key alias """ @@ -179,7 +184,7 @@ class Document(BaseDocument): def save(self, force_insert=False, validate=True, clean=True, write_concern=None, cascade=None, cascade_kwargs=None, - _refs=None, **kwargs): + _refs=None, save_condition=None, **kwargs): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. @@ -202,7 +207,8 @@ class Document(BaseDocument): :param cascade_kwargs: (optional) kwargs dictionary to be passed throw to cascading saves. Implies ``cascade=True``. :param _refs: A list of processed references used in cascading saves - + :param save_condition: only perform save if matching record in db + satisfies condition(s) (e.g., version number) .. versionchanged:: 0.5 In existing documents it only saves changed fields using set / unset. Saves are cascaded and any @@ -216,6 +222,9 @@ class Document(BaseDocument): meta['cascade'] = True. Also you can pass different kwargs to the cascade save using cascade_kwargs which overwrites the existing kwargs with custom values. + .. versionchanged:: 0.8.5 + Optional save_condition that only overwrites existing documents + if the condition is satisfied in the current db record. """ signals.pre_save.send(self.__class__, document=self) @@ -229,7 +238,8 @@ class Document(BaseDocument): created = ('_id' not in doc or self._created or force_insert) - signals.pre_save_post_validation.send(self.__class__, document=self, created=created) + signals.pre_save_post_validation.send(self.__class__, document=self, + created=created) try: collection = self._get_collection() @@ -242,7 +252,12 @@ class Document(BaseDocument): object_id = doc['_id'] updates, removals = self._delta() # Need to add shard key to query, or you get an error - select_dict = {'_id': object_id} + if save_condition is not None: + select_dict = transform.query(self.__class__, + **save_condition) + else: + select_dict = {} + select_dict['_id'] = object_id shard_key = self.__class__._meta.get('shard_key', tuple()) for k in shard_key: actual_key = self._db_field_map.get(k, k) @@ -262,8 +277,9 @@ class Document(BaseDocument): if removals: update_query["$unset"] = removals if updates or removals: + upsert = save_condition is None last_error = collection.update(select_dict, update_query, - upsert=True, **write_concern) + upsert=upsert, **write_concern) created = is_new_object(last_error) @@ -281,7 +297,9 @@ class Document(BaseDocument): kwargs.update(cascade_kwargs) kwargs['_refs'] = _refs self.cascade_save(**kwargs) - + except pymongo.errors.DuplicateKeyError, err: + message = u'Tried to save duplicate unique keys (%s)' + raise NotUniqueError(message % unicode(err)) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if re.match('^E1100[01] duplicate key', unicode(err)): @@ -291,12 +309,12 @@ class Document(BaseDocument): raise NotUniqueError(message % unicode(err)) raise OperationError(message % unicode(err)) id_field = self._meta['id_field'] - if id_field not in self._meta.get('shard_key', []): + if created or id_field not in self._meta.get('shard_key', []): self[id_field] = self._fields[id_field].to_python(object_id) + signals.post_save.send(self.__class__, document=self, created=created) self._clear_changed_fields() self._created = False - signals.post_save.send(self.__class__, document=self, created=created) return self def cascade_save(self, *args, **kwargs): @@ -353,7 +371,13 @@ class Document(BaseDocument): been saved. """ if not self.pk: - raise OperationError('attempt to update a document not yet saved') + if kwargs.get('upsert', False): + query = self.to_mongo() + if "_cls" in query: + del(query["_cls"]) + return self._qs.filter(**query).update_one(**kwargs) + else: + raise OperationError('attempt to update a document not yet saved') # Need to add shard key to query, or you get an error return self._qs.filter(**self._object_key).update_one(**kwargs) @@ -395,7 +419,7 @@ class Document(BaseDocument): """ with switch_db(self.__class__, db_alias) as cls: collection = cls._get_collection() - db = cls._get_db + db = cls._get_db() self._get_collection = lambda: collection self._get_db = lambda: db self._collection = collection @@ -435,33 +459,45 @@ class Document(BaseDocument): .. versionadded:: 0.5 """ - import dereference - self._data = dereference.DeReference()(self._data, max_depth) + DeReference = _import_class('DeReference') + DeReference()([self], max_depth + 1) return self - def reload(self, max_depth=1): + def reload(self, *fields, **kwargs): """Reloads all attributes from the database. + :param fields: (optional) args list of fields to reload + :param max_depth: (optional) depth of dereferencing to follow + .. versionadded:: 0.1.2 .. versionchanged:: 0.6 Now chainable + .. versionchanged:: 0.9 Can provide specific fields to reload """ - id_field = self._meta['id_field'] + max_depth = 1 + if fields and isinstance(fields[0], int): + max_depth = fields[0] + fields = fields[1:] + elif "max_depth" in kwargs: + max_depth = kwargs["max_depth"] + + if not self.pk: + raise self.DoesNotExist("Document does not exist") obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( - **{id_field: self[id_field]}).limit(1).select_related(max_depth=max_depth) + **self._object_key).only(*fields).limit(1 + ).select_related(max_depth=max_depth) if obj: obj = obj[0] else: - msg = "Reloaded document has been deleted" - raise OperationError(msg) - for field in self._fields: - setattr(self, field, self._reload(field, obj[field])) - if self._dynamic: - for name in self._dynamic_fields.keys(): - setattr(self, name, self._reload(name, obj._data[name])) + raise self.DoesNotExist("Document does not exist") + + for field in self._fields_ordered: + if not fields or field in fields: + setattr(self, field, self._reload(field, obj[field])) + self._changed_fields = obj._changed_fields self._created = False - return obj + return self def _reload(self, key, value): """Used by :meth:`~mongoengine.Document.reload` to ensure the @@ -474,6 +510,7 @@ class Document(BaseDocument): value = [self._reload(key, v) for v in value] value = BaseList(value, self, key) elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): + value._instance = None value._changed_fields = [] return value @@ -534,6 +571,8 @@ class Document(BaseDocument): def ensure_indexes(cls): """Checks the document meta data and ensures all the indexes exist. + Global defaults can be set in the meta - see :doc:`guide/defining-documents` + .. note:: You can disable automatic index creation by setting `auto_create_index` to False in the documents meta data """ @@ -543,6 +582,8 @@ class Document(BaseDocument): index_cls = cls._meta.get('index_cls', True) collection = cls._get_collection() + if collection.read_preference > 1: + return # determine if an index which we are creating includes # _cls as its first field; if so, we can avoid creating diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 451f7aca..abadad65 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -42,7 +42,8 @@ __all__ = ['StringField', 'URLField', 'EmailField', 'IntField', 'LongField', 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy', 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField', 'GeoPointField', 'PointField', - 'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField'] + 'LineStringField', 'PolygonField', 'SequenceField', 'UUIDField', + 'GeoJsonBaseField'] RECURSIVE_REFERENCE_CONSTANT = 'self' @@ -152,7 +153,7 @@ class EmailField(StringField): EMAIL_REGEX = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string - r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain + r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}$', re.IGNORECASE # domain ) def validate(self, value): @@ -279,14 +280,14 @@ class DecimalField(BaseField): :param precision: Number of decimal places to store. :param rounding: The rounding rule from the python decimal libary: - - decimial.ROUND_CEILING (towards Infinity) - - decimial.ROUND_DOWN (towards zero) - - decimial.ROUND_FLOOR (towards -Infinity) - - decimial.ROUND_HALF_DOWN (to nearest with ties going towards zero) - - decimial.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) - - decimial.ROUND_HALF_UP (to nearest with ties going away from zero) - - decimial.ROUND_UP (away from zero) - - decimial.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) + - decimal.ROUND_CEILING (towards Infinity) + - decimal.ROUND_DOWN (towards zero) + - decimal.ROUND_FLOOR (towards -Infinity) + - decimal.ROUND_HALF_DOWN (to nearest with ties going towards zero) + - decimal.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer) + - decimal.ROUND_HALF_UP (to nearest with ties going away from zero) + - decimal.ROUND_UP (away from zero) + - decimal.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero) Defaults to: ``decimal.ROUND_HALF_UP`` @@ -304,7 +305,10 @@ class DecimalField(BaseField): return value # Convert to string for python 2.6 before casting to Decimal - value = decimal.Decimal("%s" % value) + try: + value = decimal.Decimal("%s" % value) + except decimal.InvalidOperation: + return value return value.quantize(self.precision, rounding=self.rounding) def to_mongo(self, value): @@ -387,7 +391,7 @@ class DateTimeField(BaseField): if dateutil: try: return dateutil.parser.parse(value) - except ValueError: + except (TypeError, ValueError): return None # split usecs, because they are not recognized by strptime. @@ -624,7 +628,9 @@ class DynamicField(BaseField): cls = value.__class__ val = value.to_mongo() # If we its a document thats not inherited add _cls - if (isinstance(value, (Document, EmbeddedDocument))): + if (isinstance(value, Document)): + val = {"_ref": value.to_dbref(), "_cls": cls.__name__} + if (isinstance(value, EmbeddedDocument)): val['_cls'] = cls.__name__ return val @@ -645,6 +651,15 @@ class DynamicField(BaseField): value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))] return value + def to_python(self, value): + if isinstance(value, dict) and '_cls' in value: + doc_cls = get_document(value['_cls']) + if '_ref' in value: + value = doc_cls._get_db().dereference(value['_ref']) + return doc_cls._from_son(value) + + return super(DynamicField, self).to_python(value) + def lookup_member(self, member_name): return member_name @@ -724,13 +739,28 @@ class SortedListField(ListField): reverse=self._order_reverse) return sorted(value, reverse=self._order_reverse) +def key_not_string(d): + """ Helper function to recursively determine if any key in a dictionary is + not a string. + """ + for k, v in d.items(): + if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)): + return True + +def key_has_dot_or_dollar(d): + """ Helper function to recursively determine if any key in a dictionary + contains a dot or a dollar sign. + """ + for k, v in d.items(): + if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): + return True class DictField(ComplexBaseField): """A dictionary field that wraps a standard Python dictionary. This is similar to an embedded document, but the structure is not defined. .. note:: - Required means it cannot be empty - as the default for ListFields is [] + Required means it cannot be empty - as the default for DictFields is {} .. versionadded:: 0.3 .. versionchanged:: 0.5 - Can now handle complex / varying types of data @@ -750,11 +780,11 @@ class DictField(ComplexBaseField): if not isinstance(value, dict): self.error('Only dictionaries may be used in a DictField') - if any(k for k in value.keys() if not isinstance(k, basestring)): + if key_not_string(value): msg = ("Invalid dictionary key - documents must " "have only string keys") self.error(msg) - if any(('.' in k or '$' in k) for k in value.keys()): + if key_has_dot_or_dollar(value): self.error('Invalid dictionary key name - keys may not contain "."' ' or "$" characters') super(DictField, self).validate(value) @@ -769,6 +799,10 @@ class DictField(ComplexBaseField): if op in match_operators and isinstance(value, basestring): return StringField().prepare_query_value(op, value) + + if hasattr(self.field, 'field'): + return self.field.prepare_query_value(op, value) + return super(DictField, self).prepare_query_value(op, value) @@ -989,7 +1023,10 @@ class GenericReferenceField(BaseField): id_ = id_field.to_mongo(id_) collection = document._get_collection_name() ref = DBRef(collection, id_) - return {'_cls': document._class_name, '_ref': ref} + return SON(( + ('_cls', document._class_name), + ('_ref', ref) + )) def prepare_query_value(self, op, value): if value is None: @@ -1083,6 +1120,10 @@ class GridFSProxy(object): def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.grid_id) + def __str__(self): + name = getattr(self.get(), 'filename', self.grid_id) if self.get() else '(no file)' + return '<%s: %s>' % (self.__class__.__name__, name) + def __eq__(self, other): if isinstance(other, GridFSProxy): return ((self.grid_id == other.grid_id) and @@ -1190,9 +1231,7 @@ class FileField(BaseField): # Check if a file already exists for this model grid_file = instance._data.get(self.name) if not isinstance(grid_file, self.proxy_class): - grid_file = self.proxy_class(key=self.name, instance=instance, - db_alias=self.db_alias, - collection_name=self.collection_name) + grid_file = self.get_proxy_obj(key=self.name, instance=instance) instance._data[self.name] = grid_file if not grid_file.key: @@ -1214,15 +1253,23 @@ class FileField(BaseField): pass # Create a new proxy object as we don't already have one - instance._data[key] = self.proxy_class(key=key, instance=instance, - db_alias=self.db_alias, - collection_name=self.collection_name) + instance._data[key] = self.get_proxy_obj(key=key, instance=instance) instance._data[key].put(value) else: instance._data[key] = value instance._mark_as_changed(key) + def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None): + if db_alias is None: + db_alias = self.db_alias + if collection_name is None: + collection_name = self.collection_name + + return self.proxy_class(key=key, instance=instance, + db_alias=db_alias, + collection_name=collection_name) + def to_mongo(self, value): # Store the GridFS file id in MongoDB if isinstance(value, self.proxy_class) and value.grid_id is not None: @@ -1255,6 +1302,9 @@ class ImageGridFsProxy(GridFSProxy): applying field properties (size, thumbnail_size) """ field = self.instance._fields[self.key] + # Handle nested fields + if hasattr(field, 'field') and isinstance(field.field, FileField): + field = field.field try: img = Image.open(file_obj) @@ -1563,7 +1613,12 @@ class UUIDField(BaseField): class GeoPointField(BaseField): - """A list storing a latitude and longitude. + """A list storing a longitude and latitude coordinate. + + .. note:: this represents a generic point in a 2D plane and a legacy way of + representing a geo point. It admits 2d indexes but not "2dsphere" indexes + in MongoDB > 2.4 which are more natural for modeling geospatial points. + See :ref:`geospatial-indexes` .. versionadded:: 0.4 """ @@ -1585,7 +1640,7 @@ class GeoPointField(BaseField): class PointField(GeoJsonBaseField): - """A geo json field storing a latitude and longitude. + """A GeoJSON field storing a longitude and latitude coordinate. The data is represented as: @@ -1604,7 +1659,7 @@ class PointField(GeoJsonBaseField): class LineStringField(GeoJsonBaseField): - """A geo json field storing a line of latitude and longitude coordinates. + """A GeoJSON field storing a line of longitude and latitude coordinates. The data is represented as: @@ -1622,7 +1677,7 @@ class LineStringField(GeoJsonBaseField): class PolygonField(GeoJsonBaseField): - """A geo json field storing a polygon of latitude and longitude coordinates. + """A GeoJSON field storing a polygon of longitude and latitude coordinates. The data is represented as: diff --git a/mongoengine/python_support.py b/mongoengine/python_support.py index 097740eb..2c4df00c 100644 --- a/mongoengine/python_support.py +++ b/mongoengine/python_support.py @@ -3,8 +3,6 @@ import sys PY3 = sys.version_info[0] == 3 -PY25 = sys.version_info[:2] == (2, 5) -UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264 if PY3: import codecs @@ -29,33 +27,3 @@ else: txt_type = unicode str_types = (bin_type, txt_type) - -if PY25: - def product(*args, **kwds): - pools = map(tuple, args) * kwds.get('repeat', 1) - result = [[]] - for pool in pools: - result = [x + [y] for x in result for y in pool] - for prod in result: - yield tuple(prod) - reduce = reduce -else: - from itertools import product - from functools import reduce - - -# For use with Python 2.5 -# converts all keys from unicode to str for d and all nested dictionaries -def to_str_keys_recursive(d): - if isinstance(d, list): - for val in d: - if isinstance(val, (dict, list)): - to_str_keys_recursive(val) - elif isinstance(d, dict): - for key, val in d.items(): - if isinstance(val, (dict, list)): - to_str_keys_recursive(val) - if isinstance(key, unicode): - d[str(key)] = d.pop(key) - else: - raise ValueError("non list/dict parameter not allowed") diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py new file mode 100644 index 00000000..4fb143bb --- /dev/null +++ b/mongoengine/queryset/base.py @@ -0,0 +1,1624 @@ +from __future__ import absolute_import + +import copy +import itertools +import operator +import pprint +import re +import warnings + +from bson import SON +from bson.code import Code +from bson import json_util +import pymongo +import pymongo.errors +from pymongo.common import validate_read_preference + +from mongoengine import signals +from mongoengine.connection import get_db +from mongoengine.context_managers import switch_db +from mongoengine.common import _import_class +from mongoengine.base.common import get_document +from mongoengine.errors import (OperationError, NotUniqueError, + InvalidQueryError, LookUpError) +from mongoengine.queryset import transform +from mongoengine.queryset.field_list import QueryFieldList +from mongoengine.queryset.visitor import Q, QNode + + +__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') + +# Delete rules +DO_NOTHING = 0 +NULLIFY = 1 +CASCADE = 2 +DENY = 3 +PULL = 4 + +RE_TYPE = type(re.compile('')) + + +class BaseQuerySet(object): + """A set of results returned from a query. Wraps a MongoDB cursor, + providing :class:`~mongoengine.Document` objects as the results. + """ + __dereference = False + _auto_dereference = True + + def __init__(self, document, collection): + self._document = document + self._collection_obj = collection + self._mongo_query = None + self._query_obj = Q() + self._initial_query = {} + self._where_clause = None + self._loaded_fields = QueryFieldList() + self._ordering = None + self._snapshot = False + self._timeout = True + self._class_check = True + self._slave_okay = False + self._read_preference = None + self._iter = False + self._scalar = [] + self._none = False + self._as_pymongo = False + self._as_pymongo_coerce = False + + # If inheritance is allowed, only return instances and instances of + # subclasses of the class being used + if document._meta.get('allow_inheritance') is True: + if len(self._document._subclasses) == 1: + self._initial_query = {"_cls": self._document._subclasses[0]} + else: + self._initial_query = {"_cls": {"$in": self._document._subclasses}} + self._loaded_fields = QueryFieldList(always_include=['_cls']) + self._cursor_obj = None + self._limit = None + self._skip = None + self._hint = -1 # Using -1 as None is a valid value for hint + + def __call__(self, q_obj=None, class_check=True, slave_okay=False, + read_preference=None, **query): + """Filter the selected documents by calling the + :class:`~mongoengine.queryset.QuerySet` with a query. + + :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in + the query; the :class:`~mongoengine.queryset.QuerySet` is filtered + multiple times with different :class:`~mongoengine.queryset.Q` + objects, only the last one will be used + :param class_check: If set to False bypass class name check when + querying collection + :param slave_okay: if True, allows this query to be run against a + replica secondary. + :params read_preference: if set, overrides connection-level + read_preference from `ReplicaSetConnection`. + :param query: Django-style query keyword arguments + """ + query = Q(**query) + if q_obj: + # make sure proper query object is passed + if not isinstance(q_obj, QNode): + msg = ("Not a query object: %s. " + "Did you intend to use key=value?" % q_obj) + raise InvalidQueryError(msg) + query &= q_obj + + if read_preference is None: + queryset = self.clone() + else: + # Use the clone provided when setting read_preference + queryset = self.read_preference(read_preference) + + queryset._query_obj &= query + queryset._mongo_query = None + queryset._cursor_obj = None + queryset._class_check = class_check + + return queryset + + def __getitem__(self, key): + """Support skip and limit using getitem and slicing syntax. + """ + queryset = self.clone() + + # Slice provided + if isinstance(key, slice): + try: + queryset._cursor_obj = queryset._cursor[key] + queryset._skip, queryset._limit = key.start, key.stop + if key.start and key.stop: + queryset._limit = key.stop - key.start + except IndexError, err: + # PyMongo raises an error if key.start == key.stop, catch it, + # bin it, kill it. + start = key.start or 0 + if start >= 0 and key.stop >= 0 and key.step is None: + if start == key.stop: + queryset.limit(0) + queryset._skip = key.start + queryset._limit = key.stop - start + return queryset + raise err + # Allow further QuerySet modifications to be performed + return queryset + # Integer index provided + elif isinstance(key, int): + if queryset._scalar: + return queryset._get_scalar( + queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference)) + if queryset._as_pymongo: + return queryset._get_as_pymongo(queryset._cursor[key]) + return queryset._document._from_son(queryset._cursor[key], + _auto_dereference=self._auto_dereference) + raise AttributeError + + def __iter__(self): + raise NotImplementedError + + def _has_data(self): + """ Retrieves whether cursor has any data. """ + + queryset = self.order_by() + return False if queryset.first() is None else True + + def __nonzero__(self): + """ Avoid to open all records in an if stmt in Py2. """ + + return self._has_data() + + def __bool__(self): + """ Avoid to open all records in an if stmt in Py3. """ + + return self._has_data() + + # Core functions + + def all(self): + """Returns all documents.""" + return self.__call__() + + def filter(self, *q_objs, **query): + """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` + """ + return self.__call__(*q_objs, **query) + + def get(self, *q_objs, **query): + """Retrieve the the matching object raising + :class:`~mongoengine.queryset.MultipleObjectsReturned` or + `DocumentName.MultipleObjectsReturned` exception if multiple results + and :class:`~mongoengine.queryset.DoesNotExist` or + `DocumentName.DoesNotExist` if no results are found. + + .. versionadded:: 0.3 + """ + queryset = self.clone() + queryset = queryset.order_by().limit(2) + queryset = queryset.filter(*q_objs, **query) + + try: + result = queryset.next() + except StopIteration: + msg = ("%s matching query does not exist." + % queryset._document._class_name) + raise queryset._document.DoesNotExist(msg) + try: + queryset.next() + except StopIteration: + return result + + queryset.rewind() + message = u'%d items returned, instead of 1' % queryset.count() + raise queryset._document.MultipleObjectsReturned(message) + + def create(self, **kwargs): + """Create new object. Returns the saved object instance. + + .. versionadded:: 0.4 + """ + return self._document(**kwargs).save() + + def get_or_create(self, write_concern=None, auto_save=True, + *q_objs, **query): + """Retrieve unique object or create, if it doesn't exist. Returns a + tuple of ``(object, created)``, where ``object`` is the retrieved or + created object and ``created`` is a boolean specifying whether a new + object was created. Raises + :class:`~mongoengine.queryset.MultipleObjectsReturned` or + `DocumentName.MultipleObjectsReturned` if multiple results are found. + A new document will be created if the document doesn't exists; a + dictionary of default values for the new document may be provided as a + keyword argument called :attr:`defaults`. + + .. note:: This requires two separate operations and therefore a + race condition exists. Because there are no transactions in + mongoDB other approaches should be investigated, to ensure you + don't accidently duplicate data when using this method. This is + now scheduled to be removed before 1.0 + + :param write_concern: optional extra keyword arguments used if we + have to create a new document. + Passes any write_concern onto :meth:`~mongoengine.Document.save` + + :param auto_save: if the object is to be saved automatically if + not found. + + .. deprecated:: 0.8 + .. versionchanged:: 0.6 - added `auto_save` + .. versionadded:: 0.3 + """ + msg = ("get_or_create is scheduled to be deprecated. The approach is " + "flawed without transactions. Upserts should be preferred.") + warnings.warn(msg, DeprecationWarning) + + defaults = query.get('defaults', {}) + if 'defaults' in query: + del query['defaults'] + + try: + doc = self.get(*q_objs, **query) + return doc, False + except self._document.DoesNotExist: + query.update(defaults) + doc = self._document(**query) + + if auto_save: + doc.save(write_concern=write_concern) + return doc, True + + def first(self): + """Retrieve the first object matching the query. + """ + queryset = self.clone() + try: + result = queryset[0] + except IndexError: + result = None + return result + + def insert(self, doc_or_docs, load_bulk=True, write_concern=None): + """bulk insert documents + + :param docs_or_doc: a document or list of documents to be inserted + :param load_bulk (optional): If True returns the list of document + instances + :param write_concern: Extra keyword arguments are passed down to + :meth:`~pymongo.collection.Collection.insert` + which will be used as options for the resultant + ``getLastError`` command. For example, + ``insert(..., {w: 2, fsync: True})`` will wait until at least + two servers have recorded the write and will force an fsync on + each server being written to. + + By default returns document instances, set ``load_bulk`` to False to + return just ``ObjectIds`` + + .. versionadded:: 0.5 + """ + Document = _import_class('Document') + + if write_concern is None: + write_concern = {} + + docs = doc_or_docs + return_one = False + if isinstance(docs, Document) or issubclass(docs.__class__, Document): + return_one = True + docs = [docs] + + raw = [] + for doc in docs: + if not isinstance(doc, self._document): + msg = ("Some documents inserted aren't instances of %s" + % str(self._document)) + raise OperationError(msg) + if doc.pk and not doc._created: + msg = "Some documents have ObjectIds use doc.update() instead" + raise OperationError(msg) + raw.append(doc.to_mongo()) + + signals.pre_bulk_insert.send(self._document, documents=docs) + try: + ids = self._collection.insert(raw, **write_concern) + except pymongo.errors.DuplicateKeyError, err: + message = 'Could not save document (%s)'; + raise NotUniqueError(message % unicode(err)) + except pymongo.errors.OperationFailure, err: + message = 'Could not save document (%s)'; + if re.match('^E1100[01] duplicate key', unicode(err)): + # E11000 - duplicate key error index + # E11001 - duplicate key on update + message = u'Tried to save duplicate unique keys (%s)' + raise NotUniqueError(message % unicode(err)) + raise OperationError(message % unicode(err)) + + if not load_bulk: + signals.post_bulk_insert.send( + self._document, documents=docs, loaded=False) + return return_one and ids[0] or ids + + documents = self.in_bulk(ids) + results = [] + for obj_id in ids: + results.append(documents.get(obj_id)) + signals.post_bulk_insert.send( + self._document, documents=results, loaded=True) + return return_one and results[0] or results + + def count(self, with_limit_and_skip=True): + """Count the selected elements in the query. + + :param with_limit_and_skip (optional): take any :meth:`limit` or + :meth:`skip` that has been applied to this cursor into account when + getting the count + """ + if self._limit == 0 and with_limit_and_skip or self._none: + return 0 + return self._cursor.count(with_limit_and_skip=with_limit_and_skip) + + def delete(self, write_concern=None, _from_doc_delete=False): + """Delete the documents matched by the query. + + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param _from_doc_delete: True when called from document delete therefore + signals will have been triggered so don't loop. + """ + queryset = self.clone() + doc = queryset._document + + if write_concern is None: + write_concern = {} + + # Handle deletes where skips or limits have been applied or + # there is an untriggered delete signal + has_delete_signal = signals.signals_available and ( + signals.pre_delete.has_receivers_for(self._document) or + signals.post_delete.has_receivers_for(self._document)) + + call_document_delete = (queryset._skip or queryset._limit or + has_delete_signal) and not _from_doc_delete + + if call_document_delete: + for doc in queryset: + doc.delete(write_concern=write_concern) + return + + delete_rules = doc._meta.get('delete_rules') or {} + # Check for DENY rules before actually deleting/nullifying any other + # references + for rule_entry in delete_rules: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == DENY and document_cls.objects( + **{field_name + '__in': self}).count() > 0: + msg = ("Could not delete document (%s.%s refers to it)" + % (document_cls.__name__, field_name)) + raise OperationError(msg) + + for rule_entry in delete_rules: + document_cls, field_name = rule_entry + rule = doc._meta['delete_rules'][rule_entry] + if rule == CASCADE: + ref_q = document_cls.objects(**{field_name + '__in': self}) + ref_q_count = ref_q.count() + if (doc != document_cls and ref_q_count > 0 + or (doc == document_cls and ref_q_count > 0)): + ref_q.delete(write_concern=write_concern) + elif rule == NULLIFY: + document_cls.objects(**{field_name + '__in': self}).update( + write_concern=write_concern, **{'unset__%s' % field_name: 1}) + elif rule == PULL: + document_cls.objects(**{field_name + '__in': self}).update( + write_concern=write_concern, + **{'pull_all__%s' % field_name: self}) + + queryset._collection.remove(queryset._query, write_concern=write_concern) + + def update(self, upsert=False, multi=True, write_concern=None, + full_result=False, **update): + """Perform an atomic update on the fields matched by the query. + + :param upsert: Any existing document with that "_id" is overwritten. + :param multi: Update multiple documents. + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param full_result: Return the full result rather than just the number + updated. + :param update: Django-style update keyword arguments + + .. versionadded:: 0.2 + """ + if not update and not upsert: + raise OperationError("No update parameters, would remove data") + + if write_concern is None: + write_concern = {} + + queryset = self.clone() + query = queryset._query + update = transform.update(queryset._document, **update) + + # If doing an atomic upsert on an inheritable class + # then ensure we add _cls to the update operation + if upsert and '_cls' in query: + if '$set' in update: + update["$set"]["_cls"] = queryset._document._class_name + else: + update["$set"] = {"_cls": queryset._document._class_name} + try: + result = queryset._collection.update(query, update, multi=multi, + upsert=upsert, **write_concern) + if full_result: + return result + elif result: + return result['n'] + except pymongo.errors.DuplicateKeyError, err: + raise NotUniqueError(u'Update failed (%s)' % unicode(err)) + except pymongo.errors.OperationFailure, err: + if unicode(err) == u'multi not coded yet': + message = u'update() method requires MongoDB 1.1.3+' + raise OperationError(message) + raise OperationError(u'Update failed (%s)' % unicode(err)) + + def update_one(self, upsert=False, write_concern=None, **update): + """Perform an atomic update on first field matched by the query. + + :param upsert: Any existing document with that "_id" is overwritten. + :param write_concern: Extra keyword arguments are passed down which + will be used as options for the resultant + ``getLastError`` command. For example, + ``save(..., write_concern={w: 2, fsync: True}, ...)`` will + wait until at least two servers have recorded the write and + will force an fsync on the primary server. + :param update: Django-style update keyword arguments + + .. versionadded:: 0.2 + """ + return self.update( + upsert=upsert, multi=False, write_concern=write_concern, **update) + + def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): + """Update and return the updated document. + + Returns either the document before or after modification based on `new` + parameter. If no documents match the query and `upsert` is false, + returns ``None``. If upserting and `new` is false, returns ``None``. + + If the full_response parameter is ``True``, the return value will be + the entire response object from the server, including the 'ok' and + 'lastErrorObject' fields, rather than just the modified document. + This is useful mainly because the 'lastErrorObject' document holds + information about the command's execution. + + :param upsert: insert if document doesn't exist (default ``False``) + :param full_response: return the entire response object from the + server (default ``False``) + :param remove: remove rather than updating (default ``False``) + :param new: return updated rather than original document + (default ``False``) + :param update: Django-style update keyword arguments + + .. versionadded:: 0.9 + """ + + if remove and new: + raise OperationError("Conflicting parameters: remove and new") + + if not update and not upsert and not remove: + raise OperationError("No update parameters, must either update or remove") + + queryset = self.clone() + query = queryset._query + update = transform.update(queryset._document, **update) + sort = queryset._ordering + + try: + result = queryset._collection.find_and_modify( + query, update, upsert=upsert, sort=sort, remove=remove, new=new, + full_response=full_response, **self._cursor_args) + except pymongo.errors.DuplicateKeyError, err: + raise NotUniqueError(u"Update failed (%s)" % err) + except pymongo.errors.OperationFailure, err: + raise OperationError(u"Update failed (%s)" % err) + + if full_response: + if result["value"] is not None: + result["value"] = self._document._from_son(result["value"]) + else: + if result is not None: + result = self._document._from_son(result) + + return result + + def with_id(self, object_id): + """Retrieve the object matching the id provided. Uses `object_id` only + and raises InvalidQueryError if a filter has been applied. Returns + `None` if no document exists with that id. + + :param object_id: the value for the id of the document to look up + + .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set + """ + queryset = self.clone() + if not queryset._query_obj.empty: + msg = "Cannot use a filter whilst using `with_id`" + raise InvalidQueryError(msg) + return queryset.filter(pk=object_id).first() + + def in_bulk(self, object_ids): + """Retrieve a set of documents by their ids. + + :param object_ids: a list or tuple of ``ObjectId``\ s + :rtype: dict of ObjectIds as keys and collection-specific + Document subclasses as values. + + .. versionadded:: 0.3 + """ + doc_map = {} + + docs = self._collection.find({'_id': {'$in': object_ids}}, + **self._cursor_args) + if self._scalar: + for doc in docs: + doc_map[doc['_id']] = self._get_scalar( + self._document._from_son(doc)) + elif self._as_pymongo: + for doc in docs: + doc_map[doc['_id']] = self._get_as_pymongo(doc) + else: + for doc in docs: + doc_map[doc['_id']] = self._document._from_son(doc) + + return doc_map + + def none(self): + """Helper that just returns a list""" + queryset = self.clone() + queryset._none = True + return queryset + + def no_sub_classes(self): + """ + Only return instances of this document and not any inherited documents + """ + if self._document._meta.get('allow_inheritance') is True: + self._initial_query = {"_cls": self._document._class_name} + + return self + + def using(self, alias): + """This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database. + + :param alias: The database alias + + .. versionadded:: 0.8 + """ + + with switch_db(self._document, alias) as cls: + collection = cls._get_collection() + + return self.clone_into(self.__class__(self._document, collection)) + + def clone(self): + """Creates a copy of the current + :class:`~mongoengine.queryset.QuerySet` + + .. versionadded:: 0.5 + """ + return self.clone_into(self.__class__(self._document, self._collection_obj)) + + def clone_into(self, cls): + """Creates a copy of the current + :class:`~mongoengine.queryset.base.BaseQuerySet` into another child class + """ + if not isinstance(cls, BaseQuerySet): + raise OperationError('%s is not a subclass of BaseQuerySet' % cls.__name__) + + copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', + '_where_clause', '_loaded_fields', '_ordering', '_snapshot', + '_timeout', '_class_check', '_slave_okay', '_read_preference', + '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', + '_limit', '_skip', '_hint', '_auto_dereference') + + for prop in copy_props: + val = getattr(self, prop) + setattr(cls, prop, copy.copy(val)) + + if self._cursor_obj: + cls._cursor_obj = self._cursor_obj.clone() + + return cls + + def select_related(self, max_depth=1): + """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or + :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down + the number queries to mongodb. + + .. versionadded:: 0.5 + """ + # Make select related work the same for querysets + max_depth += 1 + queryset = self.clone() + return queryset._dereference(queryset, max_depth=max_depth) + + def limit(self, n): + """Limit the number of returned documents to `n`. This may also be + achieved using array-slicing syntax (e.g. ``User.objects[:5]``). + + :param n: the maximum number of objects to return + """ + queryset = self.clone() + if n == 0: + queryset._cursor.limit(1) + else: + queryset._cursor.limit(n) + queryset._limit = n + # Return self to allow chaining + return queryset + + def skip(self, n): + """Skip `n` documents before returning the results. This may also be + achieved using array-slicing syntax (e.g. ``User.objects[5:]``). + + :param n: the number of objects to skip before returning results + """ + queryset = self.clone() + queryset._cursor.skip(n) + queryset._skip = n + return queryset + + def hint(self, index=None): + """Added 'hint' support, telling Mongo the proper index to use for the + query. + + Judicious use of hints can greatly improve query performance. When + doing a query on multiple fields (at least one of which is indexed) + pass the indexed field as a hint to the query. + + Hinting will not do anything if the corresponding index does not exist. + The last hint applied to this cursor takes precedence over all others. + + .. versionadded:: 0.5 + """ + queryset = self.clone() + queryset._cursor.hint(index) + queryset._hint = index + return queryset + + def distinct(self, field): + """Return a list of distinct values for a given field. + + :param field: the field to select distinct values from + + .. note:: This is a command and won't take ordering or limit into + account. + + .. versionadded:: 0.4 + .. versionchanged:: 0.5 - Fixed handling references + .. versionchanged:: 0.6 - Improved db_field refrence handling + """ + queryset = self.clone() + try: + field = self._fields_to_dbfields([field]).pop() + finally: + distinct = self._dereference(queryset._cursor.distinct(field), 1, + name=field, instance=self._document) + + # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField) + doc_field = getattr(self._document._fields.get(field), "field", None) + instance = getattr(doc_field, "document_type", False) + if instance: + distinct = [instance(**doc) for doc in distinct] + return distinct + + def only(self, *fields): + """Load only a subset of this document's fields. :: + + post = BlogPost.objects(...).only("title", "author.name") + + .. note :: `only()` is chainable and will perform a union :: + So with the following it will fetch both: `title` and `author.name`:: + + post = BlogPost.objects.only("title").only("author.name") + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + + :param fields: fields to include + + .. versionadded:: 0.3 + .. versionchanged:: 0.5 - Added subfield support + """ + fields = dict([(f, QueryFieldList.ONLY) for f in fields]) + return self.fields(True, **fields) + + def exclude(self, *fields): + """Opposite to .only(), exclude some document's fields. :: + + post = BlogPost.objects(...).exclude("comments") + + .. note :: `exclude()` is chainable and will perform a union :: + So with the following it will exclude both: `title` and `author.name`:: + + post = BlogPost.objects.exclude("title").exclude("author.name") + + :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any + field filters. + + :param fields: fields to exclude + + .. versionadded:: 0.5 + """ + fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) + return self.fields(**fields) + + def fields(self, _only_called=False, **kwargs): + """Manipulate how you load this document's fields. Used by `.only()` + and `.exclude()` to manipulate which fields to retrieve. Fields also + allows for a greater level of control for example: + + Retrieving a Subrange of Array Elements: + + You can use the $slice operator to retrieve a subrange of elements in + an array. For example to get the first 5 comments:: + + post = BlogPost.objects(...).fields(slice__comments=5) + + :param kwargs: A dictionary identifying what to include + + .. versionadded:: 0.5 + """ + + # Check for an operator and transform to mongo-style if there is + operators = ["slice"] + cleaned_fields = [] + for key, value in kwargs.items(): + parts = key.split('__') + op = None + if parts[0] in operators: + op = parts.pop(0) + value = {'$' + op: value} + key = '.'.join(parts) + cleaned_fields.append((key, value)) + + fields = sorted(cleaned_fields, key=operator.itemgetter(1)) + queryset = self.clone() + for value, group in itertools.groupby(fields, lambda x: x[1]): + fields = [field for field, value in group] + fields = queryset._fields_to_dbfields(fields) + queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called) + + return queryset + + def all_fields(self): + """Include all fields. Reset all previously calls of .only() or + .exclude(). :: + + post = BlogPost.objects.exclude("comments").all_fields() + + .. versionadded:: 0.5 + """ + queryset = self.clone() + queryset._loaded_fields = QueryFieldList( + always_include=queryset._loaded_fields.always_include) + return queryset + + def order_by(self, *keys): + """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The + order may be specified by prepending each of the keys by a + or a -. + Ascending order is assumed. + + :param keys: fields to order the query results by; keys may be + prefixed with **+** or **-** to determine the ordering direction + """ + queryset = self.clone() + queryset._ordering = queryset._get_order_by(keys) + return queryset + + def explain(self, format=False): + """Return an explain plan record for the + :class:`~mongoengine.queryset.QuerySet`\ 's cursor. + + :param format: format the plan before returning it + """ + plan = self._cursor.explain() + if format: + plan = pprint.pformat(plan) + return plan + + def snapshot(self, enabled): + """Enable or disable snapshot mode when querying. + + :param enabled: whether or not snapshot mode is enabled + + ..versionchanged:: 0.5 - made chainable + """ + queryset = self.clone() + queryset._snapshot = enabled + return queryset + + def timeout(self, enabled): + """Enable or disable the default mongod timeout when querying. + + :param enabled: whether or not the timeout is used + + ..versionchanged:: 0.5 - made chainable + """ + queryset = self.clone() + queryset._timeout = enabled + return queryset + + def slave_okay(self, enabled): + """Enable or disable the slave_okay when querying. + + :param enabled: whether or not the slave_okay is enabled + """ + queryset = self.clone() + queryset._slave_okay = enabled + return queryset + + def read_preference(self, read_preference): + """Change the read_preference when querying. + + :param read_preference: override ReplicaSetConnection-level + preference. + """ + validate_read_preference('read_preference', read_preference) + queryset = self.clone() + queryset._read_preference = read_preference + return queryset + + def scalar(self, *fields): + """Instead of returning Document instances, return either a specific + value or a tuple of values in order. + + Can be used along with + :func:`~mongoengine.queryset.QuerySet.no_dereference` to turn off + dereferencing. + + .. note:: This effects all results and can be unset by calling + ``scalar`` without arguments. Calls ``only`` automatically. + + :param fields: One or more fields to return instead of a Document. + """ + queryset = self.clone() + queryset._scalar = list(fields) + + if fields: + queryset = queryset.only(*fields) + else: + queryset = queryset.all_fields() + + return queryset + + def values_list(self, *fields): + """An alias for scalar""" + return self.scalar(*fields) + + def as_pymongo(self, coerce_types=False): + """Instead of returning Document instances, return raw values from + pymongo. + + :param coerce_type: Field types (if applicable) would be use to + coerce types. + """ + queryset = self.clone() + queryset._as_pymongo = True + queryset._as_pymongo_coerce = coerce_types + return queryset + + # JSON Helpers + + def to_json(self, *args, **kwargs): + """Converts a queryset to JSON""" + return json_util.dumps(self.as_pymongo(), *args, **kwargs) + + def from_json(self, json_data): + """Converts json data to unsaved objects""" + son_data = json_util.loads(json_data) + return [self._document._from_son(data) for data in son_data] + + # JS functionality + + def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, + scope=None): + """Perform a map/reduce query using the current query spec + and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, + it must be the last call made, as it does not return a maleable + ``QuerySet``. + + See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` + and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` + tests in ``tests.queryset.QuerySetTest`` for usage examples. + + :param map_f: map function, as :class:`~bson.code.Code` or string + :param reduce_f: reduce function, as + :class:`~bson.code.Code` or string + :param output: output collection name, if set to 'inline' will try to + use :class:`~pymongo.collection.Collection.inline_map_reduce` + This can also be a dictionary containing output options + see: http://docs.mongodb.org/manual/reference/command/mapReduce/#dbcmd.mapReduce + :param finalize_f: finalize function, an optional function that + performs any post-reduction processing. + :param scope: values to insert into map/reduce global scope. Optional. + :param limit: number of objects from current query to provide + to map/reduce method + + Returns an iterator yielding + :class:`~mongoengine.document.MapReduceDocument`. + + .. note:: + + Map/Reduce changed in server version **>= 1.7.4**. The PyMongo + :meth:`~pymongo.collection.Collection.map_reduce` helper requires + PyMongo version **>= 1.11**. + + .. versionchanged:: 0.5 + - removed ``keep_temp`` keyword argument, which was only relevant + for MongoDB server versions older than 1.7.4 + + .. versionadded:: 0.3 + """ + queryset = self.clone() + + MapReduceDocument = _import_class('MapReduceDocument') + + if not hasattr(self._collection, "map_reduce"): + raise NotImplementedError("Requires MongoDB >= 1.7.1") + + map_f_scope = {} + if isinstance(map_f, Code): + map_f_scope = map_f.scope + map_f = unicode(map_f) + map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) + + reduce_f_scope = {} + if isinstance(reduce_f, Code): + reduce_f_scope = reduce_f.scope + reduce_f = unicode(reduce_f) + reduce_f_code = queryset._sub_js_fields(reduce_f) + reduce_f = Code(reduce_f_code, reduce_f_scope) + + mr_args = {'query': queryset._query} + + if finalize_f: + finalize_f_scope = {} + if isinstance(finalize_f, Code): + finalize_f_scope = finalize_f.scope + finalize_f = unicode(finalize_f) + finalize_f_code = queryset._sub_js_fields(finalize_f) + finalize_f = Code(finalize_f_code, finalize_f_scope) + mr_args['finalize'] = finalize_f + + if scope: + mr_args['scope'] = scope + + if limit: + mr_args['limit'] = limit + + if output == 'inline' and not queryset._ordering: + map_reduce_function = 'inline_map_reduce' + else: + map_reduce_function = 'map_reduce' + + if isinstance(output, basestring): + mr_args['out'] = output + + elif isinstance(output, dict): + ordered_output = [] + + for part in ('replace', 'merge', 'reduce'): + value = output.get(part) + if value: + ordered_output.append((part, value)) + break + + else: + raise OperationError("actionData not specified for output") + + db_alias = output.get('db_alias') + remaing_args = ['db', 'sharded', 'nonAtomic'] + + if db_alias: + ordered_output.append(('db', get_db(db_alias).name)) + del remaing_args[0] + + + for part in remaing_args: + value = output.get(part) + if value: + ordered_output.append((part, value)) + + mr_args['out'] = SON(ordered_output) + + results = getattr(queryset._collection, map_reduce_function)( + map_f, reduce_f, **mr_args) + + if map_reduce_function == 'map_reduce': + results = results.find() + + if queryset._ordering: + results = results.sort(queryset._ordering) + + for doc in results: + yield MapReduceDocument(queryset._document, queryset._collection, + doc['_id'], doc['value']) + + def exec_js(self, code, *fields, **options): + """Execute a Javascript function on the server. A list of fields may be + provided, which will be translated to their correct names and supplied + as the arguments to the function. A few extra variables are added to + the function's scope: ``collection``, which is the name of the + collection in use; ``query``, which is an object representing the + current query; and ``options``, which is an object containing any + options specified as keyword arguments. + + As fields in MongoEngine may use different names in the database (set + using the :attr:`db_field` keyword argument to a :class:`Field` + constructor), a mechanism exists for replacing MongoEngine field names + with the database field names in Javascript code. When accessing a + field, use square-bracket notation, and prefix the MongoEngine field + name with a tilde (~). + + :param code: a string of Javascript code to execute + :param fields: fields that you will be using in your function, which + will be passed in to your function as arguments + :param options: options that you want available to the function + (accessed in Javascript through the ``options`` object) + """ + queryset = self.clone() + + code = queryset._sub_js_fields(code) + + fields = [queryset._document._translate_field_name(f) for f in fields] + collection = queryset._document._get_collection_name() + + scope = { + 'collection': collection, + 'options': options or {}, + } + + query = queryset._query + if queryset._where_clause: + query['$where'] = queryset._where_clause + + scope['query'] = query + code = Code(code, scope=scope) + + db = queryset._document._get_db() + return db.eval(code, *fields) + + def where(self, where_clause): + """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript + expression). Performs automatic field name substitution like + :meth:`mongoengine.queryset.Queryset.exec_js`. + + .. note:: When using this mode of query, the database will call your + function, or evaluate your predicate clause, for each object + in the collection. + + .. versionadded:: 0.5 + """ + queryset = self.clone() + where_clause = queryset._sub_js_fields(where_clause) + queryset._where_clause = where_clause + return queryset + + def sum(self, field): + """Sum over the values of the specified field. + + :param field: the field to sum over; use dot-notation to refer to + embedded document fields + + .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work + with sharding. + """ + map_func = """ + function() { + var path = '{{~%(field)s}}'.split('.'), + field = this; + + for (p in path) { + if (typeof field != 'undefined') + field = field[path[p]]; + else + break; + } + + if (field && field.constructor == Array) { + field.forEach(function(item) { + emit(1, item||0); + }); + } else if (typeof field != 'undefined') { + emit(1, field||0); + } + } + """ % dict(field=field) + + reduce_func = Code(""" + function(key, values) { + var sum = 0; + for (var i in values) { + sum += values[i]; + } + return sum; + } + """) + + for result in self.map_reduce(map_func, reduce_func, output='inline'): + return result.value + else: + return 0 + + def average(self, field): + """Average over the values of the specified field. + + :param field: the field to average over; use dot-notation to refer to + embedded document fields + + .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work + with sharding. + """ + map_func = """ + function() { + var path = '{{~%(field)s}}'.split('.'), + field = this; + + for (p in path) { + if (typeof field != 'undefined') + field = field[path[p]]; + else + break; + } + + if (field && field.constructor == Array) { + field.forEach(function(item) { + emit(1, {t: item||0, c: 1}); + }); + } else if (typeof field != 'undefined') { + emit(1, {t: field||0, c: 1}); + } + } + """ % dict(field=field) + + reduce_func = Code(""" + function(key, values) { + var out = {t: 0, c: 0}; + for (var i in values) { + var value = values[i]; + out.t += value.t; + out.c += value.c; + } + return out; + } + """) + + finalize_func = Code(""" + function(key, value) { + return value.t / value.c; + } + """) + + for result in self.map_reduce(map_func, reduce_func, + finalize_f=finalize_func, output='inline'): + return result.value + else: + return 0 + + def item_frequencies(self, field, normalize=False, map_reduce=True): + """Returns a dictionary of all items present in a field across + the whole queried set of documents, and their corresponding frequency. + This is useful for generating tag clouds, or searching documents. + + .. note:: + + Can only do direct simple mappings and cannot map across + :class:`~mongoengine.fields.ReferenceField` or + :class:`~mongoengine.fields.GenericReferenceField` for more complex + counting a manual map reduce call would is required. + + If the field is a :class:`~mongoengine.fields.ListField`, the items within + each list will be counted individually. + + :param field: the field to use + :param normalize: normalize the results so they add to 1.0 + :param map_reduce: Use map_reduce over exec_js + + .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded + document lookups + """ + if map_reduce: + return self._item_frequencies_map_reduce(field, + normalize=normalize) + return self._item_frequencies_exec_js(field, normalize=normalize) + + # Iterator helpers + + def next(self): + """Wrap the result in a :class:`~mongoengine.Document` object. + """ + if self._limit == 0 or self._none: + raise StopIteration + + raw_doc = self._cursor.next() + if self._as_pymongo: + return self._get_as_pymongo(raw_doc) + doc = self._document._from_son(raw_doc, + _auto_dereference=self._auto_dereference) + if self._scalar: + return self._get_scalar(doc) + + return doc + + def rewind(self): + """Rewind the cursor to its unevaluated state. + + .. versionadded:: 0.3 + """ + self._iter = False + self._cursor.rewind() + + # Properties + + @property + def _collection(self): + """Property that returns the collection object. This allows us to + perform operations only if the collection is accessed. + """ + return self._collection_obj + + @property + def _cursor_args(self): + cursor_args = { + 'snapshot': self._snapshot, + 'timeout': self._timeout + } + if self._read_preference is not None: + cursor_args['read_preference'] = self._read_preference + else: + cursor_args['slave_okay'] = self._slave_okay + if self._loaded_fields: + cursor_args['fields'] = self._loaded_fields.as_dict() + return cursor_args + + @property + def _cursor(self): + if self._cursor_obj is None: + + self._cursor_obj = self._collection.find(self._query, + **self._cursor_args) + # Apply where clauses to cursor + if self._where_clause: + where_clause = self._sub_js_fields(self._where_clause) + self._cursor_obj.where(where_clause) + + if self._ordering: + # Apply query ordering + self._cursor_obj.sort(self._ordering) + elif self._ordering is None and self._document._meta['ordering']: + # Otherwise, apply the ordering from the document model, unless + # it's been explicitly cleared via order_by with no arguments + order = self._get_order_by(self._document._meta['ordering']) + self._cursor_obj.sort(order) + + if self._limit is not None: + self._cursor_obj.limit(self._limit) + + if self._skip is not None: + self._cursor_obj.skip(self._skip) + + if self._hint != -1: + self._cursor_obj.hint(self._hint) + + return self._cursor_obj + + def __deepcopy__(self, memo): + """Essential for chained queries with ReferenceFields involved""" + return self.clone() + + @property + def _query(self): + if self._mongo_query is None: + self._mongo_query = self._query_obj.to_query(self._document) + if self._class_check: + self._mongo_query.update(self._initial_query) + return self._mongo_query + + @property + def _dereference(self): + if not self.__dereference: + self.__dereference = _import_class('DeReference')() + return self.__dereference + + def no_dereference(self): + """Turn off any dereferencing for the results of this queryset. + """ + queryset = self.clone() + queryset._auto_dereference = False + return queryset + + # Helper Functions + + def _item_frequencies_map_reduce(self, field, normalize=False): + map_func = """ + function() { + var path = '{{~%(field)s}}'.split('.'); + var field = this; + + for (p in path) { + if (typeof field != 'undefined') + field = field[path[p]]; + else + break; + } + if (field && field.constructor == Array) { + field.forEach(function(item) { + emit(item, 1); + }); + } else if (typeof field != 'undefined') { + emit(field, 1); + } else { + emit(null, 1); + } + } + """ % dict(field=field) + reduce_func = """ + function(key, values) { + var total = 0; + var valuesSize = values.length; + for (var i=0; i < valuesSize; i++) { + total += parseInt(values[i], 10); + } + return total; + } + """ + values = self.map_reduce(map_func, reduce_func, 'inline') + frequencies = {} + for f in values: + key = f.key + if isinstance(key, float): + if int(key) == key: + key = int(key) + frequencies[key] = int(f.value) + + if normalize: + count = sum(frequencies.values()) + frequencies = dict([(k, float(v) / count) + for k, v in frequencies.items()]) + + return frequencies + + def _item_frequencies_exec_js(self, field, normalize=False): + """Uses exec_js to execute""" + freq_func = """ + function(path) { + var path = path.split('.'); + + var total = 0.0; + db[collection].find(query).forEach(function(doc) { + var field = doc; + for (p in path) { + if (field) + field = field[path[p]]; + else + break; + } + if (field && field.constructor == Array) { + total += field.length; + } else { + total++; + } + }); + + var frequencies = {}; + var types = {}; + var inc = 1.0; + + db[collection].find(query).forEach(function(doc) { + field = doc; + for (p in path) { + if (field) + field = field[path[p]]; + else + break; + } + if (field && field.constructor == Array) { + field.forEach(function(item) { + frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); + }); + } else { + var item = field; + types[item] = item; + frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); + } + }); + return [total, frequencies, types]; + } + """ + total, data, types = self.exec_js(freq_func, field) + values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) + + if normalize: + values = dict([(k, float(v) / total) for k, v in values.items()]) + + frequencies = {} + for k, v in values.iteritems(): + if isinstance(k, float): + if int(k) == k: + k = int(k) + + frequencies[k] = v + + return frequencies + + def _fields_to_dbfields(self, fields, subdoc=False): + """Translate fields paths to its db equivalents""" + ret = [] + subclasses = [] + document = self._document + if document._meta['allow_inheritance']: + subclasses = [get_document(x) + for x in document._subclasses][1:] + for field in fields: + try: + field = ".".join(f.db_field for f in + document._lookup_field(field.split('.'))) + ret.append(field) + except LookUpError, err: + found = False + for subdoc in subclasses: + try: + subfield = ".".join(f.db_field for f in + subdoc._lookup_field(field.split('.'))) + ret.append(subfield) + found = True + break + except LookUpError, e: + pass + + if not found: + raise err + return ret + + def _get_order_by(self, keys): + """Creates a list of order by fields + """ + key_list = [] + for key in keys: + if not key: + continue + direction = pymongo.ASCENDING + if key[0] == '-': + direction = pymongo.DESCENDING + if key[0] in ('-', '+'): + key = key[1:] + key = key.replace('__', '.') + try: + key = self._document._translate_field_name(key) + except: + pass + key_list.append((key, direction)) + + if self._cursor_obj and key_list: + self._cursor_obj.sort(key_list) + return key_list + + def _get_scalar(self, doc): + + def lookup(obj, name): + chunks = name.split('__') + for chunk in chunks: + obj = getattr(obj, chunk) + return obj + + data = [lookup(doc, n) for n in self._scalar] + if len(data) == 1: + return data[0] + + return tuple(data) + + def _get_as_pymongo(self, row): + # Extract which fields paths we should follow if .fields(...) was + # used. If not, handle all fields. + if not getattr(self, '__as_pymongo_fields', None): + self.__as_pymongo_fields = [] + + for field in self._loaded_fields.fields - set(['_cls']): + self.__as_pymongo_fields.append(field) + while '.' in field: + field, _ = field.rsplit('.', 1) + self.__as_pymongo_fields.append(field) + + all_fields = not self.__as_pymongo_fields + + def clean(data, path=None): + path = path or '' + + if isinstance(data, dict): + new_data = {} + for key, value in data.iteritems(): + new_path = '%s.%s' % (path, key) if path else key + + if all_fields: + include_field = True + elif self._loaded_fields.value == QueryFieldList.ONLY: + include_field = new_path in self.__as_pymongo_fields + else: + include_field = new_path not in self.__as_pymongo_fields + + if include_field: + new_data[key] = clean(value, path=new_path) + data = new_data + elif isinstance(data, list): + data = [clean(d, path=path) for d in data] + else: + if self._as_pymongo_coerce: + # If we need to coerce types, we need to determine the + # type of this field and use the corresponding + # .to_python(...) + from mongoengine.fields import EmbeddedDocumentField + + obj = self._document + for chunk in path.split('.'): + obj = getattr(obj, chunk, None) + if obj is None: + break + elif isinstance(obj, EmbeddedDocumentField): + obj = obj.document_type + if obj and data is not None: + data = obj.to_python(data) + return data + + return clean(row) + + def _sub_js_fields(self, code): + """When fields are specified with [~fieldname] syntax, where + *fieldname* is the Python name of a field, *fieldname* will be + substituted for the MongoDB name of the field (specified using the + :attr:`name` keyword argument in a field's constructor). + """ + + def field_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = self._document._lookup_field(field_name) + # Substitute the correct name for the field into the javascript + return u'["%s"]' % fields[-1].db_field + + def field_path_sub(match): + # Extract just the field name, and look up the field objects + field_name = match.group(1).split('.') + fields = self._document._lookup_field(field_name) + # Substitute the correct name for the field into the javascript + return ".".join([f.db_field for f in fields]) + + code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) + code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, + code) + return code + + # Deprecated + def ensure_index(self, **kwargs): + """Deprecated use :func:`Document.ensure_index`""" + msg = ("Doc.objects()._ensure_index() is deprecated. " + "Use Doc.ensure_index() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_index(**kwargs) + return self + + def _ensure_indexes(self): + """Deprecated use :func:`~Document.ensure_indexes`""" + msg = ("Doc.objects()._ensure_indexes() is deprecated. " + "Use Doc.ensure_indexes() instead.") + warnings.warn(msg, DeprecationWarning) + self._document.__class__.ensure_indexes() diff --git a/mongoengine/queryset/field_list.py b/mongoengine/queryset/field_list.py index 73d3cc24..140a71e0 100644 --- a/mongoengine/queryset/field_list.py +++ b/mongoengine/queryset/field_list.py @@ -55,7 +55,8 @@ class QueryFieldList(object): if self.always_include: if self.value is self.ONLY and self.fields: - self.fields = self.fields.union(self.always_include) + if sorted(self.slice.keys()) != sorted(self.fields): + self.fields = self.fields.union(self.always_include) else: self.fields -= self.always_include diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index d58a13b7..cebfcc50 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -1,137 +1,26 @@ -from __future__ import absolute_import +from mongoengine.errors import OperationError +from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY, + CASCADE, DENY, PULL) -import copy -import itertools -import operator -import pprint -import re -import warnings - -from bson.code import Code -from bson import json_util -import pymongo -from pymongo.common import validate_read_preference - -from mongoengine import signals -from mongoengine.common import _import_class -from mongoengine.errors import (OperationError, NotUniqueError, - InvalidQueryError) - -from mongoengine.queryset import transform -from mongoengine.queryset.field_list import QueryFieldList -from mongoengine.queryset.visitor import Q, QNode - - -__all__ = ('QuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') +__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', + 'DENY', 'PULL') # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 ITER_CHUNK_SIZE = 100 -# Delete rules -DO_NOTHING = 0 -NULLIFY = 1 -CASCADE = 2 -DENY = 3 -PULL = 4 -RE_TYPE = type(re.compile('')) +class QuerySet(BaseQuerySet): + """The default queryset, that builds queries and handles a set of results + returned from a query. - -class QuerySet(object): - """A set of results returned from a query. Wraps a MongoDB cursor, - providing :class:`~mongoengine.Document` objects as the results. + Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as + the results. """ - __dereference = False - _auto_dereference = True - def __init__(self, document, collection): - self._document = document - self._collection_obj = collection - self._mongo_query = None - self._query_obj = Q() - self._initial_query = {} - self._where_clause = None - self._loaded_fields = QueryFieldList() - self._ordering = [] - self._snapshot = False - self._timeout = True - self._class_check = True - self._slave_okay = False - self._read_preference = None - self._iter = False - self._scalar = [] - self._none = False - self._as_pymongo = False - self._as_pymongo_coerce = False - self._result_cache = [] - self._has_more = True - self._len = None - - # If inheritance is allowed, only return instances and instances of - # subclasses of the class being used - if document._meta.get('allow_inheritance') is True: - if len(self._document._subclasses) == 1: - self._initial_query = {"_cls": self._document._subclasses[0]} - else: - self._initial_query = {"_cls": {"$in": self._document._subclasses}} - self._loaded_fields = QueryFieldList(always_include=['_cls']) - self._cursor_obj = None - self._limit = None - self._skip = None - self._hint = -1 # Using -1 as None is a valid value for hint - - def __call__(self, q_obj=None, class_check=True, slave_okay=False, - read_preference=None, **query): - """Filter the selected documents by calling the - :class:`~mongoengine.queryset.QuerySet` with a query. - - :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in - the query; the :class:`~mongoengine.queryset.QuerySet` is filtered - multiple times with different :class:`~mongoengine.queryset.Q` - objects, only the last one will be used - :param class_check: If set to False bypass class name check when - querying collection - :param slave_okay: if True, allows this query to be run against a - replica secondary. - :params read_preference: if set, overrides connection-level - read_preference from `ReplicaSetConnection`. - :param query: Django-style query keyword arguments - """ - query = Q(**query) - if q_obj: - # make sure proper query object is passed - if not isinstance(q_obj, QNode): - msg = ("Not a query object: %s. " - "Did you intend to use key=value?" % q_obj) - raise InvalidQueryError(msg) - query &= q_obj - - if read_preference is None: - queryset = self.clone() - else: - # Use the clone provided when setting read_preference - queryset = self.read_preference(read_preference) - - queryset._query_obj &= query - queryset._mongo_query = None - queryset._cursor_obj = None - queryset._class_check = class_check - - return queryset - - def __len__(self): - """Since __len__ is called quite frequently (for example, as part of - list(qs) we populate the result cache and cache the length. - """ - if self._len is not None: - return self._len - if self._has_more: - # populate the cache - list(self._iter_results()) - - self._len = len(self._result_cache) - return self._len + _has_more = True + _len = None + _result_cache = None def __iter__(self): """Iteration utilises a results cache which iterates the cursor @@ -147,11 +36,39 @@ class QuerySet(object): # iterating over the cache. return iter(self._result_cache) + def __len__(self): + """Since __len__ is called quite frequently (for example, as part of + list(qs) we populate the result cache and cache the length. + """ + if self._len is not None: + return self._len + if self._has_more: + # populate the cache + list(self._iter_results()) + + self._len = len(self._result_cache) + return self._len + + def __repr__(self): + """Provides the string representation of the QuerySet + """ + if self._iter: + return '.. queryset mid-iteration ..' + + self._populate_cache() + data = self._result_cache[:REPR_OUTPUT_SIZE + 1] + if len(data) > REPR_OUTPUT_SIZE: + data[-1] = "...(remaining elements truncated)..." + return repr(data) + + def _iter_results(self): """A generator for iterating over the result cache. Also populates the cache if there are more possible results to yield. Raises StopIteration when there are no more results""" + if self._result_cache is None: + self._result_cache = [] pos = 0 while True: upper = len(self._result_cache) @@ -168,6 +85,8 @@ class QuerySet(object): Populates the result cache with ``ITER_CHUNK_SIZE`` more entries (until the cursor is exhausted). """ + if self._result_cache is None: + self._result_cache = [] if self._has_more: try: for i in xrange(ITER_CHUNK_SIZE): @@ -175,224 +94,6 @@ class QuerySet(object): except StopIteration: self._has_more = False - def __getitem__(self, key): - """Support skip and limit using getitem and slicing syntax. - """ - queryset = self.clone() - - # Slice provided - if isinstance(key, slice): - try: - queryset._cursor_obj = queryset._cursor[key] - queryset._skip, queryset._limit = key.start, key.stop - if key.start and key.stop: - queryset._limit = key.stop - key.start - except IndexError, err: - # PyMongo raises an error if key.start == key.stop, catch it, - # bin it, kill it. - start = key.start or 0 - if start >= 0 and key.stop >= 0 and key.step is None: - if start == key.stop: - queryset.limit(0) - queryset._skip = key.start - queryset._limit = key.stop - start - return queryset - raise err - # Allow further QuerySet modifications to be performed - return queryset - # Integer index provided - elif isinstance(key, int): - if queryset._scalar: - return queryset._get_scalar( - queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference)) - if queryset._as_pymongo: - return queryset._get_as_pymongo(queryset._cursor.next()) - return queryset._document._from_son(queryset._cursor[key], - _auto_dereference=self._auto_dereference) - raise AttributeError - - def __repr__(self): - """Provides the string representation of the QuerySet - """ - - if self._iter: - return '.. queryset mid-iteration ..' - - self._populate_cache() - data = self._result_cache[:REPR_OUTPUT_SIZE + 1] - if len(data) > REPR_OUTPUT_SIZE: - data[-1] = "...(remaining elements truncated)..." - return repr(data) - - # Core functions - - def all(self): - """Returns all documents.""" - return self.__call__() - - def filter(self, *q_objs, **query): - """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` - """ - return self.__call__(*q_objs, **query) - - def get(self, *q_objs, **query): - """Retrieve the the matching object raising - :class:`~mongoengine.queryset.MultipleObjectsReturned` or - `DocumentName.MultipleObjectsReturned` exception if multiple results - and :class:`~mongoengine.queryset.DoesNotExist` or - `DocumentName.DoesNotExist` if no results are found. - - .. versionadded:: 0.3 - """ - queryset = self.__call__(*q_objs, **query) - queryset = queryset.limit(2) - try: - result = queryset.next() - except StopIteration: - msg = ("%s matching query does not exist." - % queryset._document._class_name) - raise queryset._document.DoesNotExist(msg) - try: - queryset.next() - except StopIteration: - return result - - queryset.rewind() - message = u'%d items returned, instead of 1' % queryset.count() - raise queryset._document.MultipleObjectsReturned(message) - - def create(self, **kwargs): - """Create new object. Returns the saved object instance. - - .. versionadded:: 0.4 - """ - return self._document(**kwargs).save() - - def get_or_create(self, write_concern=None, auto_save=True, - *q_objs, **query): - """Retrieve unique object or create, if it doesn't exist. Returns a - tuple of ``(object, created)``, where ``object`` is the retrieved or - created object and ``created`` is a boolean specifying whether a new - object was created. Raises - :class:`~mongoengine.queryset.MultipleObjectsReturned` or - `DocumentName.MultipleObjectsReturned` if multiple results are found. - A new document will be created if the document doesn't exists; a - dictionary of default values for the new document may be provided as a - keyword argument called :attr:`defaults`. - - .. note:: This requires two separate operations and therefore a - race condition exists. Because there are no transactions in - mongoDB other approaches should be investigated, to ensure you - don't accidently duplicate data when using this method. This is - now scheduled to be removed before 1.0 - - :param write_concern: optional extra keyword arguments used if we - have to create a new document. - Passes any write_concern onto :meth:`~mongoengine.Document.save` - - :param auto_save: if the object is to be saved automatically if - not found. - - .. deprecated:: 0.8 - .. versionchanged:: 0.6 - added `auto_save` - .. versionadded:: 0.3 - """ - msg = ("get_or_create is scheduled to be deprecated. The approach is " - "flawed without transactions. Upserts should be preferred.") - warnings.warn(msg, DeprecationWarning) - - defaults = query.get('defaults', {}) - if 'defaults' in query: - del query['defaults'] - - try: - doc = self.get(*q_objs, **query) - return doc, False - except self._document.DoesNotExist: - query.update(defaults) - doc = self._document(**query) - - if auto_save: - doc.save(write_concern=write_concern) - return doc, True - - def first(self): - """Retrieve the first object matching the query. - """ - queryset = self.clone() - try: - result = queryset[0] - except IndexError: - result = None - return result - - def insert(self, doc_or_docs, load_bulk=True, write_concern=None): - """bulk insert documents - - :param docs_or_doc: a document or list of documents to be inserted - :param load_bulk (optional): If True returns the list of document - instances - :param write_concern: Extra keyword arguments are passed down to - :meth:`~pymongo.collection.Collection.insert` - which will be used as options for the resultant - ``getLastError`` command. For example, - ``insert(..., {w: 2, fsync: True})`` will wait until at least - two servers have recorded the write and will force an fsync on - each server being written to. - - By default returns document instances, set ``load_bulk`` to False to - return just ``ObjectIds`` - - .. versionadded:: 0.5 - """ - Document = _import_class('Document') - - if write_concern is None: - write_concern = {} - - docs = doc_or_docs - return_one = False - if isinstance(docs, Document) or issubclass(docs.__class__, Document): - return_one = True - docs = [docs] - - raw = [] - for doc in docs: - if not isinstance(doc, self._document): - msg = ("Some documents inserted aren't instances of %s" - % str(self._document)) - raise OperationError(msg) - if doc.pk and not doc._created: - msg = "Some documents have ObjectIds use doc.update() instead" - raise OperationError(msg) - raw.append(doc.to_mongo()) - - signals.pre_bulk_insert.send(self._document, documents=docs) - try: - ids = self._collection.insert(raw, **write_concern) - except pymongo.errors.OperationFailure, err: - message = 'Could not save document (%s)' - if re.match('^E1100[01] duplicate key', unicode(err)): - # E11000 - duplicate key error index - # E11001 - duplicate key on update - message = u'Tried to save duplicate unique keys (%s)' - raise NotUniqueError(message % unicode(err)) - raise OperationError(message % unicode(err)) - - if not load_bulk: - signals.post_bulk_insert.send( - self._document, documents=docs, loaded=False) - return return_one and ids[0] or ids - - documents = self.in_bulk(ids) - results = [] - for obj_id in ids: - results.append(documents.get(obj_id)) - signals.post_bulk_insert.send( - self._document, documents=results, loaded=True) - return return_one and results[0] or results - def count(self, with_limit_and_skip=True): """Count the selected elements in the query. @@ -400,1106 +101,64 @@ class QuerySet(object): :meth:`skip` that has been applied to this cursor into account when getting the count """ - if self._limit == 0: - return 0 - if with_limit_and_skip and self._len is not None: - return self._len - count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) - if with_limit_and_skip: - self._len = count - return count + if with_limit_and_skip is False: + return super(QuerySet, self).count(with_limit_and_skip) - def delete(self, write_concern=None, _from_doc_delete=False): - """Delete the documents matched by the query. + if self._len is None: + self._len = super(QuerySet, self).count(with_limit_and_skip) - :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will - wait until at least two servers have recorded the write and - will force an fsync on the primary server. - :param _from_doc_delete: True when called from document delete therefore - signals will have been triggered so don't loop. + return self._len + + def no_cache(self): + """Convert to a non_caching queryset + + .. versionadded:: 0.8.3 Convert to non caching queryset """ - queryset = self.clone() - doc = queryset._document + if self._result_cache is not None: + raise OperationError("QuerySet already cached") + return self.clone_into(QuerySetNoCache(self._document, self._collection)) - if write_concern is None: - write_concern = {} - # Handle deletes where skips or limits have been applied or - # there is an untriggered delete signal - has_delete_signal = signals.signals_available and ( - signals.pre_delete.has_receivers_for(self._document) or - signals.post_delete.has_receivers_for(self._document)) +class QuerySetNoCache(BaseQuerySet): + """A non caching QuerySet""" - call_document_delete = (queryset._skip or queryset._limit or - has_delete_signal) and not _from_doc_delete + def cache(self): + """Convert to a caching queryset - if call_document_delete: - for doc in queryset: - doc.delete(write_concern=write_concern) - return - - delete_rules = doc._meta.get('delete_rules') or {} - # Check for DENY rules before actually deleting/nullifying any other - # references - for rule_entry in delete_rules: - document_cls, field_name = rule_entry - rule = doc._meta['delete_rules'][rule_entry] - if rule == DENY and document_cls.objects( - **{field_name + '__in': self}).count() > 0: - msg = ("Could not delete document (%s.%s refers to it)" - % (document_cls.__name__, field_name)) - raise OperationError(msg) - - for rule_entry in delete_rules: - document_cls, field_name = rule_entry - rule = doc._meta['delete_rules'][rule_entry] - if rule == CASCADE: - ref_q = document_cls.objects(**{field_name + '__in': self}) - ref_q_count = ref_q.count() - if (doc != document_cls and ref_q_count > 0 - or (doc == document_cls and ref_q_count > 0)): - ref_q.delete(write_concern=write_concern) - elif rule == NULLIFY: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, **{'unset__%s' % field_name: 1}) - elif rule == PULL: - document_cls.objects(**{field_name + '__in': self}).update( - write_concern=write_concern, - **{'pull_all__%s' % field_name: self}) - - queryset._collection.remove(queryset._query, write_concern=write_concern) - - def update(self, upsert=False, multi=True, write_concern=None, **update): - """Perform an atomic update on the fields matched by the query. - - :param upsert: Any existing document with that "_id" is overwritten. - :param multi: Update multiple documents. - :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will - wait until at least two servers have recorded the write and - will force an fsync on the primary server. - :param update: Django-style update keyword arguments - - .. versionadded:: 0.2 + .. versionadded:: 0.8.3 Convert to caching queryset """ - if not update and not upsert: - raise OperationError("No update parameters, would remove data") + return self.clone_into(QuerySet(self._document, self._collection)) - if write_concern is None: - write_concern = {} + def __repr__(self): + """Provides the string representation of the QuerySet - queryset = self.clone() - query = queryset._query - update = transform.update(queryset._document, **update) - - # If doing an atomic upsert on an inheritable class - # then ensure we add _cls to the update operation - if upsert and '_cls' in query: - if '$set' in update: - update["$set"]["_cls"] = queryset._document._class_name - else: - update["$set"] = {"_cls": queryset._document._class_name} - - try: - ret = queryset._collection.update(query, update, multi=multi, - upsert=upsert, **write_concern) - if ret is not None and 'n' in ret: - return ret['n'] - except pymongo.errors.OperationFailure, err: - if unicode(err) == u'multi not coded yet': - message = u'update() method requires MongoDB 1.1.3+' - raise OperationError(message) - raise OperationError(u'Update failed (%s)' % unicode(err)) - - def update_one(self, upsert=False, write_concern=None, **update): - """Perform an atomic update on first field matched by the query. - - :param upsert: Any existing document with that "_id" is overwritten. - :param write_concern: Extra keyword arguments are passed down which - will be used as options for the resultant - ``getLastError`` command. For example, - ``save(..., write_concern={w: 2, fsync: True}, ...)`` will - wait until at least two servers have recorded the write and - will force an fsync on the primary server. - :param update: Django-style update keyword arguments - - .. versionadded:: 0.2 + .. versionchanged:: 0.6.13 Now doesnt modify the cursor """ - return self.update( - upsert=upsert, multi=False, write_concern=write_concern, **update) + if self._iter: + return '.. queryset mid-iteration ..' - def with_id(self, object_id): - """Retrieve the object matching the id provided. Uses `object_id` only - and raises InvalidQueryError if a filter has been applied. Returns - `None` if no document exists with that id. - - :param object_id: the value for the id of the document to look up - - .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set - """ - queryset = self.clone() - if not queryset._query_obj.empty: - msg = "Cannot use a filter whilst using `with_id`" - raise InvalidQueryError(msg) - return queryset.filter(pk=object_id).first() - - def in_bulk(self, object_ids): - """Retrieve a set of documents by their ids. - - :param object_ids: a list or tuple of ``ObjectId``\ s - :rtype: dict of ObjectIds as keys and collection-specific - Document subclasses as values. - - .. versionadded:: 0.3 - """ - doc_map = {} - - docs = self._collection.find({'_id': {'$in': object_ids}}, - **self._cursor_args) - if self._scalar: - for doc in docs: - doc_map[doc['_id']] = self._get_scalar( - self._document._from_son(doc)) - elif self._as_pymongo: - for doc in docs: - doc_map[doc['_id']] = self._get_as_pymongo(doc) - else: - for doc in docs: - doc_map[doc['_id']] = self._document._from_son(doc) - - return doc_map - - def none(self): - """Helper that just returns a list""" - queryset = self.clone() - queryset._none = True - return queryset - - def no_sub_classes(self): - """ - Only return instances of this document and not any inherited documents - """ - if self._document._meta.get('allow_inheritance') is True: - self._initial_query = {"_cls": self._document._class_name} - - return self - - def clone(self): - """Creates a copy of the current - :class:`~mongoengine.queryset.QuerySet` - - .. versionadded:: 0.5 - """ - c = self.__class__(self._document, self._collection_obj) - - copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj', - '_where_clause', '_loaded_fields', '_ordering', '_snapshot', - '_timeout', '_class_check', '_slave_okay', '_read_preference', - '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce', - '_limit', '_skip', '_hint', '_auto_dereference') - - for prop in copy_props: - val = getattr(self, prop) - setattr(c, prop, copy.copy(val)) - - if self._cursor_obj: - c._cursor_obj = self._cursor_obj.clone() - - return c - - def select_related(self, max_depth=1): - """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or - :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down - the number queries to mongodb. - - .. versionadded:: 0.5 - """ - # Make select related work the same for querysets - max_depth += 1 - queryset = self.clone() - return queryset._dereference(queryset, max_depth=max_depth) - - def limit(self, n): - """Limit the number of returned documents to `n`. This may also be - achieved using array-slicing syntax (e.g. ``User.objects[:5]``). - - :param n: the maximum number of objects to return - """ - queryset = self.clone() - if n == 0: - queryset._cursor.limit(1) - else: - queryset._cursor.limit(n) - queryset._limit = n - # Return self to allow chaining - return queryset - - def skip(self, n): - """Skip `n` documents before returning the results. This may also be - achieved using array-slicing syntax (e.g. ``User.objects[5:]``). - - :param n: the number of objects to skip before returning results - """ - queryset = self.clone() - queryset._cursor.skip(n) - queryset._skip = n - return queryset - - def hint(self, index=None): - """Added 'hint' support, telling Mongo the proper index to use for the - query. - - Judicious use of hints can greatly improve query performance. When - doing a query on multiple fields (at least one of which is indexed) - pass the indexed field as a hint to the query. - - Hinting will not do anything if the corresponding index does not exist. - The last hint applied to this cursor takes precedence over all others. - - .. versionadded:: 0.5 - """ - queryset = self.clone() - queryset._cursor.hint(index) - queryset._hint = index - return queryset - - def distinct(self, field): - """Return a list of distinct values for a given field. - - :param field: the field to select distinct values from - - .. note:: This is a command and won't take ordering or limit into - account. - - .. versionadded:: 0.4 - .. versionchanged:: 0.5 - Fixed handling references - .. versionchanged:: 0.6 - Improved db_field refrence handling - """ - queryset = self.clone() - try: - field = self._fields_to_dbfields([field]).pop() - finally: - return self._dereference(queryset._cursor.distinct(field), 1, - name=field, instance=self._document) - - def only(self, *fields): - """Load only a subset of this document's fields. :: - - post = BlogPost.objects(...).only("title", "author.name") - - .. note :: `only()` is chainable and will perform a union :: - So with the following it will fetch both: `title` and `author.name`:: - - post = BlogPost.objects.only("title").only("author.name") - - :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any - field filters. - - :param fields: fields to include - - .. versionadded:: 0.3 - .. versionchanged:: 0.5 - Added subfield support - """ - fields = dict([(f, QueryFieldList.ONLY) for f in fields]) - return self.fields(True, **fields) - - def exclude(self, *fields): - """Opposite to .only(), exclude some document's fields. :: - - post = BlogPost.objects(...).exclude("comments") - - .. note :: `exclude()` is chainable and will perform a union :: - So with the following it will exclude both: `title` and `author.name`:: - - post = BlogPost.objects.exclude("title").exclude("author.name") - - :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any - field filters. - - :param fields: fields to exclude - - .. versionadded:: 0.5 - """ - fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) - return self.fields(**fields) - - def fields(self, _only_called=False, **kwargs): - """Manipulate how you load this document's fields. Used by `.only()` - and `.exclude()` to manipulate which fields to retrieve. Fields also - allows for a greater level of control for example: - - Retrieving a Subrange of Array Elements: - - You can use the $slice operator to retrieve a subrange of elements in - an array. For example to get the first 5 comments:: - - post = BlogPost.objects(...).fields(slice__comments=5) - - :param kwargs: A dictionary identifying what to include - - .. versionadded:: 0.5 - """ - - # Check for an operator and transform to mongo-style if there is - operators = ["slice"] - cleaned_fields = [] - for key, value in kwargs.items(): - parts = key.split('__') - op = None - if parts[0] in operators: - op = parts.pop(0) - value = {'$' + op: value} - key = '.'.join(parts) - cleaned_fields.append((key, value)) - - fields = sorted(cleaned_fields, key=operator.itemgetter(1)) - queryset = self.clone() - for value, group in itertools.groupby(fields, lambda x: x[1]): - fields = [field for field, value in group] - fields = queryset._fields_to_dbfields(fields) - queryset._loaded_fields += QueryFieldList(fields, value=value, _only_called=_only_called) - - return queryset - - def all_fields(self): - """Include all fields. Reset all previously calls of .only() or - .exclude(). :: - - post = BlogPost.objects.exclude("comments").all_fields() - - .. versionadded:: 0.5 - """ - queryset = self.clone() - queryset._loaded_fields = QueryFieldList( - always_include=queryset._loaded_fields.always_include) - return queryset - - def order_by(self, *keys): - """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The - order may be specified by prepending each of the keys by a + or a -. - Ascending order is assumed. - - :param keys: fields to order the query results by; keys may be - prefixed with **+** or **-** to determine the ordering direction - """ - queryset = self.clone() - queryset._ordering = queryset._get_order_by(keys) - return queryset - - def explain(self, format=False): - """Return an explain plan record for the - :class:`~mongoengine.queryset.QuerySet`\ 's cursor. - - :param format: format the plan before returning it - """ - plan = self._cursor.explain() - if format: - plan = pprint.pformat(plan) - return plan - - def snapshot(self, enabled): - """Enable or disable snapshot mode when querying. - - :param enabled: whether or not snapshot mode is enabled - - ..versionchanged:: 0.5 - made chainable - """ - queryset = self.clone() - queryset._snapshot = enabled - return queryset - - def timeout(self, enabled): - """Enable or disable the default mongod timeout when querying. - - :param enabled: whether or not the timeout is used - - ..versionchanged:: 0.5 - made chainable - """ - queryset = self.clone() - queryset._timeout = enabled - return queryset - - def slave_okay(self, enabled): - """Enable or disable the slave_okay when querying. - - :param enabled: whether or not the slave_okay is enabled - """ - queryset = self.clone() - queryset._slave_okay = enabled - return queryset - - def read_preference(self, read_preference): - """Change the read_preference when querying. - - :param read_preference: override ReplicaSetConnection-level - preference. - """ - validate_read_preference('read_preference', read_preference) - queryset = self.clone() - queryset._read_preference = read_preference - return queryset - - def scalar(self, *fields): - """Instead of returning Document instances, return either a specific - value or a tuple of values in order. - - Can be used along with - :func:`~mongoengine.queryset.QuerySet.no_dereference` to turn off - dereferencing. - - .. note:: This effects all results and can be unset by calling - ``scalar`` without arguments. Calls ``only`` automatically. - - :param fields: One or more fields to return instead of a Document. - """ - queryset = self.clone() - queryset._scalar = list(fields) - - if fields: - queryset = queryset.only(*fields) - else: - queryset = queryset.all_fields() - - return queryset - - def values_list(self, *fields): - """An alias for scalar""" - return self.scalar(*fields) - - def as_pymongo(self, coerce_types=False): - """Instead of returning Document instances, return raw values from - pymongo. - - :param coerce_type: Field types (if applicable) would be use to - coerce types. - """ - queryset = self.clone() - queryset._as_pymongo = True - queryset._as_pymongo_coerce = coerce_types - return queryset - - # JSON Helpers - - def to_json(self): - """Converts a queryset to JSON""" - return json_util.dumps(self.as_pymongo()) - - def from_json(self, json_data): - """Converts json data to unsaved objects""" - son_data = json_util.loads(json_data) - return [self._document._from_son(data) for data in son_data] - - # JS functionality - - def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, - scope=None): - """Perform a map/reduce query using the current query spec - and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, - it must be the last call made, as it does not return a maleable - ``QuerySet``. - - See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` - and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` - tests in ``tests.queryset.QuerySetTest`` for usage examples. - - :param map_f: map function, as :class:`~bson.code.Code` or string - :param reduce_f: reduce function, as - :class:`~bson.code.Code` or string - :param output: output collection name, if set to 'inline' will try to - use :class:`~pymongo.collection.Collection.inline_map_reduce` - This can also be a dictionary containing output options - see: http://docs.mongodb.org/manual/reference/commands/#mapReduce - :param finalize_f: finalize function, an optional function that - performs any post-reduction processing. - :param scope: values to insert into map/reduce global scope. Optional. - :param limit: number of objects from current query to provide - to map/reduce method - - Returns an iterator yielding - :class:`~mongoengine.document.MapReduceDocument`. - - .. note:: - - Map/Reduce changed in server version **>= 1.7.4**. The PyMongo - :meth:`~pymongo.collection.Collection.map_reduce` helper requires - PyMongo version **>= 1.11**. - - .. versionchanged:: 0.5 - - removed ``keep_temp`` keyword argument, which was only relevant - for MongoDB server versions older than 1.7.4 - - .. versionadded:: 0.3 - """ - queryset = self.clone() - - MapReduceDocument = _import_class('MapReduceDocument') - - if not hasattr(self._collection, "map_reduce"): - raise NotImplementedError("Requires MongoDB >= 1.7.1") - - map_f_scope = {} - if isinstance(map_f, Code): - map_f_scope = map_f.scope - map_f = unicode(map_f) - map_f = Code(queryset._sub_js_fields(map_f), map_f_scope) - - reduce_f_scope = {} - if isinstance(reduce_f, Code): - reduce_f_scope = reduce_f.scope - reduce_f = unicode(reduce_f) - reduce_f_code = queryset._sub_js_fields(reduce_f) - reduce_f = Code(reduce_f_code, reduce_f_scope) - - mr_args = {'query': queryset._query} - - if finalize_f: - finalize_f_scope = {} - if isinstance(finalize_f, Code): - finalize_f_scope = finalize_f.scope - finalize_f = unicode(finalize_f) - finalize_f_code = queryset._sub_js_fields(finalize_f) - finalize_f = Code(finalize_f_code, finalize_f_scope) - mr_args['finalize'] = finalize_f - - if scope: - mr_args['scope'] = scope - - if limit: - mr_args['limit'] = limit - - if output == 'inline' and not queryset._ordering: - map_reduce_function = 'inline_map_reduce' - else: - map_reduce_function = 'map_reduce' - mr_args['out'] = output - - results = getattr(queryset._collection, map_reduce_function)( - map_f, reduce_f, **mr_args) - - if map_reduce_function == 'map_reduce': - results = results.find() - - if queryset._ordering: - results = results.sort(queryset._ordering) - - for doc in results: - yield MapReduceDocument(queryset._document, queryset._collection, - doc['_id'], doc['value']) - - def exec_js(self, code, *fields, **options): - """Execute a Javascript function on the server. A list of fields may be - provided, which will be translated to their correct names and supplied - as the arguments to the function. A few extra variables are added to - the function's scope: ``collection``, which is the name of the - collection in use; ``query``, which is an object representing the - current query; and ``options``, which is an object containing any - options specified as keyword arguments. - - As fields in MongoEngine may use different names in the database (set - using the :attr:`db_field` keyword argument to a :class:`Field` - constructor), a mechanism exists for replacing MongoEngine field names - with the database field names in Javascript code. When accessing a - field, use square-bracket notation, and prefix the MongoEngine field - name with a tilde (~). - - :param code: a string of Javascript code to execute - :param fields: fields that you will be using in your function, which - will be passed in to your function as arguments - :param options: options that you want available to the function - (accessed in Javascript through the ``options`` object) - """ - queryset = self.clone() - - code = queryset._sub_js_fields(code) - - fields = [queryset._document._translate_field_name(f) for f in fields] - collection = queryset._document._get_collection_name() - - scope = { - 'collection': collection, - 'options': options or {}, - } - - query = queryset._query - if queryset._where_clause: - query['$where'] = queryset._where_clause - - scope['query'] = query - code = Code(code, scope=scope) - - db = queryset._document._get_db() - return db.eval(code, *fields) - - def where(self, where_clause): - """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript - expression). Performs automatic field name substitution like - :meth:`mongoengine.queryset.Queryset.exec_js`. - - .. note:: When using this mode of query, the database will call your - function, or evaluate your predicate clause, for each object - in the collection. - - .. versionadded:: 0.5 - """ - queryset = self.clone() - where_clause = queryset._sub_js_fields(where_clause) - queryset._where_clause = where_clause - return queryset - - def sum(self, field): - """Sum over the values of the specified field. - - :param field: the field to sum over; use dot-notation to refer to - embedded document fields - - .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work - with sharding. - """ - map_func = Code(""" - function() { - emit(1, this[field] || 0); - } - """, scope={'field': field}) - - reduce_func = Code(""" - function(key, values) { - var sum = 0; - for (var i in values) { - sum += values[i]; - } - return sum; - } - """) - - for result in self.map_reduce(map_func, reduce_func, output='inline'): - return result.value - else: - return 0 - - def average(self, field): - """Average over the values of the specified field. - - :param field: the field to average over; use dot-notation to refer to - embedded document fields - - .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work - with sharding. - """ - map_func = Code(""" - function() { - if (this.hasOwnProperty(field)) - emit(1, {t: this[field] || 0, c: 1}); - } - """, scope={'field': field}) - - reduce_func = Code(""" - function(key, values) { - var out = {t: 0, c: 0}; - for (var i in values) { - var value = values[i]; - out.t += value.t; - out.c += value.c; - } - return out; - } - """) - - finalize_func = Code(""" - function(key, value) { - return value.t / value.c; - } - """) - - for result in self.map_reduce(map_func, reduce_func, - finalize_f=finalize_func, output='inline'): - return result.value - else: - return 0 - - def item_frequencies(self, field, normalize=False, map_reduce=True): - """Returns a dictionary of all items present in a field across - the whole queried set of documents, and their corresponding frequency. - This is useful for generating tag clouds, or searching documents. - - .. note:: - - Can only do direct simple mappings and cannot map across - :class:`~mongoengine.fields.ReferenceField` or - :class:`~mongoengine.fields.GenericReferenceField` for more complex - counting a manual map reduce call would is required. - - If the field is a :class:`~mongoengine.fields.ListField`, the items within - each list will be counted individually. - - :param field: the field to use - :param normalize: normalize the results so they add to 1.0 - :param map_reduce: Use map_reduce over exec_js - - .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded - document lookups - """ - if map_reduce: - return self._item_frequencies_map_reduce(field, - normalize=normalize) - return self._item_frequencies_exec_js(field, normalize=normalize) - - # Iterator helpers - - def next(self): - """Wrap the result in a :class:`~mongoengine.Document` object. - """ - if self._limit == 0 or self._none: - raise StopIteration - - raw_doc = self._cursor.next() - if self._as_pymongo: - return self._get_as_pymongo(raw_doc) - - doc = self._document._from_son(raw_doc) - if self._scalar: - return self._get_scalar(doc) - - return doc - - def rewind(self): - """Rewind the cursor to its unevaluated state. - - .. versionadded:: 0.3 - """ - self._iter = False - self._cursor.rewind() - - # Properties - - @property - def _collection(self): - """Property that returns the collection object. This allows us to - perform operations only if the collection is accessed. - """ - return self._collection_obj - - @property - def _cursor_args(self): - cursor_args = { - 'snapshot': self._snapshot, - 'timeout': self._timeout - } - if self._read_preference is not None: - cursor_args['read_preference'] = self._read_preference - else: - cursor_args['slave_okay'] = self._slave_okay - if self._loaded_fields: - cursor_args['fields'] = self._loaded_fields.as_dict() - return cursor_args - - @property - def _cursor(self): - if self._cursor_obj is None: - - self._cursor_obj = self._collection.find(self._query, - **self._cursor_args) - # Apply where clauses to cursor - if self._where_clause: - where_clause = self._sub_js_fields(self._where_clause) - self._cursor_obj.where(where_clause) - - if self._ordering: - # Apply query ordering - self._cursor_obj.sort(self._ordering) - elif self._document._meta['ordering']: - # Otherwise, apply the ordering from the document model - order = self._get_order_by(self._document._meta['ordering']) - self._cursor_obj.sort(order) - - if self._limit is not None: - self._cursor_obj.limit(self._limit) - - if self._skip is not None: - self._cursor_obj.skip(self._skip) - - if self._hint != -1: - self._cursor_obj.hint(self._hint) - - return self._cursor_obj - - def __deepcopy__(self, memo): - """Essential for chained queries with ReferenceFields involved""" - return self.clone() - - @property - def _query(self): - if self._mongo_query is None: - self._mongo_query = self._query_obj.to_query(self._document) - if self._class_check: - self._mongo_query.update(self._initial_query) - return self._mongo_query - - @property - def _dereference(self): - if not self.__dereference: - self.__dereference = _import_class('DeReference')() - return self.__dereference - - def no_dereference(self): - """Turn off any dereferencing for the results of this queryset. - """ - queryset = self.clone() - queryset._auto_dereference = False - return queryset - - # Helper Functions - - def _item_frequencies_map_reduce(self, field, normalize=False): - map_func = """ - function() { - var path = '{{~%(field)s}}'.split('.'); - var field = this; - - for (p in path) { - if (typeof field != 'undefined') - field = field[path[p]]; - else - break; - } - if (field && field.constructor == Array) { - field.forEach(function(item) { - emit(item, 1); - }); - } else if (typeof field != 'undefined') { - emit(field, 1); - } else { - emit(null, 1); - } - } - """ % dict(field=field) - reduce_func = """ - function(key, values) { - var total = 0; - var valuesSize = values.length; - for (var i=0; i < valuesSize; i++) { - total += parseInt(values[i], 10); - } - return total; - } - """ - values = self.map_reduce(map_func, reduce_func, 'inline') - frequencies = {} - for f in values: - key = f.key - if isinstance(key, float): - if int(key) == key: - key = int(key) - frequencies[key] = int(f.value) - - if normalize: - count = sum(frequencies.values()) - frequencies = dict([(k, float(v) / count) - for k, v in frequencies.items()]) - - return frequencies - - def _item_frequencies_exec_js(self, field, normalize=False): - """Uses exec_js to execute""" - freq_func = """ - function(path) { - var path = path.split('.'); - - var total = 0.0; - db[collection].find(query).forEach(function(doc) { - var field = doc; - for (p in path) { - if (field) - field = field[path[p]]; - else - break; - } - if (field && field.constructor == Array) { - total += field.length; - } else { - total++; - } - }); - - var frequencies = {}; - var types = {}; - var inc = 1.0; - - db[collection].find(query).forEach(function(doc) { - field = doc; - for (p in path) { - if (field) - field = field[path[p]]; - else - break; - } - if (field && field.constructor == Array) { - field.forEach(function(item) { - frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); - }); - } else { - var item = field; - types[item] = item; - frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); - } - }); - return [total, frequencies, types]; - } - """ - total, data, types = self.exec_js(freq_func, field) - values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) - - if normalize: - values = dict([(k, float(v) / total) for k, v in values.items()]) - - frequencies = {} - for k, v in values.iteritems(): - if isinstance(k, float): - if int(k) == k: - k = int(k) - - frequencies[k] = v - - return frequencies - - def _fields_to_dbfields(self, fields): - """Translate fields paths to its db equivalents""" - ret = [] - for field in fields: - field = ".".join(f.db_field for f in - self._document._lookup_field(field.split('.'))) - ret.append(field) - return ret - - def _get_order_by(self, keys): - """Creates a list of order by fields - """ - key_list = [] - for key in keys: - if not key: - continue - direction = pymongo.ASCENDING - if key[0] == '-': - direction = pymongo.DESCENDING - if key[0] in ('-', '+'): - key = key[1:] - key = key.replace('__', '.') + data = [] + for i in xrange(REPR_OUTPUT_SIZE + 1): try: - key = self._document._translate_field_name(key) - except: - pass - key_list.append((key, direction)) + data.append(self.next()) + except StopIteration: + break + if len(data) > REPR_OUTPUT_SIZE: + data[-1] = "...(remaining elements truncated)..." - if self._cursor_obj: - self._cursor_obj.sort(key_list) - return key_list + self.rewind() + return repr(data) - def _get_scalar(self, doc): + def __iter__(self): + queryset = self + if queryset._iter: + queryset = self.clone() + queryset.rewind() + return queryset - def lookup(obj, name): - chunks = name.split('__') - for chunk in chunks: - obj = getattr(obj, chunk) - return obj - data = [lookup(doc, n) for n in self._scalar] - if len(data) == 1: - return data[0] +class QuerySetNoDeRef(QuerySet): + """Special no_dereference QuerySet""" - return tuple(data) - - def _get_as_pymongo(self, row): - # Extract which fields paths we should follow if .fields(...) was - # used. If not, handle all fields. - if not getattr(self, '__as_pymongo_fields', None): - self.__as_pymongo_fields = [] - for field in self._loaded_fields.fields - set(['_cls', '_id']): - self.__as_pymongo_fields.append(field) - while '.' in field: - field, _ = field.rsplit('.', 1) - self.__as_pymongo_fields.append(field) - - all_fields = not self.__as_pymongo_fields - - def clean(data, path=None): - path = path or '' - - if isinstance(data, dict): - new_data = {} - for key, value in data.iteritems(): - new_path = '%s.%s' % (path, key) if path else key - - if all_fields: - include_field = True - elif self._loaded_fields.value == QueryFieldList.ONLY: - include_field = new_path in self.__as_pymongo_fields - else: - include_field = new_path not in self.__as_pymongo_fields - - if include_field: - new_data[key] = clean(value, path=new_path) - data = new_data - elif isinstance(data, list): - data = [clean(d, path=path) for d in data] - else: - if self._as_pymongo_coerce: - # If we need to coerce types, we need to determine the - # type of this field and use the corresponding - # .to_python(...) - from mongoengine.fields import EmbeddedDocumentField - obj = self._document - for chunk in path.split('.'): - obj = getattr(obj, chunk, None) - if obj is None: - break - elif isinstance(obj, EmbeddedDocumentField): - obj = obj.document_type - if obj and data is not None: - data = obj.to_python(data) - return data - return clean(row) - - def _sub_js_fields(self, code): - """When fields are specified with [~fieldname] syntax, where - *fieldname* is the Python name of a field, *fieldname* will be - substituted for the MongoDB name of the field (specified using the - :attr:`name` keyword argument in a field's constructor). - """ - def field_sub(match): - # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') - fields = self._document._lookup_field(field_name) - # Substitute the correct name for the field into the javascript - return u'["%s"]' % fields[-1].db_field - - def field_path_sub(match): - # Extract just the field name, and look up the field objects - field_name = match.group(1).split('.') - fields = self._document._lookup_field(field_name) - # Substitute the correct name for the field into the javascript - return ".".join([f.db_field for f in fields]) - - code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) - code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, - code) - return code - - # Deprecated - def ensure_index(self, **kwargs): - """Deprecated use :func:`Document.ensure_index`""" - msg = ("Doc.objects()._ensure_index() is deprecated. " - "Use Doc.ensure_index() instead.") - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_index(**kwargs) - return self - - def _ensure_indexes(self): - """Deprecated use :func:`~Document.ensure_indexes`""" - msg = ("Doc.objects()._ensure_indexes() is deprecated. " - "Use Doc.ensure_indexes() instead.") - warnings.warn(msg, DeprecationWarning) - self._document.__class__.ensure_indexes() + def __dereference(items, max_depth=1, instance=None, name=None): + return items \ No newline at end of file diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 4062fc1e..d72d97a3 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -3,6 +3,7 @@ from collections import defaultdict import pymongo from bson import SON +from mongoengine.connection import get_connection from mongoengine.common import _import_class from mongoengine.errors import InvalidQueryError, LookUpError @@ -38,16 +39,16 @@ def query(_doc_cls=None, _field_operation=False, **query): mongo_query.update(value) continue - parts = key.split('__') + parts = key.rsplit('__') indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] parts = [part for part in parts if not part.isdigit()] # Check for an operator and transform to mongo-style if there is op = None - if parts[-1] in MATCH_OPERATORS: + if len(parts) > 1 and parts[-1] in MATCH_OPERATORS: op = parts.pop() negate = False - if parts[-1] == 'not': + if len(parts) > 1 and parts[-1] == 'not': parts.pop() negate = True @@ -95,6 +96,7 @@ def query(_doc_cls=None, _field_operation=False, **query): value = _geo_operator(field, op, value) elif op in CUSTOM_OPERATORS: if op == 'match': + value = field.prepare_query_value(op, value) value = {"$elemMatch": value} else: NotImplementedError("Custom method '%s' has not " @@ -114,14 +116,26 @@ def query(_doc_cls=None, _field_operation=False, **query): if key in mongo_query and isinstance(mongo_query[key], dict): mongo_query[key].update(value) # $maxDistance needs to come last - convert to SON - if '$maxDistance' in mongo_query[key]: - value_dict = mongo_query[key] + value_dict = mongo_query[key] + if ('$maxDistance' in value_dict and '$near' in value_dict): value_son = SON() - for k, v in value_dict.iteritems(): - if k == '$maxDistance': - continue - value_son[k] = v - value_son['$maxDistance'] = value_dict['$maxDistance'] + if isinstance(value_dict['$near'], dict): + for k, v in value_dict.iteritems(): + if k == '$maxDistance': + continue + value_son[k] = v + if (get_connection().max_wire_version <= 1): + value_son['$maxDistance'] = value_dict['$maxDistance'] + else: + value_son['$near'] = SON(value_son['$near']) + value_son['$near']['$maxDistance'] = value_dict['$maxDistance'] + else: + for k, v in value_dict.iteritems(): + if k == '$maxDistance': + continue + value_son[k] = v + value_son['$maxDistance'] = value_dict['$maxDistance'] + mongo_query[key] = value_son else: # Store for manually merging later @@ -181,6 +195,7 @@ def update(_doc_cls=None, **update): parts = [] cleaned_fields = [] + appended_sub_field = False for field in fields: append_field = True if isinstance(field, basestring): @@ -192,21 +207,34 @@ def update(_doc_cls=None, **update): else: parts.append(field.db_field) if append_field: + appended_sub_field = False cleaned_fields.append(field) + if hasattr(field, 'field'): + cleaned_fields.append(field.field) + appended_sub_field = True # Convert value to proper value - field = cleaned_fields[-1] + if appended_sub_field: + field = cleaned_fields[-2] + else: + field = cleaned_fields[-1] + + GeoJsonBaseField = _import_class("GeoJsonBaseField") + if isinstance(field, GeoJsonBaseField): + value = field.to_mongo(value) if op in (None, 'set', 'push', 'pull'): if field.required or value is not None: value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): value = [field.prepare_query_value(op, v) for v in value] - elif op == 'addToSet': + elif op in ('addToSet', 'setOnInsert'): if isinstance(value, (list, tuple, set)): value = [field.prepare_query_value(op, v) for v in value] elif field.required or value is not None: value = field.prepare_query_value(op, value) + elif op == "unset": + value = 1 if match: match = '$' + match @@ -220,11 +248,24 @@ def update(_doc_cls=None, **update): if 'pull' in op and '.' in key: # Dot operators don't work on pull operations - # it uses nested dict syntax + # unless they point to a list field + # Otherwise it uses nested dict syntax if op == 'pullAll': raise InvalidQueryError("pullAll operations only support " "a single field depth") + # Look for the last list field and use dot notation until there + field_classes = [c.__class__ for c in cleaned_fields] + field_classes.reverse() + ListField = _import_class('ListField') + if ListField in field_classes: + # Join all fields via dot notation to the last ListField + # Then process as normal + last_listField = len(cleaned_fields) - field_classes.index(ListField) + key = ".".join(parts[:last_listField]) + parts = parts[last_listField:] + parts.insert(0, key) + parts.reverse() for key in parts: value = {key: value} diff --git a/mongoengine/queryset/visitor.py b/mongoengine/queryset/visitor.py index 41f4ebf8..a39b05f0 100644 --- a/mongoengine/queryset/visitor.py +++ b/mongoengine/queryset/visitor.py @@ -1,8 +1,9 @@ import copy -from mongoengine.errors import InvalidQueryError -from mongoengine.python_support import product, reduce +from itertools import product +from functools import reduce +from mongoengine.errors import InvalidQueryError from mongoengine.queryset import transform __all__ = ('Q',) diff --git a/python-mongoengine.spec b/python-mongoengine.spec index 4eaba4db..eddb488d 100644 --- a/python-mongoengine.spec +++ b/python-mongoengine.spec @@ -5,7 +5,7 @@ %define srcname mongoengine Name: python-%{srcname} -Version: 0.8.2 +Version: 0.8.7 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB diff --git a/setup.py b/setup.py index effb6f11..7270331a 100644 --- a/setup.py +++ b/setup.py @@ -38,27 +38,30 @@ CLASSIFIERS = [ 'Operating System :: OS Independent', 'Programming Language :: Python', "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.6.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ] -extra_opts = {} +extra_opts = {"packages": find_packages(exclude=["tests", "tests.*"])} if sys.version_info[0] == 3: extra_opts['use_2to3'] = True - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6'] - extra_opts['packages'] = find_packages(exclude=('tests',)) + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'jinja2==2.6', 'Pillow>=2.0.0', 'django>=1.5.1'] if "test" in sys.argv or "nosetests" in sys.argv: - extra_opts['packages'].append("tests") + extra_opts['packages'] = find_packages() extra_opts['package_data'] = {"tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]} else: - extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'PIL', 'jinja2==2.6', 'python-dateutil'] - extra_opts['packages'] = find_packages(exclude=('tests',)) + extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.4.2', 'Pillow>=2.0.0', 'jinja2>=2.6', 'python-dateutil'] + + if sys.version_info[0] == 2 and sys.version_info[1] == 6: + extra_opts['tests_require'].append('unittest2') setup(name='mongoengine', version=VERSION, @@ -74,7 +77,7 @@ setup(name='mongoengine', long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, - install_requires=['pymongo>=2.5'], + install_requires=['pymongo>=2.7'], test_suite='nose.collector', **extra_opts ) diff --git a/tests/document/delta.py b/tests/document/delta.py index 16ab609b..738dfa78 100644 --- a/tests/document/delta.py +++ b/tests/document/delta.py @@ -3,6 +3,7 @@ import sys sys.path[0:0] = [""] import unittest +from bson import SON from mongoengine import * from mongoengine.connection import get_db @@ -312,29 +313,24 @@ class DeltaTest(unittest.TestCase): self.circular_reference_deltas_2(DynamicDocument, Document) self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) - def circular_reference_deltas_2(self, DocClass1, DocClass2): + def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): class Person(DocClass1): name = StringField() - owns = ListField(ReferenceField('Organization')) - employer = ReferenceField('Organization') + owns = ListField(ReferenceField('Organization', dbref=dbref)) + employer = ReferenceField('Organization', dbref=dbref) class Organization(DocClass2): name = StringField() - owner = ReferenceField('Person') - employees = ListField(ReferenceField('Person')) + owner = ReferenceField('Person', dbref=dbref) + employees = ListField(ReferenceField('Person', dbref=dbref)) Person.drop_collection() Organization.drop_collection() - person = Person(name="owner") - person.save() - - employee = Person(name="employee") - employee.save() - - organization = Organization(name="company") - organization.save() + person = Person(name="owner").save() + employee = Person(name="employee").save() + organization = Organization(name="company").save() person.owns.append(organization) organization.owner = person @@ -354,6 +350,8 @@ class DeltaTest(unittest.TestCase): self.assertEqual(o.owner, p) self.assertEqual(e.employer, o) + return person, organization, employee + def test_delta_db_field(self): self.delta_db_field(Document) self.delta_db_field(DynamicDocument) @@ -613,13 +611,13 @@ class DeltaTest(unittest.TestCase): Person.drop_collection() p = Person(name="James", age=34) - self.assertEqual(p._delta(), ({'age': 34, 'name': 'James', - '_cls': 'Person'}, {})) + self.assertEqual(p._delta(), ( + SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) p.doc = 123 del(p.doc) - self.assertEqual(p._delta(), ({'age': 34, 'name': 'James', - '_cls': 'Person'}, {'doc': 1})) + self.assertEqual(p._delta(), ( + SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) p = Person() p.name = "Dean" @@ -631,14 +629,14 @@ class DeltaTest(unittest.TestCase): self.assertEqual(p._get_changed_fields(), ['age']) self.assertEqual(p._delta(), ({'age': 24}, {})) - p = self.Person.objects(age=22).get() + p = Person.objects(age=22).get() p.age = 24 self.assertEqual(p.age, 24) self.assertEqual(p._get_changed_fields(), ['age']) self.assertEqual(p._delta(), ({'age': 24}, {})) p.save() - self.assertEqual(1, self.Person.objects(age=24).count()) + self.assertEqual(1, Person.objects(age=24).count()) def test_dynamic_delta(self): @@ -685,6 +683,99 @@ class DeltaTest(unittest.TestCase): self.assertEqual(doc._get_changed_fields(), ['list_field']) self.assertEqual(doc._delta(), ({}, {'list_field': 1})) + def test_delta_with_dbref_true(self): + person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) + employee.name = 'test' + + self.assertEqual(organization._get_changed_fields(), []) + + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertEqual({}, updates) + + organization.employees.append(person) + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertTrue('employees' in updates) + + def test_delta_with_dbref_false(self): + person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) + employee.name = 'test' + + self.assertEqual(organization._get_changed_fields(), []) + + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertEqual({}, updates) + + organization.employees.append(person) + updates, removals = organization._delta() + self.assertEqual({}, removals) + self.assertTrue('employees' in updates) + + def test_nested_nested_fields_mark_as_changed(self): + class EmbeddedDoc(EmbeddedDocument): + name = StringField() + + class MyDoc(Document): + subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) + name = StringField() + + MyDoc.drop_collection() + + mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() + + mydoc = MyDoc.objects.first() + subdoc = mydoc.subs['a']['b'] + subdoc.name = 'bar' + + self.assertEqual(["name"], subdoc._get_changed_fields()) + self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) + + mydoc._clear_changed_fields() + self.assertEqual([], mydoc._get_changed_fields()) + + def test_referenced_object_changed_attributes(self): + """Ensures that when you save a new reference to a field, the referenced object isn't altered""" + + class Organization(Document): + name = StringField() + + class User(Document): + name = StringField() + org = ReferenceField('Organization', required=True) + + Organization.drop_collection() + User.drop_collection() + + org1 = Organization(name='Org 1') + org1.save() + + org2 = Organization(name='Org 2') + org2.save() + + user = User(name='Fred', org=org1) + user.save() + + org1.reload() + org2.reload() + user.reload() + self.assertEqual(org1.name, 'Org 1') + self.assertEqual(org2.name, 'Org 2') + self.assertEqual(user.name, 'Fred') + + user.name = 'Harold' + user.org = org2 + + org2.name = 'New Org 2' + self.assertEqual(org2.name, 'New Org 2') + + user.save() + org2.save() + + self.assertEqual(org2.name, 'New Org 2') + org2.reload() + self.assertEqual(org2.name, 'New Org 2') if __name__ == '__main__': unittest.main() diff --git a/tests/document/dynamic.py b/tests/document/dynamic.py index 6263e68c..bf69cb27 100644 --- a/tests/document/dynamic.py +++ b/tests/document/dynamic.py @@ -292,6 +292,44 @@ class DynamicTest(unittest.TestCase): person.save() self.assertEqual(Person.objects.first().age, 35) + def test_dynamic_and_embedded_dict_access(self): + """Ensure embedded dynamic documents work with dict[] style access""" + + class Address(EmbeddedDocument): + city = StringField() + + class Person(DynamicDocument): + name = StringField() + + Person.drop_collection() + + Person(name="Ross", address=Address(city="London")).save() + + person = Person.objects.first() + person.attrval = "This works" + + person["phone"] = "555-1212" # but this should too + + # Same thing two levels deep + person["address"]["city"] = "Lundenne" + person.save() + + self.assertEqual(Person.objects.first().address.city, "Lundenne") + + self.assertEqual(Person.objects.first().phone, "555-1212") + + person = Person.objects.first() + person.address = Address(city="Londinium") + person.save() + + self.assertEqual(Person.objects.first().address.city, "Londinium") + + + person = Person.objects.first() + person["age"] = 35 + person.save() + self.assertEqual(Person.objects.first().age, 35) + if __name__ == '__main__': unittest.main() diff --git a/tests/document/indexes.py b/tests/document/indexes.py index 04d56324..cf6122a3 100644 --- a/tests/document/indexes.py +++ b/tests/document/indexes.py @@ -156,6 +156,25 @@ class IndexesTest(unittest.TestCase): self.assertEqual([{'fields': [('_cls', 1), ('title', 1)]}], A._meta['index_specs']) + def test_index_no_cls(self): + """Ensure index specs are inhertited correctly""" + + class A(Document): + title = StringField() + meta = { + 'indexes': [ + {'fields': ('title',), 'cls': False}, + ], + 'allow_inheritance': True, + 'index_cls': False + } + + self.assertEqual([('title', 1)], A._meta['index_specs'][0]['fields']) + A._get_collection().drop_indexes() + A.ensure_indexes() + info = A._get_collection().index_information() + self.assertEqual(len(info.keys()), 2) + def test_build_index_spec_is_not_destructive(self): class MyDoc(Document): @@ -472,7 +491,7 @@ class IndexesTest(unittest.TestCase): def invalid_index_2(): return BlogPost.objects.hint(('tags', 1)) - self.assertRaises(TypeError, invalid_index_2) + self.assertRaises(Exception, invalid_index_2) def test_unique(self): """Ensure that uniqueness constraints are applied to fields. diff --git a/tests/document/instance.py b/tests/document/instance.py index 81734aa0..951871b6 100644 --- a/tests/document/instance.py +++ b/tests/document/instance.py @@ -10,11 +10,12 @@ import uuid from datetime import datetime from bson import DBRef -from tests.fixtures import PickleEmbedded, PickleTest, PickleSignalsTest +from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, + PickleDyanmicEmbedded, PickleDynamicTest) from mongoengine import * from mongoengine.errors import (NotRegistered, InvalidDocumentError, - InvalidQueryError) + InvalidQueryError, NotUniqueError) from mongoengine.queryset import NULLIFY, Q from mongoengine.connection import get_db from mongoengine.base import get_document @@ -56,7 +57,7 @@ class InstanceTest(unittest.TestCase): date = DateTimeField(default=datetime.now) meta = { 'max_documents': 10, - 'max_size': 90000, + 'max_size': 4096, } Log.drop_collection() @@ -74,7 +75,7 @@ class InstanceTest(unittest.TestCase): options = Log.objects._collection.options() self.assertEqual(options['capped'], True) self.assertEqual(options['max'], 10) - self.assertEqual(options['size'], 90000) + self.assertTrue(options['size'] >= 4096) # Check that the document cannot be redefined with different options def recreate_log_document(): @@ -352,6 +353,14 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 20) + person.reload('age') + self.assertEqual(person.name, "Test User") + self.assertEqual(person.age, 21) + + person.reload() + self.assertEqual(person.name, "Mr Test User") + self.assertEqual(person.age, 21) + person.reload() self.assertEqual(person.name, "Mr Test User") self.assertEqual(person.age, 21) @@ -401,6 +410,7 @@ class InstanceTest(unittest.TestCase): 'embedded_field.dict_field']) doc.save() + self.assertEqual(len(doc.list_field), 4) doc = doc.reload(10) self.assertEqual(doc._get_changed_fields(), []) self.assertEqual(len(doc.list_field), 4) @@ -408,6 +418,37 @@ class InstanceTest(unittest.TestCase): self.assertEqual(len(doc.embedded_field.list_field), 4) self.assertEqual(len(doc.embedded_field.dict_field), 2) + doc.list_field.append(1) + doc.save() + doc.dict_field['extra'] = 1 + doc = doc.reload(10, 'list_field') + self.assertEqual(doc._get_changed_fields(), []) + self.assertEqual(len(doc.list_field), 5) + self.assertEqual(len(doc.dict_field), 3) + self.assertEqual(len(doc.embedded_field.list_field), 4) + self.assertEqual(len(doc.embedded_field.dict_field), 2) + + def test_reload_doesnt_exist(self): + class Foo(Document): + pass + + f = Foo() + try: + f.reload() + except Foo.DoesNotExist: + pass + except Exception as ex: + self.assertFalse("Threw wrong exception") + + f.save() + f.delete() + try: + f.reload() + except Foo.DoesNotExist: + pass + except Exception as ex: + self.assertFalse("Threw wrong exception") + def test_dictionary_access(self): """Ensure that dictionary-style field access works properly. """ @@ -443,6 +484,13 @@ class InstanceTest(unittest.TestCase): self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), ['_cls', 'name', 'age', 'salary']) + def test_embedded_document_to_mongo_id(self): + class SubDoc(EmbeddedDocument): + id = StringField(required=True) + + sub_doc = SubDoc(id="abc") + self.assertEqual(sub_doc.to_mongo().keys(), ['id']) + def test_embedded_document(self): """Ensure that embedded documents are set up correctly. """ @@ -482,6 +530,23 @@ class InstanceTest(unittest.TestCase): doc = Doc.objects.get() self.assertEqual(doc, doc.embedded_field[0]._instance) + def test_instance_is_set_on_setattr(self): + + class Email(EmbeddedDocument): + email = EmailField() + + class Account(Document): + email = EmbeddedDocumentField(Email) + + Account.drop_collection() + acc = Account() + acc.email = Email(email='test@example.com') + self.assertTrue(hasattr(acc._data["email"], "_instance")) + acc.save() + + acc1 = Account.objects.first() + self.assertTrue(hasattr(acc1._data["email"], "_instance")) + def test_document_clean(self): class TestDocument(Document): status = StringField() @@ -771,6 +836,80 @@ class InstanceTest(unittest.TestCase): p1.reload() self.assertEqual(p1.name, p.parent.name) + def test_save_atomicity_condition(self): + + class Widget(Document): + toggle = BooleanField(default=False) + count = IntField(default=0) + save_id = UUIDField() + + def flip(widget): + widget.toggle = not widget.toggle + widget.count += 1 + + def UUID(i): + return uuid.UUID(int=i) + + Widget.drop_collection() + + w1 = Widget(toggle=False, save_id=UUID(1)) + + # ignore save_condition on new record creation + w1.save(save_condition={'save_id':UUID(42)}) + w1.reload() + self.assertFalse(w1.toggle) + self.assertEqual(w1.save_id, UUID(1)) + self.assertEqual(w1.count, 0) + + # mismatch in save_condition prevents save + flip(w1) + self.assertTrue(w1.toggle) + self.assertEqual(w1.count, 1) + w1.save(save_condition={'save_id':UUID(42)}) + w1.reload() + self.assertFalse(w1.toggle) + self.assertEqual(w1.count, 0) + + # matched save_condition allows save + flip(w1) + self.assertTrue(w1.toggle) + self.assertEqual(w1.count, 1) + w1.save(save_condition={'save_id':UUID(1)}) + w1.reload() + self.assertTrue(w1.toggle) + self.assertEqual(w1.count, 1) + + # save_condition can be used to ensure atomic read & updates + # i.e., prevent interleaved reads and writes from separate contexts + w2 = Widget.objects.get() + self.assertEqual(w1, w2) + old_id = w1.save_id + + flip(w1) + w1.save_id = UUID(2) + w1.save(save_condition={'save_id':old_id}) + w1.reload() + self.assertFalse(w1.toggle) + self.assertEqual(w1.count, 2) + flip(w2) + flip(w2) + w2.save(save_condition={'save_id':old_id}) + w2.reload() + self.assertFalse(w2.toggle) + self.assertEqual(w2.count, 2) + + # save_condition uses mongoengine-style operator syntax + flip(w1) + w1.save(save_condition={'count__lt':w1.count}) + w1.reload() + self.assertTrue(w1.toggle) + self.assertEqual(w1.count, 3) + flip(w1) + w1.save(save_condition={'count__gte':w1.count}) + w1.reload() + self.assertTrue(w1.toggle) + self.assertEqual(w1.count, 3) + def test_update(self): """Ensure that an existing document is updated instead of be overwritten.""" @@ -941,6 +1080,16 @@ class InstanceTest(unittest.TestCase): self.assertRaises(InvalidQueryError, update_no_op_raises) + def test_update_unique_field(self): + class Doc(Document): + name = StringField(unique=True) + + doc1 = Doc(name="first").save() + doc2 = Doc(name="second").save() + + self.assertRaises(NotUniqueError, lambda: + doc2.update(set__name=doc1.name)) + def test_embedded_update(self): """ Test update on `EmbeddedDocumentField` fields @@ -1827,6 +1976,29 @@ class InstanceTest(unittest.TestCase): self.assertEqual(pickle_doc.string, "Two") self.assertEqual(pickle_doc.lists, ["1", "2", "3"]) + def test_dynamic_document_pickle(self): + + pickle_doc = PickleDynamicTest(name="test", number=1, string="One", lists=['1', '2']) + pickle_doc.embedded = PickleDyanmicEmbedded(foo="Bar") + pickled_doc = pickle.dumps(pickle_doc) # make sure pickling works even before the doc is saved + + pickle_doc.save() + + pickled_doc = pickle.dumps(pickle_doc) + resurrected = pickle.loads(pickled_doc) + + self.assertEqual(resurrected, pickle_doc) + self.assertEqual(resurrected._fields_ordered, + pickle_doc._fields_ordered) + self.assertEqual(resurrected._dynamic_fields.keys(), + pickle_doc._dynamic_fields.keys()) + + self.assertEqual(resurrected.embedded, pickle_doc.embedded) + self.assertEqual(resurrected.embedded._fields_ordered, + pickle_doc.embedded._fields_ordered) + self.assertEqual(resurrected.embedded._dynamic_fields.keys(), + pickle_doc.embedded._dynamic_fields.keys()) + def test_picklable_on_signals(self): pickle_doc = PickleSignalsTest(number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleEmbedded() @@ -2209,6 +2381,8 @@ class InstanceTest(unittest.TestCase): log.machine = "Localhost" log.save() + self.assertTrue(log.id is not None) + log.log = "Saving" log.save() @@ -2232,6 +2406,8 @@ class InstanceTest(unittest.TestCase): log.machine = "Localhost" log.save() + self.assertTrue(log.id is not None) + log.log = "Saving" log.save() @@ -2289,6 +2465,16 @@ class InstanceTest(unittest.TestCase): self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 42) + def test_mixed_creation_dynamic(self): + """Ensure that document may be created using mixed arguments. + """ + class Person(DynamicDocument): + name = StringField() + + person = Person("Test User", age=42) + self.assertEqual(person.name, "Test User") + self.assertEqual(person.age, 42) + def test_bad_mixed_creation(self): """Ensure that document gives correct error when duplicating arguments """ @@ -2329,7 +2515,7 @@ class InstanceTest(unittest.TestCase): for parameter_name, parameter in self.parameters.iteritems(): parameter.expand() - class System(Document): + class NodesSystem(Document): name = StringField(required=True) nodes = MapField(ReferenceField(Node, dbref=False)) @@ -2337,20 +2523,38 @@ class InstanceTest(unittest.TestCase): for node_name, node in self.nodes.iteritems(): node.expand() node.save(*args, **kwargs) - super(System, self).save(*args, **kwargs) + super(NodesSystem, self).save(*args, **kwargs) - System.drop_collection() + NodesSystem.drop_collection() Node.drop_collection() - system = System(name="system") + system = NodesSystem(name="system") system.nodes["node"] = Node() system.save() system.nodes["node"].parameters["param"] = Parameter() system.save() - system = System.objects.first() + system = NodesSystem.objects.first() self.assertEqual("UNDEFINED", system.nodes["node"].parameters["param"].macros["test"].value) + def test_embedded_document_equality(self): + + class Test(Document): + field = StringField(required=True) + + class Embedded(EmbeddedDocument): + ref = ReferenceField(Test) + + Test.drop_collection() + test = Test(field='123').save() # has id + + e = Embedded(ref=test) + f1 = Embedded._from_son(e.to_mongo()) + f2 = Embedded._from_son(e.to_mongo()) + + self.assertEqual(f1, f2) + f1.ref # Dereferences lazily + self.assertEqual(f1, f2) if __name__ == '__main__': unittest.main() diff --git a/tests/document/json_serialisation.py b/tests/document/json_serialisation.py index dbc09d83..2b5d9a0c 100644 --- a/tests/document/json_serialisation.py +++ b/tests/document/json_serialisation.py @@ -31,6 +31,10 @@ class TestJson(unittest.TestCase): doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) + doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) + expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" + self.assertEqual(doc_json, expected_json) + self.assertEqual(doc, Doc.from_json(doc.to_json())) def test_json_complex(self): diff --git a/tests/fields/fields.py b/tests/fields/fields.py index 3e48a214..c108f37e 100644 --- a/tests/fields/fields.py +++ b/tests/fields/fields.py @@ -384,6 +384,9 @@ class FieldTest(unittest.TestCase): person.height = 4.0 self.assertRaises(ValidationError, person.validate) + person_2 = Person(height='something invalid') + self.assertRaises(ValidationError, person_2.validate) + def test_decimal_validation(self): """Ensure that invalid values cannot be assigned to decimal fields. """ @@ -405,6 +408,11 @@ class FieldTest(unittest.TestCase): self.assertRaises(ValidationError, person.validate) person.height = Decimal('4.0') self.assertRaises(ValidationError, person.validate) + person.height = 'something invalid' + self.assertRaises(ValidationError, person.validate) + + person_2 = Person(height='something invalid') + self.assertRaises(ValidationError, person_2.validate) Person.drop_collection() @@ -1018,6 +1026,32 @@ class FieldTest(unittest.TestCase): e.mapping = {} self.assertEqual([], e._changed_fields) + def test_slice_marks_field_as_changed(self): + + class Simple(Document): + widgets = ListField() + + simple = Simple(widgets=[1, 2, 3, 4]).save() + simple.widgets[:3] = [] + self.assertEqual(['widgets'], simple._changed_fields) + simple.save() + + simple = simple.reload() + self.assertEqual(simple.widgets, [4]) + + def test_del_slice_marks_field_as_changed(self): + + class Simple(Document): + widgets = ListField() + + simple = Simple(widgets=[1, 2, 3, 4]).save() + del simple.widgets[:3] + self.assertEqual(['widgets'], simple._changed_fields) + simple.save() + + simple = simple.reload() + self.assertEqual(simple.widgets, [4]) + def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" @@ -1083,9 +1117,15 @@ class FieldTest(unittest.TestCase): post.info = {'$title': 'test'} self.assertRaises(ValidationError, post.validate) + post.info = {'nested': {'$title': 'test'}} + self.assertRaises(ValidationError, post.validate) + post.info = {'the.title': 'test'} self.assertRaises(ValidationError, post.validate) + post.info = {'nested': {'the.title': 'test'}} + self.assertRaises(ValidationError, post.validate) + post.info = {1: 'test'} self.assertRaises(ValidationError, post.validate) @@ -1864,6 +1904,37 @@ class FieldTest(unittest.TestCase): Post.drop_collection() User.drop_collection() + def test_generic_reference_list_item_modification(self): + """Ensure that modifications of related documents (through generic reference) don't influence on querying + """ + class Post(Document): + title = StringField() + + class User(Document): + username = StringField() + bookmarks = ListField(GenericReferenceField()) + + Post.drop_collection() + User.drop_collection() + + post_1 = Post(title="Behind the Scenes of the Pavement Reunion") + post_1.save() + + user = User(bookmarks=[post_1]) + user.save() + + post_1.title = "Title was modified" + user.username = "New username" + user.save() + + user = User.objects(bookmarks__all=[post_1]).first() + + self.assertNotEqual(user, None) + self.assertEqual(user.bookmarks[0], post_1) + + Post.drop_collection() + User.drop_collection() + def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ @@ -2428,6 +2499,9 @@ class FieldTest(unittest.TestCase): user = User(email="ross@example.com") self.assertTrue(user.validate() is None) + user = User(email="ross@example.co.uk") + self.assertTrue(user.validate() is None) + user = User(email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S" "ucictfqpdkK9iS1zeFw8sg7s7cwAF7suIfUfeyueLpfosjn3" "aJIazqqWkm7.net")) @@ -2436,6 +2510,9 @@ class FieldTest(unittest.TestCase): user = User(email='me@localhost') self.assertRaises(ValidationError, user.validate) + user = User(email="ross@example.com.") + self.assertRaises(ValidationError, user.validate) + def test_email_field_honors_regex(self): class User(Document): email = EmailField(regex=r'\w+@example.com') @@ -2448,6 +2525,92 @@ class FieldTest(unittest.TestCase): user = User(email='me@example.com') self.assertTrue(user.validate() is None) + def test_tuples_as_tuples(self): + """ + Ensure that tuples remain tuples when they are + inside a ComplexBaseField + """ + from mongoengine.base import BaseField + + class EnumField(BaseField): + + def __init__(self, **kwargs): + super(EnumField, self).__init__(**kwargs) + + def to_mongo(self, value): + return value + + def to_python(self, value): + return tuple(value) + + class TestDoc(Document): + items = ListField(EnumField()) + + TestDoc.drop_collection() + tuples = [(100, 'Testing')] + doc = TestDoc() + doc.items = tuples + doc.save() + x = TestDoc.objects().get() + self.assertTrue(x is not None) + self.assertTrue(len(x.items) == 1) + self.assertTrue(tuple(x.items[0]) in tuples) + self.assertTrue(x.items[0] in tuples) + + def test_dynamic_fields_class(self): + + class Doc2(Document): + field_1 = StringField(db_field='f') + + class Doc(Document): + my_id = IntField(required=True, unique=True, primary_key=True) + embed_me = DynamicField(db_field='e') + field_x = StringField(db_field='x') + + Doc.drop_collection() + Doc2.drop_collection() + + doc2 = Doc2(field_1="hello") + doc = Doc(my_id=1, embed_me=doc2, field_x="x") + self.assertRaises(OperationError, doc.save) + + doc2.save() + doc.save() + + doc = Doc.objects.get() + self.assertEqual(doc.embed_me.field_1, "hello") + + def test_dynamic_fields_embedded_class(self): + + class Embed(EmbeddedDocument): + field_1 = StringField(db_field='f') + + class Doc(Document): + my_id = IntField(required=True, unique=True, primary_key=True) + embed_me = DynamicField(db_field='e') + field_x = StringField(db_field='x') + + Doc.drop_collection() + + Doc(my_id=1, embed_me=Embed(field_1="hello"), field_x="x").save() + + doc = Doc.objects.get() + self.assertEqual(doc.embed_me.field_1, "hello") + + def test_invalid_dict_value(self): + class DictFieldTest(Document): + dictionary = DictField(required=True) + + DictFieldTest.drop_collection() + + test = DictFieldTest(dictionary=None) + test.dictionary # Just access to test getter + self.assertRaises(ValidationError, test.validate) + + test = DictFieldTest(dictionary=False) + test.dictionary # Just access to test getter + self.assertRaises(ValidationError, test.validate) + if __name__ == '__main__': unittest.main() diff --git a/tests/fields/file_tests.py b/tests/fields/file_tests.py index dfef9eed..7ae53e8a 100644 --- a/tests/fields/file_tests.py +++ b/tests/fields/file_tests.py @@ -53,11 +53,12 @@ class FileTest(unittest.TestCase): content_type = 'text/plain' putfile = PutFile() - putfile.the_file.put(text, content_type=content_type) + putfile.the_file.put(text, content_type=content_type, filename="hello") putfile.save() result = PutFile.objects.first() self.assertTrue(putfile == result) + self.assertEqual("%s" % result.the_file, "") self.assertEqual(result.the_file.read(), text) self.assertEqual(result.the_file.content_type, content_type) result.the_file.delete() # Remove file from GridFS @@ -278,7 +279,7 @@ class FileTest(unittest.TestCase): t.image.put(f) self.fail("Should have raised an invalidation error") except ValidationError, e: - self.assertEquals("%s" % e, "Invalid image: cannot identify image file") + self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) t = TestImage() t.image.put(open(TEST_IMAGE_PATH, 'rb')) @@ -455,5 +456,31 @@ class FileTest(unittest.TestCase): self.assertEqual(1, TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count()) + def test_complex_field_filefield(self): + """Ensure you can add meta data to file""" + + class Animal(Document): + genus = StringField() + family = StringField() + photos = ListField(FileField()) + + Animal.drop_collection() + marmot = Animal(genus='Marmota', family='Sciuridae') + + marmot_photo = open(TEST_IMAGE_PATH, 'rb') # Retrieve a photo from disk + + photos_field = marmot._fields['photos'].field + new_proxy = photos_field.get_proxy_obj('photos', marmot) + new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') + marmot_photo.close() + + marmot.photos.append(new_proxy) + marmot.save() + + marmot = Animal.objects.get() + self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') + self.assertEqual(marmot.photos[0].foo, 'bar') + self.assertEqual(marmot.photos[0].get().length, 8313) + if __name__ == '__main__': unittest.main() diff --git a/tests/fields/geo.py b/tests/fields/geo.py index 31ded26a..be29a0b2 100644 --- a/tests/fields/geo.py +++ b/tests/fields/geo.py @@ -75,6 +75,12 @@ class GeoFieldTest(unittest.TestCase): self._test_for_expected_error(Location, coord, expected) Location(loc=[1, 2]).validate() + Location(loc={ + "type": "Point", + "coordinates": [ + 81.4471435546875, + 23.61432859499169 + ]}).validate() def test_linestring_validation(self): class Location(Document): diff --git a/tests/fixtures.py b/tests/fixtures.py index e2070443..f1344d7c 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -17,6 +17,14 @@ class PickleTest(Document): photo = FileField() +class PickleDyanmicEmbedded(DynamicEmbeddedDocument): + date = DateTimeField(default=datetime.now) + + +class PickleDynamicTest(DynamicDocument): + number = IntField() + + class PickleSignalsTest(Document): number = IntField() string = StringField(choices=(('One', '1'), ('Two', '2'))) diff --git a/tests/queryset/__init__.py b/tests/queryset/__init__.py index 8a93c19f..c36b2684 100644 --- a/tests/queryset/__init__.py +++ b/tests/queryset/__init__.py @@ -3,3 +3,4 @@ from field_list import * from queryset import * from visitor import * from geo import * +from modify import * \ No newline at end of file diff --git a/tests/queryset/field_list.py b/tests/queryset/field_list.py index 2bdfce1f..7d66d263 100644 --- a/tests/queryset/field_list.py +++ b/tests/queryset/field_list.py @@ -162,6 +162,10 @@ class OnlyExcludeAllTest(unittest.TestCase): self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, person.age) + obj = self.Person.objects.only(*('id', 'name',)).get() + self.assertEqual(obj.name, person.name) + self.assertEqual(obj.age, None) + # Check polymorphism still works class Employee(self.Person): salary = IntField(db_field='wage') @@ -395,5 +399,28 @@ class OnlyExcludeAllTest(unittest.TestCase): numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) + + def test_exclude_from_subclasses_docs(self): + + class Base(Document): + username = StringField() + + meta = {'allow_inheritance': True} + + class Anon(Base): + anon = BooleanField() + + class User(Base): + password = StringField() + wibble = StringField() + + Base.drop_collection() + User(username="mongodb", password="secret").save() + + user = Base.objects().exclude("password", "wibble").first() + self.assertEqual(user.password, None) + + self.assertRaises(LookUpError, Base.objects.exclude, "made_up") + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/geo.py b/tests/queryset/geo.py index f5648961..5148a48e 100644 --- a/tests/queryset/geo.py +++ b/tests/queryset/geo.py @@ -5,6 +5,8 @@ import unittest from datetime import datetime, timedelta from mongoengine import * +from nose.plugins.skip import SkipTest + __all__ = ("GeoQueriesTest",) @@ -139,6 +141,7 @@ class GeoQueriesTest(unittest.TestCase): def test_spherical_geospatial_operators(self): """Ensure that spherical geospatial queries are working """ + raise SkipTest("https://jira.mongodb.org/browse/SERVER-14039") class Point(Document): location = GeoPointField() @@ -414,5 +417,47 @@ class GeoQueriesTest(unittest.TestCase): roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() self.assertEqual(1, roads) + def test_2dsphere_point_sets_correctly(self): + class Location(Document): + loc = PointField() + + Location.drop_collection() + + Location(loc=[1,2]).save() + loc = Location.objects.as_pymongo()[0] + self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) + + Location.objects.update(set__loc=[2,1]) + loc = Location.objects.as_pymongo()[0] + self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) + + def test_2dsphere_linestring_sets_correctly(self): + class Location(Document): + line = LineStringField() + + Location.drop_collection() + + Location(line=[[1, 2], [2, 2]]).save() + loc = Location.objects.as_pymongo()[0] + self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) + + Location.objects.update(set__line=[[2, 1], [1, 2]]) + loc = Location.objects.as_pymongo()[0] + self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}) + + def test_geojson_PolygonField(self): + class Location(Document): + poly = PolygonField() + + Location.drop_collection() + + Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() + loc = Location.objects.as_pymongo()[0] + self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) + + Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]]) + loc = Location.objects.as_pymongo()[0] + self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/modify.py b/tests/queryset/modify.py new file mode 100644 index 00000000..e0c7d1fe --- /dev/null +++ b/tests/queryset/modify.py @@ -0,0 +1,102 @@ +import sys +sys.path[0:0] = [""] + +import unittest + +from mongoengine import connect, Document, IntField + +__all__ = ("FindAndModifyTest",) + + +class Doc(Document): + id = IntField(primary_key=True) + value = IntField() + + +class FindAndModifyTest(unittest.TestCase): + + def setUp(self): + connect(db="mongoenginetest") + Doc.drop_collection() + + def assertDbEqual(self, docs): + self.assertEqual(list(Doc._collection.find().sort("id")), docs) + + def test_modify(self): + Doc(id=0, value=0).save() + doc = Doc(id=1, value=1).save() + + old_doc = Doc.objects(id=1).modify(set__value=-1) + self.assertEqual(old_doc.to_json(), doc.to_json()) + self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + + def test_modify_with_new(self): + Doc(id=0, value=0).save() + doc = Doc(id=1, value=1).save() + + new_doc = Doc.objects(id=1).modify(set__value=-1, new=True) + doc.value = -1 + self.assertEqual(new_doc.to_json(), doc.to_json()) + self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + + def test_modify_not_existing(self): + Doc(id=0, value=0).save() + self.assertEqual(Doc.objects(id=1).modify(set__value=-1), None) + self.assertDbEqual([{"_id": 0, "value": 0}]) + + def test_modify_with_upsert(self): + Doc(id=0, value=0).save() + old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True) + self.assertEqual(old_doc, None) + self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) + + def test_modify_with_upsert_existing(self): + Doc(id=0, value=0).save() + doc = Doc(id=1, value=1).save() + + old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True) + self.assertEqual(old_doc.to_json(), doc.to_json()) + self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + + def test_modify_with_upsert_with_new(self): + Doc(id=0, value=0).save() + new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1) + self.assertEqual(new_doc.to_mongo(), {"_id": 1, "value": 1}) + self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) + + def test_modify_with_remove(self): + Doc(id=0, value=0).save() + doc = Doc(id=1, value=1).save() + + old_doc = Doc.objects(id=1).modify(remove=True) + self.assertEqual(old_doc.to_json(), doc.to_json()) + self.assertDbEqual([{"_id": 0, "value": 0}]) + + def test_find_and_modify_with_remove_not_existing(self): + Doc(id=0, value=0).save() + self.assertEqual(Doc.objects(id=1).modify(remove=True), None) + self.assertDbEqual([{"_id": 0, "value": 0}]) + + def test_modify_with_order_by(self): + Doc(id=0, value=3).save() + Doc(id=1, value=2).save() + Doc(id=2, value=1).save() + doc = Doc(id=3, value=0).save() + + old_doc = Doc.objects().order_by("-id").modify(set__value=-1) + self.assertEqual(old_doc.to_json(), doc.to_json()) + self.assertDbEqual([ + {"_id": 0, "value": 3}, {"_id": 1, "value": 2}, + {"_id": 2, "value": 1}, {"_id": 3, "value": -1}]) + + def test_modify_with_fields(self): + Doc(id=0, value=0).save() + Doc(id=1, value=1).save() + + old_doc = Doc.objects(id=1).only("id").modify(set__value=-1) + self.assertEqual(old_doc.to_mongo(), {"_id": 1}) + self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/queryset/queryset.py b/tests/queryset/queryset.py index 21df22c5..62a142f8 100644 --- a/tests/queryset/queryset.py +++ b/tests/queryset/queryset.py @@ -14,9 +14,9 @@ from pymongo.read_preferences import ReadPreference from bson import ObjectId from mongoengine import * -from mongoengine.connection import get_connection +from mongoengine.connection import get_connection, get_db from mongoengine.python_support import PY3 -from mongoengine.context_managers import query_counter +from mongoengine.context_managers import query_counter, switch_db from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, queryset_manager) @@ -25,17 +25,29 @@ from mongoengine.errors import InvalidQueryError __all__ = ("QuerySetTest",) +class db_ops_tracker(query_counter): + def get_ops(self): + ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}} + return list(self.db.system.profile.find(ignore_query)) + + class QuerySetTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') + connect(db='mongoenginetest2', alias='test2') + + class PersonMeta(EmbeddedDocument): + weight = IntField() class Person(Document): name = StringField() age = IntField() + person_meta = EmbeddedDocumentField(PersonMeta) meta = {'allow_inheritance': True} Person.drop_collection() + self.PersonMeta = PersonMeta self.Person = Person def test_initialisation(self): @@ -536,6 +548,23 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(club.members['John']['gender'], "F") self.assertEqual(club.members['John']['age'], 14) + def test_update_results(self): + self.Person.drop_collection() + + result = self.Person(name="Bob", age=25).update(upsert=True, full_result=True) + self.assertTrue(isinstance(result, dict)) + self.assertTrue("upserted" in result) + self.assertFalse(result["updatedExisting"]) + + bob = self.Person.objects.first() + result = bob.update(set__age=30, full_result=True) + self.assertTrue(isinstance(result, dict)) + self.assertTrue(result["updatedExisting"]) + + self.Person(name="Bob", age=20).save() + result = self.Person.objects(name="Bob").update(set__name="bobby", multi=True) + self.assertEqual(result, 2) + def test_upsert(self): self.Person.drop_collection() @@ -628,7 +657,10 @@ class QuerySetTest(unittest.TestCase): blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) Blog.objects.insert(blogs, load_bulk=False) - self.assertEqual(q, 1) # 1 for the insert + if (get_connection().max_wire_version <= 1): + self.assertEqual(q, 1) + else: + self.assertEqual(q, 99) # profiling logs each doc now in the bulk op Blog.drop_collection() Blog.ensure_indexes() @@ -637,7 +669,10 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(q, 0) Blog.objects.insert(blogs) - self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch + if (get_connection().max_wire_version <= 1): + self.assertEqual(q, 2) # 1 for insert, and 1 for in bulk fetch + else: + self.assertEqual(q, 100) # 99 for insert, and 1 for in bulk fetch Blog.drop_collection() @@ -1018,6 +1053,54 @@ class QuerySetTest(unittest.TestCase): expected = [blog_post_1, blog_post_2, blog_post_3] self.assertSequence(qs, expected) + def test_clear_ordering(self): + """ Ensure that the default ordering can be cleared by calling order_by(). + """ + class BlogPost(Document): + title = StringField() + published_date = DateTimeField() + + meta = { + 'ordering': ['-published_date'] + } + + BlogPost.drop_collection() + + with db_ops_tracker() as q: + BlogPost.objects.filter(title='whatever').first() + self.assertEqual(len(q.get_ops()), 1) + self.assertEqual(q.get_ops()[0]['query']['$orderby'], {u'published_date': -1}) + + with db_ops_tracker() as q: + BlogPost.objects.filter(title='whatever').order_by().first() + self.assertEqual(len(q.get_ops()), 1) + print q.get_ops()[0]['query'] + self.assertFalse('$orderby' in q.get_ops()[0]['query']) + + def test_no_ordering_for_get(self): + """ Ensure that Doc.objects.get doesn't use any ordering. + """ + class BlogPost(Document): + title = StringField() + published_date = DateTimeField() + + meta = { + 'ordering': ['-published_date'] + } + + BlogPost.objects.create(title='whatever', published_date=datetime.utcnow()) + + with db_ops_tracker() as q: + BlogPost.objects.get(title='whatever') + self.assertEqual(len(q.get_ops()), 1) + self.assertFalse('$orderby' in q.get_ops()[0]['query']) + + # Ordering should be ignored for .get even if we set it explicitly + with db_ops_tracker() as q: + BlogPost.objects.order_by('-title').get(title='whatever') + self.assertEqual(len(q.get_ops()), 1) + self.assertFalse('$orderby' in q.get_ops()[0]['query']) + def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ @@ -1475,9 +1558,6 @@ class QuerySetTest(unittest.TestCase): def test_pull_nested(self): - class User(Document): - name = StringField() - class Collaborator(EmbeddedDocument): user = StringField() @@ -1492,8 +1572,7 @@ class QuerySetTest(unittest.TestCase): Site.drop_collection() c = Collaborator(user='Esteban') - s = Site(name="test", collaborators=[c]) - s.save() + s = Site(name="test", collaborators=[c]).save() Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') self.assertEqual(Site.objects.first().collaborators, []) @@ -1503,6 +1582,71 @@ class QuerySetTest(unittest.TestCase): self.assertRaises(InvalidQueryError, pull_all) + def test_pull_from_nested_embedded(self): + + class User(EmbeddedDocument): + name = StringField() + + def __unicode__(self): + return '%s' % self.name + + class Collaborator(EmbeddedDocument): + helpful = ListField(EmbeddedDocumentField(User)) + unhelpful = ListField(EmbeddedDocumentField(User)) + + class Site(Document): + name = StringField(max_length=75, unique=True, required=True) + collaborators = EmbeddedDocumentField(Collaborator) + + + Site.drop_collection() + + c = User(name='Esteban') + f = User(name='Frank') + s = Site(name="test", collaborators=Collaborator(helpful=[c], unhelpful=[f])).save() + + Site.objects(id=s.id).update_one(pull__collaborators__helpful=c) + self.assertEqual(Site.objects.first().collaborators['helpful'], []) + + Site.objects(id=s.id).update_one(pull__collaborators__unhelpful={'name': 'Frank'}) + self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + + def pull_all(): + Site.objects(id=s.id).update_one(pull_all__collaborators__helpful__name=['Ross']) + + self.assertRaises(InvalidQueryError, pull_all) + + def test_pull_from_nested_mapfield(self): + + class Collaborator(EmbeddedDocument): + user = StringField() + + def __unicode__(self): + return '%s' % self.user + + class Site(Document): + name = StringField(max_length=75, unique=True, required=True) + collaborators = MapField(ListField(EmbeddedDocumentField(Collaborator))) + + + Site.drop_collection() + + c = Collaborator(user='Esteban') + f = Collaborator(user='Frank') + s = Site(name="test", collaborators={'helpful':[c],'unhelpful':[f]}) + s.save() + + Site.objects(id=s.id).update_one(pull__collaborators__helpful__user='Esteban') + self.assertEqual(Site.objects.first().collaborators['helpful'], []) + + Site.objects(id=s.id).update_one(pull__collaborators__unhelpful={'user':'Frank'}) + self.assertEqual(Site.objects.first().collaborators['unhelpful'], []) + + def pull_all(): + Site.objects(id=s.id).update_one(pull_all__collaborators__helpful__user=['Ross']) + + self.assertRaises(InvalidQueryError, pull_all) + def test_update_one_pop_generic_reference(self): class BlogTag(Document): @@ -1596,6 +1740,32 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(message.authors[1].name, "Ross") self.assertEqual(message.authors[2].name, "Adam") + def test_reload_embedded_docs_instance(self): + + class SubDoc(EmbeddedDocument): + val = IntField() + + class Doc(Document): + embedded = EmbeddedDocumentField(SubDoc) + + doc = Doc(embedded=SubDoc(val=0)).save() + doc.reload() + + self.assertEqual(doc.pk, doc.embedded._instance.pk) + + def test_reload_list_embedded_docs_instance(self): + + class SubDoc(EmbeddedDocument): + val = IntField() + + class Doc(Document): + embedded = ListField(EmbeddedDocumentField(SubDoc)) + + doc = Doc(embedded=[SubDoc(val=0)]).save() + doc.reload() + + self.assertEqual(doc.pk, doc.embedded[0]._instance.pk) + def test_order_by(self): """Ensure that QuerySets may be ordered. """ @@ -1816,6 +1986,140 @@ class QuerySetTest(unittest.TestCase): BlogPost.drop_collection() + def test_map_reduce_custom_output(self): + """ + Test map/reduce custom output + """ + register_connection('test2', 'mongoenginetest2') + + class Family(Document): + id = IntField( + primary_key=True) + log = StringField() + + class Person(Document): + id = IntField( + primary_key=True) + name = StringField() + age = IntField() + family = ReferenceField(Family) + + Family.drop_collection() + Person.drop_collection() + + # creating first family + f1 = Family(id=1, log="Trav 02 de Julho") + f1.save() + + # persons of first family + Person(id=1, family=f1, name=u"Wilson Jr", age=21).save() + Person(id=2, family=f1, name=u"Wilson Father", age=45).save() + Person(id=3, family=f1, name=u"Eliana Costa", age=40).save() + Person(id=4, family=f1, name=u"Tayza Mariana", age=17).save() + + # creating second family + f2 = Family(id=2, log="Av prof frasc brunno") + f2.save() + + #persons of second family + Person(id=5, family=f2, name="Isabella Luanna", age=16).save() + Person(id=6, family=f2, name="Sandra Mara", age=36).save() + Person(id=7, family=f2, name="Igor Gabriel", age=10).save() + + # creating third family + f3 = Family(id=3, log="Av brazil") + f3.save() + + #persons of thrird family + Person(id=8, family=f3, name="Arthur WA", age=30).save() + Person(id=9, family=f3, name="Paula Leonel", age=25).save() + + # executing join map/reduce + map_person = """ + function () { + emit(this.family, { + totalAge: this.age, + persons: [{ + name: this.name, + age: this.age + }]}); + } + """ + + map_family = """ + function () { + emit(this._id, { + totalAge: 0, + persons: [] + }); + } + """ + + reduce_f = """ + function (key, values) { + var family = {persons: [], totalAge: 0}; + + values.forEach(function(value) { + if (value.persons) { + value.persons.forEach(function (person) { + family.persons.push(person); + family.totalAge += person.age; + }); + } + }); + + return family; + } + """ + cursor = Family.objects.map_reduce( + map_f=map_family, + reduce_f=reduce_f, + output={'replace': 'family_map', 'db_alias': 'test2'}) + + # start a map/reduce + cursor.next() + + results = Person.objects.map_reduce( + map_f=map_person, + reduce_f=reduce_f, + output={'reduce': 'family_map', 'db_alias': 'test2'}) + + results = list(results) + collection = get_db('test2').family_map + + self.assertEqual( + collection.find_one({'_id': 1}), { + '_id': 1, + 'value': { + 'persons': [ + {'age': 21, 'name': u'Wilson Jr'}, + {'age': 45, 'name': u'Wilson Father'}, + {'age': 40, 'name': u'Eliana Costa'}, + {'age': 17, 'name': u'Tayza Mariana'}], + 'totalAge': 123} + }) + + self.assertEqual( + collection.find_one({'_id': 2}), { + '_id': 2, + 'value': { + 'persons': [ + {'age': 16, 'name': u'Isabella Luanna'}, + {'age': 36, 'name': u'Sandra Mara'}, + {'age': 10, 'name': u'Igor Gabriel'}], + 'totalAge': 62} + }) + + self.assertEqual( + collection.find_one({'_id': 3}), { + '_id': 3, + 'value': { + 'persons': [ + {'age': 30, 'name': u'Arthur WA'}, + {'age': 25, 'name': u'Paula Leonel'}], + 'totalAge': 55} + }) + def test_map_reduce_finalize(self): """Ensure that map, reduce, and finalize run and introduce "scope" by simulating "hotness" ranking with Reddit algorithm. @@ -2165,6 +2469,19 @@ class QuerySetTest(unittest.TestCase): self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.average('age')), avg) + # dot notation + self.Person(name='person meta', person_meta=self.PersonMeta(weight=0)).save() + self.assertAlmostEqual(int(self.Person.objects.average('person_meta.weight')), 0) + + for i, weight in enumerate(ages): + self.Person(name='test meta%i', person_meta=self.PersonMeta(weight=weight)).save() + + self.assertAlmostEqual(int(self.Person.objects.average('person_meta.weight')), avg) + + self.Person(name='test meta none').save() + self.assertEqual(int(self.Person.objects.average('person_meta.weight')), avg) + + def test_sum(self): """Ensure that field can be summed over correctly. """ @@ -2177,6 +2494,153 @@ class QuerySetTest(unittest.TestCase): self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + for i, age in enumerate(ages): + self.Person(name='test meta%s' % i, person_meta=self.PersonMeta(weight=age)).save() + + self.assertEqual(int(self.Person.objects.sum('person_meta.weight')), sum(ages)) + + self.Person(name='weightless person').save() + self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) + + def test_embedded_average(self): + class Pay(EmbeddedDocument): + value = DecimalField() + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(value=150)).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(value=530)).save() + + Doc(name=u"Tayza mariana", + pay=Pay(value=165)).save() + + Doc(name=u"Eliana Costa", + pay=Pay(value=115)).save() + + self.assertEqual( + Doc.objects.average('pay.value'), + 240) + + def test_embedded_array_average(self): + class Pay(EmbeddedDocument): + values = ListField(DecimalField()) + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(values=[150, 100])).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(values=[530, 100])).save() + + Doc(name=u"Tayza mariana", + pay=Pay(values=[165, 100])).save() + + Doc(name=u"Eliana Costa", + pay=Pay(values=[115, 100])).save() + + self.assertEqual( + Doc.objects.average('pay.values'), + 170) + + def test_array_average(self): + class Doc(Document): + values = ListField(DecimalField()) + + Doc.drop_collection() + + Doc(values=[150, 100]).save() + Doc(values=[530, 100]).save() + Doc(values=[165, 100]).save() + Doc(values=[115, 100]).save() + + self.assertEqual( + Doc.objects.average('values'), + 170) + + def test_embedded_sum(self): + class Pay(EmbeddedDocument): + value = DecimalField() + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(value=150)).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(value=530)).save() + + Doc(name=u"Tayza mariana", + pay=Pay(value=165)).save() + + Doc(name=u"Eliana Costa", + pay=Pay(value=115)).save() + + self.assertEqual( + Doc.objects.sum('pay.value'), + 960) + + + def test_embedded_array_sum(self): + class Pay(EmbeddedDocument): + values = ListField(DecimalField()) + + class Doc(Document): + name = StringField() + pay = EmbeddedDocumentField( + Pay) + + Doc.drop_collection() + + Doc(name=u"Wilson Junior", + pay=Pay(values=[150, 100])).save() + + Doc(name=u"Isabella Luanna", + pay=Pay(values=[530, 100])).save() + + Doc(name=u"Tayza mariana", + pay=Pay(values=[165, 100])).save() + + Doc(name=u"Eliana Costa", + pay=Pay(values=[115, 100])).save() + + self.assertEqual( + Doc.objects.sum('pay.values'), + 1360) + + def test_array_sum(self): + class Doc(Document): + values = ListField(DecimalField()) + + Doc.drop_collection() + + Doc(values=[150, 100]).save() + Doc(values=[530, 100]).save() + Doc(values=[165, 100]).save() + Doc(values=[115, 100]).save() + + self.assertEqual( + Doc.objects.sum('values'), + 1360) + def test_distinct(self): """Ensure that the QuerySet.distinct method works. """ @@ -2250,6 +2714,27 @@ class QuerySetTest(unittest.TestCase): Product.drop_collection() + def test_distinct_ListField_EmbeddedDocumentField(self): + + class Author(EmbeddedDocument): + name = StringField() + + class Book(Document): + title = StringField() + authors = ListField(EmbeddedDocumentField(Author)) + + Book.drop_collection() + + mark_twain = Author(name="Mark Twain") + john_tolkien = Author(name="John Ronald Reuel Tolkien") + + book = Book(title="Tom Sawyer", authors=[mark_twain]).save() + book = Book(title="The Lord of the Rings", authors=[john_tolkien]).save() + book = Book(title="The Stories", authors=[mark_twain, john_tolkien]).save() + authors = Book.objects.distinct("authors") + + self.assertEqual(authors, [mark_twain, john_tolkien]) + def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. """ @@ -2592,6 +3077,19 @@ class QuerySetTest(unittest.TestCase): self.assertEqual(10, Post.objects.limit(5).skip(5).count(with_limit_and_skip=False)) + def test_count_and_none(self): + """Test count works with None()""" + + class MyDoc(Document): + pass + + MyDoc.drop_collection() + for i in xrange(0, 10): + MyDoc().save() + + self.assertEqual(MyDoc.objects.count(), 10) + self.assertEqual(MyDoc.objects.none().count(), 0) + def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ @@ -2654,6 +3152,23 @@ class QuerySetTest(unittest.TestCase): Number.drop_collection() + def test_using(self): + """Ensure that switching databases for a queryset is possible + """ + class Number2(Document): + n = IntField() + + Number2.drop_collection() + with switch_db(Number2, 'test2') as Number2: + Number2.drop_collection() + + for i in range(1, 10): + t = Number2(n=i) + t.switch_db('test2') + t.save() + + self.assertEqual(len(Number2.objects.using('test2')), 9) + def test_unset_reference(self): class Comment(Document): text = StringField() @@ -3048,7 +3563,7 @@ class QuerySetTest(unittest.TestCase): class Foo(EmbeddedDocument): shape = StringField() color = StringField() - trick = BooleanField() + thick = BooleanField() meta = {'allow_inheritance': False} class Bar(Document): @@ -3057,17 +3572,20 @@ class QuerySetTest(unittest.TestCase): Bar.drop_collection() - b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False), - Foo(shape= "circle", color ="red", thick = True)]) + b1 = Bar(foo=[Foo(shape="square", color="purple", thick=False), + Foo(shape="circle", color="red", thick=True)]) b1.save() - b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True), - Foo(shape= "circle", color ="purple", thick = False)]) + b2 = Bar(foo=[Foo(shape="square", color="red", thick=True), + Foo(shape="circle", color="purple", thick=False)]) b2.save() ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) + ak = list(Bar.objects(foo__match=Foo(shape="square", color="purple"))) + self.assertEqual([b1], ak) + def test_upsert_includes_cls(self): """Upserts should include _cls information for inheritable classes """ @@ -3088,6 +3606,24 @@ class QuerySetTest(unittest.TestCase): Test.objects(test='foo').update_one(upsert=True, set__test='foo') self.assertTrue('_cls' in Test._collection.find_one()) + def test_update_upsert_looks_like_a_digit(self): + class MyDoc(DynamicDocument): + pass + MyDoc.drop_collection() + self.assertEqual(1, MyDoc.objects.update_one(upsert=True, inc__47=1)) + self.assertEqual(MyDoc.objects.get()['47'], 1) + + def test_dictfield_key_looks_like_a_digit(self): + """Only should work with DictField even if they have numeric keys.""" + + class MyDoc(Document): + test = DictField() + + MyDoc.drop_collection() + doc = MyDoc(test={'47': 1}) + doc.save() + self.assertEqual(MyDoc.objects.only('test__47').get().test['47'], 1) + def test_read_preference(self): class Bar(Document): pass @@ -3116,7 +3652,7 @@ class QuerySetTest(unittest.TestCase): Doc(string="Bye", embedded_field=Embedded(string="Bye")).save() Doc().save() - json_data = Doc.objects.to_json() + json_data = Doc.objects.to_json(sort_keys=True, separators=(',', ':')) doc_objects = list(Doc.objects) self.assertEqual(doc_objects, Doc.objects.from_json(json_data)) @@ -3181,6 +3717,9 @@ class QuerySetTest(unittest.TestCase): User(name="Bob Dole", age=89, price=Decimal('1.11')).save() User(name="Barack Obama", age=51, price=Decimal('2.22')).save() + results = User.objects.only('id', 'name').as_pymongo() + self.assertEqual(sorted(results[0].keys()), sorted(['_id', 'name'])) + users = User.objects.only('name', 'price').as_pymongo() results = list(users) self.assertTrue(isinstance(results[0], dict)) @@ -3241,6 +3780,8 @@ class QuerySetTest(unittest.TestCase): self.assertTrue(isinstance(qs.first().organization, Organization)) self.assertFalse(isinstance(qs.no_dereference().first().organization, Organization)) + self.assertFalse(isinstance(qs.no_dereference().get().organization, + Organization)) self.assertTrue(isinstance(qs.first().organization, Organization)) def test_cached_queryset(self): @@ -3257,7 +3798,13 @@ class QuerySetTest(unittest.TestCase): [x for x in people] self.assertEqual(100, len(people._result_cache)) - self.assertEqual(None, people._len) + + import platform + + if platform.python_implementation() != "PyPy": + # PyPy evaluates __len__ when iterating with list comprehensions while CPython does not. + # This may be a bug in PyPy (PyPy/#1802) but it does not affect the behavior of MongoEngine. + self.assertEqual(None, people._len) self.assertEqual(q, 1) list(people) @@ -3267,6 +3814,27 @@ class QuerySetTest(unittest.TestCase): people.count() # count is cached self.assertEqual(q, 1) + def test_no_cached_queryset(self): + class Person(Document): + name = StringField() + + Person.drop_collection() + for i in xrange(100): + Person(name="No: %s" % i).save() + + with query_counter() as q: + self.assertEqual(q, 0) + people = Person.objects.no_cache() + + [x for x in people] + self.assertEqual(q, 1) + + list(people) + self.assertEqual(q, 2) + + people.count() + self.assertEqual(q, 3) + def test_cache_not_cloned(self): class User(Document): @@ -3288,6 +3856,34 @@ class QuerySetTest(unittest.TestCase): self.assertEqual("%s" % users, "[]") self.assertEqual(1, len(users._result_cache)) + def test_no_cache(self): + """Ensure you can add meta data to file""" + + class Noddy(Document): + fields = DictField() + + Noddy.drop_collection() + for i in xrange(100): + noddy = Noddy() + for j in range(20): + noddy.fields["key"+str(j)] = "value "+str(j) + noddy.save() + + docs = Noddy.objects.no_cache() + + counter = len([1 for i in docs]) + self.assertEqual(counter, 100) + + self.assertEqual(len(list(docs)), 100) + self.assertRaises(TypeError, lambda: len(docs)) + + with query_counter() as q: + self.assertEqual(q, 0) + list(docs) + self.assertEqual(q, 1) + list(docs) + self.assertEqual(q, 2) + def test_nested_queryset_iterator(self): # Try iterating the same queryset twice, nested. names = ['Alice', 'Bob', 'Chuck', 'David', 'Eric', 'Francis', 'George'] @@ -3419,6 +4015,128 @@ class QuerySetTest(unittest.TestCase): '_cls': 'Animal.Cat' }) + def test_can_have_field_same_name_as_query_operator(self): + + class Size(Document): + name = StringField() + + class Example(Document): + size = ReferenceField(Size) + + Size.drop_collection() + Example.drop_collection() + + instance_size = Size(name="Large").save() + Example(size=instance_size).save() + + self.assertEqual(Example.objects(size=instance_size).count(), 1) + self.assertEqual(Example.objects(size__in=[instance_size]).count(), 1) + + def test_cursor_in_an_if_stmt(self): + + class Test(Document): + test_field = StringField() + + Test.drop_collection() + queryset = Test.objects + + if queryset: + raise AssertionError('Empty cursor returns True') + + test = Test() + test.test_field = 'test' + test.save() + + queryset = Test.objects + if not test: + raise AssertionError('Cursor has data and returned False') + + queryset.next() + if not queryset: + raise AssertionError('Cursor has data and it must returns True,' + ' even in the last item.') + + def test_bool_performance(self): + + class Person(Document): + name = StringField() + + Person.drop_collection() + for i in xrange(100): + Person(name="No: %s" % i).save() + + with query_counter() as q: + if Person.objects: + pass + + self.assertEqual(q, 1) + op = q.db.system.profile.find({"ns": + {"$ne": "%s.system.indexes" % q.db.name}})[0] + + self.assertEqual(op['nreturned'], 1) + + + def test_bool_with_ordering(self): + + class Person(Document): + name = StringField() + + Person.drop_collection() + Person(name="Test").save() + + qs = Person.objects.order_by('name') + + with query_counter() as q: + + if qs: + pass + + op = q.db.system.profile.find({"ns": + {"$ne": "%s.system.indexes" % q.db.name}})[0] + + self.assertFalse('$orderby' in op['query'], + 'BaseQuerySet cannot use orderby in if stmt') + + with query_counter() as p: + + for x in qs: + pass + + op = p.db.system.profile.find({"ns": + {"$ne": "%s.system.indexes" % q.db.name}})[0] + + self.assertTrue('$orderby' in op['query'], + 'BaseQuerySet cannot remove orderby in for loop') + + def test_bool_with_ordering_from_meta_dict(self): + + class Person(Document): + name = StringField() + meta = { + 'ordering': ['name'] + } + + Person.drop_collection() + + Person(name="B").save() + Person(name="C").save() + Person(name="A").save() + + with query_counter() as q: + + if Person.objects: + pass + + op = q.db.system.profile.find({"ns": + {"$ne": "%s.system.indexes" % q.db.name}})[0] + + self.assertFalse('$orderby' in op['query'], + 'BaseQuerySet must remove orderby from meta in boolen test') + + self.assertEqual(Person.objects.first().name, 'A') + self.assertTrue(Person.objects._has_data(), + 'Cursor has data and returned False') + if __name__ == '__main__': unittest.main() diff --git a/tests/queryset/transform.py b/tests/queryset/transform.py index 7886965b..48e01919 100644 --- a/tests/queryset/transform.py +++ b/tests/queryset/transform.py @@ -31,6 +31,31 @@ class TransformTest(unittest.TestCase): self.assertEqual(transform.query(name__exists=True), {'name': {'$exists': True}}) + def test_transform_update(self): + class DicDoc(Document): + dictField = DictField() + + class Doc(Document): + pass + + DicDoc.drop_collection() + Doc.drop_collection() + + doc = Doc().save() + dic_doc = DicDoc().save() + + for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): + update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) + self.assertTrue(isinstance(update[v]["dictField.test"], dict)) + + # Update special cases + update = transform.update(DicDoc, unset__dictField__test=doc) + self.assertEqual(update["$unset"]["dictField.test"], 1) + + update = transform.update(DicDoc, pull__dictField__test=doc) + self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) + + def test_query_field_name(self): """Ensure that the correct field name is used when querying. """ @@ -142,6 +167,35 @@ class TransformTest(unittest.TestCase): {'attachments.views.extracted': 'no'}]} self.assertEqual(expected, raw_query) + def test_geojson_PointField(self): + class Location(Document): + loc = PointField() + + update = transform.update(Location, set__loc=[1, 2]) + self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}}) + + update = transform.update(Location, set__loc={"type": "Point", "coordinates": [1,2]}) + self.assertEqual(update, {'$set': {'loc': {"type": "Point", "coordinates": [1,2]}}}) + + def test_geojson_LineStringField(self): + class Location(Document): + line = LineStringField() + + update = transform.update(Location, set__line=[[1, 2], [2, 2]]) + self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) + + update = transform.update(Location, set__line={"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) + self.assertEqual(update, {'$set': {'line': {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}}}) + + def test_geojson_PolygonField(self): + class Location(Document): + poly = PolygonField() + + update = transform.update(Location, set__poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) + self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) + + update = transform.update(Location, set__poly={"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) + self.assertEqual(update, {'$set': {'poly': {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}}}) if __name__ == '__main__': unittest.main() diff --git a/tests/test_connection.py b/tests/test_connection.py index d7926489..a5b1b089 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,6 +1,11 @@ import sys sys.path[0:0] = [""] -import unittest + +try: + import unittest2 as unittest +except ImportError: + import unittest + import datetime import pymongo @@ -34,6 +39,17 @@ class ConnectionTest(unittest.TestCase): conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + def test_sharing_connections(self): + """Ensure that connections are shared when the connection settings are exactly the same + """ + connect('mongoenginetest', alias='testdb1') + + expected_connection = get_connection('testdb1') + + connect('mongoenginetest', alias='testdb2') + actual_connection = get_connection('testdb2') + self.assertIs(expected_connection, actual_connection) + def test_connect_uri(self): """Ensure that the connect() method works properly with uri's """ @@ -56,6 +72,35 @@ class ConnectionTest(unittest.TestCase): self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') + c.admin.system.users.remove({}) + c.mongoenginetest.system.users.remove({}) + + def test_connect_uri_without_db(self): + """Ensure that the connect() method works properly with uri's + without database_name + """ + c = connect(db='mongoenginetest', alias='admin') + c.admin.system.users.remove({}) + c.mongoenginetest.system.users.remove({}) + + c.admin.add_user("admin", "password") + c.admin.authenticate("admin", "password") + c.mongoenginetest.add_user("username", "password") + + self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') + + connect("mongoenginetest", host='mongodb://localhost/') + + conn = get_connection() + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + + db = get_db() + self.assertTrue(isinstance(db, pymongo.database.Database)) + self.assertEqual(db.name, 'mongoenginetest') + + c.admin.system.users.remove({}) + c.mongoenginetest.system.users.remove({}) + def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ @@ -69,6 +114,14 @@ class ConnectionTest(unittest.TestCase): self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest2') + def test_register_connection_defaults(self): + """Ensure that defaults are used when the host and port are None. + """ + register_connection('testdb', 'mongoenginetest', host=None, port=None) + + conn = get_connection('testdb') + self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) + def test_connection_kwargs(self): """Ensure that connection kwargs get passed to pymongo. """ diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py new file mode 100644 index 00000000..c761a41e --- /dev/null +++ b/tests/test_datastructures.py @@ -0,0 +1,107 @@ +import unittest +from mongoengine.base.datastructures import StrictDict, SemiStrictDict + +class TestStrictDict(unittest.TestCase): + def strict_dict_class(self, *args, **kwargs): + return StrictDict.create(*args, **kwargs) + def setUp(self): + self.dtype = self.strict_dict_class(("a", "b", "c")) + def test_init(self): + d = self.dtype(a=1, b=1, c=1) + self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) + + def test_init_fails_on_nonexisting_attrs(self): + self.assertRaises(AttributeError, lambda: self.dtype(a=1, b=2, d=3)) + + def test_eq(self): + d = self.dtype(a=1, b=1, c=1) + dd = self.dtype(a=1, b=1, c=1) + e = self.dtype(a=1, b=1, c=3) + f = self.dtype(a=1, b=1) + g = self.strict_dict_class(("a", "b", "c", "d"))(a=1, b=1, c=1, d=1) + h = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=1) + i = self.strict_dict_class(("a", "c", "b"))(a=1, b=1, c=2) + + self.assertEqual(d, dd) + self.assertNotEqual(d, e) + self.assertNotEqual(d, f) + self.assertNotEqual(d, g) + self.assertNotEqual(f, d) + self.assertEqual(d, h) + self.assertNotEqual(d, i) + + def test_setattr_getattr(self): + d = self.dtype() + d.a = 1 + self.assertEqual(d.a, 1) + self.assertRaises(AttributeError, lambda: d.b) + + def test_setattr_raises_on_nonexisting_attr(self): + d = self.dtype() + def _f(): + d.x=1 + self.assertRaises(AttributeError, _f) + + def test_setattr_getattr_special(self): + d = self.strict_dict_class(["items"]) + d.items = 1 + self.assertEqual(d.items, 1) + + def test_get(self): + d = self.dtype(a=1) + self.assertEqual(d.get('a'), 1) + self.assertEqual(d.get('b', 'bla'), 'bla') + + def test_items(self): + d = self.dtype(a=1) + self.assertEqual(d.items(), [('a', 1)]) + d = self.dtype(a=1, b=2) + self.assertEqual(d.items(), [('a', 1), ('b', 2)]) + + def test_mappings_protocol(self): + d = self.dtype(a=1, b=2) + assert dict(d) == {'a': 1, 'b': 2} + assert dict(**d) == {'a': 1, 'b': 2} + + +class TestSemiSrictDict(TestStrictDict): + def strict_dict_class(self, *args, **kwargs): + return SemiStrictDict.create(*args, **kwargs) + + def test_init_fails_on_nonexisting_attrs(self): + # disable irrelevant test + pass + + def test_setattr_raises_on_nonexisting_attr(self): + # disable irrelevant test + pass + + def test_setattr_getattr_nonexisting_attr_succeeds(self): + d = self.dtype() + d.x = 1 + self.assertEqual(d.x, 1) + + def test_init_succeeds_with_nonexisting_attrs(self): + d = self.dtype(a=1, b=1, c=1, x=2) + self.assertEqual((d.a, d.b, d.c, d.x), (1, 1, 1, 2)) + + def test_iter_with_nonexisting_attrs(self): + d = self.dtype(a=1, b=1, c=1, x=2) + self.assertEqual(list(d), ['a', 'b', 'c', 'x']) + + def test_iteritems_with_nonexisting_attrs(self): + d = self.dtype(a=1, b=1, c=1, x=2) + self.assertEqual(list(d.iteritems()), [('a', 1), ('b', 1), ('c', 1), ('x', 2)]) + + def tets_cmp_with_strict_dicts(self): + d = self.dtype(a=1, b=1, c=1) + dd = StrictDict.create(("a", "b", "c"))(a=1, b=1, c=1) + self.assertEqual(d, dd) + + def test_cmp_with_strict_dict_with_nonexisting_attrs(self): + d = self.dtype(a=1, b=1, c=1, x=2) + dd = StrictDict.create(("a", "b", "c", "x"))(a=1, b=1, c=1, x=2) + self.assertEqual(d, dd) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_dereference.py b/tests/test_dereference.py index a63c7840..dc416007 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1145,37 +1145,32 @@ class FieldTest(unittest.TestCase): self.assertEqual(q, 2) - def test_tuples_as_tuples(self): - """ - Ensure that tuples remain tuples when they are - inside a ComplexBaseField - """ - from mongoengine.base import BaseField + def test_objectid_reference_across_databases(self): + # mongoenginetest - Is default connection alias from setUp() + # Register Aliases + register_connection('testdb-1', 'mongoenginetest2') - class EnumField(BaseField): + class User(Document): + name = StringField() + meta = {"db_alias": "testdb-1"} - def __init__(self, **kwargs): - super(EnumField, self).__init__(**kwargs) + class Book(Document): + name = StringField() + author = ReferenceField(User) - def to_mongo(self, value): - return value + # Drops + User.drop_collection() + Book.drop_collection() - def to_python(self, value): - return tuple(value) + user = User(name="Ross").save() + Book(name="MongoEngine for pros", author=user).save() - class TestDoc(Document): - items = ListField(EnumField()) + # Can't use query_counter across databases - so test the _data object + book = Book.objects.first() + self.assertFalse(isinstance(book._data['author'], User)) - TestDoc.drop_collection() - tuples = [(100, 'Testing')] - doc = TestDoc() - doc.items = tuples - doc.save() - x = TestDoc.objects().get() - self.assertTrue(x is not None) - self.assertTrue(len(x.items) == 1) - self.assertTrue(tuple(x.items[0]) in tuples) - self.assertTrue(x.items[0] in tuples) + book.select_related() + self.assertTrue(isinstance(book._data['author'], User)) def test_non_ascii_pk(self): """ @@ -1200,6 +1195,30 @@ class FieldTest(unittest.TestCase): self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands])) + def test_dereferencing_embedded_listfield_referencefield(self): + class Tag(Document): + meta = {'collection': 'tags'} + name = StringField() + + class Post(EmbeddedDocument): + body = StringField() + tags = ListField(ReferenceField("Tag", dbref=True)) + + class Page(Document): + meta = {'collection': 'pages'} + tags = ListField(ReferenceField("Tag", dbref=True)) + posts = ListField(EmbeddedDocumentField(Post)) + + Tag.drop_collection() + Page.drop_collection() + + tag = Tag(name='test').save() + post = Post(body='test body', tags=[tag]) + Page(tags=[tag], posts=[post]).save() + + page = Page.objects.first() + self.assertEqual(page.tags[0], page.posts[0].tags[0]) + if __name__ == '__main__': unittest.main() diff --git a/tests/test_django.py b/tests/test_django.py index 63e3245a..56a61640 100644 --- a/tests/test_django.py +++ b/tests/test_django.py @@ -2,48 +2,45 @@ import sys sys.path[0:0] = [""] import unittest from nose.plugins.skip import SkipTest -from mongoengine.python_support import PY3 from mongoengine import * + +from mongoengine.django.shortcuts import get_document_or_404 + +from django.http import Http404 +from django.template import Context, Template +from django.conf import settings +from django.core.paginator import Paginator + +settings.configure( + USE_TZ=True, + INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'), + AUTH_USER_MODEL=('mongo_auth.MongoUser'), + AUTHENTICATION_BACKENDS = ('mongoengine.django.auth.MongoEngineBackend',) +) + try: - from mongoengine.django.shortcuts import get_document_or_404 - - from django.http import Http404 - from django.template import Context, Template - from django.conf import settings - from django.core.paginator import Paginator - - settings.configure( - USE_TZ=True, - INSTALLED_APPS=('django.contrib.auth', 'mongoengine.django.mongo_auth'), - AUTH_USER_MODEL=('mongo_auth.MongoUser'), + from django.contrib.auth import authenticate, get_user_model + from mongoengine.django.auth import User + from mongoengine.django.mongo_auth.models import ( + MongoUser, + MongoUserManager, + get_user_document, ) - - try: - from django.contrib.auth import authenticate, get_user_model - from mongoengine.django.auth import User - from mongoengine.django.mongo_auth.models import MongoUser, MongoUserManager - DJ15 = True - except Exception: - DJ15 = False - from django.contrib.sessions.tests import SessionTestsMixin - from mongoengine.django.sessions import SessionStore, MongoSession -except Exception, err: - if PY3: - SessionTestsMixin = type # dummy value so no error - SessionStore = None # dummy value so no error - else: - raise err - - + DJ15 = True +except Exception: + DJ15 = False +from django.contrib.sessions.tests import SessionTestsMixin +from mongoengine.django.sessions import SessionStore, MongoSession from datetime import tzinfo, timedelta ZERO = timedelta(0) + class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): - self.__offset = timedelta(minutes = offset) + self.__offset = timedelta(minutes=offset) self.__name = name def utcoffset(self, dt): @@ -70,8 +67,6 @@ def activate_timezone(tz): class QuerySetTest(unittest.TestCase): def setUp(self): - if PY3: - raise SkipTest('django does not have Python 3 support') connect(db='mongoenginetest') class Person(Document): @@ -173,6 +168,8 @@ class QuerySetTest(unittest.TestCase): class Note(Document): text = StringField() + Note.drop_collection() + for i in xrange(1, 101): Note(name="Note: %s" % i).save() @@ -223,8 +220,6 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): backend = SessionStore def setUp(self): - if PY3: - raise SkipTest('django does not have Python 3 support') connect(db='mongoenginetest') MongoSession.drop_collection() super(MongoDBSessionTest, self).setUp() @@ -262,17 +257,18 @@ class MongoAuthTest(unittest.TestCase): } def setUp(self): - if PY3: - raise SkipTest('django does not have Python 3 support') if not DJ15: raise SkipTest('mongo_auth requires Django 1.5') connect(db='mongoenginetest') User.drop_collection() super(MongoAuthTest, self).setUp() - def test_user_model(self): + def test_get_user_model(self): self.assertEqual(get_user_model(), MongoUser) + def test_get_user_document(self): + self.assertEqual(get_user_document(), User) + def test_user_manager(self): manager = get_user_model()._default_manager self.assertTrue(isinstance(manager, MongoUserManager)) diff --git a/tests/test_signals.py b/tests/test_signals.py index 50e5e6b8..3d0cbb3e 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -54,7 +54,9 @@ class SignalTests(unittest.TestCase): @classmethod def post_save(cls, sender, document, **kwargs): + dirty_keys = document._delta()[0].keys() + document._delta()[1].keys() signal_output.append('post_save signal, %s' % document) + signal_output.append('post_save dirty keys, %s' % dirty_keys) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') @@ -203,6 +205,7 @@ class SignalTests(unittest.TestCase): "pre_save_post_validation signal, Bill Shakespeare", "Is created", "post_save signal, Bill Shakespeare", + "post_save dirty keys, ['name']", "Is created" ]) @@ -213,6 +216,7 @@ class SignalTests(unittest.TestCase): "pre_save_post_validation signal, William Shakespeare", "Is updated", "post_save signal, William Shakespeare", + "post_save dirty keys, ['name']", "Is updated" ])