Improve Fields documentation + remove versionadded/changed as it's not maintained
This commit is contained in:
parent
2ec454447f
commit
3926473917
@ -2,8 +2,6 @@
|
||||
GridFS
|
||||
======
|
||||
|
||||
.. versionadded:: 0.4
|
||||
|
||||
Writing
|
||||
-------
|
||||
|
||||
|
@ -265,8 +265,6 @@ class ComplexBaseField(BaseField):
|
||||
Allows for nesting of embedded documents inside complex types.
|
||||
Handles the lazy dereferencing of a queryset by lazily dereferencing all
|
||||
items in a list / dict rather than one at a time.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
field = None
|
||||
@ -521,8 +519,6 @@ class ObjectIdField(BaseField):
|
||||
|
||||
class GeoJsonBaseField(BaseField):
|
||||
"""A geo json field storing a geojson style object.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEOSPHERE
|
||||
|
@ -74,8 +74,6 @@ def _get_connection_settings(
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = {
|
||||
"name": name or db or DEFAULT_DATABASE_NAME,
|
||||
@ -201,8 +199,6 @@ def register_connection(
|
||||
: param kwargs: ad-hoc parameters to be passed into the pymongo driver,
|
||||
for example maxpoolsize, tz_aware, etc. See the documentation
|
||||
for pymongo's `MongoClient` for a full list.
|
||||
|
||||
.. versionchanged:: 0.10.6 - added mongomock support
|
||||
"""
|
||||
conn_settings = _get_connection_settings(
|
||||
db=db,
|
||||
@ -386,8 +382,6 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
|
||||
|
||||
See the docstring for `register_connection` for more details about all
|
||||
supported kwargs.
|
||||
|
||||
.. versionchanged:: 0.6 - added multiple database support.
|
||||
"""
|
||||
if alias in _connections:
|
||||
prev_conn_setting = _connection_settings[alias]
|
||||
|
@ -1,5 +1,4 @@
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from bson.dbref import DBRef
|
||||
import pymongo
|
||||
@ -367,15 +366,6 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
||||
meta['cascade'] = True. Also you can pass different kwargs to
|
||||
the cascade save using cascade_kwargs which overwrites the
|
||||
existing kwargs with custom values.
|
||||
.. versionchanged:: 0.8.5
|
||||
Optional save_condition that only overwrites existing documents
|
||||
if the condition is satisfied in the current db record.
|
||||
.. versionchanged:: 0.10
|
||||
:class:`OperationError` exception raised if save_condition fails.
|
||||
.. versionchanged:: 0.10.1
|
||||
:class: save_condition failure now raises a `SaveConditionError`
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
signal_kwargs = signal_kwargs or {}
|
||||
|
||||
@ -714,8 +704,6 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
||||
def select_related(self, max_depth=1):
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
||||
a maximum depth in order to cut down the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
DeReference = _import_class("DeReference")
|
||||
DeReference()([self], max_depth + 1)
|
||||
@ -726,10 +714,6 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
||||
|
||||
:param fields: (optional) args list of fields to reload
|
||||
:param max_depth: (optional) depth of dereferencing to follow
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
.. versionchanged:: 0.6 Now chainable
|
||||
.. versionchanged:: 0.9 Can provide specific fields to reload
|
||||
"""
|
||||
max_depth = 1
|
||||
if fields and isinstance(fields[0], int):
|
||||
@ -1088,8 +1072,6 @@ class MapReduceDocument:
|
||||
an ``ObjectId`` found in the given ``collection``,
|
||||
the object can be accessed via the ``object`` property.
|
||||
:param value: The result(s) for this key.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
def __init__(self, document, collection, key, value):
|
||||
|
@ -36,7 +36,6 @@ from mongoengine.common import _import_class
|
||||
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
|
||||
from mongoengine.document import Document, EmbeddedDocument
|
||||
from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError
|
||||
from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version
|
||||
from mongoengine.queryset import DO_NOTHING
|
||||
from mongoengine.queryset.base import BaseQuerySet
|
||||
from mongoengine.queryset.transform import STRING_OPERATORS
|
||||
@ -101,6 +100,11 @@ class StringField(BaseField):
|
||||
"""A unicode string field."""
|
||||
|
||||
def __init__(self, regex=None, max_length=None, min_length=None, **kwargs):
|
||||
"""
|
||||
:param regex: (optional) A string pattern that will be applied during validation
|
||||
:param max_length: (optional) A max length that will be applied during validation
|
||||
:param min_length: (optional) A min length that will be applied during validation
|
||||
"""
|
||||
self.regex = re.compile(regex) if regex else None
|
||||
self.max_length = max_length
|
||||
self.min_length = min_length
|
||||
@ -158,7 +162,6 @@ class StringField(BaseField):
|
||||
class URLField(StringField):
|
||||
"""A field that validates input as an URL.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
_URL_REGEX = LazyRegexCompiler(
|
||||
@ -174,6 +177,10 @@ class URLField(StringField):
|
||||
_URL_SCHEMES = ["http", "https", "ftp", "ftps"]
|
||||
|
||||
def __init__(self, url_regex=None, schemes=None, **kwargs):
|
||||
"""
|
||||
:param url_regex: (optional) Overwrite the default regex used for validation
|
||||
:param schemes: (optional) Overwrite the default URL schemes that are allowed
|
||||
"""
|
||||
self.url_regex = url_regex or self._URL_REGEX
|
||||
self.schemes = schemes or self._URL_SCHEMES
|
||||
super().__init__(**kwargs)
|
||||
@ -192,7 +199,6 @@ class URLField(StringField):
|
||||
class EmailField(StringField):
|
||||
"""A field that validates input as an email address.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
|
||||
USER_REGEX = LazyRegexCompiler(
|
||||
@ -229,16 +235,10 @@ class EmailField(StringField):
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
"""Initialize the EmailField.
|
||||
|
||||
Args:
|
||||
domain_whitelist (list) - list of otherwise invalid domain
|
||||
names which you'd like to support.
|
||||
allow_utf8_user (bool) - if True, the user part of the email
|
||||
address can contain UTF8 characters.
|
||||
False by default.
|
||||
allow_ip_domain (bool) - if True, the domain part of the email
|
||||
can be a valid IPv4 or IPv6 address.
|
||||
"""
|
||||
:param domain_whitelist: (optional) list of valid domain names applied during validation
|
||||
:param allow_utf8_user: Allow user part of the email to contain utf8 char
|
||||
:param allow_ip_domain: Allow domain part of the email to be an IPv4 or IPv6 address
|
||||
"""
|
||||
self.domain_whitelist = domain_whitelist or []
|
||||
self.allow_utf8_user = allow_utf8_user
|
||||
@ -310,6 +310,10 @@ class IntField(BaseField):
|
||||
"""32-bit integer field."""
|
||||
|
||||
def __init__(self, min_value=None, max_value=None, **kwargs):
|
||||
"""
|
||||
:param min_value: (optional) A min value that will be applied during validation
|
||||
:param max_value: (optional) A max value that will be applied during validation
|
||||
"""
|
||||
self.min_value, self.max_value = min_value, max_value
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@ -343,6 +347,10 @@ class LongField(BaseField):
|
||||
"""64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)"""
|
||||
|
||||
def __init__(self, min_value=None, max_value=None, **kwargs):
|
||||
"""
|
||||
:param min_value: (optional) A min value that will be applied during validation
|
||||
:param max_value: (optional) A max value that will be applied during validation
|
||||
"""
|
||||
self.min_value, self.max_value = min_value, max_value
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@ -379,6 +387,10 @@ class FloatField(BaseField):
|
||||
"""Floating point number field."""
|
||||
|
||||
def __init__(self, min_value=None, max_value=None, **kwargs):
|
||||
"""
|
||||
:param min_value: (optional) A min value that will be applied during validation
|
||||
:param max_value: (optional) A max value that will be applied during validation
|
||||
"""
|
||||
self.min_value, self.max_value = min_value, max_value
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@ -416,8 +428,6 @@ class DecimalField(BaseField):
|
||||
"""Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used.
|
||||
If using floats, beware of Decimal to float conversion (potential precision loss)
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@ -430,8 +440,8 @@ class DecimalField(BaseField):
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
:param min_value: Validation rule for the minimum acceptable value.
|
||||
:param max_value: Validation rule for the maximum acceptable value.
|
||||
:param min_value: (optional) A min value that will be applied during validation
|
||||
:param max_value: (optional) A max value that will be applied during validation
|
||||
:param force_string: Store the value as a string (instead of a float).
|
||||
Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied)
|
||||
and some query operator won't work (e.g. inc, dec)
|
||||
@ -498,10 +508,7 @@ class DecimalField(BaseField):
|
||||
|
||||
|
||||
class BooleanField(BaseField):
|
||||
"""Boolean field type.
|
||||
|
||||
.. versionadded:: 0.1.2
|
||||
"""
|
||||
"""Boolean field type."""
|
||||
|
||||
def to_python(self, value):
|
||||
try:
|
||||
@ -551,7 +558,8 @@ class DateTimeField(BaseField):
|
||||
|
||||
return self._parse_datetime(value)
|
||||
|
||||
def _parse_datetime(self, value):
|
||||
@staticmethod
|
||||
def _parse_datetime(value):
|
||||
# Attempt to parse a datetime from a string
|
||||
value = value.strip()
|
||||
if not value:
|
||||
@ -627,8 +635,6 @@ class ComplexDateTimeField(StringField):
|
||||
keyword when initializing the field.
|
||||
|
||||
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
def __init__(self, separator=",", **kwargs):
|
||||
@ -975,8 +981,6 @@ class EmbeddedDocumentListField(ListField):
|
||||
.. note::
|
||||
The only valid list values are subclasses of
|
||||
:class:`~mongoengine.EmbeddedDocument`.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, **kwargs):
|
||||
@ -999,9 +1003,6 @@ class SortedListField(ListField):
|
||||
save the whole list then other processes trying to save the whole list
|
||||
as well could overwrite changes. The safest way to append to a list is
|
||||
to perform a push operation.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.6 - added reverse keyword
|
||||
"""
|
||||
|
||||
_ordering = None
|
||||
@ -1058,9 +1059,6 @@ class DictField(ComplexBaseField):
|
||||
|
||||
.. note::
|
||||
Required means it cannot be empty - as the default for DictFields is {}
|
||||
|
||||
.. versionadded:: 0.3
|
||||
.. versionchanged:: 0.5 - Can now handle complex / varying types of data
|
||||
"""
|
||||
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
@ -1124,8 +1122,6 @@ class MapField(DictField):
|
||||
"""A field that maps a name to a specified field type. Similar to
|
||||
a DictField, except the 'value' of each item must match the specified
|
||||
field type.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
def __init__(self, field=None, *args, **kwargs):
|
||||
@ -1173,8 +1169,6 @@ class ReferenceField(BaseField):
|
||||
org = ReferenceField('Org', reverse_delete_rule=CASCADE)
|
||||
|
||||
User.register_delete_rule(Org, 'owner', DENY)
|
||||
|
||||
.. versionchanged:: 0.5 added `reverse_delete_rule`
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@ -1309,8 +1303,6 @@ class ReferenceField(BaseField):
|
||||
class CachedReferenceField(BaseField):
|
||||
"""
|
||||
A referencefield with cache fields to purpose pseudo-joins
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
def __init__(self, document_type, fields=None, auto_sync=True, **kwargs):
|
||||
@ -1485,8 +1477,6 @@ class GenericReferenceField(BaseField):
|
||||
it.
|
||||
|
||||
* You can use the choices param to limit the acceptable Document types
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -1692,10 +1682,6 @@ class GridFSError(Exception):
|
||||
|
||||
class GridFSProxy:
|
||||
"""Proxy object to handle writing and reading of files to and from GridFS
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - added optional size param to read
|
||||
.. versionchanged:: 0.6 - added collection name param
|
||||
"""
|
||||
|
||||
_fs = None
|
||||
@ -1859,10 +1845,6 @@ class GridFSProxy:
|
||||
|
||||
class FileField(BaseField):
|
||||
"""A GridFS storage field.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 added optional size param for read
|
||||
.. versionchanged:: 0.6 added db_alias for multidb support
|
||||
"""
|
||||
|
||||
proxy_class = GridFSProxy
|
||||
@ -1945,11 +1927,7 @@ class FileField(BaseField):
|
||||
|
||||
|
||||
class ImageGridFsProxy(GridFSProxy):
|
||||
"""
|
||||
Proxy for ImageField
|
||||
|
||||
versionadded: 0.6
|
||||
"""
|
||||
"""Proxy for ImageField"""
|
||||
|
||||
def put(self, file_obj, **kwargs):
|
||||
"""
|
||||
@ -2083,8 +2061,6 @@ class ImageField(FileField):
|
||||
:param size: max size to store images, provided as (width, height, force)
|
||||
if larger, it will be automatically resized (ex: size=(800, 600, True))
|
||||
:param thumbnail_size: size to generate a thumbnail, provided as (width, height, force)
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
|
||||
proxy_class = ImageGridFsProxy
|
||||
@ -2132,9 +2108,6 @@ class SequenceField(BaseField):
|
||||
In case the counter is defined in the abstract document, it will be
|
||||
common to all inherited documents and the default sequence name will
|
||||
be the class name of the abstract document.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
.. versionchanged:: 0.8 added `value_decorator`
|
||||
"""
|
||||
|
||||
_auto_gen = True
|
||||
@ -2248,8 +2221,6 @@ class SequenceField(BaseField):
|
||||
|
||||
class UUIDField(BaseField):
|
||||
"""A UUID field.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
|
||||
_binary = None
|
||||
@ -2259,9 +2230,6 @@ class UUIDField(BaseField):
|
||||
Store UUID data in the database
|
||||
|
||||
:param binary: if False store as a string.
|
||||
|
||||
.. versionchanged:: 0.8.0
|
||||
.. versionchanged:: 0.6.19
|
||||
"""
|
||||
self._binary = binary
|
||||
super().__init__(**kwargs)
|
||||
@ -2306,8 +2274,6 @@ class GeoPointField(BaseField):
|
||||
representing a geo point. It admits 2d indexes but not "2dsphere" indexes
|
||||
in MongoDB > 2.4 which are more natural for modeling geospatial points.
|
||||
See :ref:`geospatial-indexes`
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
|
||||
_geo_index = pymongo.GEO2D
|
||||
@ -2339,8 +2305,6 @@ class PointField(GeoJsonBaseField):
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
_type = "Point"
|
||||
@ -2359,8 +2323,6 @@ class LineStringField(GeoJsonBaseField):
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
_type = "LineString"
|
||||
@ -2382,8 +2344,6 @@ class PolygonField(GeoJsonBaseField):
|
||||
holes.
|
||||
|
||||
Requires mongodb >= 2.4
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
_type = "Polygon"
|
||||
@ -2403,8 +2363,6 @@ class MultiPointField(GeoJsonBaseField):
|
||||
to set the value.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
_type = "MultiPoint"
|
||||
@ -2424,8 +2382,6 @@ class MultiLineStringField(GeoJsonBaseField):
|
||||
You can either pass a dict with the full information or a list of points.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
_type = "MultiLineString"
|
||||
@ -2452,8 +2408,6 @@ class MultiPolygonField(GeoJsonBaseField):
|
||||
of Polygons.
|
||||
|
||||
Requires mongodb >= 2.6
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
_type = "MultiPolygon"
|
||||
@ -2466,8 +2420,6 @@ class LazyReferenceField(BaseField):
|
||||
Instead, access will return a :class:`~mongoengine.base.LazyReference` class
|
||||
instance, allowing access to `pk` or manual dereference by using
|
||||
``fetch()`` method.
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@ -2630,8 +2582,6 @@ class GenericLazyReferenceField(GenericReferenceField):
|
||||
it.
|
||||
|
||||
* You can use the choices param to limit the acceptable Document types
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -256,8 +256,6 @@ class BaseQuerySet:
|
||||
`DocumentName.MultipleObjectsReturned` exception if multiple results
|
||||
and :class:`~mongoengine.queryset.DoesNotExist` or
|
||||
`DocumentName.DoesNotExist` if no results are found.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset = queryset.order_by().limit(2)
|
||||
@ -282,8 +280,6 @@ class BaseQuerySet:
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create new object. Returns the saved object instance.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
"""
|
||||
return self._document(**kwargs).save(force_insert=True)
|
||||
|
||||
@ -316,10 +312,6 @@ class BaseQuerySet:
|
||||
|
||||
By default returns document instances, set ``load_bulk`` to False to
|
||||
return just ``ObjectIds``
|
||||
|
||||
.. versionadded:: 0.5
|
||||
.. versionchanged:: 0.10.7
|
||||
Add signal_kwargs argument
|
||||
"""
|
||||
Document = _import_class("Document")
|
||||
|
||||
@ -550,8 +542,6 @@ class BaseQuerySet:
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
:returns the number of updated documents (unless ``full_result`` is True)
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
if not update and not upsert:
|
||||
raise OperationError("No update parameters, would remove data")
|
||||
@ -603,8 +593,6 @@ class BaseQuerySet:
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
:returns the new or overwritten document
|
||||
|
||||
.. versionadded:: 0.10.2
|
||||
"""
|
||||
|
||||
atomic_update = self.update(
|
||||
@ -638,7 +626,6 @@ class BaseQuerySet:
|
||||
:param update: Django-style update keyword arguments
|
||||
full_result
|
||||
:returns the number of updated documents (unless ``full_result`` is True)
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
return self.update(
|
||||
upsert=upsert,
|
||||
@ -670,8 +657,6 @@ class BaseQuerySet:
|
||||
:param new: return updated rather than original document
|
||||
(default ``False``)
|
||||
:param update: Django-style update keyword arguments
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
if remove and new:
|
||||
@ -727,8 +712,6 @@ class BaseQuerySet:
|
||||
`None` if no document exists with that id.
|
||||
|
||||
:param object_id: the value for the id of the document to look up
|
||||
|
||||
.. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set
|
||||
"""
|
||||
queryset = self.clone()
|
||||
if not queryset._query_obj.empty:
|
||||
@ -742,8 +725,6 @@ class BaseQuerySet:
|
||||
:param object_ids: a list or tuple of ObjectId's
|
||||
:rtype: dict of ObjectId's as keys and collection-specific
|
||||
Document subclasses as values.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
doc_map = {}
|
||||
|
||||
@ -785,8 +766,6 @@ class BaseQuerySet:
|
||||
evaluated against if you are using more than one database.
|
||||
|
||||
:param alias: The database alias
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
|
||||
with switch_db(self._document, alias) as cls:
|
||||
@ -848,8 +827,6 @@ class BaseQuerySet:
|
||||
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
|
||||
:class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
|
||||
the number queries to mongodb.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
# Make select related work the same for querysets
|
||||
max_depth += 1
|
||||
@ -898,8 +875,6 @@ class BaseQuerySet:
|
||||
|
||||
Hinting will not do anything if the corresponding index does not exist.
|
||||
The last hint applied to this cursor takes precedence over all others.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._hint = index
|
||||
@ -961,10 +936,6 @@ class BaseQuerySet:
|
||||
|
||||
.. note:: This is a command and won't take ordering or limit into
|
||||
account.
|
||||
|
||||
.. versionadded:: 0.4
|
||||
.. versionchanged:: 0.5 - Fixed handling references
|
||||
.. versionchanged:: 0.6 - Improved db_field refrence handling
|
||||
"""
|
||||
queryset = self.clone()
|
||||
|
||||
@ -1028,9 +999,6 @@ class BaseQuerySet:
|
||||
field filters.
|
||||
|
||||
:param fields: fields to include
|
||||
|
||||
.. versionadded:: 0.3
|
||||
.. versionchanged:: 0.5 - Added subfield support
|
||||
"""
|
||||
fields = {f: QueryFieldList.ONLY for f in fields}
|
||||
return self.fields(True, **fields)
|
||||
@ -1049,8 +1017,6 @@ class BaseQuerySet:
|
||||
field filters.
|
||||
|
||||
:param fields: fields to exclude
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
fields = {f: QueryFieldList.EXCLUDE for f in fields}
|
||||
return self.fields(**fields)
|
||||
@ -1077,8 +1043,6 @@ class BaseQuerySet:
|
||||
|
||||
:param kwargs: A set of keyword arguments identifying what to
|
||||
include, exclude, or slice.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
|
||||
# Check for an operator and transform to mongo-style if there is
|
||||
@ -1120,8 +1084,6 @@ class BaseQuerySet:
|
||||
.exclude(). ::
|
||||
|
||||
post = BlogPost.objects.exclude('comments').all_fields()
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._loaded_fields = QueryFieldList(
|
||||
@ -1194,9 +1156,6 @@ class BaseQuerySet:
|
||||
"""Enable or disable snapshot mode when querying.
|
||||
|
||||
:param enabled: whether or not snapshot mode is enabled
|
||||
|
||||
..versionchanged:: 0.5 - made chainable
|
||||
.. deprecated:: Ignored with PyMongo 3+
|
||||
"""
|
||||
msg = "snapshot is deprecated as it has no impact when using PyMongo 3+."
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
@ -1208,8 +1167,6 @@ class BaseQuerySet:
|
||||
"""Enable or disable the default mongod timeout when querying. (no_cursor_timeout option)
|
||||
|
||||
:param enabled: whether or not the timeout is used
|
||||
|
||||
..versionchanged:: 0.5 - made chainable
|
||||
"""
|
||||
queryset = self.clone()
|
||||
queryset._timeout = enabled
|
||||
@ -1308,7 +1265,6 @@ class BaseQuerySet:
|
||||
parameter will be removed shortly
|
||||
:param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call
|
||||
See https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
using_deprecated_interface = isinstance(pipeline, dict) or bool(suppl_pipeline)
|
||||
user_pipeline = [pipeline] if isinstance(pipeline, dict) else list(pipeline)
|
||||
@ -1380,12 +1336,6 @@ class BaseQuerySet:
|
||||
Map/Reduce changed in server version **>= 1.7.4**. The PyMongo
|
||||
:meth:`~pymongo.collection.Collection.map_reduce` helper requires
|
||||
PyMongo version **>= 1.11**.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
- removed ``keep_temp`` keyword argument, which was only relevant
|
||||
for MongoDB server versions older than 1.7.4
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
queryset = self.clone()
|
||||
|
||||
@ -1522,8 +1472,6 @@ class BaseQuerySet:
|
||||
.. note:: When using this mode of query, the database will call your
|
||||
function, or evaluate your predicate clause, for each object
|
||||
in the collection.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
queryset = self.clone()
|
||||
where_clause = queryset._sub_js_fields(where_clause)
|
||||
@ -1600,9 +1548,6 @@ class BaseQuerySet:
|
||||
:param field: the field to use
|
||||
:param normalize: normalize the results so they add to 1.0
|
||||
:param map_reduce: Use map_reduce over exec_js
|
||||
|
||||
.. versionchanged:: 0.5 defaults to map_reduce and can handle embedded
|
||||
document lookups
|
||||
"""
|
||||
if map_reduce:
|
||||
return self._item_frequencies_map_reduce(field, normalize=normalize)
|
||||
@ -1632,8 +1577,6 @@ class BaseQuerySet:
|
||||
|
||||
def rewind(self):
|
||||
"""Rewind the cursor to its unevaluated state.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
self._iter = False
|
||||
self._cursor.rewind()
|
||||
|
@ -151,8 +151,6 @@ class QuerySet(BaseQuerySet):
|
||||
|
||||
def no_cache(self):
|
||||
"""Convert to a non-caching queryset
|
||||
|
||||
.. versionadded:: 0.8.3 Convert to non caching queryset
|
||||
"""
|
||||
if self._result_cache is not None:
|
||||
raise OperationError("QuerySet already cached")
|
||||
@ -165,15 +163,11 @@ class QuerySetNoCache(BaseQuerySet):
|
||||
|
||||
def cache(self):
|
||||
"""Convert to a caching queryset
|
||||
|
||||
.. versionadded:: 0.8.3 Convert to caching queryset
|
||||
"""
|
||||
return self._clone_into(QuerySet(self._document, self._collection))
|
||||
|
||||
def __repr__(self):
|
||||
"""Provides the string representation of the QuerySet
|
||||
|
||||
.. versionchanged:: 0.6.13 Now doesnt modify the cursor
|
||||
"""
|
||||
if self._iter:
|
||||
return ".. queryset mid-iteration .."
|
||||
|
Loading…
x
Reference in New Issue
Block a user