get rid of six

This commit is contained in:
Bastien Gérard 2020-03-11 23:07:03 +01:00
parent 03e34299f0
commit 8086576677
28 changed files with 118 additions and 156 deletions

View File

@ -50,7 +50,6 @@ All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`
At the very least, you'll need these two packages to use MongoEngine: At the very least, you'll need these two packages to use MongoEngine:
- pymongo>=3.4 - pymongo>=3.4
- six>=1.10.0
If you utilize a ``DateTimeField``, you might also use a more flexible date parser: If you utilize a ``DateTimeField``, you might also use a more flexible date parser:

View File

@ -153,7 +153,7 @@ inherited classes like so: ::
# 4. Remove indexes # 4. Remove indexes
info = collection.index_information() info = collection.index_information()
indexes_to_drop = [key for key, value in info.iteritems() indexes_to_drop = [key for key, value in info.items()
if '_types' in dict(value['key'])] if '_types' in dict(value['key'])]
for index in indexes_to_drop: for index in indexes_to_drop:
collection.drop_index(index) collection.drop_index(index)

View File

@ -1,7 +1,6 @@
import weakref import weakref
from bson import DBRef from bson import DBRef
from six import iteritems
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
@ -360,7 +359,7 @@ class StrictDict(object):
_classes = {} _classes = {}
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k, v in iteritems(kwargs): for k, v in kwargs.items():
setattr(self, k, v) setattr(self, k, v)
def __getitem__(self, key): def __getitem__(self, key):
@ -408,7 +407,7 @@ class StrictDict(object):
return (key for key in self.__slots__ if hasattr(self, key)) return (key for key in self.__slots__ if hasattr(self, key))
def __len__(self): def __len__(self):
return len(list(iteritems(self))) return len(list(self.items()))
def __eq__(self, other): def __eq__(self, other):
return list(self.items()) == list(other.items()) return list(self.items()) == list(other.items())

View File

@ -5,8 +5,6 @@ from functools import partial
from bson import DBRef, ObjectId, SON, json_util from bson import DBRef, ObjectId, SON, json_util
import pymongo import pymongo
import six
from six import iteritems
from mongoengine import signals from mongoengine import signals
from mongoengine.base.common import get_document from mongoengine.base.common import get_document
@ -110,7 +108,7 @@ class BaseDocument(object):
# Assign default values to the instance. # Assign default values to the instance.
# We set default values only for fields loaded from DB. See # We set default values only for fields loaded from DB. See
# https://github.com/mongoengine/mongoengine/issues/399 for more info. # https://github.com/mongoengine/mongoengine/issues/399 for more info.
for key, field in iteritems(self._fields): for key, field in self._fields.items():
if self._db_field_map.get(key, key) in __only_fields: if self._db_field_map.get(key, key) in __only_fields:
continue continue
value = getattr(self, key, None) value = getattr(self, key, None)
@ -122,14 +120,14 @@ class BaseDocument(object):
# Set passed values after initialisation # Set passed values after initialisation
if self._dynamic: if self._dynamic:
dynamic_data = {} dynamic_data = {}
for key, value in iteritems(values): for key, value in values.items():
if key in self._fields or key == "_id": if key in self._fields or key == "_id":
setattr(self, key, value) setattr(self, key, value)
else: else:
dynamic_data[key] = value dynamic_data[key] = value
else: else:
FileField = _import_class("FileField") FileField = _import_class("FileField")
for key, value in iteritems(values): for key, value in values.items():
key = self._reverse_db_field_map.get(key, key) key = self._reverse_db_field_map.get(key, key)
if key in self._fields or key in ("id", "pk", "_cls"): if key in self._fields or key in ("id", "pk", "_cls"):
if __auto_convert and value is not None: if __auto_convert and value is not None:
@ -145,7 +143,7 @@ class BaseDocument(object):
if self._dynamic: if self._dynamic:
self._dynamic_lock = False self._dynamic_lock = False
for key, value in iteritems(dynamic_data): for key, value in dynamic_data.items():
setattr(self, key, value) setattr(self, key, value)
# Flag initialised # Flag initialised
@ -575,7 +573,7 @@ class BaseDocument(object):
if not hasattr(data, "items"): if not hasattr(data, "items"):
iterator = enumerate(data) iterator = enumerate(data)
else: else:
iterator = iteritems(data) iterator = data.items()
for index_or_key, value in iterator: for index_or_key, value in iterator:
item_key = "%s%s." % (base_key, index_or_key) item_key = "%s%s." % (base_key, index_or_key)
@ -741,7 +739,7 @@ class BaseDocument(object):
# Convert SON to a data dict, making sure each key is a string and # Convert SON to a data dict, making sure each key is a string and
# corresponds to the right db field. # corresponds to the right db field.
data = {} data = {}
for key, value in iteritems(son): for key, value in son.items():
key = str(key) key = str(key)
key = cls._db_field_map.get(key, key) key = cls._db_field_map.get(key, key)
data[key] = value data[key] = value
@ -756,7 +754,7 @@ class BaseDocument(object):
if not _auto_dereference: if not _auto_dereference:
fields = copy.deepcopy(fields) fields = copy.deepcopy(fields)
for field_name, field in iteritems(fields): for field_name, field in fields.items():
field._auto_dereference = _auto_dereference field._auto_dereference = _auto_dereference
if field.db_field in data: if field.db_field in data:
value = data[field.db_field] value = data[field.db_field]
@ -781,7 +779,7 @@ class BaseDocument(object):
# In STRICT documents, remove any keys that aren't in cls._fields # In STRICT documents, remove any keys that aren't in cls._fields
if cls.STRICT: if cls.STRICT:
data = {k: v for k, v in iteritems(data) if k in cls._fields} data = {k: v for k, v in data.items() if k in cls._fields}
obj = cls( obj = cls(
__auto_convert=False, _created=created, __only_fields=only_fields, **data __auto_convert=False, _created=created, __only_fields=only_fields, **data

View File

@ -4,7 +4,6 @@ import weakref
from bson import DBRef, ObjectId, SON from bson import DBRef, ObjectId, SON
import pymongo import pymongo
from six import iteritems
from mongoengine.base.common import UPDATE_OPERATORS from mongoengine.base.common import UPDATE_OPERATORS
from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList
@ -418,11 +417,11 @@ class ComplexBaseField(BaseField):
if self.field: if self.field:
value_dict = { value_dict = {
key: self.field._to_mongo_safe_call(item, use_db_field, fields) key: self.field._to_mongo_safe_call(item, use_db_field, fields)
for key, item in iteritems(value) for key, item in value.items()
} }
else: else:
value_dict = {} value_dict = {}
for k, v in iteritems(value): for k, v in value.items():
if isinstance(v, Document): if isinstance(v, Document):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
if v.pk is None: if v.pk is None:
@ -461,8 +460,8 @@ class ComplexBaseField(BaseField):
"""If field is provided ensure the value is valid.""" """If field is provided ensure the value is valid."""
errors = {} errors = {}
if self.field: if self.field:
if hasattr(value, "iteritems") or hasattr(value, "items"): if hasattr(value, "items"):
sequence = iteritems(value) sequence = value.items()
else: else:
sequence = enumerate(value) sequence = enumerate(value)
for k, v in sequence: for k, v in sequence:

View File

@ -1,8 +1,6 @@
import itertools import itertools
import warnings import warnings
from six import iteritems, itervalues
from mongoengine.base.common import _document_registry from mongoengine.base.common import _document_registry
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
from mongoengine.common import _import_class from mongoengine.common import _import_class
@ -68,7 +66,7 @@ class DocumentMetaclass(type):
# Standard object mixin - merge in any Fields # Standard object mixin - merge in any Fields
if not hasattr(base, "_meta"): if not hasattr(base, "_meta"):
base_fields = {} base_fields = {}
for attr_name, attr_value in iteritems(base.__dict__): for attr_name, attr_value in base.__dict__.items():
if not isinstance(attr_value, BaseField): if not isinstance(attr_value, BaseField):
continue continue
attr_value.name = attr_name attr_value.name = attr_name
@ -80,7 +78,7 @@ class DocumentMetaclass(type):
# Discover any document fields # Discover any document fields
field_names = {} field_names = {}
for attr_name, attr_value in iteritems(attrs): for attr_name, attr_value in attrs.items():
if not isinstance(attr_value, BaseField): if not isinstance(attr_value, BaseField):
continue continue
attr_value.name = attr_name attr_value.name = attr_name
@ -110,9 +108,7 @@ class DocumentMetaclass(type):
attrs["_fields_ordered"] = tuple( attrs["_fields_ordered"] = tuple(
i[1] i[1]
for i in sorted( for i in sorted((v.creation_counter, v.name) for v in doc_fields.values())
(v.creation_counter, v.name) for v in itervalues(doc_fields)
)
) )
# #
@ -190,7 +186,7 @@ class DocumentMetaclass(type):
# f.__dict__.update({"im_self": getattr(f, "__self__")}) # f.__dict__.update({"im_self": getattr(f, "__self__")})
# Handle delete rules # Handle delete rules
for field in itervalues(new_class._fields): for field in new_class._fields.values():
f = field f = field
if f.owner_document is None: if f.owner_document is None:
f.owner_document = new_class f.owner_document = new_class
@ -399,7 +395,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
new_class.objects = QuerySetManager() new_class.objects = QuerySetManager()
# Validate the fields and set primary key if needed # Validate the fields and set primary key if needed
for field_name, field in iteritems(new_class._fields): for field_name, field in new_class._fields.items():
if field.primary_key: if field.primary_key:
# Ensure only one primary key is set # Ensure only one primary key is set
current_pk = new_class._meta.get("id_field") current_pk = new_class._meta.get("id_field")
@ -476,7 +472,7 @@ class MetaDict(dict):
_merge_options = ("indexes",) _merge_options = ("indexes",)
def merge(self, new_options): def merge(self, new_options):
for k, v in iteritems(new_options): for k, v in new_options.items():
if k in self._merge_options: if k in self._merge_options:
self[k] = self.get(k, []) + v self[k] = self.get(k, []) + v
else: else:

View File

@ -1,7 +1,6 @@
from contextlib import contextmanager from contextlib import contextmanager
from pymongo.write_concern import WriteConcern from pymongo.write_concern import WriteConcern
from six import iteritems
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
@ -123,7 +122,7 @@ class no_dereference(object):
self.deref_fields = [ self.deref_fields = [
k k
for k, v in iteritems(self.cls._fields) for k, v in self.cls._fields.items()
if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField))
] ]

View File

@ -1,6 +1,4 @@
from bson import DBRef, SON from bson import DBRef, SON
import six
from six import iteritems
from mongoengine.base import ( from mongoengine.base import (
BaseDict, BaseDict,
@ -79,7 +77,7 @@ class DeReference(object):
def _get_items_from_dict(items): def _get_items_from_dict(items):
new_items = {} new_items = {}
for k, v in iteritems(items): for k, v in items.items():
value = v value = v
if isinstance(v, list): if isinstance(v, list):
value = _get_items_from_list(v) value = _get_items_from_list(v)
@ -120,7 +118,7 @@ class DeReference(object):
depth += 1 depth += 1
for item in iterator: for item in iterator:
if isinstance(item, (Document, EmbeddedDocument)): if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in iteritems(item._fields): for field_name, field in item._fields.items():
v = item._data.get(field_name, None) v = item._data.get(field_name, None)
if isinstance(v, LazyReference): if isinstance(v, LazyReference):
# LazyReference inherits DBRef but should not be dereferenced here ! # LazyReference inherits DBRef but should not be dereferenced here !
@ -136,7 +134,7 @@ class DeReference(object):
getattr(field, "field", None), "document_type", None getattr(field, "field", None), "document_type", None
) )
references = self._find_references(v, depth) references = self._find_references(v, depth)
for key, refs in iteritems(references): for key, refs in references.items():
if isinstance( if isinstance(
field_cls, (Document, TopLevelDocumentMetaclass) field_cls, (Document, TopLevelDocumentMetaclass)
): ):
@ -153,7 +151,7 @@ class DeReference(object):
) )
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
references = self._find_references(item, depth - 1) references = self._find_references(item, depth - 1)
for key, refs in iteritems(references): for key, refs in references.items():
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, set()).update(refs)
return reference_map return reference_map
@ -162,7 +160,7 @@ class DeReference(object):
"""Fetch all references and convert to their document objects """Fetch all references and convert to their document objects
""" """
object_map = {} object_map = {}
for collection, dbrefs in iteritems(self.reference_map): for collection, dbrefs in self.reference_map.items():
# we use getattr instead of hasattr because hasattr swallows any exception under python2 # we use getattr instead of hasattr because hasattr swallows any exception under python2
# so it could hide nasty things without raising exceptions (cfr bug #1688)) # so it could hide nasty things without raising exceptions (cfr bug #1688))
@ -174,7 +172,7 @@ class DeReference(object):
dbref for dbref in dbrefs if (col_name, dbref) not in object_map dbref for dbref in dbrefs if (col_name, dbref) not in object_map
] ]
references = collection.objects.in_bulk(refs) references = collection.objects.in_bulk(refs)
for key, doc in iteritems(references): for key, doc in references.items():
object_map[(col_name, key)] = doc object_map[(col_name, key)] = doc
else: # Generic reference: use the refs data to convert to document else: # Generic reference: use the refs data to convert to document
if isinstance(doc_type, (ListField, DictField, MapField)): if isinstance(doc_type, (ListField, DictField, MapField)):
@ -250,7 +248,7 @@ class DeReference(object):
data = [] data = []
else: else:
is_list = False is_list = False
iterator = iteritems(items) iterator = items.items()
data = {} data = {}
depth += 1 depth += 1

View File

@ -4,8 +4,6 @@ import warnings
from bson.dbref import DBRef from bson.dbref import DBRef
import pymongo import pymongo
from pymongo.read_preferences import ReadPreference from pymongo.read_preferences import ReadPreference
import six
from six import iteritems
from mongoengine import signals from mongoengine import signals
from mongoengine.base import ( from mongoengine.base import (
@ -55,7 +53,7 @@ class InvalidCollectionError(Exception):
pass pass
class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
"""A :class:`~mongoengine.Document` that isn't stored in its own """A :class:`~mongoengine.Document` that isn't stored in its own
collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
fields on :class:`~mongoengine.Document`\ s through the fields on :class:`~mongoengine.Document`\ s through the
@ -103,7 +101,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)):
return data return data
class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
"""The base class used for defining the structure and properties of """The base class used for defining the structure and properties of
collections of documents stored in MongoDB. Inherit from this class, and collections of documents stored in MongoDB. Inherit from this class, and
add fields as class attributes to define a document's structure. add fields as class attributes to define a document's structure.
@ -632,7 +630,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
# Delete FileFields separately # Delete FileFields separately
FileField = _import_class("FileField") FileField = _import_class("FileField")
for name, field in iteritems(self._fields): for name, field in self._fields.items():
if isinstance(field, FileField): if isinstance(field, FileField):
getattr(self, name).delete() getattr(self, name).delete()
@ -1029,7 +1027,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
return {"missing": missing, "extra": extra} return {"missing": missing, "extra": extra}
class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
"""A Dynamic Document class allowing flexible, expandable and uncontrolled """A Dynamic Document class allowing flexible, expandable and uncontrolled
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
way as an ordinary document but has expanded style properties. Any data way as an ordinary document but has expanded style properties. Any data
@ -1060,7 +1058,7 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)):
super(DynamicDocument, self).__delattr__(*args, **kwargs) super(DynamicDocument, self).__delattr__(*args, **kwargs)
class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass):
"""A Dynamic Embedded Document class allowing flexible, expandable and """A Dynamic Embedded Document class allowing flexible, expandable and
uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
information about dynamic documents. information about dynamic documents.

View File

@ -1,6 +1,5 @@
from collections import defaultdict from collections import defaultdict
from six import iteritems
__all__ = ( __all__ = (
"NotRegistered", "NotRegistered",
@ -125,7 +124,7 @@ class ValidationError(AssertionError):
def build_dict(source): def build_dict(source):
errors_dict = {} errors_dict = {}
if isinstance(source, dict): if isinstance(source, dict):
for field_name, error in iteritems(source): for field_name, error in source.items():
errors_dict[field_name] = build_dict(error) errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors: elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors) return build_dict(source.errors)
@ -146,15 +145,15 @@ class ValidationError(AssertionError):
if isinstance(value, list): if isinstance(value, list):
value = " ".join([generate_key(k) for k in value]) value = " ".join([generate_key(k) for k in value])
elif isinstance(value, dict): elif isinstance(value, dict):
value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) value = " ".join([generate_key(v, k) for k, v in value.items()])
results = "%s.%s" % (prefix, value) if prefix else value results = "%s.%s" % (prefix, value) if prefix else value
return results return results
error_dict = defaultdict(list) error_dict = defaultdict(list)
for k, v in iteritems(self.to_dict()): for k, v in self.to_dict().items():
error_dict[generate_key(v)].append(k) error_dict[generate_key(v)].append(k)
return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) return " ".join(["%s: %s" % (k, v) for k, v in error_dict.items()])
class DeprecatedError(Exception): class DeprecatedError(Exception):

View File

@ -13,8 +13,6 @@ from bson.int64 import Int64
import gridfs import gridfs
import pymongo import pymongo
from pymongo import ReturnDocument from pymongo import ReturnDocument
import six
from six import iteritems
try: try:
import dateutil import dateutil
@ -205,7 +203,7 @@ class EmailField(StringField):
) )
UTF8_USER_REGEX = LazyRegexCompiler( UTF8_USER_REGEX = LazyRegexCompiler(
six.u( (
# RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to
# include `UTF8-non-ascii`. # include `UTF8-non-ascii`.
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z" r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z"
@ -387,7 +385,7 @@ class FloatField(BaseField):
return value return value
def validate(self, value): def validate(self, value):
if isinstance(value, six.integer_types): if isinstance(value, int):
try: try:
value = float(value) value = float(value)
except OverflowError: except OverflowError:
@ -868,12 +866,12 @@ class DynamicField(BaseField):
value = {k: v for k, v in enumerate(value)} value = {k: v for k, v in enumerate(value)}
data = {} data = {}
for k, v in iteritems(value): for k, v in value.items():
data[k] = self.to_mongo(v, use_db_field, fields) data[k] = self.to_mongo(v, use_db_field, fields)
value = data value = data
if is_list: # Convert back to a list if is_list: # Convert back to a list
value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
return value return value
def to_python(self, value): def to_python(self, value):
@ -1607,10 +1605,10 @@ class BinaryField(BaseField):
return Binary(value) return Binary(value)
def validate(self, value): def validate(self, value):
if not isinstance(value, (six.binary_type, Binary)): if not isinstance(value, (bytes, Binary)):
self.error( self.error(
"BinaryField only accepts instances of " "BinaryField only accepts instances of "
"(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__)
) )
if self.max_bytes is not None and len(value) > self.max_bytes: if self.max_bytes is not None and len(value) > self.max_bytes:
@ -1829,7 +1827,7 @@ class FileField(BaseField):
key = self.name key = self.name
if ( if (
hasattr(value, "read") and not isinstance(value, GridFSProxy) hasattr(value, "read") and not isinstance(value, GridFSProxy)
) or isinstance(value, (six.binary_type, str)): ) or isinstance(value, (bytes, str)):
# using "FileField() = file/string" notation # using "FileField() = file/string" notation
grid_file = instance._data.get(self.name) grid_file = instance._data.get(self.name)
# If a file already exists, delete it # If a file already exists, delete it

View File

@ -9,8 +9,6 @@ import pymongo
import pymongo.errors import pymongo.errors
from pymongo.collection import ReturnDocument from pymongo.collection import ReturnDocument
from pymongo.common import validate_read_preference from pymongo.common import validate_read_preference
import six
from six import iteritems
from mongoengine import signals from mongoengine import signals
from mongoengine.base import get_document from mongoengine.base import get_document
@ -252,12 +250,12 @@ class BaseQuerySet(object):
queryset = queryset.filter(*q_objs, **query) queryset = queryset.filter(*q_objs, **query)
try: try:
result = six.next(queryset) result = next(queryset)
except StopIteration: except StopIteration:
msg = "%s matching query does not exist." % queryset._document._class_name msg = "%s matching query does not exist." % queryset._document._class_name
raise queryset._document.DoesNotExist(msg) raise queryset._document.DoesNotExist(msg)
try: try:
six.next(queryset) next(queryset)
except StopIteration: except StopIteration:
return result return result
@ -1567,7 +1565,7 @@ class BaseQuerySet(object):
if self._limit == 0 or self._none: if self._limit == 0 or self._none:
raise StopIteration raise StopIteration
raw_doc = six.next(self._cursor) raw_doc = next(self._cursor)
if self._as_pymongo: if self._as_pymongo:
return raw_doc return raw_doc
@ -1812,13 +1810,13 @@ class BaseQuerySet(object):
} }
""" """
total, data, types = self.exec_js(freq_func, field) total, data, types = self.exec_js(freq_func, field)
values = {types.get(k): int(v) for k, v in iteritems(data)} values = {types.get(k): int(v) for k, v in data.items()}
if normalize: if normalize:
values = {k: float(v) / total for k, v in values.items()} values = {k: float(v) / total for k, v in values.items()}
frequencies = {} frequencies = {}
for k, v in iteritems(values): for k, v in values.items():
if isinstance(k, float): if isinstance(k, float):
if int(k) == k: if int(k) == k:
k = int(k) k = int(k)

View File

@ -1,5 +1,3 @@
import six
from mongoengine.errors import OperationError from mongoengine.errors import OperationError
from mongoengine.queryset.base import ( from mongoengine.queryset.base import (
BaseQuerySet, BaseQuerySet,
@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet):
# Pull in ITER_CHUNK_SIZE docs from the database and store them in # Pull in ITER_CHUNK_SIZE docs from the database and store them in
# the result cache. # the result cache.
try: try:
for _ in six.moves.range(ITER_CHUNK_SIZE): for _ in range(ITER_CHUNK_SIZE):
self._result_cache.append(six.next(self)) self._result_cache.append(next(self))
except StopIteration: except StopIteration:
# Getting this exception means there are no more docs in the # Getting this exception means there are no more docs in the
# db cursor. Set _has_more to False so that we can use that # db cursor. Set _has_more to False so that we can use that
@ -180,9 +178,9 @@ class QuerySetNoCache(BaseQuerySet):
return ".. queryset mid-iteration .." return ".. queryset mid-iteration .."
data = [] data = []
for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): for _ in range(REPR_OUTPUT_SIZE + 1):
try: try:
data.append(six.next(self)) data.append(next(self))
except StopIteration: except StopIteration:
break break

View File

@ -3,8 +3,6 @@ from collections import defaultdict
from bson import ObjectId, SON from bson import ObjectId, SON
from bson.dbref import DBRef from bson.dbref import DBRef
import pymongo import pymongo
import six
from six import iteritems
from mongoengine.base import UPDATE_OPERATORS from mongoengine.base import UPDATE_OPERATORS
from mongoengine.common import _import_class from mongoengine.common import _import_class
@ -180,7 +178,7 @@ def query(_doc_cls=None, **kwargs):
"$near" in value_dict or "$nearSphere" in value_dict "$near" in value_dict or "$nearSphere" in value_dict
): ):
value_son = SON() value_son = SON()
for k, v in iteritems(value_dict): for k, v in value_dict.items():
if k == "$maxDistance" or k == "$minDistance": if k == "$maxDistance" or k == "$minDistance":
continue continue
value_son[k] = v value_son[k] = v

View File

@ -1,4 +1,3 @@
pymongo>=3.4 pymongo>=3.4
six==1.10.0
Sphinx==1.5.5 Sphinx==1.5.5
sphinx-rtd-theme==0.2.4 sphinx-rtd-theme==0.2.4

View File

@ -145,7 +145,7 @@ setup(
platforms=["any"], platforms=["any"],
classifiers=CLASSIFIERS, classifiers=CLASSIFIERS,
python_requires=">=3.5", python_requires=">=3.5",
install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], install_requires=["pymongo>=3.4, <4.0"],
cmdclass={"test": PyTest}, cmdclass={"test": PyTest},
**extra_opts **extra_opts
) )

View File

@ -5,7 +5,6 @@ from datetime import datetime
from pymongo.collation import Collation from pymongo.collation import Collation
from pymongo.errors import OperationFailure from pymongo.errors import OperationFailure
import pytest import pytest
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
@ -59,7 +58,7 @@ class TestIndexes(unittest.TestCase):
info = BlogPost.objects._collection.index_information() info = BlogPost.objects._collection.index_information()
# _id, '-date', 'tags', ('cat', 'date') # _id, '-date', 'tags', ('cat', 'date')
assert len(info) == 4 assert len(info) == 4
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
for expected in expected_specs: for expected in expected_specs:
assert expected["fields"] in info assert expected["fields"] in info
@ -87,7 +86,7 @@ class TestIndexes(unittest.TestCase):
# the indices on -date and tags will both contain # the indices on -date and tags will both contain
# _cls as first element in the key # _cls as first element in the key
assert len(info) == 4 assert len(info) == 4
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
for expected in expected_specs: for expected in expected_specs:
assert expected["fields"] in info assert expected["fields"] in info
@ -102,7 +101,7 @@ class TestIndexes(unittest.TestCase):
ExtendedBlogPost.ensure_indexes() ExtendedBlogPost.ensure_indexes()
info = ExtendedBlogPost.objects._collection.index_information() info = ExtendedBlogPost.objects._collection.index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
for expected in expected_specs: for expected in expected_specs:
assert expected["fields"] in info assert expected["fields"] in info
@ -192,7 +191,7 @@ class TestIndexes(unittest.TestCase):
# Indexes are lazy so use list() to perform query # Indexes are lazy so use list() to perform query
list(Person.objects) list(Person.objects)
info = Person.objects._collection.index_information() info = Person.objects._collection.index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("rank.title", 1)] in info assert [("rank.title", 1)] in info
def test_explicit_geo2d_index(self): def test_explicit_geo2d_index(self):
@ -207,7 +206,7 @@ class TestIndexes(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("location.point", "2d")] in info assert [("location.point", "2d")] in info
def test_explicit_geo2d_index_embedded(self): def test_explicit_geo2d_index_embedded(self):
@ -227,7 +226,7 @@ class TestIndexes(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("current.location.point", "2d")] in info assert [("current.location.point", "2d")] in info
def test_explicit_geosphere_index(self): def test_explicit_geosphere_index(self):
@ -244,7 +243,7 @@ class TestIndexes(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("location.point", "2dsphere")] in info assert [("location.point", "2dsphere")] in info
def test_explicit_geohaystack_index(self): def test_explicit_geohaystack_index(self):
@ -266,7 +265,7 @@ class TestIndexes(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("location.point", "geoHaystack")] in info assert [("location.point", "geoHaystack")] in info
def test_create_geohaystack_index(self): def test_create_geohaystack_index(self):
@ -279,7 +278,7 @@ class TestIndexes(unittest.TestCase):
Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) Place.create_index({"fields": (")location.point", "name")}, bucketSize=10)
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("location.point", "geoHaystack"), ("name", 1)] in info assert [("location.point", "geoHaystack"), ("name", 1)] in info
def test_dictionary_indexes(self): def test_dictionary_indexes(self):
@ -308,7 +307,7 @@ class TestIndexes(unittest.TestCase):
info = BlogPost.objects._collection.index_information() info = BlogPost.objects._collection.index_information()
info = [ info = [
(value["key"], value.get("unique", False), value.get("sparse", False)) (value["key"], value.get("unique", False), value.get("sparse", False))
for key, value in iteritems(info) for key, value in info.items()
] ]
assert ([("addDate", -1)], True, True) in info assert ([("addDate", -1)], True, True) in info
@ -901,7 +900,7 @@ class TestIndexes(unittest.TestCase):
self.fail("Unbound local error at index + pk definition") self.fail("Unbound local error at index + pk definition")
info = BlogPost.objects._collection.index_information() info = BlogPost.objects._collection.index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
index_item = [("_id", 1), ("comments.comment_id", 1)] index_item = [("_id", 1), ("comments.comment_id", 1)]
assert index_item in info assert index_item in info
@ -942,7 +941,7 @@ class TestIndexes(unittest.TestCase):
meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]}
info = MyDoc.objects._collection.index_information() info = MyDoc.objects._collection.index_information()
info = [value["key"] for key, value in iteritems(info)] info = [value["key"] for key, value in info.items()]
assert [("provider_ids.foo", 1)] in info assert [("provider_ids.foo", 1)] in info
assert [("provider_ids.bar", 1)] in info assert [("provider_ids.bar", 1)] in info

View File

@ -3,7 +3,6 @@ import unittest
import warnings import warnings
import pytest import pytest
from six import iteritems
from mongoengine import ( from mongoengine import (
BooleanField, BooleanField,
@ -550,7 +549,7 @@ class TestInheritance(MongoDBTestCase):
class Human(Mammal): class Human(Mammal):
pass pass
for k, v in iteritems(defaults): for k, v in defaults.items():
for cls in [Animal, Fish, Guppy]: for cls in [Animal, Fish, Guppy]:
assert cls._meta[k] == v assert cls._meta[k] == v

View File

@ -10,7 +10,6 @@ import bson
from bson import DBRef, ObjectId from bson import DBRef, ObjectId
from pymongo.errors import DuplicateKeyError from pymongo.errors import DuplicateKeyError
import pytest import pytest
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine import signals from mongoengine import signals
@ -3274,7 +3273,7 @@ class TestDocumentInstance(MongoDBTestCase):
def expand(self): def expand(self):
self.flattened_parameter = {} self.flattened_parameter = {}
for parameter_name, parameter in iteritems(self.parameters): for parameter_name, parameter in self.parameters.items():
parameter.expand() parameter.expand()
class NodesSystem(Document): class NodesSystem(Document):
@ -3282,7 +3281,7 @@ class TestDocumentInstance(MongoDBTestCase):
nodes = MapField(ReferenceField(Node, dbref=False)) nodes = MapField(ReferenceField(Node, dbref=False))
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
for node_name, node in iteritems(self.nodes): for node_name, node in self.nodes.items():
node.expand() node.expand()
node.save(*args, **kwargs) node.save(*args, **kwargs)
super(NodesSystem, self).save(*args, **kwargs) super(NodesSystem, self).save(*args, **kwargs)

View File

@ -3,13 +3,12 @@ import uuid
from bson import Binary from bson import Binary
import pytest import pytest
import six
from mongoengine import * from mongoengine import *
from tests.utils import MongoDBTestCase from tests.utils import MongoDBTestCase
BIN_VALUE = six.b( BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode(
"\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" "latin-1"
) )
@ -22,7 +21,7 @@ class TestBinaryField(MongoDBTestCase):
content_type = StringField() content_type = StringField()
blob = BinaryField() blob = BinaryField()
BLOB = six.b("\xe6\x00\xc4\xff\x07") BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1")
MIME_TYPE = "application/octet-stream" MIME_TYPE = "application/octet-stream"
Attachment.drop_collection() Attachment.drop_collection()
@ -32,7 +31,7 @@ class TestBinaryField(MongoDBTestCase):
attachment_1 = Attachment.objects().first() attachment_1 = Attachment.objects().first()
assert MIME_TYPE == attachment_1.content_type assert MIME_TYPE == attachment_1.content_type
assert BLOB == six.binary_type(attachment_1.blob) assert BLOB == bytes(attachment_1.blob)
def test_validation_succeeds(self): def test_validation_succeeds(self):
"""Ensure that valid values can be assigned to binary fields. """Ensure that valid values can be assigned to binary fields.
@ -47,11 +46,11 @@ class TestBinaryField(MongoDBTestCase):
attachment_required = AttachmentRequired() attachment_required = AttachmentRequired()
with pytest.raises(ValidationError): with pytest.raises(ValidationError):
attachment_required.validate() attachment_required.validate()
attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1"))
attachment_required.validate() attachment_required.validate()
_5_BYTES = six.b("\xe6\x00\xc4\xff\x07") _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1")
_4_BYTES = six.b("\xe6\x00\xc4\xff") _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1")
with pytest.raises(ValidationError): with pytest.raises(ValidationError):
AttachmentSizeLimit(blob=_5_BYTES).validate() AttachmentSizeLimit(blob=_5_BYTES).validate()
AttachmentSizeLimit(blob=_4_BYTES).validate() AttachmentSizeLimit(blob=_4_BYTES).validate()
@ -133,7 +132,7 @@ class TestBinaryField(MongoDBTestCase):
MyDocument.drop_collection() MyDocument.drop_collection()
bin_data = six.b("\xe6\x00\xc4\xff\x07") bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1")
doc = MyDocument(bin_field=bin_data).save() doc = MyDocument(bin_field=bin_data).save()
n_updated = MyDocument.objects(bin_field=bin_data).update_one( n_updated = MyDocument.objects(bin_field=bin_data).update_one(

View File

@ -2,7 +2,6 @@
import datetime import datetime
import pytest import pytest
import six
try: try:
import dateutil import dateutil

View File

@ -2,7 +2,6 @@
import datetime as dt import datetime as dt
import pytest import pytest
import six
try: try:
import dateutil import dateutil

View File

@ -7,7 +7,6 @@ from io import BytesIO
import gridfs import gridfs
import pytest import pytest
import six
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
@ -58,7 +57,7 @@ class TestFileField(MongoDBTestCase):
PutFile.drop_collection() PutFile.drop_collection()
text = six.b("Hello, World!") text = "Hello, World!".encode("latin-1")
content_type = "text/plain" content_type = "text/plain"
putfile = PutFile() putfile = PutFile()
@ -101,8 +100,8 @@ class TestFileField(MongoDBTestCase):
StreamFile.drop_collection() StreamFile.drop_collection()
text = six.b("Hello, World!") text = "Hello, World!".encode("latin-1")
more_text = six.b("Foo Bar") more_text = "Foo Bar".encode("latin-1")
content_type = "text/plain" content_type = "text/plain"
streamfile = StreamFile() streamfile = StreamFile()
@ -137,8 +136,8 @@ class TestFileField(MongoDBTestCase):
StreamFile.drop_collection() StreamFile.drop_collection()
text = six.b("Hello, World!") text = "Hello, World!".encode("latin-1")
more_text = six.b("Foo Bar") more_text = "Foo Bar".encode("latin-1")
streamfile = StreamFile() streamfile = StreamFile()
streamfile.save() streamfile.save()
@ -167,8 +166,8 @@ class TestFileField(MongoDBTestCase):
class SetFile(Document): class SetFile(Document):
the_file = FileField() the_file = FileField()
text = six.b("Hello, World!") text = "Hello, World!".encode("latin-1")
more_text = six.b("Foo Bar") more_text = "Foo Bar".encode("latin-1")
SetFile.drop_collection() SetFile.drop_collection()
@ -196,7 +195,7 @@ class TestFileField(MongoDBTestCase):
GridDocument.drop_collection() GridDocument.drop_collection()
with tempfile.TemporaryFile() as f: with tempfile.TemporaryFile() as f:
f.write(six.b("Hello World!")) f.write("Hello World!".encode("latin-1"))
f.flush() f.flush()
# Test without default # Test without default
@ -213,7 +212,7 @@ class TestFileField(MongoDBTestCase):
assert doc_b.the_file.grid_id == doc_c.the_file.grid_id assert doc_b.the_file.grid_id == doc_c.the_file.grid_id
# Test with default # Test with default
doc_d = GridDocument(the_file=six.b("")) doc_d = GridDocument(the_file="".encode("latin-1"))
doc_d.save() doc_d.save()
doc_e = GridDocument.objects.with_id(doc_d.id) doc_e = GridDocument.objects.with_id(doc_d.id)
@ -240,7 +239,7 @@ class TestFileField(MongoDBTestCase):
# First instance # First instance
test_file = TestFile() test_file = TestFile()
test_file.name = "Hello, World!" test_file.name = "Hello, World!"
test_file.the_file.put(six.b("Hello, World!")) test_file.the_file.put("Hello, World!".encode("latin-1"))
test_file.save() test_file.save()
# Second instance # Second instance
@ -297,7 +296,9 @@ class TestFileField(MongoDBTestCase):
test_file = TestFile() test_file = TestFile()
assert not bool(test_file.the_file) assert not bool(test_file.the_file)
test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") test_file.the_file.put(
"Hello, World!".encode("latin-1"), content_type="text/plain"
)
test_file.save() test_file.save()
assert bool(test_file.the_file) assert bool(test_file.the_file)
@ -319,7 +320,7 @@ class TestFileField(MongoDBTestCase):
class TestFile(Document): class TestFile(Document):
the_file = FileField() the_file = FileField()
text = six.b("Hello, World!") text = "Hello, World!".encode("latin-1")
content_type = "text/plain" content_type = "text/plain"
testfile = TestFile() testfile = TestFile()
@ -363,7 +364,7 @@ class TestFileField(MongoDBTestCase):
testfile.the_file.put(text, content_type=content_type, filename="hello") testfile.the_file.put(text, content_type=content_type, filename="hello")
testfile.save() testfile.save()
text = six.b("Bonjour, World!") text = "Bonjour, World!".encode("latin-1")
testfile.the_file.replace(text, content_type=content_type, filename="hello") testfile.the_file.replace(text, content_type=content_type, filename="hello")
testfile.save() testfile.save()
@ -387,7 +388,7 @@ class TestFileField(MongoDBTestCase):
TestImage.drop_collection() TestImage.drop_collection()
with tempfile.TemporaryFile() as f: with tempfile.TemporaryFile() as f:
f.write(six.b("Hello World!")) f.write("Hello World!".encode("latin-1"))
f.flush() f.flush()
t = TestImage() t = TestImage()
@ -503,21 +504,21 @@ class TestFileField(MongoDBTestCase):
# First instance # First instance
test_file = TestFile() test_file = TestFile()
test_file.name = "Hello, World!" test_file.name = "Hello, World!"
test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt")
test_file.save() test_file.save()
data = get_db("test_files").macumba.files.find_one() data = get_db("test_files").macumba.files.find_one()
assert data.get("name") == "hello.txt" assert data.get("name") == "hello.txt"
test_file = TestFile.objects.first() test_file = TestFile.objects.first()
assert test_file.the_file.read() == six.b("Hello, World!") assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
test_file = TestFile.objects.first() test_file = TestFile.objects.first()
test_file.the_file = six.b("HELLO, WORLD!") test_file.the_file = "Hello, World!".encode("latin-1")
test_file.save() test_file.save()
test_file = TestFile.objects.first() test_file = TestFile.objects.first()
assert test_file.the_file.read() == six.b("HELLO, WORLD!") assert test_file.the_file.read() == "Hello, World!".encode("latin-1")
def test_copyable(self): def test_copyable(self):
class PutFile(Document): class PutFile(Document):
@ -525,7 +526,7 @@ class TestFileField(MongoDBTestCase):
PutFile.drop_collection() PutFile.drop_collection()
text = six.b("Hello, World!") text = "Hello, World!".encode("latin-1")
content_type = "text/plain" content_type = "text/plain"
putfile = PutFile() putfile = PutFile()

View File

@ -1,6 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import pytest import pytest
import six
from mongoengine import * from mongoengine import *
@ -52,8 +51,7 @@ class TestFloatField(MongoDBTestCase):
big_person = BigPerson() big_person = BigPerson()
for value, value_type in enumerate(six.integer_types): big_person.height = int(0)
big_person.height = value_type(value)
big_person.validate() big_person.validate()
big_person.height = 2 ** 500 big_person.height = 2 ** 500

View File

@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
import pytest
from bson.int64 import Int64 from bson.int64 import Int64
import six import pytest
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
@ -24,7 +22,7 @@ class TestLongField(MongoDBTestCase):
assert isinstance( assert isinstance(
db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64
) )
assert isinstance(doc.some_long, six.integer_types) assert isinstance(doc.some_long, int)
def test_long_validation(self): def test_long_validation(self):
"""Ensure that invalid values cannot be assigned to long fields. """Ensure that invalid values cannot be assigned to long fields.

View File

@ -10,8 +10,6 @@ import pymongo
from pymongo.read_preferences import ReadPreference from pymongo.read_preferences import ReadPreference
from pymongo.results import UpdateResult from pymongo.results import UpdateResult
import pytest import pytest
import six
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
@ -4093,7 +4091,7 @@ class TestQueryset(unittest.TestCase):
info = Comment.objects._collection.index_information() info = Comment.objects._collection.index_information()
info = [ info = [
(value["key"], value.get("unique", False), value.get("sparse", False)) (value["key"], value.get("unique", False), value.get("sparse", False))
for key, value in iteritems(info) for key, value in info.items()
] ]
assert ([("_cls", 1), ("message", 1)], False, False) in info assert ([("_cls", 1), ("message", 1)], False, False) in info

View File

@ -1,7 +1,6 @@
import unittest import unittest
import pytest import pytest
from six import iterkeys
from mongoengine import Document from mongoengine import Document
from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict
@ -372,7 +371,7 @@ class TestStrictDict(unittest.TestCase):
def test_iterkeys(self): def test_iterkeys(self):
d = self.dtype(a=1) d = self.dtype(a=1)
assert list(iterkeys(d)) == ["a"] assert list(d.keys()) == ["a"]
def test_len(self): def test_len(self):
d = self.dtype(a=1) d = self.dtype(a=1)

View File

@ -2,10 +2,8 @@
import unittest import unittest
from bson import DBRef, ObjectId from bson import DBRef, ObjectId
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db
from mongoengine.context_managers import query_counter from mongoengine.context_managers import query_counter
@ -739,7 +737,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 2 assert q == 2
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert isinstance(m, User) assert isinstance(m, User)
# Document select_related # Document select_related
@ -752,7 +750,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 2 assert q == 2
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert isinstance(m, User) assert isinstance(m, User)
# Queryset select_related # Queryset select_related
@ -766,7 +764,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 2 assert q == 2
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert isinstance(m, User) assert isinstance(m, User)
User.drop_collection() User.drop_collection()
@ -820,7 +818,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 4 assert q == 4
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert "User" in m.__class__.__name__ assert "User" in m.__class__.__name__
# Document select_related # Document select_related
@ -836,7 +834,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 4 assert q == 4
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert "User" in m.__class__.__name__ assert "User" in m.__class__.__name__
# Queryset select_related # Queryset select_related
@ -853,7 +851,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 4 assert q == 4
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert "User" in m.__class__.__name__ assert "User" in m.__class__.__name__
Group.objects.delete() Group.objects.delete()
@ -910,7 +908,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 2 assert q == 2
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert isinstance(m, UserA) assert isinstance(m, UserA)
# Document select_related # Document select_related
@ -926,7 +924,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 2 assert q == 2
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert isinstance(m, UserA) assert isinstance(m, UserA)
# Queryset select_related # Queryset select_related
@ -943,7 +941,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 2 assert q == 2
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert isinstance(m, UserA) assert isinstance(m, UserA)
UserA.drop_collection() UserA.drop_collection()
@ -997,7 +995,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 4 assert q == 4
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert "User" in m.__class__.__name__ assert "User" in m.__class__.__name__
# Document select_related # Document select_related
@ -1013,7 +1011,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 4 assert q == 4
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert "User" in m.__class__.__name__ assert "User" in m.__class__.__name__
# Queryset select_related # Queryset select_related
@ -1030,7 +1028,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
assert q == 4 assert q == 4
for k, m in iteritems(group_obj.members): for k, m in group_obj.members.items():
assert "User" in m.__class__.__name__ assert "User" in m.__class__.__name__
Group.objects.delete() Group.objects.delete()