get rid of six
This commit is contained in:
		| @@ -50,7 +50,6 @@ All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>` | ||||
| At the very least, you'll need these two packages to use MongoEngine: | ||||
|  | ||||
| - pymongo>=3.4 | ||||
| - six>=1.10.0 | ||||
|  | ||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||
|  | ||||
|   | ||||
| @@ -153,7 +153,7 @@ inherited classes like so: :: | ||||
|  | ||||
|     # 4. Remove indexes | ||||
|     info = collection.index_information() | ||||
|     indexes_to_drop = [key for key, value in info.iteritems() | ||||
|     indexes_to_drop = [key for key, value in info.items() | ||||
|                        if '_types' in dict(value['key'])] | ||||
|     for index in indexes_to_drop: | ||||
|         collection.drop_index(index) | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| import weakref | ||||
|  | ||||
| from bson import DBRef | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||
| @@ -360,7 +359,7 @@ class StrictDict(object): | ||||
|     _classes = {} | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         for k, v in iteritems(kwargs): | ||||
|         for k, v in kwargs.items(): | ||||
|             setattr(self, k, v) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
| @@ -408,7 +407,7 @@ class StrictDict(object): | ||||
|         return (key for key in self.__slots__ if hasattr(self, key)) | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(list(iteritems(self))) | ||||
|         return len(list(self.items())) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return list(self.items()) == list(other.items()) | ||||
|   | ||||
| @@ -5,8 +5,6 @@ from functools import partial | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON, json_util | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base.common import get_document | ||||
| @@ -110,7 +108,7 @@ class BaseDocument(object): | ||||
|         # Assign default values to the instance. | ||||
|         # We set default values only for fields loaded from DB. See | ||||
|         # https://github.com/mongoengine/mongoengine/issues/399 for more info. | ||||
|         for key, field in iteritems(self._fields): | ||||
|         for key, field in self._fields.items(): | ||||
|             if self._db_field_map.get(key, key) in __only_fields: | ||||
|                 continue | ||||
|             value = getattr(self, key, None) | ||||
| @@ -122,14 +120,14 @@ class BaseDocument(object): | ||||
|         # Set passed values after initialisation | ||||
|         if self._dynamic: | ||||
|             dynamic_data = {} | ||||
|             for key, value in iteritems(values): | ||||
|             for key, value in values.items(): | ||||
|                 if key in self._fields or key == "_id": | ||||
|                     setattr(self, key, value) | ||||
|                 else: | ||||
|                     dynamic_data[key] = value | ||||
|         else: | ||||
|             FileField = _import_class("FileField") | ||||
|             for key, value in iteritems(values): | ||||
|             for key, value in values.items(): | ||||
|                 key = self._reverse_db_field_map.get(key, key) | ||||
|                 if key in self._fields or key in ("id", "pk", "_cls"): | ||||
|                     if __auto_convert and value is not None: | ||||
| @@ -145,7 +143,7 @@ class BaseDocument(object): | ||||
|  | ||||
|         if self._dynamic: | ||||
|             self._dynamic_lock = False | ||||
|             for key, value in iteritems(dynamic_data): | ||||
|             for key, value in dynamic_data.items(): | ||||
|                 setattr(self, key, value) | ||||
|  | ||||
|         # Flag initialised | ||||
| @@ -575,7 +573,7 @@ class BaseDocument(object): | ||||
|         if not hasattr(data, "items"): | ||||
|             iterator = enumerate(data) | ||||
|         else: | ||||
|             iterator = iteritems(data) | ||||
|             iterator = data.items() | ||||
|  | ||||
|         for index_or_key, value in iterator: | ||||
|             item_key = "%s%s." % (base_key, index_or_key) | ||||
| @@ -741,7 +739,7 @@ class BaseDocument(object): | ||||
|         # Convert SON to a data dict, making sure each key is a string and | ||||
|         # corresponds to the right db field. | ||||
|         data = {} | ||||
|         for key, value in iteritems(son): | ||||
|         for key, value in son.items(): | ||||
|             key = str(key) | ||||
|             key = cls._db_field_map.get(key, key) | ||||
|             data[key] = value | ||||
| @@ -756,7 +754,7 @@ class BaseDocument(object): | ||||
|         if not _auto_dereference: | ||||
|             fields = copy.deepcopy(fields) | ||||
|  | ||||
|         for field_name, field in iteritems(fields): | ||||
|         for field_name, field in fields.items(): | ||||
|             field._auto_dereference = _auto_dereference | ||||
|             if field.db_field in data: | ||||
|                 value = data[field.db_field] | ||||
| @@ -781,7 +779,7 @@ class BaseDocument(object): | ||||
|  | ||||
|         # In STRICT documents, remove any keys that aren't in cls._fields | ||||
|         if cls.STRICT: | ||||
|             data = {k: v for k, v in iteritems(data) if k in cls._fields} | ||||
|             data = {k: v for k, v in data.items() if k in cls._fields} | ||||
|  | ||||
|         obj = cls( | ||||
|             __auto_convert=False, _created=created, __only_fields=only_fields, **data | ||||
|   | ||||
| @@ -4,7 +4,6 @@ import weakref | ||||
|  | ||||
| from bson import DBRef, ObjectId, SON | ||||
| import pymongo | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base.common import UPDATE_OPERATORS | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList | ||||
| @@ -418,11 +417,11 @@ class ComplexBaseField(BaseField): | ||||
|         if self.field: | ||||
|             value_dict = { | ||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||
|                 for key, item in iteritems(value) | ||||
|                 for key, item in value.items() | ||||
|             } | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in iteritems(value): | ||||
|             for k, v in value.items(): | ||||
|                 if isinstance(v, Document): | ||||
|                     # We need the id from the saved object to create the DBRef | ||||
|                     if v.pk is None: | ||||
| @@ -461,8 +460,8 @@ class ComplexBaseField(BaseField): | ||||
|         """If field is provided ensure the value is valid.""" | ||||
|         errors = {} | ||||
|         if self.field: | ||||
|             if hasattr(value, "iteritems") or hasattr(value, "items"): | ||||
|                 sequence = iteritems(value) | ||||
|             if hasattr(value, "items"): | ||||
|                 sequence = value.items() | ||||
|             else: | ||||
|                 sequence = enumerate(value) | ||||
|             for k, v in sequence: | ||||
|   | ||||
| @@ -1,8 +1,6 @@ | ||||
| import itertools | ||||
| import warnings | ||||
|  | ||||
| from six import iteritems, itervalues | ||||
|  | ||||
| from mongoengine.base.common import _document_registry | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
| from mongoengine.common import _import_class | ||||
| @@ -68,7 +66,7 @@ class DocumentMetaclass(type): | ||||
|             # Standard object mixin - merge in any Fields | ||||
|             if not hasattr(base, "_meta"): | ||||
|                 base_fields = {} | ||||
|                 for attr_name, attr_value in iteritems(base.__dict__): | ||||
|                 for attr_name, attr_value in base.__dict__.items(): | ||||
|                     if not isinstance(attr_value, BaseField): | ||||
|                         continue | ||||
|                     attr_value.name = attr_name | ||||
| @@ -80,7 +78,7 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|         # Discover any document fields | ||||
|         field_names = {} | ||||
|         for attr_name, attr_value in iteritems(attrs): | ||||
|         for attr_name, attr_value in attrs.items(): | ||||
|             if not isinstance(attr_value, BaseField): | ||||
|                 continue | ||||
|             attr_value.name = attr_name | ||||
| @@ -110,9 +108,7 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|         attrs["_fields_ordered"] = tuple( | ||||
|             i[1] | ||||
|             for i in sorted( | ||||
|                 (v.creation_counter, v.name) for v in itervalues(doc_fields) | ||||
|             ) | ||||
|             for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) | ||||
|         ) | ||||
|  | ||||
|         # | ||||
| @@ -190,7 +186,7 @@ class DocumentMetaclass(type): | ||||
|         #             f.__dict__.update({"im_self": getattr(f, "__self__")}) | ||||
|  | ||||
|         # Handle delete rules | ||||
|         for field in itervalues(new_class._fields): | ||||
|         for field in new_class._fields.values(): | ||||
|             f = field | ||||
|             if f.owner_document is None: | ||||
|                 f.owner_document = new_class | ||||
| @@ -399,7 +395,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|             new_class.objects = QuerySetManager() | ||||
|  | ||||
|         # Validate the fields and set primary key if needed | ||||
|         for field_name, field in iteritems(new_class._fields): | ||||
|         for field_name, field in new_class._fields.items(): | ||||
|             if field.primary_key: | ||||
|                 # Ensure only one primary key is set | ||||
|                 current_pk = new_class._meta.get("id_field") | ||||
| @@ -476,7 +472,7 @@ class MetaDict(dict): | ||||
|     _merge_options = ("indexes",) | ||||
|  | ||||
|     def merge(self, new_options): | ||||
|         for k, v in iteritems(new_options): | ||||
|         for k, v in new_options.items(): | ||||
|             if k in self._merge_options: | ||||
|                 self[k] = self.get(k, []) + v | ||||
|             else: | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| from contextlib import contextmanager | ||||
|  | ||||
| from pymongo.write_concern import WriteConcern | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| @@ -123,7 +122,7 @@ class no_dereference(object): | ||||
|  | ||||
|         self.deref_fields = [ | ||||
|             k | ||||
|             for k, v in iteritems(self.cls._fields) | ||||
|             for k, v in self.cls._fields.items() | ||||
|             if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) | ||||
|         ] | ||||
|  | ||||
|   | ||||
| @@ -1,6 +1,4 @@ | ||||
| from bson import DBRef, SON | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base import ( | ||||
|     BaseDict, | ||||
| @@ -79,7 +77,7 @@ class DeReference(object): | ||||
|  | ||||
|                     def _get_items_from_dict(items): | ||||
|                         new_items = {} | ||||
|                         for k, v in iteritems(items): | ||||
|                         for k, v in items.items(): | ||||
|                             value = v | ||||
|                             if isinstance(v, list): | ||||
|                                 value = _get_items_from_list(v) | ||||
| @@ -120,7 +118,7 @@ class DeReference(object): | ||||
|         depth += 1 | ||||
|         for item in iterator: | ||||
|             if isinstance(item, (Document, EmbeddedDocument)): | ||||
|                 for field_name, field in iteritems(item._fields): | ||||
|                 for field_name, field in item._fields.items(): | ||||
|                     v = item._data.get(field_name, None) | ||||
|                     if isinstance(v, LazyReference): | ||||
|                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||
| @@ -136,7 +134,7 @@ class DeReference(object): | ||||
|                             getattr(field, "field", None), "document_type", None | ||||
|                         ) | ||||
|                         references = self._find_references(v, depth) | ||||
|                         for key, refs in iteritems(references): | ||||
|                         for key, refs in references.items(): | ||||
|                             if isinstance( | ||||
|                                 field_cls, (Document, TopLevelDocumentMetaclass) | ||||
|                             ): | ||||
| @@ -153,7 +151,7 @@ class DeReference(object): | ||||
|                 ) | ||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||
|                 references = self._find_references(item, depth - 1) | ||||
|                 for key, refs in iteritems(references): | ||||
|                 for key, refs in references.items(): | ||||
|                     reference_map.setdefault(key, set()).update(refs) | ||||
|  | ||||
|         return reference_map | ||||
| @@ -162,7 +160,7 @@ class DeReference(object): | ||||
|         """Fetch all references and convert to their document objects | ||||
|         """ | ||||
|         object_map = {} | ||||
|         for collection, dbrefs in iteritems(self.reference_map): | ||||
|         for collection, dbrefs in self.reference_map.items(): | ||||
|  | ||||
|             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||
|             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||
| @@ -174,7 +172,7 @@ class DeReference(object): | ||||
|                     dbref for dbref in dbrefs if (col_name, dbref) not in object_map | ||||
|                 ] | ||||
|                 references = collection.objects.in_bulk(refs) | ||||
|                 for key, doc in iteritems(references): | ||||
|                 for key, doc in references.items(): | ||||
|                     object_map[(col_name, key)] = doc | ||||
|             else:  # Generic reference: use the refs data to convert to document | ||||
|                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||
| @@ -250,7 +248,7 @@ class DeReference(object): | ||||
|             data = [] | ||||
|         else: | ||||
|             is_list = False | ||||
|             iterator = iteritems(items) | ||||
|             iterator = items.items() | ||||
|             data = {} | ||||
|  | ||||
|         depth += 1 | ||||
|   | ||||
| @@ -4,8 +4,6 @@ import warnings | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| from pymongo.read_preferences import ReadPreference | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base import ( | ||||
| @@ -55,7 +53,7 @@ class InvalidCollectionError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
| class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): | ||||
|     """A :class:`~mongoengine.Document` that isn't stored in its own | ||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||
|     fields on :class:`~mongoengine.Document`\ s through the | ||||
| @@ -103,7 +101,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||
|         return data | ||||
|  | ||||
|  | ||||
| class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
| class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||
|     """The base class used for defining the structure and properties of | ||||
|     collections of documents stored in MongoDB. Inherit from this class, and | ||||
|     add fields as class attributes to define a document's structure. | ||||
| @@ -632,7 +630,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|  | ||||
|         # Delete FileFields separately | ||||
|         FileField = _import_class("FileField") | ||||
|         for name, field in iteritems(self._fields): | ||||
|         for name, field in self._fields.items(): | ||||
|             if isinstance(field, FileField): | ||||
|                 getattr(self, name).delete() | ||||
|  | ||||
| @@ -1029,7 +1027,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||
|         return {"missing": missing, "extra": extra} | ||||
|  | ||||
|  | ||||
| class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | ||||
| class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): | ||||
|     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||
|     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||
|     way as an ordinary document but has expanded style properties.  Any data | ||||
| @@ -1060,7 +1058,7 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | ||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||
|  | ||||
|  | ||||
| class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): | ||||
| class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): | ||||
|     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||
|     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||
|     information about dynamic documents. | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| from six import iteritems | ||||
|  | ||||
| __all__ = ( | ||||
|     "NotRegistered", | ||||
| @@ -125,7 +124,7 @@ class ValidationError(AssertionError): | ||||
|         def build_dict(source): | ||||
|             errors_dict = {} | ||||
|             if isinstance(source, dict): | ||||
|                 for field_name, error in iteritems(source): | ||||
|                 for field_name, error in source.items(): | ||||
|                     errors_dict[field_name] = build_dict(error) | ||||
|             elif isinstance(source, ValidationError) and source.errors: | ||||
|                 return build_dict(source.errors) | ||||
| @@ -146,15 +145,15 @@ class ValidationError(AssertionError): | ||||
|             if isinstance(value, list): | ||||
|                 value = " ".join([generate_key(k) for k in value]) | ||||
|             elif isinstance(value, dict): | ||||
|                 value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) | ||||
|                 value = " ".join([generate_key(v, k) for k, v in value.items()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in iteritems(self.to_dict()): | ||||
|         for k, v in self.to_dict().items(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) | ||||
|         return " ".join(["%s: %s" % (k, v) for k, v in error_dict.items()]) | ||||
|  | ||||
|  | ||||
| class DeprecatedError(Exception): | ||||
|   | ||||
| @@ -13,8 +13,6 @@ from bson.int64 import Int64 | ||||
| import gridfs | ||||
| import pymongo | ||||
| from pymongo import ReturnDocument | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
| @@ -205,7 +203,7 @@ class EmailField(StringField): | ||||
|     ) | ||||
|  | ||||
|     UTF8_USER_REGEX = LazyRegexCompiler( | ||||
|         six.u( | ||||
|         ( | ||||
|             # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to | ||||
|             # include `UTF8-non-ascii`. | ||||
|             r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z" | ||||
| @@ -387,7 +385,7 @@ class FloatField(BaseField): | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if isinstance(value, six.integer_types): | ||||
|         if isinstance(value, int): | ||||
|             try: | ||||
|                 value = float(value) | ||||
|             except OverflowError: | ||||
| @@ -868,12 +866,12 @@ class DynamicField(BaseField): | ||||
|             value = {k: v for k, v in enumerate(value)} | ||||
|  | ||||
|         data = {} | ||||
|         for k, v in iteritems(value): | ||||
|         for k, v in value.items(): | ||||
|             data[k] = self.to_mongo(v, use_db_field, fields) | ||||
|  | ||||
|         value = data | ||||
|         if is_list:  # Convert back to a list | ||||
|             value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] | ||||
|             value = [v for k, v in sorted(data.items(), key=itemgetter(0))] | ||||
|         return value | ||||
|  | ||||
|     def to_python(self, value): | ||||
| @@ -1607,10 +1605,10 @@ class BinaryField(BaseField): | ||||
|         return Binary(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, (six.binary_type, Binary)): | ||||
|         if not isinstance(value, (bytes, Binary)): | ||||
|             self.error( | ||||
|                 "BinaryField only accepts instances of " | ||||
|                 "(%s, %s, Binary)" % (six.binary_type.__name__, Binary.__name__) | ||||
|                 "(%s, %s, Binary)" % (bytes.__name__, Binary.__name__) | ||||
|             ) | ||||
|  | ||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||
| @@ -1829,7 +1827,7 @@ class FileField(BaseField): | ||||
|         key = self.name | ||||
|         if ( | ||||
|             hasattr(value, "read") and not isinstance(value, GridFSProxy) | ||||
|         ) or isinstance(value, (six.binary_type, str)): | ||||
|         ) or isinstance(value, (bytes, str)): | ||||
|             # using "FileField() = file/string" notation | ||||
|             grid_file = instance._data.get(self.name) | ||||
|             # If a file already exists, delete it | ||||
|   | ||||
| @@ -9,8 +9,6 @@ import pymongo | ||||
| import pymongo.errors | ||||
| from pymongo.collection import ReturnDocument | ||||
| from pymongo.common import validate_read_preference | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import signals | ||||
| from mongoengine.base import get_document | ||||
| @@ -252,12 +250,12 @@ class BaseQuerySet(object): | ||||
|         queryset = queryset.filter(*q_objs, **query) | ||||
|  | ||||
|         try: | ||||
|             result = six.next(queryset) | ||||
|             result = next(queryset) | ||||
|         except StopIteration: | ||||
|             msg = "%s matching query does not exist." % queryset._document._class_name | ||||
|             raise queryset._document.DoesNotExist(msg) | ||||
|         try: | ||||
|             six.next(queryset) | ||||
|             next(queryset) | ||||
|         except StopIteration: | ||||
|             return result | ||||
|  | ||||
| @@ -1567,7 +1565,7 @@ class BaseQuerySet(object): | ||||
|         if self._limit == 0 or self._none: | ||||
|             raise StopIteration | ||||
|  | ||||
|         raw_doc = six.next(self._cursor) | ||||
|         raw_doc = next(self._cursor) | ||||
|  | ||||
|         if self._as_pymongo: | ||||
|             return raw_doc | ||||
| @@ -1812,13 +1810,13 @@ class BaseQuerySet(object): | ||||
|             } | ||||
|         """ | ||||
|         total, data, types = self.exec_js(freq_func, field) | ||||
|         values = {types.get(k): int(v) for k, v in iteritems(data)} | ||||
|         values = {types.get(k): int(v) for k, v in data.items()} | ||||
|  | ||||
|         if normalize: | ||||
|             values = {k: float(v) / total for k, v in values.items()} | ||||
|  | ||||
|         frequencies = {} | ||||
|         for k, v in iteritems(values): | ||||
|         for k, v in values.items(): | ||||
|             if isinstance(k, float): | ||||
|                 if int(k) == k: | ||||
|                     k = int(k) | ||||
|   | ||||
| @@ -1,5 +1,3 @@ | ||||
| import six | ||||
|  | ||||
| from mongoengine.errors import OperationError | ||||
| from mongoengine.queryset.base import ( | ||||
|     BaseQuerySet, | ||||
| @@ -127,8 +125,8 @@ class QuerySet(BaseQuerySet): | ||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||
|         # the result cache. | ||||
|         try: | ||||
|             for _ in six.moves.range(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(six.next(self)) | ||||
|             for _ in range(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(next(self)) | ||||
|         except StopIteration: | ||||
|             # Getting this exception means there are no more docs in the | ||||
|             # db cursor. Set _has_more to False so that we can use that | ||||
| @@ -180,9 +178,9 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|             return ".. queryset mid-iteration .." | ||||
|  | ||||
|         data = [] | ||||
|         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): | ||||
|         for _ in range(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|                 data.append(six.next(self)) | ||||
|                 data.append(next(self)) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|  | ||||
|   | ||||
| @@ -3,8 +3,6 @@ from collections import defaultdict | ||||
| from bson import ObjectId, SON | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base import UPDATE_OPERATORS | ||||
| from mongoengine.common import _import_class | ||||
| @@ -180,7 +178,7 @@ def query(_doc_cls=None, **kwargs): | ||||
|                     "$near" in value_dict or "$nearSphere" in value_dict | ||||
|                 ): | ||||
|                     value_son = SON() | ||||
|                     for k, v in iteritems(value_dict): | ||||
|                     for k, v in value_dict.items(): | ||||
|                         if k == "$maxDistance" or k == "$minDistance": | ||||
|                             continue | ||||
|                         value_son[k] = v | ||||
|   | ||||
| @@ -1,4 +1,3 @@ | ||||
| pymongo>=3.4 | ||||
| six==1.10.0 | ||||
| Sphinx==1.5.5 | ||||
| sphinx-rtd-theme==0.2.4 | ||||
|   | ||||
							
								
								
									
										2
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								setup.py
									
									
									
									
									
								
							| @@ -145,7 +145,7 @@ setup( | ||||
|     platforms=["any"], | ||||
|     classifiers=CLASSIFIERS, | ||||
|     python_requires=">=3.5", | ||||
|     install_requires=["pymongo>=3.4, <4.0", "six>=1.10.0"], | ||||
|     install_requires=["pymongo>=3.4, <4.0"], | ||||
|     cmdclass={"test": PyTest}, | ||||
|     **extra_opts | ||||
| ) | ||||
|   | ||||
| @@ -5,7 +5,6 @@ from datetime import datetime | ||||
| from pymongo.collation import Collation | ||||
| from pymongo.errors import OperationFailure | ||||
| import pytest | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| @@ -59,7 +58,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         # _id, '-date', 'tags', ('cat', 'date') | ||||
|         assert len(info) == 4 | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         for expected in expected_specs: | ||||
|             assert expected["fields"] in info | ||||
|  | ||||
| @@ -87,7 +86,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         # the indices on -date and tags will both contain | ||||
|         # _cls as first element in the key | ||||
|         assert len(info) == 4 | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         for expected in expected_specs: | ||||
|             assert expected["fields"] in info | ||||
|  | ||||
| @@ -102,7 +101,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         ExtendedBlogPost.ensure_indexes() | ||||
|         info = ExtendedBlogPost.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         for expected in expected_specs: | ||||
|             assert expected["fields"] in info | ||||
|  | ||||
| @@ -192,7 +191,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         # Indexes are lazy so use list() to perform query | ||||
|         list(Person.objects) | ||||
|         info = Person.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("rank.title", 1)] in info | ||||
|  | ||||
|     def test_explicit_geo2d_index(self): | ||||
| @@ -207,7 +206,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("location.point", "2d")] in info | ||||
|  | ||||
|     def test_explicit_geo2d_index_embedded(self): | ||||
| @@ -227,7 +226,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("current.location.point", "2d")] in info | ||||
|  | ||||
|     def test_explicit_geosphere_index(self): | ||||
| @@ -244,7 +243,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("location.point", "2dsphere")] in info | ||||
|  | ||||
|     def test_explicit_geohaystack_index(self): | ||||
| @@ -266,7 +265,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Place.ensure_indexes() | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("location.point", "geoHaystack")] in info | ||||
|  | ||||
|     def test_create_geohaystack_index(self): | ||||
| @@ -279,7 +278,7 @@ class TestIndexes(unittest.TestCase): | ||||
|  | ||||
|         Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) | ||||
|         info = Place._get_collection().index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("location.point", "geoHaystack"), ("name", 1)] in info | ||||
|  | ||||
|     def test_dictionary_indexes(self): | ||||
| @@ -308,7 +307,7 @@ class TestIndexes(unittest.TestCase): | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         info = [ | ||||
|             (value["key"], value.get("unique", False), value.get("sparse", False)) | ||||
|             for key, value in iteritems(info) | ||||
|             for key, value in info.items() | ||||
|         ] | ||||
|         assert ([("addDate", -1)], True, True) in info | ||||
|  | ||||
| @@ -901,7 +900,7 @@ class TestIndexes(unittest.TestCase): | ||||
|             self.fail("Unbound local error at index + pk definition") | ||||
|  | ||||
|         info = BlogPost.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         index_item = [("_id", 1), ("comments.comment_id", 1)] | ||||
|         assert index_item in info | ||||
|  | ||||
| @@ -942,7 +941,7 @@ class TestIndexes(unittest.TestCase): | ||||
|             meta = {"indexes": ["provider_ids.foo", "provider_ids.bar"]} | ||||
|  | ||||
|         info = MyDoc.objects._collection.index_information() | ||||
|         info = [value["key"] for key, value in iteritems(info)] | ||||
|         info = [value["key"] for key, value in info.items()] | ||||
|         assert [("provider_ids.foo", 1)] in info | ||||
|         assert [("provider_ids.bar", 1)] in info | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,6 @@ import unittest | ||||
| import warnings | ||||
|  | ||||
| import pytest | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import ( | ||||
|     BooleanField, | ||||
| @@ -550,7 +549,7 @@ class TestInheritance(MongoDBTestCase): | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         for k, v in iteritems(defaults): | ||||
|         for k, v in defaults.items(): | ||||
|             for cls in [Animal, Fish, Guppy]: | ||||
|                 assert cls._meta[k] == v | ||||
|  | ||||
|   | ||||
| @@ -10,7 +10,6 @@ import bson | ||||
| from bson import DBRef, ObjectId | ||||
| from pymongo.errors import DuplicateKeyError | ||||
| import pytest | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine import signals | ||||
| @@ -3274,7 +3273,7 @@ class TestDocumentInstance(MongoDBTestCase): | ||||
|  | ||||
|             def expand(self): | ||||
|                 self.flattened_parameter = {} | ||||
|                 for parameter_name, parameter in iteritems(self.parameters): | ||||
|                 for parameter_name, parameter in self.parameters.items(): | ||||
|                     parameter.expand() | ||||
|  | ||||
|         class NodesSystem(Document): | ||||
| @@ -3282,7 +3281,7 @@ class TestDocumentInstance(MongoDBTestCase): | ||||
|             nodes = MapField(ReferenceField(Node, dbref=False)) | ||||
|  | ||||
|             def save(self, *args, **kwargs): | ||||
|                 for node_name, node in iteritems(self.nodes): | ||||
|                 for node_name, node in self.nodes.items(): | ||||
|                     node.expand() | ||||
|                     node.save(*args, **kwargs) | ||||
|                 super(NodesSystem, self).save(*args, **kwargs) | ||||
|   | ||||
| @@ -3,13 +3,12 @@ import uuid | ||||
|  | ||||
| from bson import Binary | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
| BIN_VALUE = six.b( | ||||
|     "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" | ||||
| BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( | ||||
|     "latin-1" | ||||
| ) | ||||
|  | ||||
|  | ||||
| @@ -22,7 +21,7 @@ class TestBinaryField(MongoDBTestCase): | ||||
|             content_type = StringField() | ||||
|             blob = BinaryField() | ||||
|  | ||||
|         BLOB = six.b("\xe6\x00\xc4\xff\x07") | ||||
|         BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||
|         MIME_TYPE = "application/octet-stream" | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
| @@ -32,7 +31,7 @@ class TestBinaryField(MongoDBTestCase): | ||||
|  | ||||
|         attachment_1 = Attachment.objects().first() | ||||
|         assert MIME_TYPE == attachment_1.content_type | ||||
|         assert BLOB == six.binary_type(attachment_1.blob) | ||||
|         assert BLOB == bytes(attachment_1.blob) | ||||
|  | ||||
|     def test_validation_succeeds(self): | ||||
|         """Ensure that valid values can be assigned to binary fields. | ||||
| @@ -47,11 +46,11 @@ class TestBinaryField(MongoDBTestCase): | ||||
|         attachment_required = AttachmentRequired() | ||||
|         with pytest.raises(ValidationError): | ||||
|             attachment_required.validate() | ||||
|         attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) | ||||
|         attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) | ||||
|         attachment_required.validate() | ||||
|  | ||||
|         _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") | ||||
|         _4_BYTES = six.b("\xe6\x00\xc4\xff") | ||||
|         _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||
|         _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") | ||||
|         with pytest.raises(ValidationError): | ||||
|             AttachmentSizeLimit(blob=_5_BYTES).validate() | ||||
|         AttachmentSizeLimit(blob=_4_BYTES).validate() | ||||
| @@ -133,7 +132,7 @@ class TestBinaryField(MongoDBTestCase): | ||||
|  | ||||
|         MyDocument.drop_collection() | ||||
|  | ||||
|         bin_data = six.b("\xe6\x00\xc4\xff\x07") | ||||
|         bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||
|         doc = MyDocument(bin_field=bin_data).save() | ||||
|  | ||||
|         n_updated = MyDocument.objects(bin_field=bin_data).update_one( | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
| import datetime | ||||
|  | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
| import datetime as dt | ||||
|  | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
|   | ||||
| @@ -7,7 +7,6 @@ from io import BytesIO | ||||
|  | ||||
| import gridfs | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| @@ -58,7 +57,7 @@ class TestFileField(MongoDBTestCase): | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = six.b("Hello, World!") | ||||
|         text = "Hello, World!".encode("latin-1") | ||||
|         content_type = "text/plain" | ||||
|  | ||||
|         putfile = PutFile() | ||||
| @@ -101,8 +100,8 @@ class TestFileField(MongoDBTestCase): | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = six.b("Hello, World!") | ||||
|         more_text = six.b("Foo Bar") | ||||
|         text = "Hello, World!".encode("latin-1") | ||||
|         more_text = "Foo Bar".encode("latin-1") | ||||
|         content_type = "text/plain" | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
| @@ -137,8 +136,8 @@ class TestFileField(MongoDBTestCase): | ||||
|  | ||||
|         StreamFile.drop_collection() | ||||
|  | ||||
|         text = six.b("Hello, World!") | ||||
|         more_text = six.b("Foo Bar") | ||||
|         text = "Hello, World!".encode("latin-1") | ||||
|         more_text = "Foo Bar".encode("latin-1") | ||||
|  | ||||
|         streamfile = StreamFile() | ||||
|         streamfile.save() | ||||
| @@ -167,8 +166,8 @@ class TestFileField(MongoDBTestCase): | ||||
|         class SetFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         text = six.b("Hello, World!") | ||||
|         more_text = six.b("Foo Bar") | ||||
|         text = "Hello, World!".encode("latin-1") | ||||
|         more_text = "Foo Bar".encode("latin-1") | ||||
|  | ||||
|         SetFile.drop_collection() | ||||
|  | ||||
| @@ -196,7 +195,7 @@ class TestFileField(MongoDBTestCase): | ||||
|         GridDocument.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(six.b("Hello World!")) | ||||
|             f.write("Hello World!".encode("latin-1")) | ||||
|             f.flush() | ||||
|  | ||||
|             # Test without default | ||||
| @@ -213,7 +212,7 @@ class TestFileField(MongoDBTestCase): | ||||
|             assert doc_b.the_file.grid_id == doc_c.the_file.grid_id | ||||
|  | ||||
|             # Test with default | ||||
|             doc_d = GridDocument(the_file=six.b("")) | ||||
|             doc_d = GridDocument(the_file="".encode("latin-1")) | ||||
|             doc_d.save() | ||||
|  | ||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||
| @@ -240,7 +239,7 @@ class TestFileField(MongoDBTestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(six.b("Hello, World!")) | ||||
|         test_file.the_file.put("Hello, World!".encode("latin-1")) | ||||
|         test_file.save() | ||||
|  | ||||
|         # Second instance | ||||
| @@ -297,7 +296,9 @@ class TestFileField(MongoDBTestCase): | ||||
|  | ||||
|         test_file = TestFile() | ||||
|         assert not bool(test_file.the_file) | ||||
|         test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") | ||||
|         test_file.the_file.put( | ||||
|             "Hello, World!".encode("latin-1"), content_type="text/plain" | ||||
|         ) | ||||
|         test_file.save() | ||||
|         assert bool(test_file.the_file) | ||||
|  | ||||
| @@ -319,7 +320,7 @@ class TestFileField(MongoDBTestCase): | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
|  | ||||
|         text = six.b("Hello, World!") | ||||
|         text = "Hello, World!".encode("latin-1") | ||||
|         content_type = "text/plain" | ||||
|  | ||||
|         testfile = TestFile() | ||||
| @@ -363,7 +364,7 @@ class TestFileField(MongoDBTestCase): | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|  | ||||
|         text = six.b("Bonjour, World!") | ||||
|         text = "Bonjour, World!".encode("latin-1") | ||||
|         testfile.the_file.replace(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
|  | ||||
| @@ -387,7 +388,7 @@ class TestFileField(MongoDBTestCase): | ||||
|         TestImage.drop_collection() | ||||
|  | ||||
|         with tempfile.TemporaryFile() as f: | ||||
|             f.write(six.b("Hello World!")) | ||||
|             f.write("Hello World!".encode("latin-1")) | ||||
|             f.flush() | ||||
|  | ||||
|             t = TestImage() | ||||
| @@ -503,21 +504,21 @@ class TestFileField(MongoDBTestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") | ||||
|         test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt") | ||||
|         test_file.save() | ||||
|  | ||||
|         data = get_db("test_files").macumba.files.find_one() | ||||
|         assert data.get("name") == "hello.txt" | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         assert test_file.the_file.read() == six.b("Hello, World!") | ||||
|         assert test_file.the_file.read() == "Hello, World!".encode("latin-1") | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = six.b("HELLO, WORLD!") | ||||
|         test_file.the_file = "Hello, World!".encode("latin-1") | ||||
|         test_file.save() | ||||
|  | ||||
|         test_file = TestFile.objects.first() | ||||
|         assert test_file.the_file.read() == six.b("HELLO, WORLD!") | ||||
|         assert test_file.the_file.read() == "Hello, World!".encode("latin-1") | ||||
|  | ||||
|     def test_copyable(self): | ||||
|         class PutFile(Document): | ||||
| @@ -525,7 +526,7 @@ class TestFileField(MongoDBTestCase): | ||||
|  | ||||
|         PutFile.drop_collection() | ||||
|  | ||||
|         text = six.b("Hello, World!") | ||||
|         text = "Hello, World!".encode("latin-1") | ||||
|         content_type = "text/plain" | ||||
|  | ||||
|         putfile = PutFile() | ||||
|   | ||||
| @@ -1,6 +1,5 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| @@ -52,8 +51,7 @@ class TestFloatField(MongoDBTestCase): | ||||
|  | ||||
|         big_person = BigPerson() | ||||
|  | ||||
|         for value, value_type in enumerate(six.integer_types): | ||||
|             big_person.height = value_type(value) | ||||
|         big_person.height = int(0) | ||||
|         big_person.validate() | ||||
|  | ||||
|         big_person.height = 2 ** 500 | ||||
|   | ||||
| @@ -1,7 +1,5 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
| from bson.int64 import Int64 | ||||
| import six | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| @@ -24,7 +22,7 @@ class TestLongField(MongoDBTestCase): | ||||
|         assert isinstance( | ||||
|             db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 | ||||
|         ) | ||||
|         assert isinstance(doc.some_long, six.integer_types) | ||||
|         assert isinstance(doc.some_long, int) | ||||
|  | ||||
|     def test_long_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to long fields. | ||||
|   | ||||
| @@ -10,8 +10,6 @@ import pymongo | ||||
| from pymongo.read_preferences import ReadPreference | ||||
| from pymongo.results import UpdateResult | ||||
| import pytest | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| @@ -4093,7 +4091,7 @@ class TestQueryset(unittest.TestCase): | ||||
|         info = Comment.objects._collection.index_information() | ||||
|         info = [ | ||||
|             (value["key"], value.get("unique", False), value.get("sparse", False)) | ||||
|             for key, value in iteritems(info) | ||||
|             for key, value in info.items() | ||||
|         ] | ||||
|         assert ([("_cls", 1), ("message", 1)], False, False) in info | ||||
|  | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| import unittest | ||||
|  | ||||
| import pytest | ||||
| from six import iterkeys | ||||
|  | ||||
| from mongoengine import Document | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList, StrictDict | ||||
| @@ -372,7 +371,7 @@ class TestStrictDict(unittest.TestCase): | ||||
|  | ||||
|     def test_iterkeys(self): | ||||
|         d = self.dtype(a=1) | ||||
|         assert list(iterkeys(d)) == ["a"] | ||||
|         assert list(d.keys()) == ["a"] | ||||
|  | ||||
|     def test_len(self): | ||||
|         d = self.dtype(a=1) | ||||
|   | ||||
| @@ -2,10 +2,8 @@ | ||||
| import unittest | ||||
|  | ||||
| from bson import DBRef, ObjectId | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.context_managers import query_counter | ||||
|  | ||||
|  | ||||
| @@ -739,7 +737,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 2 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert isinstance(m, User) | ||||
|  | ||||
|         # Document select_related | ||||
| @@ -752,7 +750,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 2 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert isinstance(m, User) | ||||
|  | ||||
|         # Queryset select_related | ||||
| @@ -766,7 +764,7 @@ class FieldTest(unittest.TestCase): | ||||
|                 [m for m in group_obj.members] | ||||
|                 assert q == 2 | ||||
|  | ||||
|                 for k, m in iteritems(group_obj.members): | ||||
|                 for k, m in group_obj.members.items(): | ||||
|                     assert isinstance(m, User) | ||||
|  | ||||
|         User.drop_collection() | ||||
| @@ -820,7 +818,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 4 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert "User" in m.__class__.__name__ | ||||
|  | ||||
|         # Document select_related | ||||
| @@ -836,7 +834,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 4 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert "User" in m.__class__.__name__ | ||||
|  | ||||
|         # Queryset select_related | ||||
| @@ -853,7 +851,7 @@ class FieldTest(unittest.TestCase): | ||||
|                 [m for m in group_obj.members] | ||||
|                 assert q == 4 | ||||
|  | ||||
|                 for k, m in iteritems(group_obj.members): | ||||
|                 for k, m in group_obj.members.items(): | ||||
|                     assert "User" in m.__class__.__name__ | ||||
|  | ||||
|         Group.objects.delete() | ||||
| @@ -910,7 +908,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 2 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert isinstance(m, UserA) | ||||
|  | ||||
|         # Document select_related | ||||
| @@ -926,7 +924,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 2 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert isinstance(m, UserA) | ||||
|  | ||||
|         # Queryset select_related | ||||
| @@ -943,7 +941,7 @@ class FieldTest(unittest.TestCase): | ||||
|                 [m for m in group_obj.members] | ||||
|                 assert q == 2 | ||||
|  | ||||
|                 for k, m in iteritems(group_obj.members): | ||||
|                 for k, m in group_obj.members.items(): | ||||
|                     assert isinstance(m, UserA) | ||||
|  | ||||
|         UserA.drop_collection() | ||||
| @@ -997,7 +995,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 4 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert "User" in m.__class__.__name__ | ||||
|  | ||||
|         # Document select_related | ||||
| @@ -1013,7 +1011,7 @@ class FieldTest(unittest.TestCase): | ||||
|             [m for m in group_obj.members] | ||||
|             assert q == 4 | ||||
|  | ||||
|             for k, m in iteritems(group_obj.members): | ||||
|             for k, m in group_obj.members.items(): | ||||
|                 assert "User" in m.__class__.__name__ | ||||
|  | ||||
|         # Queryset select_related | ||||
| @@ -1030,7 +1028,7 @@ class FieldTest(unittest.TestCase): | ||||
|                 [m for m in group_obj.members] | ||||
|                 assert q == 4 | ||||
|  | ||||
|                 for k, m in iteritems(group_obj.members): | ||||
|                 for k, m in group_obj.members.items(): | ||||
|                     assert "User" in m.__class__.__name__ | ||||
|  | ||||
|         Group.objects.delete() | ||||
|   | ||||
		Reference in New Issue
	
	Block a user