Merge pull request #2004 from bagerard/fix_iteritems_itervalues_23_compat

refactored iteritems/itervalues to improve 2/3 compat
This commit is contained in:
erdenezul 2019-02-23 22:46:21 +08:00 committed by GitHub
commit d65861cdf7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 87 additions and 71 deletions

View File

@ -2,6 +2,7 @@ import weakref
from bson import DBRef from bson import DBRef
import six import six
from six import iteritems
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.errors import DoesNotExist, MultipleObjectsReturned from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
@ -363,7 +364,7 @@ class StrictDict(object):
_classes = {} _classes = {}
def __init__(self, **kwargs): def __init__(self, **kwargs):
for k, v in kwargs.iteritems(): for k, v in iteritems(kwargs):
setattr(self, k, v) setattr(self, k, v)
def __getitem__(self, key): def __getitem__(self, key):
@ -411,7 +412,7 @@ class StrictDict(object):
return (key for key in self.__slots__ if hasattr(self, key)) return (key for key in self.__slots__ if hasattr(self, key))
def __len__(self): def __len__(self):
return len(list(self.iteritems())) return len(list(iteritems(self)))
def __eq__(self, other): def __eq__(self, other):
return self.items() == other.items() return self.items() == other.items()

View File

@ -5,6 +5,7 @@ from functools import partial
from bson import DBRef, ObjectId, SON, json_util from bson import DBRef, ObjectId, SON, json_util
import pymongo import pymongo
import six import six
from six import iteritems
from mongoengine import signals from mongoengine import signals
from mongoengine.base.common import get_document from mongoengine.base.common import get_document
@ -83,7 +84,7 @@ class BaseDocument(object):
self._dynamic_fields = SON() self._dynamic_fields = SON()
# Assign default values to instance # Assign default values to instance
for key, field in self._fields.iteritems(): for key, field in iteritems(self._fields):
if self._db_field_map.get(key, key) in __only_fields: if self._db_field_map.get(key, key) in __only_fields:
continue continue
value = getattr(self, key, None) value = getattr(self, key, None)
@ -95,14 +96,14 @@ class BaseDocument(object):
# Set passed values after initialisation # Set passed values after initialisation
if self._dynamic: if self._dynamic:
dynamic_data = {} dynamic_data = {}
for key, value in values.iteritems(): for key, value in iteritems(values):
if key in self._fields or key == '_id': if key in self._fields or key == '_id':
setattr(self, key, value) setattr(self, key, value)
else: else:
dynamic_data[key] = value dynamic_data[key] = value
else: else:
FileField = _import_class('FileField') FileField = _import_class('FileField')
for key, value in values.iteritems(): for key, value in iteritems(values):
key = self._reverse_db_field_map.get(key, key) key = self._reverse_db_field_map.get(key, key)
if key in self._fields or key in ('id', 'pk', '_cls'): if key in self._fields or key in ('id', 'pk', '_cls'):
if __auto_convert and value is not None: if __auto_convert and value is not None:
@ -118,7 +119,7 @@ class BaseDocument(object):
if self._dynamic: if self._dynamic:
self._dynamic_lock = False self._dynamic_lock = False
for key, value in dynamic_data.iteritems(): for key, value in iteritems(dynamic_data):
setattr(self, key, value) setattr(self, key, value)
# Flag initialised # Flag initialised
@ -513,7 +514,7 @@ class BaseDocument(object):
if not hasattr(data, 'items'): if not hasattr(data, 'items'):
iterator = enumerate(data) iterator = enumerate(data)
else: else:
iterator = data.iteritems() iterator = iteritems(data)
for index_or_key, value in iterator: for index_or_key, value in iterator:
item_key = '%s%s.' % (base_key, index_or_key) item_key = '%s%s.' % (base_key, index_or_key)
@ -678,7 +679,7 @@ class BaseDocument(object):
# Convert SON to a data dict, making sure each key is a string and # Convert SON to a data dict, making sure each key is a string and
# corresponds to the right db field. # corresponds to the right db field.
data = {} data = {}
for key, value in son.iteritems(): for key, value in iteritems(son):
key = str(key) key = str(key)
key = cls._db_field_map.get(key, key) key = cls._db_field_map.get(key, key)
data[key] = value data[key] = value
@ -694,7 +695,7 @@ class BaseDocument(object):
if not _auto_dereference: if not _auto_dereference:
fields = copy.deepcopy(fields) fields = copy.deepcopy(fields)
for field_name, field in fields.iteritems(): for field_name, field in iteritems(fields):
field._auto_dereference = _auto_dereference field._auto_dereference = _auto_dereference
if field.db_field in data: if field.db_field in data:
value = data[field.db_field] value = data[field.db_field]
@ -715,7 +716,7 @@ class BaseDocument(object):
# In STRICT documents, remove any keys that aren't in cls._fields # In STRICT documents, remove any keys that aren't in cls._fields
if cls.STRICT: if cls.STRICT:
data = {k: v for k, v in data.iteritems() if k in cls._fields} data = {k: v for k, v in iteritems(data) if k in cls._fields}
obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data) obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
obj._changed_fields = changed_fields obj._changed_fields = changed_fields

View File

@ -5,6 +5,7 @@ import weakref
from bson import DBRef, ObjectId, SON from bson import DBRef, ObjectId, SON
import pymongo import pymongo
import six import six
from six import iteritems
from mongoengine.base.common import UPDATE_OPERATORS from mongoengine.base.common import UPDATE_OPERATORS
from mongoengine.base.datastructures import (BaseDict, BaseList, from mongoengine.base.datastructures import (BaseDict, BaseList,
@ -382,11 +383,11 @@ class ComplexBaseField(BaseField):
if self.field: if self.field:
value_dict = { value_dict = {
key: self.field._to_mongo_safe_call(item, use_db_field, fields) key: self.field._to_mongo_safe_call(item, use_db_field, fields)
for key, item in value.iteritems() for key, item in iteritems(value)
} }
else: else:
value_dict = {} value_dict = {}
for k, v in value.iteritems(): for k, v in iteritems(value):
if isinstance(v, Document): if isinstance(v, Document):
# We need the id from the saved object to create the DBRef # We need the id from the saved object to create the DBRef
if v.pk is None: if v.pk is None:
@ -423,7 +424,7 @@ class ComplexBaseField(BaseField):
errors = {} errors = {}
if self.field: if self.field:
if hasattr(value, 'iteritems') or hasattr(value, 'items'): if hasattr(value, 'iteritems') or hasattr(value, 'items'):
sequence = value.iteritems() sequence = iteritems(value)
else: else:
sequence = enumerate(value) sequence = enumerate(value)
for k, v in sequence: for k, v in sequence:

View File

@ -1,6 +1,7 @@
import warnings import warnings
import six import six
from six import iteritems, itervalues
from mongoengine.base.common import _document_registry from mongoengine.base.common import _document_registry
from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
@ -62,7 +63,7 @@ class DocumentMetaclass(type):
# Standard object mixin - merge in any Fields # Standard object mixin - merge in any Fields
if not hasattr(base, '_meta'): if not hasattr(base, '_meta'):
base_fields = {} base_fields = {}
for attr_name, attr_value in base.__dict__.iteritems(): for attr_name, attr_value in iteritems(base.__dict__):
if not isinstance(attr_value, BaseField): if not isinstance(attr_value, BaseField):
continue continue
attr_value.name = attr_name attr_value.name = attr_name
@ -74,7 +75,7 @@ class DocumentMetaclass(type):
# Discover any document fields # Discover any document fields
field_names = {} field_names = {}
for attr_name, attr_value in attrs.iteritems(): for attr_name, attr_value in iteritems(attrs):
if not isinstance(attr_value, BaseField): if not isinstance(attr_value, BaseField):
continue continue
attr_value.name = attr_name attr_value.name = attr_name
@ -103,7 +104,7 @@ class DocumentMetaclass(type):
attrs['_fields_ordered'] = tuple(i[1] for i in sorted( attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
(v.creation_counter, v.name) (v.creation_counter, v.name)
for v in doc_fields.itervalues())) for v in itervalues(doc_fields)))
# #
# Set document hierarchy # Set document hierarchy
@ -173,7 +174,7 @@ class DocumentMetaclass(type):
f.__dict__.update({'im_self': getattr(f, '__self__')}) f.__dict__.update({'im_self': getattr(f, '__self__')})
# Handle delete rules # Handle delete rules
for field in new_class._fields.itervalues(): for field in itervalues(new_class._fields):
f = field f = field
if f.owner_document is None: if f.owner_document is None:
f.owner_document = new_class f.owner_document = new_class
@ -375,7 +376,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
new_class.objects = QuerySetManager() new_class.objects = QuerySetManager()
# Validate the fields and set primary key if needed # Validate the fields and set primary key if needed
for field_name, field in new_class._fields.iteritems(): for field_name, field in iteritems(new_class._fields):
if field.primary_key: if field.primary_key:
# Ensure only one primary key is set # Ensure only one primary key is set
current_pk = new_class._meta.get('id_field') current_pk = new_class._meta.get('id_field')
@ -438,7 +439,7 @@ class MetaDict(dict):
_merge_options = ('indexes',) _merge_options = ('indexes',)
def merge(self, new_options): def merge(self, new_options):
for k, v in new_options.iteritems(): for k, v in iteritems(new_options):
if k in self._merge_options: if k in self._merge_options:
self[k] = self.get(k, []) + v self[k] = self.get(k, []) + v
else: else:

View File

@ -1,9 +1,11 @@
from contextlib import contextmanager from contextlib import contextmanager
from pymongo.write_concern import WriteConcern from pymongo.write_concern import WriteConcern
from six import iteritems
from mongoengine.common import _import_class from mongoengine.common import _import_class
from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
__all__ = ('switch_db', 'switch_collection', 'no_dereference', __all__ = ('switch_db', 'switch_collection', 'no_dereference',
'no_sub_classes', 'query_counter', 'set_write_concern') 'no_sub_classes', 'query_counter', 'set_write_concern')
@ -112,7 +114,7 @@ class no_dereference(object):
GenericReferenceField = _import_class('GenericReferenceField') GenericReferenceField = _import_class('GenericReferenceField')
ComplexBaseField = _import_class('ComplexBaseField') ComplexBaseField = _import_class('ComplexBaseField')
self.deref_fields = [k for k, v in self.cls._fields.iteritems() self.deref_fields = [k for k, v in iteritems(self.cls._fields)
if isinstance(v, (ReferenceField, if isinstance(v, (ReferenceField,
GenericReferenceField, GenericReferenceField,
ComplexBaseField))] ComplexBaseField))]

View File

@ -1,5 +1,6 @@
from bson import DBRef, SON from bson import DBRef, SON
import six import six
from six import iteritems
from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList,
TopLevelDocumentMetaclass, get_document) TopLevelDocumentMetaclass, get_document)
@ -71,7 +72,7 @@ class DeReference(object):
def _get_items_from_dict(items): def _get_items_from_dict(items):
new_items = {} new_items = {}
for k, v in items.iteritems(): for k, v in iteritems(items):
value = v value = v
if isinstance(v, list): if isinstance(v, list):
value = _get_items_from_list(v) value = _get_items_from_list(v)
@ -112,7 +113,7 @@ class DeReference(object):
depth += 1 depth += 1
for item in iterator: for item in iterator:
if isinstance(item, (Document, EmbeddedDocument)): if isinstance(item, (Document, EmbeddedDocument)):
for field_name, field in item._fields.iteritems(): for field_name, field in iteritems(item._fields):
v = item._data.get(field_name, None) v = item._data.get(field_name, None)
if isinstance(v, LazyReference): if isinstance(v, LazyReference):
# LazyReference inherits DBRef but should not be dereferenced here ! # LazyReference inherits DBRef but should not be dereferenced here !
@ -124,7 +125,7 @@ class DeReference(object):
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
field_cls = getattr(getattr(field, 'field', None), 'document_type', None) field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
references = self._find_references(v, depth) references = self._find_references(v, depth)
for key, refs in references.iteritems(): for key, refs in iteritems(references):
if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
key = field_cls key = field_cls
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, set()).update(refs)
@ -137,7 +138,7 @@ class DeReference(object):
reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id)
elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
references = self._find_references(item, depth - 1) references = self._find_references(item, depth - 1)
for key, refs in references.iteritems(): for key, refs in iteritems(references):
reference_map.setdefault(key, set()).update(refs) reference_map.setdefault(key, set()).update(refs)
return reference_map return reference_map
@ -146,7 +147,7 @@ class DeReference(object):
"""Fetch all references and convert to their document objects """Fetch all references and convert to their document objects
""" """
object_map = {} object_map = {}
for collection, dbrefs in self.reference_map.iteritems(): for collection, dbrefs in iteritems(self.reference_map):
# we use getattr instead of hasattr because hasattr swallows any exception under python2 # we use getattr instead of hasattr because hasattr swallows any exception under python2
# so it could hide nasty things without raising exceptions (cfr bug #1688)) # so it could hide nasty things without raising exceptions (cfr bug #1688))
@ -157,7 +158,7 @@ class DeReference(object):
refs = [dbref for dbref in dbrefs refs = [dbref for dbref in dbrefs
if (col_name, dbref) not in object_map] if (col_name, dbref) not in object_map]
references = collection.objects.in_bulk(refs) references = collection.objects.in_bulk(refs)
for key, doc in references.iteritems(): for key, doc in iteritems(references):
object_map[(col_name, key)] = doc object_map[(col_name, key)] = doc
else: # Generic reference: use the refs data to convert to document else: # Generic reference: use the refs data to convert to document
if isinstance(doc_type, (ListField, DictField, MapField)): if isinstance(doc_type, (ListField, DictField, MapField)):
@ -229,7 +230,7 @@ class DeReference(object):
data = [] data = []
else: else:
is_list = False is_list = False
iterator = items.iteritems() iterator = iteritems(items)
data = {} data = {}
depth += 1 depth += 1

View File

@ -5,6 +5,7 @@ from bson.dbref import DBRef
import pymongo import pymongo
from pymongo.read_preferences import ReadPreference from pymongo.read_preferences import ReadPreference
import six import six
from six import iteritems
from mongoengine import signals from mongoengine import signals
from mongoengine.base import (BaseDict, BaseDocument, BaseList, from mongoengine.base import (BaseDict, BaseDocument, BaseList,
@ -613,7 +614,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)):
# Delete FileFields separately # Delete FileFields separately
FileField = _import_class('FileField') FileField = _import_class('FileField')
for name, field in self._fields.iteritems(): for name, field in iteritems(self._fields):
if isinstance(field, FileField): if isinstance(field, FileField):
getattr(self, name).delete() getattr(self, name).delete()

View File

@ -1,6 +1,7 @@
from collections import defaultdict from collections import defaultdict
import six import six
from six import iteritems
__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
@ -113,7 +114,7 @@ class ValidationError(AssertionError):
return errors_dict return errors_dict
if isinstance(source, dict): if isinstance(source, dict):
for field_name, error in source.iteritems(): for field_name, error in iteritems(source):
errors_dict[field_name] = build_dict(error) errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors: elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors) return build_dict(source.errors)
@ -135,12 +136,12 @@ class ValidationError(AssertionError):
value = ' '.join([generate_key(k) for k in value]) value = ' '.join([generate_key(k) for k in value])
elif isinstance(value, dict): elif isinstance(value, dict):
value = ' '.join( value = ' '.join(
[generate_key(v, k) for k, v in value.iteritems()]) [generate_key(v, k) for k, v in iteritems(value)])
results = '%s.%s' % (prefix, value) if prefix else value results = '%s.%s' % (prefix, value) if prefix else value
return results return results
error_dict = defaultdict(list) error_dict = defaultdict(list)
for k, v in self.to_dict().iteritems(): for k, v in iteritems(self.to_dict()):
error_dict[generate_key(v)].append(k) error_dict[generate_key(v)].append(k)
return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)])

View File

@ -11,6 +11,7 @@ from bson import Binary, DBRef, ObjectId, SON
import gridfs import gridfs
import pymongo import pymongo
import six import six
from six import iteritems
try: try:
import dateutil import dateutil
@ -794,12 +795,12 @@ class DynamicField(BaseField):
value = {k: v for k, v in enumerate(value)} value = {k: v for k, v in enumerate(value)}
data = {} data = {}
for k, v in value.iteritems(): for k, v in iteritems(value):
data[k] = self.to_mongo(v, use_db_field, fields) data[k] = self.to_mongo(v, use_db_field, fields)
value = data value = data
if is_list: # Convert back to a list if is_list: # Convert back to a list
value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))] value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))]
return value return value
def to_python(self, value): def to_python(self, value):

View File

@ -12,6 +12,7 @@ import pymongo
import pymongo.errors import pymongo.errors
from pymongo.common import validate_read_preference from pymongo.common import validate_read_preference
import six import six
from six import iteritems
from mongoengine import signals from mongoengine import signals
from mongoengine.base import get_document from mongoengine.base import get_document
@ -1739,13 +1740,13 @@ class BaseQuerySet(object):
} }
""" """
total, data, types = self.exec_js(freq_func, field) total, data, types = self.exec_js(freq_func, field)
values = {types.get(k): int(v) for k, v in data.iteritems()} values = {types.get(k): int(v) for k, v in iteritems(data)}
if normalize: if normalize:
values = {k: float(v) / total for k, v in values.items()} values = {k: float(v) / total for k, v in values.items()}
frequencies = {} frequencies = {}
for k, v in values.iteritems(): for k, v in iteritems(values):
if isinstance(k, float): if isinstance(k, float):
if int(k) == k: if int(k) == k:
k = int(k) k = int(k)

View File

@ -4,6 +4,7 @@ from bson import ObjectId, SON
from bson.dbref import DBRef from bson.dbref import DBRef
import pymongo import pymongo
import six import six
from six import iteritems
from mongoengine.base import UPDATE_OPERATORS from mongoengine.base import UPDATE_OPERATORS
from mongoengine.common import _import_class from mongoengine.common import _import_class
@ -154,7 +155,7 @@ def query(_doc_cls=None, **kwargs):
if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \
('$near' in value_dict or '$nearSphere' in value_dict): ('$near' in value_dict or '$nearSphere' in value_dict):
value_son = SON() value_son = SON()
for k, v in value_dict.iteritems(): for k, v in iteritems(value_dict):
if k == '$maxDistance' or k == '$minDistance': if k == '$maxDistance' or k == '$minDistance':
continue continue
value_son[k] = v value_son[k] = v

View File

@ -5,6 +5,7 @@ from datetime import datetime
from nose.plugins.skip import SkipTest from nose.plugins.skip import SkipTest
from pymongo.errors import OperationFailure from pymongo.errors import OperationFailure
import pymongo import pymongo
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
@ -68,7 +69,7 @@ class IndexesTest(unittest.TestCase):
info = BlogPost.objects._collection.index_information() info = BlogPost.objects._collection.index_information()
# _id, '-date', 'tags', ('cat', 'date') # _id, '-date', 'tags', ('cat', 'date')
self.assertEqual(len(info), 4) self.assertEqual(len(info), 4)
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
for expected in expected_specs: for expected in expected_specs:
self.assertIn(expected['fields'], info) self.assertIn(expected['fields'], info)
@ -100,7 +101,7 @@ class IndexesTest(unittest.TestCase):
# the indices on -date and tags will both contain # the indices on -date and tags will both contain
# _cls as first element in the key # _cls as first element in the key
self.assertEqual(len(info), 4) self.assertEqual(len(info), 4)
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
for expected in expected_specs: for expected in expected_specs:
self.assertIn(expected['fields'], info) self.assertIn(expected['fields'], info)
@ -115,7 +116,7 @@ class IndexesTest(unittest.TestCase):
ExtendedBlogPost.ensure_indexes() ExtendedBlogPost.ensure_indexes()
info = ExtendedBlogPost.objects._collection.index_information() info = ExtendedBlogPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
for expected in expected_specs: for expected in expected_specs:
self.assertIn(expected['fields'], info) self.assertIn(expected['fields'], info)
@ -225,7 +226,7 @@ class IndexesTest(unittest.TestCase):
# Indexes are lazy so use list() to perform query # Indexes are lazy so use list() to perform query
list(Person.objects) list(Person.objects)
info = Person.objects._collection.index_information() info = Person.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('rank.title', 1)], info) self.assertIn([('rank.title', 1)], info)
def test_explicit_geo2d_index(self): def test_explicit_geo2d_index(self):
@ -245,7 +246,7 @@ class IndexesTest(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('location.point', '2d')], info) self.assertIn([('location.point', '2d')], info)
def test_explicit_geo2d_index_embedded(self): def test_explicit_geo2d_index_embedded(self):
@ -268,7 +269,7 @@ class IndexesTest(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('current.location.point', '2d')], info) self.assertIn([('current.location.point', '2d')], info)
def test_explicit_geosphere_index(self): def test_explicit_geosphere_index(self):
@ -288,7 +289,7 @@ class IndexesTest(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('location.point', '2dsphere')], info) self.assertIn([('location.point', '2dsphere')], info)
def test_explicit_geohaystack_index(self): def test_explicit_geohaystack_index(self):
@ -310,7 +311,7 @@ class IndexesTest(unittest.TestCase):
Place.ensure_indexes() Place.ensure_indexes()
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('location.point', 'geoHaystack')], info) self.assertIn([('location.point', 'geoHaystack')], info)
def test_create_geohaystack_index(self): def test_create_geohaystack_index(self):
@ -322,7 +323,7 @@ class IndexesTest(unittest.TestCase):
Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10)
info = Place._get_collection().index_information() info = Place._get_collection().index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info) self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info)
def test_dictionary_indexes(self): def test_dictionary_indexes(self):
@ -355,7 +356,7 @@ class IndexesTest(unittest.TestCase):
info = [(value['key'], info = [(value['key'],
value.get('unique', False), value.get('unique', False),
value.get('sparse', False)) value.get('sparse', False))
for key, value in info.iteritems()] for key, value in iteritems(info)]
self.assertIn(([('addDate', -1)], True, True), info) self.assertIn(([('addDate', -1)], True, True), info)
BlogPost.drop_collection() BlogPost.drop_collection()
@ -576,7 +577,7 @@ class IndexesTest(unittest.TestCase):
else: else:
self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10)
self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME ).count(), 10) self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10)
with self.assertRaises(Exception): with self.assertRaises(Exception):
BlogPost.objects.hint(('tags', 1)).next() BlogPost.objects.hint(('tags', 1)).next()
@ -806,7 +807,7 @@ class IndexesTest(unittest.TestCase):
self.fail('Unbound local error at index + pk definition') self.fail('Unbound local error at index + pk definition')
info = BlogPost.objects._collection.index_information() info = BlogPost.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
index_item = [('_id', 1), ('comments.comment_id', 1)] index_item = [('_id', 1), ('comments.comment_id', 1)]
self.assertIn(index_item, info) self.assertIn(index_item, info)
@ -854,7 +855,7 @@ class IndexesTest(unittest.TestCase):
} }
info = MyDoc.objects._collection.index_information() info = MyDoc.objects._collection.index_information()
info = [value['key'] for key, value in info.iteritems()] info = [value['key'] for key, value in iteritems(info)]
self.assertIn([('provider_ids.foo', 1)], info) self.assertIn([('provider_ids.foo', 1)], info)
self.assertIn([('provider_ids.bar', 1)], info) self.assertIn([('provider_ids.bar', 1)], info)
@ -936,7 +937,6 @@ class IndexesTest(unittest.TestCase):
# Drop the temporary database at the end # Drop the temporary database at the end
connection.drop_database('tempdatabase') connection.drop_database('tempdatabase')
def test_index_dont_send_cls_option(self): def test_index_dont_send_cls_option(self):
""" """
Ensure that 'cls' option is not sent through ensureIndex. We shouldn't Ensure that 'cls' option is not sent through ensureIndex. We shouldn't

View File

@ -2,6 +2,8 @@
import unittest import unittest
import warnings import warnings
from six import iteritems
from mongoengine import (BooleanField, Document, EmbeddedDocument, from mongoengine import (BooleanField, Document, EmbeddedDocument,
EmbeddedDocumentField, GenericReferenceField, EmbeddedDocumentField, GenericReferenceField,
IntField, ReferenceField, StringField, connect) IntField, ReferenceField, StringField, connect)
@ -485,7 +487,7 @@ class InheritanceTest(unittest.TestCase):
meta = {'abstract': True} meta = {'abstract': True}
class Human(Mammal): pass class Human(Mammal): pass
for k, v in defaults.iteritems(): for k, v in iteritems(defaults):
for cls in [Animal, Fish, Guppy]: for cls in [Animal, Fish, Guppy]:
self.assertEqual(cls._meta[k], v) self.assertEqual(cls._meta[k], v)

View File

@ -4,13 +4,13 @@ import os
import pickle import pickle
import unittest import unittest
import uuid import uuid
import warnings
import weakref import weakref
from datetime import datetime from datetime import datetime
import warnings
from bson import DBRef, ObjectId from bson import DBRef, ObjectId
from pymongo.errors import DuplicateKeyError from pymongo.errors import DuplicateKeyError
from six import iteritems
from tests import fixtures from tests import fixtures
from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest,
@ -3060,7 +3060,7 @@ class InstanceTest(MongoDBTestCase):
def expand(self): def expand(self):
self.flattened_parameter = {} self.flattened_parameter = {}
for parameter_name, parameter in self.parameters.iteritems(): for parameter_name, parameter in iteritems(self.parameters):
parameter.expand() parameter.expand()
class NodesSystem(Document): class NodesSystem(Document):
@ -3068,7 +3068,7 @@ class InstanceTest(MongoDBTestCase):
nodes = MapField(ReferenceField(Node, dbref=False)) nodes = MapField(ReferenceField(Node, dbref=False))
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
for node_name, node in self.nodes.iteritems(): for node_name, node in iteritems(self.nodes):
node.expand() node.expand()
node.save(*args, **kwargs) node.save(*args, **kwargs)
super(NodesSystem, self).save(*args, **kwargs) super(NodesSystem, self).save(*args, **kwargs)

View File

@ -12,6 +12,7 @@ from pymongo.errors import ConfigurationError
from pymongo.read_preferences import ReadPreference from pymongo.read_preferences import ReadPreference
from pymongo.results import UpdateResult from pymongo.results import UpdateResult
import six import six
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_connection, get_db from mongoengine.connection import get_connection, get_db
@ -4039,7 +4040,7 @@ class QuerySetTest(unittest.TestCase):
info = [(value['key'], info = [(value['key'],
value.get('unique', False), value.get('unique', False),
value.get('sparse', False)) value.get('sparse', False))
for key, value in info.iteritems()] for key, value in iteritems(info)]
self.assertIn(([('_cls', 1), ('message', 1)], False, False), info) self.assertIn(([('_cls', 1), ('message', 1)], False, False), info)
def test_where(self): def test_where(self):

View File

@ -2,6 +2,7 @@
import unittest import unittest
from bson import DBRef, ObjectId from bson import DBRef, ObjectId
from six import iteritems
from mongoengine import * from mongoengine import *
from mongoengine.connection import get_db from mongoengine.connection import get_db
@ -632,7 +633,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 2) self.assertEqual(q, 2)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIsInstance(m, User) self.assertIsInstance(m, User)
# Document select_related # Document select_related
@ -645,7 +646,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 2) self.assertEqual(q, 2)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIsInstance(m, User) self.assertIsInstance(m, User)
# Queryset select_related # Queryset select_related
@ -659,7 +660,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 2) self.assertEqual(q, 2)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIsInstance(m, User) self.assertIsInstance(m, User)
User.drop_collection() User.drop_collection()
@ -714,7 +715,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 4) self.assertEqual(q, 4)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIn('User', m.__class__.__name__) self.assertIn('User', m.__class__.__name__)
# Document select_related # Document select_related
@ -730,7 +731,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 4) self.assertEqual(q, 4)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIn('User', m.__class__.__name__) self.assertIn('User', m.__class__.__name__)
# Queryset select_related # Queryset select_related
@ -747,7 +748,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 4) self.assertEqual(q, 4)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIn('User', m.__class__.__name__) self.assertIn('User', m.__class__.__name__)
Group.objects.delete() Group.objects.delete()
@ -805,7 +806,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 2) self.assertEqual(q, 2)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIsInstance(m, UserA) self.assertIsInstance(m, UserA)
# Document select_related # Document select_related
@ -821,7 +822,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 2) self.assertEqual(q, 2)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIsInstance(m, UserA) self.assertIsInstance(m, UserA)
# Queryset select_related # Queryset select_related
@ -838,7 +839,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 2) self.assertEqual(q, 2)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIsInstance(m, UserA) self.assertIsInstance(m, UserA)
UserA.drop_collection() UserA.drop_collection()
@ -893,7 +894,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 4) self.assertEqual(q, 4)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIn('User', m.__class__.__name__) self.assertIn('User', m.__class__.__name__)
# Document select_related # Document select_related
@ -909,7 +910,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 4) self.assertEqual(q, 4)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIn('User', m.__class__.__name__) self.assertIn('User', m.__class__.__name__)
# Queryset select_related # Queryset select_related
@ -926,7 +927,7 @@ class FieldTest(unittest.TestCase):
[m for m in group_obj.members] [m for m in group_obj.members]
self.assertEqual(q, 4) self.assertEqual(q, 4)
for k, m in group_obj.members.iteritems(): for k, m in iteritems(group_obj.members):
self.assertIn('User', m.__class__.__name__) self.assertIn('User', m.__class__.__name__)
Group.objects.delete() Group.objects.delete()
@ -1064,7 +1065,6 @@ class FieldTest(unittest.TestCase):
self.assertEqual(msg.author, user) self.assertEqual(msg.author, user)
self.assertEqual(msg.author.name, 'new-name') self.assertEqual(msg.author.name, 'new-name')
def test_list_lookup_not_checked_in_map(self): def test_list_lookup_not_checked_in_map(self):
"""Ensure we dereference list data correctly """Ensure we dereference list data correctly
""" """
@ -1286,5 +1286,6 @@ class FieldTest(unittest.TestCase):
self.assertEqual(q, 2) self.assertEqual(q, 2)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()