autoformat with pyupgrade
This commit is contained in:
@@ -67,11 +67,11 @@ class BaseDict(dict):
|
||||
if isinstance(value, EmbeddedDocument) and value._instance is None:
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
value = BaseDict(value, None, "{}.{}".format(self._name, key))
|
||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
value = BaseList(value, None, "{}.{}".format(self._name, key))
|
||||
value = BaseList(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
@@ -97,7 +97,7 @@ class BaseDict(dict):
|
||||
def _mark_as_changed(self, key=None):
|
||||
if hasattr(self._instance, "_mark_as_changed"):
|
||||
if key:
|
||||
self._instance._mark_as_changed("{}.{}".format(self._name, key))
|
||||
self._instance._mark_as_changed(f"{self._name}.{key}")
|
||||
else:
|
||||
self._instance._mark_as_changed(self._name)
|
||||
|
||||
@@ -133,12 +133,12 @@ class BaseList(list):
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
||||
# Replace dict by BaseDict
|
||||
value = BaseDict(value, None, "{}.{}".format(self._name, key))
|
||||
value = BaseDict(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
elif isinstance(value, list) and not isinstance(value, BaseList):
|
||||
# Replace list by BaseList
|
||||
value = BaseList(value, None, "{}.{}".format(self._name, key))
|
||||
value = BaseList(value, None, f"{self._name}.{key}")
|
||||
super().__setitem__(key, value)
|
||||
value._instance = self._instance
|
||||
return value
|
||||
@@ -429,7 +429,7 @@ class StrictDict:
|
||||
|
||||
def __repr__(self):
|
||||
return "{%s}" % ", ".join(
|
||||
'"{!s}": {!r}'.format(k, v) for k, v in self.items()
|
||||
f'"{k!s}": {v!r}' for k, v in self.items()
|
||||
)
|
||||
|
||||
cls._classes[allowed_keys] = SpecificStrictDict
|
||||
@@ -472,4 +472,4 @@ class LazyReference(DBRef):
|
||||
raise AttributeError()
|
||||
|
||||
def __repr__(self):
|
||||
return "<LazyReference({}, {!r})>".format(self.document_type, self.pk)
|
||||
return f"<LazyReference({self.document_type}, {self.pk!r})>"
|
||||
|
||||
@@ -275,7 +275,7 @@ class BaseDocument:
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
u = "[Bad Unicode data]"
|
||||
repr_type = str if u is None else type(u)
|
||||
return repr_type("<{}: {}>".format(self.__class__.__name__, u))
|
||||
return repr_type(f"<{self.__class__.__name__}: {u}>")
|
||||
|
||||
def __str__(self):
|
||||
# TODO this could be simpler?
|
||||
@@ -431,7 +431,7 @@ class BaseDocument:
|
||||
pk = self.pk
|
||||
elif self._instance and hasattr(self._instance, "pk"):
|
||||
pk = self._instance.pk
|
||||
message = "ValidationError ({}:{}) ".format(self._class_name, pk)
|
||||
message = f"ValidationError ({self._class_name}:{pk}) "
|
||||
raise ValidationError(message, errors=errors)
|
||||
|
||||
def to_json(self, *args, **kwargs):
|
||||
@@ -504,7 +504,7 @@ class BaseDocument:
|
||||
if "." in key:
|
||||
key, rest = key.split(".", 1)
|
||||
key = self._db_field_map.get(key, key)
|
||||
key = "{}.{}".format(key, rest)
|
||||
key = f"{key}.{rest}"
|
||||
else:
|
||||
key = self._db_field_map.get(key, key)
|
||||
|
||||
@@ -600,7 +600,7 @@ class BaseDocument:
|
||||
iterator = data.items()
|
||||
|
||||
for index_or_key, value in iterator:
|
||||
item_key = "{}{}.".format(base_key, index_or_key)
|
||||
item_key = f"{base_key}{index_or_key}."
|
||||
# don't check anything lower if this key is already marked
|
||||
# as changed.
|
||||
if item_key[:-1] in changed_fields:
|
||||
@@ -608,7 +608,7 @@ class BaseDocument:
|
||||
|
||||
if hasattr(value, "_get_changed_fields"):
|
||||
changed = value._get_changed_fields()
|
||||
changed_fields += ["{}{}".format(item_key, k) for k in changed if k]
|
||||
changed_fields += [f"{item_key}{k}" for k in changed if k]
|
||||
elif isinstance(value, (list, tuple, dict)):
|
||||
BaseDocument._nestable_types_changed_fields(
|
||||
changed_fields, item_key, value
|
||||
@@ -640,7 +640,7 @@ class BaseDocument:
|
||||
if isinstance(data, EmbeddedDocument):
|
||||
# Find all embedded fields that have been changed
|
||||
changed = data._get_changed_fields()
|
||||
changed_fields += ["{}{}".format(key, k) for k in changed if k]
|
||||
changed_fields += [f"{key}{k}" for k in changed if k]
|
||||
elif isinstance(data, (list, tuple, dict)):
|
||||
if hasattr(field, "field") and isinstance(
|
||||
field.field, (ReferenceField, GenericReferenceField)
|
||||
@@ -792,9 +792,7 @@ class BaseDocument:
|
||||
errors_dict[field_name] = e
|
||||
|
||||
if errors_dict:
|
||||
errors = "\n".join(
|
||||
["Field '{}' - {}".format(k, v) for k, v in errors_dict.items()]
|
||||
)
|
||||
errors = "\n".join([f"Field '{k}' - {v}" for k, v in errors_dict.items()])
|
||||
msg = "Invalid data to create a `{}` instance.\n{}".format(
|
||||
cls._class_name,
|
||||
errors,
|
||||
@@ -965,10 +963,7 @@ class BaseDocument:
|
||||
unique_fields += unique_with
|
||||
|
||||
# Add the new index to the list
|
||||
fields = [
|
||||
("{}{}".format(namespace, f), pymongo.ASCENDING)
|
||||
for f in unique_fields
|
||||
]
|
||||
fields = [(f"{namespace}{f}", pymongo.ASCENDING) for f in unique_fields]
|
||||
index = {"fields": fields, "unique": True, "sparse": sparse}
|
||||
unique_indexes.append(index)
|
||||
|
||||
@@ -1024,7 +1019,7 @@ class BaseDocument:
|
||||
elif field._geo_index:
|
||||
field_name = field.db_field
|
||||
if parent_field:
|
||||
field_name = "{}.{}".format(parent_field, field_name)
|
||||
field_name = f"{parent_field}.{field_name}"
|
||||
geo_indices.append({"fields": [(field_name, field._geo_index)]})
|
||||
|
||||
return geo_indices
|
||||
|
||||
@@ -40,7 +40,7 @@ class BaseField:
|
||||
choices=None,
|
||||
null=False,
|
||||
sparse=False,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param db_field: The database field to store this field in
|
||||
@@ -465,9 +465,7 @@ class ComplexBaseField(BaseField):
|
||||
|
||||
if errors:
|
||||
field_class = self.field.__class__.__name__
|
||||
self.error(
|
||||
"Invalid {} item ({})".format(field_class, value), errors=errors
|
||||
)
|
||||
self.error(f"Invalid {field_class} item ({value})", errors=errors)
|
||||
# Don't allow empty values if required
|
||||
if self.required and not value:
|
||||
self.error("Field is required and cannot be empty")
|
||||
@@ -537,7 +535,7 @@ class GeoJsonBaseField(BaseField):
|
||||
if isinstance(value, dict):
|
||||
if set(value.keys()) == {"type", "coordinates"}:
|
||||
if value["type"] != self._type:
|
||||
self.error('{} type must be "{}"'.format(self._name, self._type))
|
||||
self.error(f'{self._name} type must be "{self._type}"')
|
||||
return self.validate(value["coordinates"])
|
||||
else:
|
||||
self.error(
|
||||
|
||||
@@ -439,8 +439,8 @@ class TopLevelDocumentMetaclass(DocumentMetaclass):
|
||||
|
||||
id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0)
|
||||
for i in itertools.count():
|
||||
id_name = "{}_{}".format(id_basename, i)
|
||||
id_db_name = "{}_{}".format(id_db_basename, i)
|
||||
id_name = f"{id_basename}_{i}"
|
||||
id_db_name = f"{id_db_basename}_{i}"
|
||||
if id_name not in existing_fields and id_db_name not in existing_db_fields:
|
||||
return id_name, id_db_name
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ def _get_connection_settings(
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Get the connection settings as a dict
|
||||
|
||||
@@ -177,7 +177,7 @@ def register_connection(
|
||||
password=None,
|
||||
authentication_source=None,
|
||||
authentication_mechanism=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Register the connection settings.
|
||||
|
||||
@@ -210,7 +210,7 @@ def register_connection(
|
||||
password=password,
|
||||
authentication_source=authentication_source,
|
||||
authentication_mechanism=authentication_mechanism,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
_connection_settings[alias] = conn_settings
|
||||
|
||||
@@ -313,7 +313,7 @@ def _create_connection(alias, connection_class, **connection_settings):
|
||||
try:
|
||||
return connection_class(**connection_settings)
|
||||
except Exception as e:
|
||||
raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e))
|
||||
raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}")
|
||||
|
||||
|
||||
def _find_existing_connection(connection_settings):
|
||||
|
||||
@@ -271,12 +271,12 @@ class DeReference:
|
||||
(v["_ref"].collection, v["_ref"].id), v
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = "{}.{}.{}".format(name, k, field_name)
|
||||
item_name = f"{name}.{k}.{field_name}"
|
||||
data[k]._data[field_name] = self._attach_objects(
|
||||
v, depth, instance=instance, name=item_name
|
||||
)
|
||||
elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
|
||||
item_name = "{}.{}".format(name, k) if name else name
|
||||
item_name = f"{name}.{k}" if name else name
|
||||
data[k] = self._attach_objects(
|
||||
v, depth - 1, instance=instance, name=item_name
|
||||
)
|
||||
|
||||
@@ -94,7 +94,7 @@ class ValidationError(AssertionError):
|
||||
return str(self.message)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({},)".format(self.__class__.__name__, self.message)
|
||||
return f"{self.__class__.__name__}({self.message},)"
|
||||
|
||||
def __getattribute__(self, name):
|
||||
message = super().__getattribute__(name)
|
||||
@@ -102,7 +102,7 @@ class ValidationError(AssertionError):
|
||||
if self.field_name:
|
||||
message = "%s" % message
|
||||
if self.errors:
|
||||
message = "{}({})".format(message, self._format_errors())
|
||||
message = f"{message}({self._format_errors()})"
|
||||
return message
|
||||
|
||||
def _get_message(self):
|
||||
@@ -147,13 +147,13 @@ class ValidationError(AssertionError):
|
||||
elif isinstance(value, dict):
|
||||
value = " ".join([generate_key(v, k) for k, v in value.items()])
|
||||
|
||||
results = "{}.{}".format(prefix, value) if prefix else value
|
||||
results = f"{prefix}.{value}" if prefix else value
|
||||
return results
|
||||
|
||||
error_dict = defaultdict(list)
|
||||
for k, v in self.to_dict().items():
|
||||
error_dict[generate_key(v)].append(k)
|
||||
return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()])
|
||||
return " ".join([f"{k}: {v}" for k, v in error_dict.items()])
|
||||
|
||||
|
||||
class DeprecatedError(Exception):
|
||||
|
||||
@@ -189,11 +189,11 @@ class URLField(StringField):
|
||||
# Check first if the scheme is valid
|
||||
scheme = value.split("://")[0].lower()
|
||||
if scheme not in self.schemes:
|
||||
self.error("Invalid scheme {} in URL: {}".format(scheme, value))
|
||||
self.error(f"Invalid scheme {scheme} in URL: {value}")
|
||||
|
||||
# Then check full URL
|
||||
if not self.url_regex.match(value):
|
||||
self.error("Invalid URL: {}".format(value))
|
||||
self.error(f"Invalid URL: {value}")
|
||||
|
||||
|
||||
class EmailField(StringField):
|
||||
@@ -233,7 +233,7 @@ class EmailField(StringField):
|
||||
allow_utf8_user=False,
|
||||
allow_ip_domain=False,
|
||||
*args,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param domain_whitelist: (optional) list of valid domain names applied during validation
|
||||
@@ -440,7 +440,7 @@ class DecimalField(BaseField):
|
||||
force_string=False,
|
||||
precision=2,
|
||||
rounding=decimal.ROUND_HALF_UP,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
:param min_value: (optional) A min value that will be applied during validation
|
||||
@@ -1337,7 +1337,7 @@ class CachedReferenceField(BaseField):
|
||||
return None
|
||||
|
||||
update_kwargs = {
|
||||
"set__{}__{}".format(self.name, key): val
|
||||
f"set__{self.name}__{key}": val
|
||||
for key, val in document._delta()[0].items()
|
||||
if key in self.fields
|
||||
}
|
||||
@@ -1739,12 +1739,12 @@ class GridFSProxy:
|
||||
return self.__copy__()
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}: {}>".format(self.__class__.__name__, self.grid_id)
|
||||
return f"<{self.__class__.__name__}: {self.grid_id}>"
|
||||
|
||||
def __str__(self):
|
||||
gridout = self.get()
|
||||
filename = getattr(gridout, "filename") if gridout else "<no file>"
|
||||
return "<{}: {} ({})>".format(self.__class__.__name__, filename, self.grid_id)
|
||||
return f"<{self.__class__.__name__}: {filename} ({self.grid_id})>"
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, GridFSProxy):
|
||||
@@ -2120,7 +2120,7 @@ class SequenceField(BaseField):
|
||||
sequence_name=None,
|
||||
value_decorator=None,
|
||||
*args,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
self.collection_name = collection_name or self.COLLECTION_NAME
|
||||
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
||||
@@ -2135,7 +2135,7 @@ class SequenceField(BaseField):
|
||||
Generate and Increment the counter
|
||||
"""
|
||||
sequence_name = self.get_sequence_name()
|
||||
sequence_id = "{}.{}".format(sequence_name, self.name)
|
||||
sequence_id = f"{sequence_name}.{self.name}"
|
||||
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||
|
||||
counter = collection.find_one_and_update(
|
||||
@@ -2149,7 +2149,7 @@ class SequenceField(BaseField):
|
||||
def set_next_value(self, value):
|
||||
"""Helper method to set the next sequence value"""
|
||||
sequence_name = self.get_sequence_name()
|
||||
sequence_id = "{}.{}".format(sequence_name, self.name)
|
||||
sequence_id = f"{sequence_name}.{self.name}"
|
||||
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||
counter = collection.find_one_and_update(
|
||||
filter={"_id": sequence_id},
|
||||
@@ -2166,7 +2166,7 @@ class SequenceField(BaseField):
|
||||
as it is only fixed on set.
|
||||
"""
|
||||
sequence_name = self.get_sequence_name()
|
||||
sequence_id = "{}.{}".format(sequence_name, self.name)
|
||||
sequence_id = f"{sequence_name}.{self.name}"
|
||||
collection = get_db(alias=self.db_alias)[self.collection_name]
|
||||
data = collection.find_one({"_id": sequence_id})
|
||||
|
||||
@@ -2427,7 +2427,7 @@ class LazyReferenceField(BaseField):
|
||||
passthrough=False,
|
||||
dbref=False,
|
||||
reverse_delete_rule=DO_NOTHING,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Initialises the Reference Field.
|
||||
|
||||
|
||||
@@ -422,7 +422,7 @@ class BaseQuerySet:
|
||||
count = count_documents(
|
||||
collection=self._cursor.collection,
|
||||
filter=self._cursor._Cursor__spec,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
self._cursor_obj = None
|
||||
@@ -526,7 +526,7 @@ class BaseQuerySet:
|
||||
write_concern=None,
|
||||
read_concern=None,
|
||||
full_result=False,
|
||||
**update
|
||||
**update,
|
||||
):
|
||||
"""Perform an atomic update on the fields matched by the query.
|
||||
|
||||
@@ -603,7 +603,7 @@ class BaseQuerySet:
|
||||
write_concern=write_concern,
|
||||
read_concern=read_concern,
|
||||
full_result=True,
|
||||
**update
|
||||
**update,
|
||||
)
|
||||
|
||||
if atomic_update.raw_result["updatedExisting"]:
|
||||
@@ -634,7 +634,7 @@ class BaseQuerySet:
|
||||
multi=False,
|
||||
write_concern=write_concern,
|
||||
full_result=full_result,
|
||||
**update
|
||||
**update,
|
||||
)
|
||||
|
||||
def modify(
|
||||
@@ -692,7 +692,7 @@ class BaseQuerySet:
|
||||
upsert=upsert,
|
||||
sort=sort,
|
||||
return_document=return_doc,
|
||||
**self._cursor_args
|
||||
**self._cursor_args,
|
||||
)
|
||||
except pymongo.errors.DuplicateKeyError as err:
|
||||
raise NotUniqueError("Update failed (%s)" % err)
|
||||
@@ -1194,7 +1194,7 @@ class BaseQuerySet:
|
||||
preference.
|
||||
"""
|
||||
if read_concern is not None and not isinstance(read_concern, Mapping):
|
||||
raise TypeError("%r is not a valid read concern." % (read_concern,))
|
||||
raise TypeError(f"{read_concern!r} is not a valid read concern.")
|
||||
|
||||
queryset = self.clone()
|
||||
queryset._read_concern = (
|
||||
|
||||
Reference in New Issue
Block a user