Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 6 additions & 10 deletions python/tvm/relay/frontend/caffe.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def convert_eltwise(self, op):
out = _op.maximum(out, extra_expr)
else:
raise tvm.error.OpNotImplemented(
"eltwise_type {} is not supported for frontend Caffe.".format(eltwise_type)
f"eltwise_type {eltwise_type} is not supported for frontend Caffe."
)

return out
Expand Down Expand Up @@ -351,7 +351,7 @@ def convert_conv(self, op):
weight_value = np.asarray(weight.data, np.float32)
weight_value = np.reshape(weight_value, weight_shape)
else:
raise Exception("No weight value of layer {} in caffemodel".format(op.name))
raise Exception(f"No weight value of layer {op.name} in caffemodel")

weight_expr = self.exp_tab.new_const(weight_value, dtype="float32")
in_expr = self.exp_tab.get_expr(inputs[0])
Expand Down Expand Up @@ -416,9 +416,7 @@ def convert_pooling(self, op):
out = _op.nn.avg_pool2d(in_expr, **params)
else:
raise tvm.error.OpNotImplemented(
"Operator {} is not supported for frontend Caffe.".format(
pool_type_dict[pool_type] + " pool"
)
f"Operator {pool_type_dict[pool_type]} pool is not supported for frontend Caffe."
)

return out
Expand Down Expand Up @@ -465,7 +463,7 @@ def convert_innerproduct(self, op):
weight_value = np.reshape(weight_value, (params["num_output"], -1))
weight_shape = weight_value.shape
else:
raise Exception("No weight value of layer {} in caffemodel".format(op.name))
raise Exception(f"No weight value of layer {op.name} in caffemodel")

weight_expr = self.exp_tab.new_const(weight_value, dtype="float32")

Expand Down Expand Up @@ -548,9 +546,7 @@ def convert_deconv(self, op):
# weight shape is in relay's IOHW format rn, we need it to be OIHW
weight_value = np.transpose(weight_value, [1, 0, 2, 3])
else:
raise tvm.error.OpAttributeRequired(
"No weight value of layer {} in caffemodel".format(op.name)
)
raise tvm.error.OpAttributeRequired(f"No weight value of layer {op.name} in caffemodel")

weight_expr = self.exp_tab.new_const(weight_value, dtype="float32")
in_expr = self.exp_tab.get_expr(inputs[0])
Expand Down Expand Up @@ -670,7 +666,7 @@ def convert_reduction(self, op):
out = _op.sum(in_expr, axis=axis)
else:
raise tvm.error.OpAttributeInvalid(
"reduction method:{} is invalid in Caffe frontend.".format(method)
f"reduction method:{method} is invalid in Caffe frontend."
)

if float(coeff) != 1.0:
Expand Down
24 changes: 10 additions & 14 deletions python/tvm/relay/frontend/caffe2.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def _impl(attr):
if len(kernel) == 2:
return prefix + "2d" + surfix
raise tvm.error.OpAttributeUnImplemented(
"Non-2D kernels are not supported for operator {}2d".format(prefix)
f"Non-2D kernels are not supported for operator {prefix}2d"
)

return _impl
Expand Down Expand Up @@ -122,7 +122,7 @@ def get_converter(cls):
if hasattr(cls, "_impl"):
return getattr(cls, "_impl")
raise tvm.error.OpNotImplemented(
"Operator {} is not supported in frontend Caffe2.".format(cls.__name__)
f"Operator {cls.__name__} is not supported in frontend Caffe2."
)


Expand Down Expand Up @@ -151,7 +151,7 @@ class Elemwise(Caffe2OpConverter):

@classmethod
def _impl(cls, inputs, args, params):
assert len(inputs) == 2, "Math op take 2 inputs, {} given".format(len(inputs))
assert len(inputs) == 2, f"Math op take 2 inputs, {len(inputs)} given"
op_name = cls.name
conv_ops = ["conv2d", "conv2d_transpose"]
if args.get("broadcast", 0) and any(x in str(inputs[0]) for x in conv_ops):
Expand Down Expand Up @@ -282,14 +282,12 @@ def _get_axis_from_order_str(order):
if order == "NHWC":
return 3
raise tvm.error.OpAttributeUnImplemented(
"Order {} is not supported in operator Concat.".format(order)
f"Order {order} is not supported in operator Concat."
)

return AttrCvt(
op_name="concatenate",
transforms={
"order": ("axis", (1), _get_axis_from_order_str),
},
transforms={"order": ("axis", (1), _get_axis_from_order_str)},
excludes=["add_axis"],
)((inputs,), args, params)

Expand Down Expand Up @@ -498,9 +496,7 @@ def _get_node(self, blob):
if blob in self._nodes:
return self._nodes[blob]

assert blob not in self._visited_nodes, "Cyclic dependency in the graph (in {})".format(
blob
)
assert blob not in self._visited_nodes, f"Cyclic dependency in the graph (in {blob})"
self._visited_nodes.add(blob)

self._process_op(self._ops[blob])
Expand Down Expand Up @@ -531,12 +527,12 @@ def _parse_arg(self, arg):
args[a.name] = tuple(getattr(a, f))
for f in ["n"]:
if a.HasField(f):
raise NotImplementedError("Field {} is not supported in relay.".format(f))
raise NotImplementedError(f"Field {f} is not supported in relay.")
for f in ["nets"]:
if list(getattr(a, f)):
raise NotImplementedError("Field {} is not supported in relay.".format(f))
raise NotImplementedError(f"Field {f} is not supported in relay.")
if a.name not in args:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
raise ValueError(f"Cannot parse attribute: \n{a}\n.")
return args

def _convert_operator(self, op_type, inputs, args, identity_list=None, convert_map=None):
Expand Down Expand Up @@ -573,7 +569,7 @@ def _convert_operator(self, op_type, inputs, args, identity_list=None, convert_m
func = convert_map[op_type](inputs, args, self._params)
else:
raise tvm.error.OpNotImplemented(
"Operator {} is not supported in frontend Caffe2.".format(op_type)
f"Operator {op_type} is not supported in frontend Caffe2."
)
return func

Expand Down
28 changes: 14 additions & 14 deletions python/tvm/relay/frontend/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def get_float(self, key, default=RequiredAttr()):
if key in self.attrs:
return float(self.attrs[key])
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_int(self, key, default=RequiredAttr()):
Expand All @@ -123,7 +123,7 @@ def get_int(self, key, default=RequiredAttr()):
return None
return int(val)
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_str(self, key, default=RequiredAttr()):
Expand All @@ -144,7 +144,7 @@ def get_str(self, key, default=RequiredAttr()):
if key in self.attrs:
return self.attrs[key]
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_int_tuple(self, key, default=RequiredAttr()):
Expand All @@ -170,7 +170,7 @@ def get_int_tuple(self, key, default=RequiredAttr()):
if x
)
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_float_tuple(self, key, default=RequiredAttr()):
Expand All @@ -193,7 +193,7 @@ def get_float_tuple(self, key, default=RequiredAttr()):
tshape = self.attrs[key]
return tuple(float(x.strip()) for x in tshape.strip("()[]").split(","))
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_tuple_tuple_int(self, key, default=RequiredAttr()):
Expand Down Expand Up @@ -222,7 +222,7 @@ def get_tuple_tuple_int(self, key, default=RequiredAttr()):
return tuple(seq)

if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_int_list(self, key, default=RequiredAttr()):
Expand All @@ -244,7 +244,7 @@ def get_int_list(self, key, default=RequiredAttr()):
tshape = self.attrs[key]
return tuple(int(x.strip()) for x in tshape.strip("[]()").split(","))
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default

def get_bool(self, key, default=RequiredAttr()):
Expand All @@ -266,7 +266,7 @@ def get_bool(self, key, default=RequiredAttr()):
val = self.attrs[key]
return val.strip().lower() in ["true", "1", "t", "y", "yes"]
if isinstance(default, RequiredAttr):
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return default


Expand All @@ -292,7 +292,7 @@ def get_relay_op(op_name):
if op is not None:
break
if not op:
raise tvm.error.OpNotImplemented("Unable to map op_name {} to relay".format(op_name))
raise tvm.error.OpNotImplemented(f"Unable to map op_name {op_name} to relay")
return op


Expand All @@ -307,7 +307,7 @@ def __init__(self):

def new_const(self, value, shape=None, dtype="float32", source_name=None):
"""Construct a new var expr and add to exprs dictionary"""
name = "_param_%d" % (self.const_ctr)
name = f"_param_{self.const_ctr}"
if hasattr(value, "shape"):
shape = value.shape
self.const_ctr += 1
Expand Down Expand Up @@ -412,7 +412,7 @@ def __call__(self, inputs, attrs, *args):
if self._custom_check:
func, msg = self._custom_check
if not func(attrs):
raise RuntimeError("Check failed: {}".format(msg))
raise RuntimeError(f"Check failed: {msg}")
# get new op_name
if isinstance(self._op_name, str):
op_name = self._op_name
Expand Down Expand Up @@ -465,7 +465,7 @@ def _parse_default(self, target):
else:
k = None # should raise
if not isinstance(k, str):
msg = "{} is not a valid target, (name, default) expected.".format(target)
msg = f"{target} is not a valid target, (name, default) expected."
raise ValueError(msg)
return k, v, t

Expand All @@ -479,7 +479,7 @@ def _required_attr(self, attr, key):
"""Wrapper for getting required attributes."""
assert isinstance(attr, dict)
if key not in attr:
raise AttributeError("Required attribute {} not found.".format(key))
raise AttributeError(f"Required attribute {key} not found.")
return attr[key]


Expand Down Expand Up @@ -1035,7 +1035,7 @@ def ensure_scalar_shape(x):
return x

num_elem = np.prod(x_shape)
assert num_elem == 1, "Cannot squeeze tensor shape {} to scalar form.".format(x_shape)
assert num_elem == 1, f"Cannot squeeze tensor shape {x_shape} to scalar form."

return _op.squeeze(x)

Expand Down
22 changes: 11 additions & 11 deletions python/tvm/relay/frontend/coreml.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def _ActivationParams(op, inexpr, etab):
beta_expr = etab.new_const(beta)
return _op.multiply(_op.log(_op.add(_op.exp(inexpr), beta_expr)), alpha_expr)
raise tvm.error.OpNotImplemented(
"Operator {} is not supported in frontend CoreML.".format(whichActivation)
f"Operator {whichActivation} is not supported in frontend CoreML."
)


Expand Down Expand Up @@ -231,9 +231,9 @@ def _PoolingLayerParams(op, inexpr, etab):
params["padding"] = padding
params["ceil_mode"] = True
else:
msg = "PoolingPaddingType {} is not supported in operator Pooling."
op_name = op.WhichOneof("PoolingPaddingType")
raise tvm.error.OpAttributeUnImplemented(msg.format(op_name))
msg = f"PoolingPaddingType {op_name} is not supported in operator Pooling."
raise tvm.error.OpAttributeUnImplemented(msg)

if op.type == 0:
return _op.nn.max_pool2d(inexpr, **params)
Expand Down Expand Up @@ -302,7 +302,7 @@ def _PaddingLayerParams(op, inexpr, etab):
constant = op.constant
if constant.value != 0:
raise tvm.error.OpAttributeUnImplemented(
"{} is not supported in operator Padding.".format(constant.value)
f"{constant.value} is not supported in operator Padding."
)
pad_t = op.paddingAmounts.borderAmounts[0].startEdgeSize
pad_l = op.paddingAmounts.borderAmounts[1].startEdgeSize
Expand Down Expand Up @@ -391,8 +391,8 @@ def _UnaryFunctionLayerParams(op, inexpr, etab):
alpha = _expr.const(op.alpha)
return _op.maximum(inexpr, alpha)
else:
msg = "Unary Op type value {} is not supported in frontend CoreML."
raise tvm.error.OpAttributeUnImplemented(msg.format(op_type))
msg = f"Unary Op type value {op_type} is not supported in frontend CoreML."
raise tvm.error.OpAttributeUnImplemented(msg)


def _ReduceLayerParams(op, inexpr, etab):
Expand All @@ -408,8 +408,8 @@ def _ReduceLayerParams(op, inexpr, etab):
elif axis == op.W:
axis = -1
else:
msg = "Reduce axis value {} is not supported in frontend CoreML."
raise tvm.error.OpAttributeUnImplemented(msg.format(axis))
msg = f"Reduce axis value {axis} is not supported in frontend CoreML."
raise tvm.error.OpAttributeUnImplemented(msg)

mode = op.mode
if mode == op.SUM:
Expand All @@ -425,8 +425,8 @@ def _ReduceLayerParams(op, inexpr, etab):
elif mode == op.ARGMAX:
return _op.argmax(inexpr, axis=axis, keepdims=True)
else:
msg = "Reduce mode value {} is not supported in frontend CoreML."
raise tvm.error.OpAttributeUnImplemented(msg.format(mode))
msg = f"Reduce mode value {mode} is not supported in frontend CoreML."
raise tvm.error.OpAttributeUnImplemented(msg)


def _ReshapeLayerParams(op, inexpr, etab):
Expand Down Expand Up @@ -511,7 +511,7 @@ def coreml_op_to_relay(op, inname, outnames, etab):
classname = type(op).__name__
if classname not in _convert_map:
raise tvm.error.OpNotImplemented(
"Operator {} is not supported in frontend CoreML.".format(classname)
f"Operator {classname} is not supported in frontend CoreML."
)
if isinstance(inname, _base.string_types):
insym = etab.get_expr(inname)
Expand Down
9 changes: 4 additions & 5 deletions python/tvm/relay/frontend/darknet.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@

def _darknet_not_support(attr, op="relay"):
"""Raise error if any operation is not supported."""
err = "{} is not supported in {}.".format(attr, op)
raise NotImplementedError(err)
raise NotImplementedError(f"{attr} is not supported in {op}.")


def _get_params_prefix(opname, layer_num):
Expand All @@ -51,7 +50,7 @@ def _get_params_name(prefix, item):
def _get_param_var(params, prefix, item):
name = _get_params_name(prefix, item)
if name not in params:
raise AttributeError("{} not found in params dict.".format(name))
raise AttributeError(f"{name} not found in params dict.")
return new_var(name, shape=params[name].shape, dtype=params[name].dtype)


Expand Down Expand Up @@ -688,7 +687,7 @@ def _get_darknet_attrs(self, layer, layer_num):
pass

else:
err = "Darknet layer type {} is not supported in relay.".format(layer_type)
err = f"Darknet layer type {layer_type} is not supported in relay."
raise NotImplementedError(err)

return attr
Expand Down Expand Up @@ -743,7 +742,7 @@ def _get_opname(self, layer):

def _new_rnn_state_var(self, state=None, name="rnn"):
"""Returs a symbol for state"""
sym_name = name + "%d_state" % self._state_ctr[name]
sym_name = name + f"{self._state_ctr[name]}_state"
self._state_ctr[name] += 1
return new_var(sym_name, shape=state.shape, dtype=str(state.dtype))

Expand Down
Loading