Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions nnvm/tests/python/frontend/tensorflow/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ def _test_split(in_shape, axis, num_or_size_splits, dtype):
num_split = len(num_or_size_splits) if isinstance(num_or_size_splits, list) else num_or_size_splits
tf.split(in_data, num_or_size_splits, axis=axis)

compare_tf_with_tvm([np_data], ['in_data:0'], [f'split:{n}' for n in range(num_split)])
compare_tf_with_tvm([np_data], ['in_data:0'], ['split:{0}'.format(n) for n in range(num_split)])

# and now test together with concat
tf.reset_default_graph()
Expand Down Expand Up @@ -586,7 +586,7 @@ def _test_unstack(ip_shape, axis, dtype):
in_data = tf.placeholder(dtype, ip_shape, name="in_data")
tf.unstack(in_data, axis=axis)

compare_tf_with_tvm([np_data], ['in_data:0'], [f'unstack:{n}' for n in range(ip_shape[axis])])
compare_tf_with_tvm([np_data], ['in_data:0'], ['unstack:{0}'.format(n) for n in range(ip_shape[axis])])

tf.reset_default_graph()
in_data = tf.placeholder(dtype, ip_shape, name="in_data")
Expand Down
17 changes: 9 additions & 8 deletions python/tvm/relay/_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ def mk_var(self, name: str, typ: ty.Type = None):
def mk_global_var(self, name: str) -> expr.GlobalVar:
"""Create a new GlobalVar and add it to the GlobalVar scope."""
if name in self.global_vars:
raise ParseError(f"duplicate global var \"{name}\"")
raise ParseError("duplicate global var \"{0}\"".format(name))
var = expr.GlobalVar(name)
self.global_vars[name] = var
return var
Expand Down Expand Up @@ -252,14 +252,15 @@ def _check_existing_typ_expr(self, name, new_expr):
new_typ_name = self._type_expr_name(new_expr)
existing_typ_name = self._type_expr_name(self.global_type_vars[name])
raise ParseError(
f"{new_typ_name} `{name}` conflicts with existing {existing_typ_name}")
"{0} `{1}` conflicts with existing {2}".format(new_typ_name,\
name, existing_typ_name))

def _type_expr_name(self, e):
if isinstance(e, adt.Constructor):
return f"`{e.belong_to.var.name}` ADT constructor"
return "`{0}` ADT constructor".format(e.belong_to.var.name)
elif isinstance(e, ty.GlobalTypeVar):
if e.kind == ty.Kind.AdtHandle:
return f"ADT definition"
return "ADT definition"
return "function definition"

def visitProjection(self, ctx):
Expand All @@ -282,7 +283,7 @@ def visitTerminal(self, node) -> Union[expr.Expr, int, float]:
raise ParseError("unrecognized BOOL_LIT: `{}`".format(node_text))
if node_type == RelayLexer.QUOTED_STRING:
return literal_eval(node_text)
raise ParseError(f"unhandled terminal \"{node_text}\" of type `{node_type}`")
raise ParseError("unhandled terminal \"{0}\" of type `{1}`".format(node_text, node_type))

def visitGeneralIdent(self, ctx):
name = ctx.getText()
Expand Down Expand Up @@ -310,14 +311,14 @@ def visitGlobalVar(self, ctx):
var_name = ctx.CNAME().getText()
global_var = self.global_vars.get(var_name, None)
if global_var is None:
raise ParseError(f"unbound global var `{var_name}`")
raise ParseError("unbound global var `{0}`".format(var_name))
return global_var

def visitLocalVar(self, ctx):
var_name = ctx.CNAME().getText()
local_var = lookup(self.var_scopes, var_name)
if local_var is None:
raise ParseError(f"unbound local var `{var_name}`")
raise ParseError("unbound local var `{0}`".format(var_name))
return local_var

def visitGraphVar(self, ctx):
Expand Down Expand Up @@ -557,7 +558,7 @@ def visitMatch(self, ctx: RelayParser.MatchContext):
elif match_type == "match?":
complete_match = False
else:
raise RuntimeError(f"unknown match type {match_type}")
raise RuntimeError("unknown match type {0}".format(match_type))

match_data = self.visit(ctx.expr())
match_clauses = ctx.matchClauseList()
Expand Down
18 changes: 9 additions & 9 deletions python/tvm/relay/memory_alloc.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _unpack(typ, out):
for field_ty in typ.fields:
_unpack(field_ty, out)
else:
raise Exception(f"unsupported Relay type: {typ}")
raise Exception("unsupported Relay type: {0}".format(typ))

output = []
_unpack(self.typ, output)
Expand All @@ -67,7 +67,7 @@ def _pack(value, typ, out):
_pack(value[i], field_ty, tuple_out)
out.append(expr.Tuple(tuple_out))
else:
raise Exception(f"unsupported Relay type: {typ}")
raise Exception("unsupported Relay type: {0}".format(typ))

if len(seq) == 1:
return seq[0]
Expand Down Expand Up @@ -144,11 +144,11 @@ def make_static_allocation(self, scope, tensor_type, i):
size = self.compute_storage(tensor_type)
alignment = self.compute_alignment(tensor_type.dtype)
dtype = tensor_type.dtype
sto = scope.let(f"storage_{i}", self.alloc_storage(
sto = scope.let("storage_{0}".format(i), self.alloc_storage(
size, alignment, dtype))
# TODO(@jroesch): There is a bug with typing based on the constant shape.
tensor = self.alloc_tensor(sto, shape, dtype, tensor_type.shape)
return scope.let(f"tensor_{i}", tensor)
return scope.let("tensor_{0}".format(i), tensor)

def visit_let(self, let):
scope = ScopeBuilder()
Expand Down Expand Up @@ -192,13 +192,13 @@ def visit_call(self, call):
if state == 2:
sh_of = self.visit(self.shape_of(arg))
shape_func_ins.append(
scope.let(f"in_shape_{i}", sh_of))
scope.let("in_shape_{0}".format(i), sh_of))
is_inputs.append(0)
# Pass Inputs
elif state == 1:
new_arg = self.visit(arg)
shape_func_ins.append(
scope.let(f"in_shape_{i}", new_arg))
scope.let("in_shape_{0}".format(i), new_arg))
is_inputs.append(1)
# TODO(@jroesch): handle 3rd case
else:
Expand All @@ -208,7 +208,7 @@ def visit_call(self, call):
for i, out in enumerate(cfunc.outputs):
tt = ty.TensorType(out.shape, out.dtype)
alloc = self.make_static_allocation(scope, tt, i)
alloc = scope.let(f"shape_func_out_{i}", alloc)
alloc = scope.let("shape_func_out_{0}".format(i), alloc)
out_shapes.append(alloc)

shape_call = self.shape_func(
Expand All @@ -226,7 +226,7 @@ def visit_call(self, call):
size = self.compute_storage_in_relay(
out_shape, out_type.dtype)
alignment = self.compute_alignment(out_type.dtype)
sto = scope.let(f"storage_{i}", self.alloc_storage(
sto = scope.let("storage_{i}".format(i=i), self.alloc_storage(
size, alignment, out_type.dtype))
storages.append(sto)

Expand All @@ -238,7 +238,7 @@ def visit_call(self, call):
out_shape,
out_type.dtype,
out_type.shape)
alloc = scope.let(f"out_{i}", alloc)
alloc = scope.let("out_{i}".format(i=i), alloc)
outs.append(alloc)

invoke = self.invoke_tvm(call.op, ins, expr.Tuple(outs))
Expand Down
2 changes: 1 addition & 1 deletion rust/frontend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ from tvm.contrib import cc

def test_add(target_dir):
if not tvm.module.enabled("cuda"):
print(f"skip {__file__} because cuda is not enabled...")
print("skip {__file__} because cuda is not enabled...".format(__file__=__file__))
return
n = tvm.var("n")
A = tvm.placeholder((n,), name='A')
Expand Down