diff --git a/python/tvm/relay/frontend/tensorflow.py b/python/tvm/relay/frontend/tensorflow.py index 5a17d5f7b8f0..cd4e36e28e2a 100644 --- a/python/tvm/relay/frontend/tensorflow.py +++ b/python/tvm/relay/frontend/tensorflow.py @@ -306,8 +306,10 @@ def _impl(inputs, attr, params): use_bias = len(inputs) == 3 channel_axis = 1 if attr['data_format'] == "NCHW" else 3 + # Ignore the new attributes from TF2.0, for now. out = AttrCvt( op_name=_dimension_picker('conv'), + ignores=['explicit_paddings'], transforms={ 'kernel_shape': 'kernel_size', 'data_format': 'data_layout', @@ -404,8 +406,9 @@ def _impl(inputs, attr, params): # NHWC attr['layout'] = 'NHWC' + # Ignore the new attributes from TF2.0, for now. return AttrCvt(op_name="resize", - ignores=['Tdim'], + ignores=['Tdim', 'half_pixel_centers'], extras={'method': "bilinear"})(inputs, attr) return _impl diff --git a/python/tvm/relay/testing/tf.py b/python/tvm/relay/testing/tf.py index a56e6fe1782d..79d0d8257953 100644 --- a/python/tvm/relay/testing/tf.py +++ b/python/tvm/relay/testing/tf.py @@ -80,7 +80,7 @@ def AddShapesToGraphDef(session, out_node): """ - graph_def = tf.graph_util.convert_variables_to_constants( + graph_def = tf.compat.v1.graph_util.convert_variables_to_constants( session, session.graph.as_graph_def(add_shapes=True), [out_node], @@ -112,13 +112,13 @@ def load(self, label_lookup_path, uid_lookup_path): dict from integer node ID to human-readable string. """ - if not tf.gfile.Exists(uid_lookup_path): + if not tf.compat.v1.io.gfile.exists(uid_lookup_path): tf.logging.fatal('File does not exist %s', uid_lookup_path) - if not tf.gfile.Exists(label_lookup_path): + if not tf.compat.v1.io.gfile.exists(label_lookup_path): tf.logging.fatal('File does not exist %s', label_lookup_path) # Loads mapping from string UID to human-readable string - proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines() + proto_as_ascii_lines = tf.compat.v1.gfile.GFile(uid_lookup_path).readlines() uid_to_human = {} p = re.compile(r'[n\d]*[ \S,]*') for line in proto_as_ascii_lines: @@ -129,7 +129,7 @@ def load(self, label_lookup_path, uid_lookup_path): # Loads mapping from string UID to integer node ID. node_id_to_uid = {} - proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines() + proto_as_ascii = tf.compat.v1.gfile.GFile(label_lookup_path).readlines() for line in proto_as_ascii: if line.startswith(' target_class:'): target_class = int(line.split(': ')[1]) @@ -209,7 +209,7 @@ def get_workload(model_path, model_sub_path=None): path_model = download_testdata(model_url, model_path, module='tf') # Creates graph from saved graph_def.pb. - with tf.gfile.FastGFile(path_model, 'rb') as f: + with tf.compat.v1.gfile.FastGFile(path_model, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) graph = tf.import_graph_def(graph_def, name='') @@ -299,7 +299,7 @@ def _create_ptb_vocabulary(data_dir): file_name = 'ptb.train.txt' def _read_words(filename): """Read the data for creating vocabulary""" - with tf.gfile.GFile(filename, "r") as f: + with tf.compat.v1.gfile.GFile(filename, "r") as f: return f.read().encode("utf-8").decode("utf-8").replace("\n", "").split() def _build_vocab(filename): diff --git a/tutorials/frontend/from_tensorflow.py b/tutorials/frontend/from_tensorflow.py index 34865f021230..2c109cbaf907 100644 --- a/tutorials/frontend/from_tensorflow.py +++ b/tutorials/frontend/from_tensorflow.py @@ -89,14 +89,14 @@ # ------------ # Creates tensorflow graph definition from protobuf file. -with tf.gfile.FastGFile(model_path, 'rb') as f: - graph_def = tf.GraphDef() +with tf.compat.v1.gfile.GFile(model_path, 'rb') as f: + graph_def = tf.compat.v1.GraphDef() graph_def.ParseFromString(f.read()) graph = tf.import_graph_def(graph_def, name='') # Call the utility to import the graph definition into default graph. graph_def = tf_testing.ProcessGraphDefParam(graph_def) # Add shapes to the graph. - with tf.Session() as sess: + with tf.compat.v1.Session() as sess: graph_def = tf_testing.AddShapesToGraphDef(sess, 'softmax') ###################################################################### @@ -187,8 +187,8 @@ def create_graph(): """Creates a graph from saved GraphDef file and returns a saver.""" # Creates graph from saved graph_def.pb. - with tf.gfile.FastGFile(model_path, 'rb') as f: - graph_def = tf.GraphDef() + with tf.compat.v1.gfile.GFile(model_path, 'rb') as f: + graph_def = tf.compat.v1.GraphDef() graph_def.ParseFromString(f.read()) graph = tf.import_graph_def(graph_def, name='') # Call the utility to import the graph definition into default graph. @@ -206,14 +206,14 @@ def run_inference_on_image(image): ------- Nothing """ - if not tf.gfile.Exists(image): + if not tf.compat.v1.io.gfile.exists(image): tf.logging.fatal('File does not exist %s', image) - image_data = tf.gfile.FastGFile(image, 'rb').read() + image_data = tf.compat.v1.gfile.GFile(image, 'rb').read() # Creates graph from saved GraphDef. create_graph() - with tf.Session() as sess: + with tf.compat.v1.Session() as sess: softmax_tensor = sess.graph.get_tensor_by_name('softmax:0') predictions = sess.run(softmax_tensor, {'DecodeJpeg/contents:0': image_data})