From 94fa3e0390f7b7765d35961a508f5cd141cc7f10 Mon Sep 17 00:00:00 2001 From: Duo <50307526+iProzd@users.noreply.github.com> Date: Sat, 16 Sep 2023 02:26:09 +0800 Subject: [PATCH 1/2] support type embedding changing for frozen .pb --- deepmd/infer/deep_eval.py | 43 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/deepmd/infer/deep_eval.py b/deepmd/infer/deep_eval.py index 3f5dede1ad..1380be6f49 100644 --- a/deepmd/infer/deep_eval.py +++ b/deepmd/infer/deep_eval.py @@ -64,6 +64,7 @@ def __init__( input_map=input_map, ) self.load_prefix = load_prefix + self.model_file = model_file # graph_compatable should be called after graph and prefix are set if not self._graph_compatable(): @@ -360,3 +361,45 @@ def eval_typeebd(self) -> np.ndarray: t_typeebd = self._get_tensor("t_typeebd:0") [typeebd] = run_sess(self.sess, [t_typeebd], feed_dict={}) return typeebd + + def update_typeebd( + self, new_typeebd: np.ndarray, save_path: str + ): + """Change the type embedding of this model and then save to a new one. + + Parameters + ---------- + new_typeebd + The new type embedding to replace the old one. + save_path + The output file to save the new model. + + Examples + -------- + Change the type embedding of `graph.pb` and save the new model in `graph_new.pb`: + + >>> from deepmd.infer import DeepPotential + >>> dp = DeepPotential('graph.pb') + >>> new_tebd = dp.eval_typeebd() # or some np.ndarray has the same shape as new_tebd + >>> dp.update_typeebd(new_tebd, 'graph_new.pb') + """ + with tf.gfile.GFile(self.model_file, "rb") as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + for node in graph_def.node: + if node.name in ['type_embed_net/matrix_1']: + t_matrix = node.attr["value"].tensor + old_typeebd = tf.make_ndarray(t_matrix) + required_shape = (old_typeebd.shape[0] + 1, old_typeebd.shape[1]) + assert required_shape == new_typeebd.shape, \ + f"The input type embedding should has shape {required_shape}, but got {new_typeebd.shape} instead!" + new_typeebd = new_typeebd[:-1].astype(old_typeebd.dtype) + new_typeebd_tensor_pb = tf.make_tensor_proto(new_typeebd) + node.attr["value"].tensor.CopyFrom(new_typeebd_tensor_pb) + elif node.name in ['type_embed_net/bias_1']: + t_bias = node.attr["value"].tensor + old_bias = tf.make_ndarray(t_bias) + new_bias_tensor_pb = tf.make_tensor_proto(np.zeros_like(old_bias)) + node.attr["value"].tensor.CopyFrom(new_bias_tensor_pb) + with tf.gfile.GFile(save_path, "wb") as f: + f.write(graph_def.SerializeToString()) From 08556fe270e5e503686bbcd627e5a6945380718a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Sep 2023 18:27:03 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- deepmd/infer/deep_eval.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/deepmd/infer/deep_eval.py b/deepmd/infer/deep_eval.py index 1380be6f49..0724686ba3 100644 --- a/deepmd/infer/deep_eval.py +++ b/deepmd/infer/deep_eval.py @@ -362,9 +362,7 @@ def eval_typeebd(self) -> np.ndarray: [typeebd] = run_sess(self.sess, [t_typeebd], feed_dict={}) return typeebd - def update_typeebd( - self, new_typeebd: np.ndarray, save_path: str - ): + def update_typeebd(self, new_typeebd: np.ndarray, save_path: str): """Change the type embedding of this model and then save to a new one. Parameters @@ -387,16 +385,17 @@ def update_typeebd( graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) for node in graph_def.node: - if node.name in ['type_embed_net/matrix_1']: + if node.name in ["type_embed_net/matrix_1"]: t_matrix = node.attr["value"].tensor old_typeebd = tf.make_ndarray(t_matrix) required_shape = (old_typeebd.shape[0] + 1, old_typeebd.shape[1]) - assert required_shape == new_typeebd.shape, \ - f"The input type embedding should has shape {required_shape}, but got {new_typeebd.shape} instead!" + assert ( + required_shape == new_typeebd.shape + ), f"The input type embedding should has shape {required_shape}, but got {new_typeebd.shape} instead!" new_typeebd = new_typeebd[:-1].astype(old_typeebd.dtype) new_typeebd_tensor_pb = tf.make_tensor_proto(new_typeebd) node.attr["value"].tensor.CopyFrom(new_typeebd_tensor_pb) - elif node.name in ['type_embed_net/bias_1']: + elif node.name in ["type_embed_net/bias_1"]: t_bias = node.attr["value"].tensor old_bias = tf.make_ndarray(t_bias) new_bias_tensor_pb = tf.make_tensor_proto(np.zeros_like(old_bias))