Skip to content

Commit

Permalink
refine code
Browse files Browse the repository at this point in the history
  • Loading branch information
HydrogenSulfate committed Nov 26, 2023
1 parent 0af71a0 commit 4689924
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 91 deletions.
14 changes: 0 additions & 14 deletions deepmd/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,20 +364,6 @@ def get_module(module_name: str) -> "ModuleType":
import paddle_deepmd_lib
from paddle.utils import cpp_extension

# module = cpp_extension.load(
# name="paddle_custom_ops",
# sources=[
# "/workspace/hesensen/deepmd-kit/source/op/paddle/neighbor_stat.cc",
# "/workspace/hesensen/deepmd-kit/source/op/paddle/prod_env_mat.cc",
# ],
# extra_include_paths=[
# "/workspace/hesensen/deepmd-kit/source/lib/include/",
# "/usr/local/cuda/lib64/",
# "/workspace/hesensen/deepmd-kit/source/op/paddle/cub/",
# "/usr/local/cuda/include/"
# ],
# verbose=True,
# )
module = paddle_deepmd_lib

except tf.errors.NotFoundError as e:
Expand Down
9 changes: 0 additions & 9 deletions deepmd/utils/neighbor_stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,6 @@
from deepmd.env import default_tf_session_config
from deepmd.env import op_module
from deepmd.env import tf

# from paddle.utils import cpp_extension
# op_module = cpp_extension.load(
# name="custom_op_paddle2",
# sources=["/workspace/hesensen/deepmd-kit/source/op/paddle/neighbor_stat.cc"],
# extra_include_paths=["/workspace/hesensen/deepmd-kit/source/lib/include/","/usr/local/cuda/targets/x86_64-linux/include/", "/workspace/hesensen/deepmd-kit/source/op"],
# # extra_library_paths=["../build/lib/", "/usr/local/cuda/lib64"],
# verbose=True,
# )
from deepmd.utils.data_system import DeepmdDataSystem
from deepmd.utils.parallel_op import ParallelOp

Expand Down
68 changes: 0 additions & 68 deletions deepmd/utils/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,8 +430,6 @@ def __init__(
),
)
)
# print(outputs_size[ii-1], precision, False, trainable, outputs_size[ii]+outputs_size[ii-1])
# exit()
bias.append(
self.create_parameter(
shape=[1, outputs_size[ii]],
Expand Down Expand Up @@ -460,91 +458,25 @@ def __init__(
self.idt = paddle.nn.ParameterList(idt)

def forward(self, xx):
# outputs_size = self.outputs_size
# print(self.outputs_size)
# for ii in range(1, len(outputs_size)):
# # if self.activation_fn is not None:
# hidden = paddle.reshape(
# self.activation_fn(paddle.matmul(xx, self.weight[ii-1]) + self.bias[ii-1]),
# [-1, outputs_size[ii]]
# )
# # print(__file__, 1)
# # else:
# # hidden = paddle.reshape(
# # paddle.matmul(xx, self.weight[ii-1]) + self.bias[ii-1],
# # [-1, outputs_size[ii]]
# # )
# # print(__file__, 2)

# if outputs_size[ii] == outputs_size[ii - 1]:
# if self.resnet_dt:
# xx += hidden * self.idt[ii]
# # print(__file__, 3)
# else:
# xx += hidden
# # print(__file__, 4)
# elif outputs_size[ii] == outputs_size[ii-1] * 2:
# if self.resnet_dt:
# xx = paddle.concat([xx,xx], axis=1) + hidden * self.idt[ii]
# # print(__file__, 5)
# else:
# xx = paddle.concat([xx,xx], axis=1) + hidden
# # print(__file__, 6)
# else:
# # print(__file__, 7)
# xx = hidden
# # exit()

# return xx
# if not hasattr(self, "xx1"):
# self.xx1 = xx
# paddle.save(self.xx1.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_xx1.npy")
# paddle.save(self.weight[0].numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_weight_0.npy")
# paddle.save(self.bias[0].numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_bias_0.npy")

hidden = nn.functional.tanh(
nn.functional.linear(xx, self.weight[0], self.bias[0])
).reshape(
[-1, 25]
) # 1
xx = hidden # 7

# if not hasattr(self, "hidden1"):
# self.hidden1 = hidden
# paddle.save(self.hidden1.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_hidden1.npy")

# if not hasattr(self, "xx2"):
# self.xx2 = xx
# paddle.save(self.xx2.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_xx2.npy")

hidden = nn.functional.tanh(
nn.functional.linear(xx, self.weight[1], self.bias[1])
).reshape(
[-1, 50]
) # 1
xx = paddle.concat([xx, xx], axis=1) + hidden # 6

# if not hasattr(self, "hidden2"):
# self.hidden2 = hidden
# paddle.save(self.hidden2.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_hidden2.npy")

# if not hasattr(self, "xx3"):
# self.xx3 = xx
# paddle.save(self.xx3.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_xx3.npy")

hidden = nn.functional.tanh(
nn.functional.linear(xx, self.weight[2], self.bias[2])
).reshape(
[-1, 100]
) # 1
xx = paddle.concat([xx, xx], axis=1) + hidden # 6

# if not hasattr(self, "hidden3"):
# self.hidden3 = hidden
# paddle.save(self.hidden3.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_hidden3.npy")

# if not hasattr(self, "xx4"):
# self.xx4 = xx
# paddle.save(self.xx4.numpy(), f"/workspace/hesensen/deepmd_backend/debug_emb/{self.name}_xx4.npy")

return xx

0 comments on commit 4689924

Please sign in to comment.