Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
xusuyong committed Dec 13, 2023
1 parent af971cb commit 0cb4fde
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 66 deletions.
4 changes: 2 additions & 2 deletions deepmd/entrypoints/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -890,12 +890,12 @@ def test_dipole(
atomic=atomic,
must=True,
high_prec=False,
type_sel=dp.tselt,
type_sel=dp.get_sel_type(),
)
test_data = data.get_test()
dipole, numb_test, atype = run_test(dp, test_data, numb_test, data)

sel_type = dp.tselt
sel_type = dp.get_sel_type()
sel_natoms = 0
for ii in sel_type:
sel_natoms += sum(atype == ii)
Expand Down
66 changes: 11 additions & 55 deletions deepmd/fit/dipole.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def __init__(
) -> None:
super().__init__(name_scope="DipoleFittingSeA")
"""Constructor."""
self.ntypes = descrpt.get_ntypes() # 2
self.ntypes = descrpt.get_ntypes()
self.dim_descrpt = descrpt.get_dim_out()
self.n_neuron = neuron
self.resnet_dt = resnet_dt
Expand Down Expand Up @@ -170,72 +170,32 @@ def _build_lower(
[0, start_index, 0],
[rot_mat.shape[0], start_index + natoms, rot_mat.shape[2]],
)
# paddle.slice(rot_mat, [0, start_index, 0], [-1, natoms, -1])
rot_mat_i = paddle.reshape(rot_mat_i, [-1, self.dim_rot_mat_1, 3])
layer = inputs_i
for ii in range(0, len(self.n_neuron)):
if ii >= 1 and self.n_neuron[ii] == self.n_neuron[ii - 1]:
layer += self.one_layers[type_i][ii](layer)
else:
layer = self.one_layers[type_i][ii](layer)
# if ii >= 1 and self.n_neuron[ii] == self.n_neuron[ii - 1]:
# layer += one_layer(
# layer,
# self.n_neuron[ii],
# name="layer_" + str(ii) + suffix,
# reuse=reuse,
# seed=self.seed,
# use_timestep=self.resnet_dt,
# activation_fn=self.fitting_activation_fn,
# precision=self.fitting_precision,
# uniform_seed=self.uniform_seed,
# initial_variables=self.fitting_net_variables,
# mixed_prec=self.mixed_prec,
# )
# else:
# layer = one_layer(
# layer,
# self.n_neuron[ii],
# name="layer_" + str(ii) + suffix,
# reuse=reuse,
# seed=self.seed,
# activation_fn=self.fitting_activation_fn,
# precision=self.fitting_precision,
# uniform_seed=self.uniform_seed,
# initial_variables=self.fitting_net_variables,
# mixed_prec=self.mixed_prec,
# )

if (not self.uniform_seed) and (self.seed is not None):
self.seed += self.seed_shift

# (nframes x natoms) x naxis
final_layer = self.final_layers[type_i](
layer,
)
# # (nframes x natoms) x naxis
# final_layer = one_layer(
# layer,
# self.dim_rot_mat_1,
# activation_fn=None,
# name="final_layer" + suffix,
# reuse=reuse,
# seed=self.seed,
# precision=self.fitting_precision,
# uniform_seed=self.uniform_seed,
# initial_variables=self.fitting_net_variables,
# mixed_prec=self.mixed_prec,
# final_layer=True,
# )

if (not self.uniform_seed) and (self.seed is not None):
self.seed += self.seed_shift
# (nframes x natoms) x 1 * naxis
final_layer = paddle.reshape(
final_layer, [paddle.shape(inputs)[0] * natoms, 1, self.dim_rot_mat_1]
) # natoms=64, self.dim_rot_mat_1=100
)
# (nframes x natoms) x 1 x 3(coord)
final_layer = paddle.matmul(final_layer, rot_mat_i)
# nframes x natoms x 3
final_layer = paddle.reshape(final_layer, [paddle.shape(inputs)[0], natoms, 3])
return final_layer # [1, 64, 3]
return final_layer

def forward(
self,
Expand Down Expand Up @@ -282,7 +242,7 @@ def forward(

if type_embedding is not None:
nloc_mask = paddle.reshape(
paddle.tile(paddle.repeat(self.sel_mask, natoms[2:]), [nframes]),
paddle.tile(paddle.repeat_interleave(self.sel_mask, natoms[2:]), [nframes]),
[nframes, -1],
)
atype_nall = paddle.reshape(atype, [-1, natoms[1]])
Expand All @@ -293,7 +253,7 @@ def forward(
self.nloc_masked = paddle.shape(
paddle.reshape(self.atype_nloc_masked, [nframes, -1])
)[1]
atype_embed = paddle.nn.embedding_lookup(
atype_embed = nn.embedding_lookup(
type_embedding, self.atype_nloc_masked
)
else:
Expand All @@ -304,10 +264,10 @@ def forward(
if atype_embed is None:
count = 0
outs_list = []
for type_i in range(self.ntypes): # 2
for type_i in range(self.ntypes):
if type_i not in self.sel_type:
start_index += natoms[2 + type_i]
continue # sel_type是0,所以就循环了一次
continue
final_layer = self._build_lower(
start_index,
natoms[2 + type_i],
Expand All @@ -321,8 +281,7 @@ def forward(
# concat the results
outs_list.append(final_layer)
count += 1

outs = paddle.concat(outs_list, axis=1) # [1, 64, 3]
outs = paddle.concat(outs_list, axis=1)
else:
inputs = paddle.reshape(
paddle.reshape(inputs, [nframes, natoms[0], self.dim_descrpt])[
Expand All @@ -349,13 +308,10 @@ def forward(
final_layer = self._build_lower(
0, self.nloc_masked, inputs, rot_mat, suffix=suffix, reuse=reuse
)

# nframes x natoms x 3
outs = paddle.reshape(final_layer, [nframes, self.nloc_masked, 3])

# paddle.summary.histogram("fitting_net_output", outs)
return paddle.reshape(outs, [-1])
# return tf.reshape(outs, [tf.shape(inputs)[0] * natoms[0] * 3 // 3])

def init_variables(
self,
Expand Down
5 changes: 2 additions & 3 deletions deepmd/infer/deep_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(
auto_batch_size: Union[bool, int, AutoBatchSize] = False,
):
jdata = j_loader(
"input.json" if os.path.isfile("input.json") else "dipole_input.json"
"input.json" if os.path.exists("input.json") else "dipole_input.json"
)
remove_comment_in_json(jdata)
model_param = j_must_have(jdata, "model")
Expand Down Expand Up @@ -150,7 +150,7 @@ def __init__(
fitting_param.pop("type", None)
fitting = dipole.DipoleFittingSeA(**fitting_param)
else:
pass
raise NotImplementedError()
else:
self.fitting_dict = {}
self.fitting_type_dict = {}
Expand Down Expand Up @@ -361,7 +361,6 @@ def __init__(
@property
@lru_cache(maxsize=None)
def model_type(self) -> str:

return self.model.model_type
"""Get type of model.
Expand Down
1 change: 0 additions & 1 deletion deepmd/loss/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ def eval(self, model, batch_data, natoms):
)

l2_l, l2_more = self.compute_loss(
# 0.0, natoms, model_dict, batch_data
0.0,
model_inputs["natoms_vec"],
model_pred,
Expand Down
4 changes: 1 addition & 3 deletions deepmd/model/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,9 +154,7 @@ def forward(

rot_mat = self.descrpt.get_rot_mat()
rot_mat = paddle.clone(rot_mat, name="o_rot_mat" + suffix)
# rot_mat = paddle.fluid.layers.assign(rot_mat, name="o_rot_mat" + suffix)
# rot_mat = paddle.tensor.clone(rot_mat, name="o_rot_mat" + suffix)


output = self.fitting(
dout, rot_mat, natoms, input_dict, reuse=reuse, suffix=suffix
)
Expand Down
3 changes: 1 addition & 2 deletions deepmd/train/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def _init_param(self, jdata):
fitting_param.pop("type")
self.fitting = dipole.DipoleFittingSeA(**fitting_param)
else:
pass
raise NotImplementedError
else:
self.fitting_dict = {}
self.fitting_type_dict = {}
Expand Down Expand Up @@ -804,7 +804,6 @@ def train(self, train_data=None, valid_data=None, stop_batch: int = 10):
cur_batch = self.global_step
is_first_step = True
self.cur_batch = cur_batch

self.optimizer = paddle.optimizer.Adam(
learning_rate=self.learning_rate, parameters=self.model.parameters()
)
Expand Down

0 comments on commit 0cb4fde

Please sign in to comment.