Skip to content

Commit

Permalink
remove old code cuz merge
Browse files Browse the repository at this point in the history
  • Loading branch information
HydrogenSulfate committed Nov 28, 2023
1 parent 9fc9a67 commit 0439995
Show file tree
Hide file tree
Showing 5 changed files with 3 additions and 78 deletions.
3 changes: 0 additions & 3 deletions deepmd/descriptor/se_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,9 +444,6 @@ def merge_input_stats(self, stat_dict):
self.davg = np.array(all_davg)
self.dstd = np.array(all_dstd)

self.t_avg = paddle.to_tensor(self.davg, dtype=GLOBAL_NP_FLOAT_PRECISION)
self.t_std = paddle.to_tensor(self.dstd, dtype=GLOBAL_NP_FLOAT_PRECISION)

def enable_compression(
self,
min_nbor_dist: float,
Expand Down
65 changes: 0 additions & 65 deletions deepmd/fit/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,71 +191,6 @@ def __init__(
self.aparam_std = None
self.aparam_inv_std = None

emenets = []
for type_i in range(self.ntypes):
layers = []
for ii in range(0, len(self.n_neuron)):
if ii >= 1 and self.n_neuron[ii] == self.n_neuron[ii - 1]:
layers.append(
OneLayer(
self.n_neuron[ii - 1],
self.n_neuron[ii],
name="layer_" + str(ii) + "_type_" + str(type_i),
seed=self.seed,
use_timestep=self.resnet_dt,
activation_fn=self.fitting_activation_fn,
precision=self.fitting_precision,
trainable=self.trainable[ii],
)
)
else:
layers.append(
OneLayer(
self.dim_descrpt + self.numb_fparam + self.numb_aparam,
self.n_neuron[ii],
name="layer_" + str(ii) + "_type_" + str(type_i),
seed=self.seed,
activation_fn=self.fitting_activation_fn,
precision=self.fitting_precision,
trainable=self.trainable[ii],
)
)
layers.append(
OneLayer(
self.n_neuron[-1],
1,
name="final_layer_type_" + str(type_i),
seed=self.seed,
activation_fn=None,
precision=self.fitting_precision,
trainable=self.trainable[ii],
)
)

emenets.append(paddle.nn.LayerList(layers))
self.ElementNets = paddle.nn.LayerList(emenets)

self.t_dfparam = paddle.to_tensor(self.numb_fparam, dtype="int32")
self.t_daparam = paddle.to_tensor(self.numb_aparam, dtype="int32")

# stat fparam
if self.numb_fparam > 0:
self.t_fparam_avg = paddle.to_tensor(
np.zeros([1, self.numb_fparam]), dtype=GLOBAL_PD_FLOAT_PRECISION
)
self.t_fparam_istd = paddle.to_tensor(
np.ones([1, self.numb_fparam]), dtype=GLOBAL_PD_FLOAT_PRECISION
)

# stat aparam
if self.numb_aparam > 0:
self.t_aparam_avg = paddle.to_tensor(
np.zeros([1, self.numb_aparam]), dtype=GLOBAL_PD_FLOAT_PRECISION
)
self.t_aparam_istd = paddle.to_tensor(
np.ones([1, self.numb_aparam]), dtype=GLOBAL_PD_FLOAT_PRECISION
)

self.fitting_net_variables = None
self.mixed_prec = None
self.layer_name = layer_name
Expand Down
6 changes: 3 additions & 3 deletions deepmd/infer/deep_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,10 +220,10 @@ def _get_value(self, tensor_name: str, attr_name: Optional[str] = None):
if tensor_name in name:
value = tensor.numpy()[0] if tensor.shape == [1] else tensor.numpy()
if attr_name:
setattr(self, attr_name, value)
return value
setattr(self, attr_name, tensor)
return tensor
else:
return value
return tensor

@staticmethod
def _load_graph(
Expand Down
5 changes: 0 additions & 5 deletions deepmd/model/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ def __init__(
# super(EnerModel, self).__init__(name_scope="EnerModel")
"""Constructor."""
# descriptor
super(EnerModel, self).__init__(name_scope="EnerModel")
self.descrpt = descrpt
self.rcut = self.descrpt.get_rcut()
self.ntypes = self.descrpt.get_ntypes()
Expand All @@ -93,10 +92,6 @@ def __init__(
else:
self.srtab = None

self.t_tmap = " ".join(self.type_map)
self.t_mt = self.model_type
self.t_ver = MODEL_VERSION

# self.type_map = " ".join(self.type_map)
self.t_tmap = " ".join(self.type_map)
self.t_mt = self.model_type
Expand Down
2 changes: 0 additions & 2 deletions deepmd/train/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@
import os
import platform
import shutil
import sys
import time
from collections import defaultdict
from typing import Dict
from typing import List

Expand Down

0 comments on commit 0439995

Please sign in to comment.