Skip to content

Commit

Permalink
Solve some UT
Browse files Browse the repository at this point in the history
  • Loading branch information
Chengqian-Zhang committed Dec 13, 2024
1 parent 13e1911 commit ff4650e
Show file tree
Hide file tree
Showing 13 changed files with 37 additions and 3 deletions.
1 change: 1 addition & 0 deletions checkpoint
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
model.ckpt-1.pt
6 changes: 4 additions & 2 deletions deepmd/pt/loss/property.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ def __init__(
self.property_name = property_name
assert self.task_dim == sum(property_dim)
self.property_name_dim_mapping = dict(zip(property_name, property_dim))
self.out_bias = kwargs.get("out_bias", None)
self.out_std = kwargs.get("out_std", None)

def forward(self, input_dict, model, label, natoms, learning_rate=0.0, mae=False):
"""Return loss on properties .
Expand Down Expand Up @@ -94,8 +96,8 @@ def forward(self, input_dict, model, label, natoms, learning_rate=0.0, mae=False
label["property"] = torch.cat(concat_property, dim=1)
assert label["property"].shape == (nbz, self.task_dim)

out_std = model.atomic_model.out_std[0][0]
out_bias = model.atomic_model.out_bias[0][0]
out_std = model.atomic_model.out_std[0][0] if self.out_std is None else torch.tensor(self.out_std,device=env.DEVICE)
out_bias = model.atomic_model.out_bias[0][0] if self.out_bias is None else torch.tensor(self.out_bias,device=env.DEVICE)
assert len(out_std.shape) == 1
assert out_std.shape[0] == self.task_dim

Expand Down
1 change: 1 addition & 0 deletions source/tests/pt/model/test_permutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,7 @@
"fitting_net": {
"type": "property",
"task_dim": 3,
"property_name": ["band_property"],
"neuron": [24, 24, 24],
"resnet_dt": True,
"bias_method": "normal",
Expand Down
Empty file.
Binary file not shown.
Binary file not shown.
Binary file not shown.
20 changes: 20 additions & 0 deletions source/tests/pt/property/double/type.raw
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
4 changes: 4 additions & 0 deletions source/tests/pt/property/double/type_map.raw
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
H
C
N
O
3 changes: 3 additions & 0 deletions source/tests/pt/property/input.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"fitting_net": {
"type": "property",
"intensive": true,
"property_name": "band_property",
"task_dim": 3,
"neuron": [
100,
Expand All @@ -48,6 +49,8 @@
},
"loss": {
"type": "property",
"property_name": "band_property",
"property_dim": [3],
"_comment": " that's all"
},
"training": {
Expand Down
2 changes: 1 addition & 1 deletion source/tests/pt/test_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ def setUp(self) -> None:
property_input = str(Path(__file__).parent / "property/input.json")
with open(property_input) as f:
self.config_property = json.load(f)
prop_data_file = [str(Path(__file__).parent / "property/single")]
prop_data_file = [str(Path(__file__).parent / "property/double")]
self.config_property["training"]["training_data"]["systems"] = prop_data_file
self.config_property["training"]["validation_data"]["systems"] = prop_data_file
self.config_property["model"]["descriptor"] = deepcopy(model_dpa1["descriptor"])
Expand Down
3 changes: 3 additions & 0 deletions source/tests/universal/dpmodel/loss/test_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,9 @@ def LossParamProperty():
}
input_dict = {
"key_to_pref_map": key_to_pref_map,
"property_dim": [2],
"out_bias": [0,0],
"out_std": [1,1],
"task_dim": 2,
}
return input_dict
Expand Down

0 comments on commit ff4650e

Please sign in to comment.