Skip to content

Commit

Permalink
fix multiple theory single atom case
Browse files Browse the repository at this point in the history
  • Loading branch information
ilyes319 committed Nov 1, 2024
1 parent 6a79015 commit 787cda9
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 3 deletions.
1 change: 0 additions & 1 deletion mace/cli/run_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,7 +653,6 @@ def run(args: argparse.Namespace) -> None:
folder, r_max=args.r_max, z_table=z_table, heads=heads, head=head_config.head_name
)
for test_name, test_set in test_sets.items():
print(test_name)
test_sampler = None
if args.distributed:
test_sampler = torch.utils.data.distributed.DistributedSampler(
Expand Down
4 changes: 2 additions & 2 deletions mace/tools/scripts_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,7 +628,7 @@ def custom_key(key):

def dict_to_array(input_data, heads):
if not all(isinstance(value, dict) for value in input_data.values()):
return np.array(list(input_data.values()))
return np.array([[input_data[head]] for head in heads])
unique_keys = set()
for inner_dict in input_data.values():
unique_keys.update(inner_dict.keys())
Expand All @@ -640,7 +640,7 @@ def dict_to_array(input_data, heads):
key_index = sorted_keys.index(int(key))
head_index = heads.index(head_name)
result_array[head_index][key_index] = value
return np.squeeze(result_array)
return result_array


class LRScheduler:
Expand Down

0 comments on commit 787cda9

Please sign in to comment.