From 525ce93cc97dd8f3bbf56dd1539974dd73114cc8 Mon Sep 17 00:00:00 2001 From: Duo <50307526+iProzd@users.noreply.github.com> Date: Thu, 29 Feb 2024 18:38:43 +0800 Subject: [PATCH] resolve conversations --- deepmd/dpmodel/descriptor/make_base_descriptor.py | 6 +++++- deepmd/dpmodel/descriptor/se_e2_a.py | 6 +++++- deepmd/dpmodel/descriptor/se_r.py | 6 +++++- deepmd/pt/model/descriptor/descriptor.py | 5 +++++ deepmd/pt/model/descriptor/dpa1.py | 5 +++++ deepmd/pt/model/descriptor/dpa2.py | 5 +++++ deepmd/pt/model/descriptor/hybrid.py | 5 +++++ deepmd/pt/model/descriptor/se_a.py | 5 +++++ deepmd/pt/model/descriptor/se_r.py | 5 +++++ deepmd/pt/model/network/network.py | 5 +++++ deepmd/pt/model/task/dipole.py | 2 +- deepmd/pt/model/task/fitting.py | 5 +++++ deepmd/pt/model/task/polarizability.py | 2 +- deepmd/pt/train/wrapper.py | 5 +++++ 14 files changed, 62 insertions(+), 5 deletions(-) diff --git a/deepmd/dpmodel/descriptor/make_base_descriptor.py b/deepmd/dpmodel/descriptor/make_base_descriptor.py index 11db208077..940bd0cd27 100644 --- a/deepmd/dpmodel/descriptor/make_base_descriptor.py +++ b/deepmd/dpmodel/descriptor/make_base_descriptor.py @@ -88,7 +88,11 @@ def mixed_types(self) -> bool: @abstractmethod def share_params(self, base_class, shared_level, resume=False): - """Share the parameters of self to the base_class with shared_level.""" + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ pass def compute_input_stats( diff --git a/deepmd/dpmodel/descriptor/se_e2_a.py b/deepmd/dpmodel/descriptor/se_e2_a.py index 14010c34e2..f6b1c5677e 100644 --- a/deepmd/dpmodel/descriptor/se_e2_a.py +++ b/deepmd/dpmodel/descriptor/se_e2_a.py @@ -244,7 +244,11 @@ def mixed_types(self): return False def share_params(self, base_class, shared_level, resume=False): - """Share the parameters of self to the base_class with shared_level.""" + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ raise NotImplementedError def get_ntypes(self) -> int: diff --git a/deepmd/dpmodel/descriptor/se_r.py b/deepmd/dpmodel/descriptor/se_r.py index feea008478..fda8b19474 100644 --- a/deepmd/dpmodel/descriptor/se_r.py +++ b/deepmd/dpmodel/descriptor/se_r.py @@ -204,7 +204,11 @@ def mixed_types(self): return False def share_params(self, base_class, shared_level, resume=False): - """Share the parameters of self to the base_class with shared_level.""" + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ raise NotImplementedError def get_ntypes(self) -> int: diff --git a/deepmd/pt/model/descriptor/descriptor.py b/deepmd/pt/model/descriptor/descriptor.py index 778523a14d..339a716942 100644 --- a/deepmd/pt/model/descriptor/descriptor.py +++ b/deepmd/pt/model/descriptor/descriptor.py @@ -101,6 +101,11 @@ def get_stats(self) -> Dict[str, StatItem]: raise NotImplementedError def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only descriptors of the same type can share params!" diff --git a/deepmd/pt/model/descriptor/dpa1.py b/deepmd/pt/model/descriptor/dpa1.py index 08c37487de..ddb1d0ea05 100644 --- a/deepmd/pt/model/descriptor/dpa1.py +++ b/deepmd/pt/model/descriptor/dpa1.py @@ -148,6 +148,11 @@ def mixed_types(self) -> bool: return self.se_atten.mixed_types() def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only descriptors of the same type can share params!" diff --git a/deepmd/pt/model/descriptor/dpa2.py b/deepmd/pt/model/descriptor/dpa2.py index d407452e46..3a4319860f 100644 --- a/deepmd/pt/model/descriptor/dpa2.py +++ b/deepmd/pt/model/descriptor/dpa2.py @@ -292,6 +292,11 @@ def mixed_types(self) -> bool: return True def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only descriptors of the same type can share params!" diff --git a/deepmd/pt/model/descriptor/hybrid.py b/deepmd/pt/model/descriptor/hybrid.py index 40ff3e5c7f..2c68afa892 100644 --- a/deepmd/pt/model/descriptor/hybrid.py +++ b/deepmd/pt/model/descriptor/hybrid.py @@ -146,6 +146,11 @@ def dim_emb(self): raise RuntimeError def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only descriptors of the same type can share params!" diff --git a/deepmd/pt/model/descriptor/se_a.py b/deepmd/pt/model/descriptor/se_a.py index 9a9883cdb3..eddfcf4047 100644 --- a/deepmd/pt/model/descriptor/se_a.py +++ b/deepmd/pt/model/descriptor/se_a.py @@ -130,6 +130,11 @@ def mixed_types(self): return self.sea.mixed_types() def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only descriptors of the same type can share params!" diff --git a/deepmd/pt/model/descriptor/se_r.py b/deepmd/pt/model/descriptor/se_r.py index ba483ea711..4e7e516065 100644 --- a/deepmd/pt/model/descriptor/se_r.py +++ b/deepmd/pt/model/descriptor/se_r.py @@ -154,6 +154,11 @@ def mixed_types(self) -> bool: return False def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only descriptors of the same type can share params!" diff --git a/deepmd/pt/model/network/network.py b/deepmd/pt/model/network/network.py index 9ef7b3366a..10d0364c9b 100644 --- a/deepmd/pt/model/network/network.py +++ b/deepmd/pt/model/network/network.py @@ -575,6 +575,11 @@ def forward(self, atype): return self.embedding(atype) def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only TypeEmbedNet of the same type can share params!" diff --git a/deepmd/pt/model/task/dipole.py b/deepmd/pt/model/task/dipole.py index 00de5276ee..6956d2ce25 100644 --- a/deepmd/pt/model/task/dipole.py +++ b/deepmd/pt/model/task/dipole.py @@ -93,7 +93,7 @@ def __init__( self.r_differentiable = r_differentiable self.c_differentiable = c_differentiable super().__init__( - var_name="dipole" if "var_name" not in kwargs else kwargs.pop("var_name"), + var_name=kwargs.pop("var_name", "dipole"), ntypes=ntypes, dim_descrpt=dim_descrpt, neuron=neuron, diff --git a/deepmd/pt/model/task/fitting.py b/deepmd/pt/model/task/fitting.py index d752ac964c..47535580db 100644 --- a/deepmd/pt/model/task/fitting.py +++ b/deepmd/pt/model/task/fitting.py @@ -62,6 +62,11 @@ def __new__(cls, *args, **kwargs): return super().__new__(cls) def share_params(self, base_class, shared_level, resume=False): + """ + Share the parameters of self to the base_class with shared_level during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ assert ( self.__class__ == base_class.__class__ ), "Only fitting nets of the same type can share params!" diff --git a/deepmd/pt/model/task/polarizability.py b/deepmd/pt/model/task/polarizability.py index 37c802613a..3c40e9f734 100644 --- a/deepmd/pt/model/task/polarizability.py +++ b/deepmd/pt/model/task/polarizability.py @@ -115,7 +115,7 @@ def __init__( ).view(ntypes, 1) self.shift_diag = shift_diag super().__init__( - var_name="polar" if "var_name" not in kwargs else kwargs.pop("var_name"), + var_name=kwargs.pop("var_name", "polar"), ntypes=ntypes, dim_descrpt=dim_descrpt, neuron=neuron, diff --git a/deepmd/pt/train/wrapper.py b/deepmd/pt/train/wrapper.py index 52cc636c10..67f8043653 100644 --- a/deepmd/pt/train/wrapper.py +++ b/deepmd/pt/train/wrapper.py @@ -83,6 +83,11 @@ def set_trainable_params(self): param.requires_grad = trainable def share_params(self, shared_links, resume=False): + """ + Share the parameters of classes following rules defined in shared_links during multitask training. + If not start from checkpoint (resume is False), + some seperated parameters (e.g. mean and stddev) will be re-calculated across different classes. + """ supported_types = ["descriptor", "fitting_net"] for shared_item in shared_links: class_name = shared_links[shared_item]["type"]