Skip to content

Commit

Permalink
Merge branch 'devel' of https://github.com/deepmodeling/deepmd-kit in…
Browse files Browse the repository at this point in the history
…to devel
  • Loading branch information
CaRoLZhangxy committed Feb 28, 2024
2 parents c074687 + 004ebd6 commit de0e3ab
Show file tree
Hide file tree
Showing 50 changed files with 1,614 additions and 355 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test_python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
- python: 3.8
tf:
torch:
- python: "3.11"
- python: "3.12"
tf:
torch:

Expand Down
5 changes: 5 additions & 0 deletions backend/find_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,11 @@ def get_tf_requirement(tf_version: str = "") -> dict:
extra_select = {}
if not (tf_version == "" or tf_version in SpecifierSet(">=2.12", prereleases=True)):
extra_requires.append("protobuf<3.20")
# keras 3 is not compatible with tf.compat.v1
if tf_version == "" or tf_version in SpecifierSet(">=2.15.0rc0", prereleases=True):
extra_requires.append("tf-keras; python_version>='3.9'")
# only TF>=2.16 is compatible with Python 3.12
extra_requires.append("tf-keras>=2.16.0rc0; python_version>='3.12'")
if tf_version == "" or tf_version in SpecifierSet(">=1.15", prereleases=True):
extra_select["mpi"] = [
"horovod",
Expand Down
33 changes: 5 additions & 28 deletions deepmd/backend/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
)

from deepmd.utils.plugin import (
Plugin,
PluginVariant,
make_plugin_registry,
)

if TYPE_CHECKING:
Expand All @@ -33,7 +33,7 @@
)


class Backend(PluginVariant):
class Backend(PluginVariant, make_plugin_registry("backend")):
r"""General backend class.
Examples
Expand All @@ -44,24 +44,6 @@ class Backend(PluginVariant):
... pass
"""

__plugins = Plugin()

@staticmethod
def register(key: str) -> Callable[[object], object]:
"""Register a backend plugin.
Parameters
----------
key : str
the key of a backend
Returns
-------
Callable[[object], object]
the decorator to register backend
"""
return Backend.__plugins.register(key.lower())

@staticmethod
def get_backend(key: str) -> Type["Backend"]:
"""Get the backend by key.
Expand All @@ -76,12 +58,7 @@ def get_backend(key: str) -> Type["Backend"]:
Backend
the backend
"""
try:
backend = Backend.__plugins.get_plugin(key.lower())
except KeyError:
raise KeyError(f"Backend {key} is not registered.")
assert isinstance(backend, type)
return backend
return Backend.get_class_by_type(key)

@staticmethod
def get_backends() -> Dict[str, Type["Backend"]]:
Expand All @@ -92,7 +69,7 @@ def get_backends() -> Dict[str, Type["Backend"]]:
list
all the registered backends
"""
return Backend.__plugins.plugins
return Backend.get_plugins()

@staticmethod
def get_backends_by_feature(
Expand All @@ -112,7 +89,7 @@ def get_backends_by_feature(
"""
return {
key: backend
for key, backend in Backend.__plugins.plugins.items()
for key, backend in Backend.get_backends().items()
if backend.features & feature
}

Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/atomic_model/dp_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,8 @@ def forward_atomic(

def serialize(self) -> dict:
return {
"@class": "Model",
"type": "standard",
"type_map": self.type_map,
"descriptor": self.descriptor.serialize(),
"fitting": self.fitting.serialize(),
Expand All @@ -138,6 +140,8 @@ def serialize(self) -> dict:
@classmethod
def deserialize(cls, data) -> "DPAtomicModel":
data = copy.deepcopy(data)
data.pop("@class")
data.pop("type")
descriptor_obj = BaseDescriptor.deserialize(data["descriptor"])
fitting_obj = BaseFitting.deserialize(data["fitting"])
obj = cls(descriptor_obj, fitting_obj, type_map=data["type_map"])
Expand Down
11 changes: 11 additions & 0 deletions deepmd/dpmodel/atomic_model/linear_atomic_model.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import copy
import sys
from abc import (
abstractmethod,
Expand Down Expand Up @@ -182,12 +183,17 @@ def fitting_output_def(self) -> FittingOutputDef:
@staticmethod
def serialize(models) -> dict:
return {
"@class": "Model",
"type": "linear",
"models": [model.serialize() for model in models],
"model_name": [model.__class__.__name__ for model in models],
}

@staticmethod
def deserialize(data) -> List[BaseAtomicModel]:
data = copy.deepcopy(data)
data.pop("@class")
data.pop("type")
model_names = data["model_name"]
models = [
getattr(sys.modules[__name__], name).deserialize(model)
Expand Down Expand Up @@ -263,6 +269,8 @@ def __init__(

def serialize(self) -> dict:
return {
"@class": "Model",
"type": "zbl",
"models": LinearAtomicModel.serialize([self.dp_model, self.zbl_model]),
"sw_rmin": self.sw_rmin,
"sw_rmax": self.sw_rmax,
Expand All @@ -271,6 +279,9 @@ def serialize(self) -> dict:

@classmethod
def deserialize(cls, data) -> "DPZBLLinearAtomicModel":
data = copy.deepcopy(data)
data.pop("@class")
data.pop("type")
sw_rmin = data["sw_rmin"]
sw_rmax = data["sw_rmax"]
smin_alpha = data["smin_alpha"]
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/atomic_model/make_base_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,10 @@ def do_grad_(self, var_name: str, base: str) -> bool:
return self.fitting_output_def()[var_name].c_differentiable
return self.fitting_output_def()[var_name].r_differentiable

def get_model_def_script(self) -> str:
# TODO: implement this method; saved to model
raise NotImplementedError

setattr(BAM, fwd_method_name, BAM.fwd)
delattr(BAM, "fwd")

Expand Down
12 changes: 11 additions & 1 deletion deepmd/dpmodel/atomic_model/pairtab_atomic_model.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
import copy
from typing import (
Dict,
List,
Expand Down Expand Up @@ -105,10 +106,19 @@ def mixed_types(self) -> bool:
return True

def serialize(self) -> dict:
return {"tab": self.tab.serialize(), "rcut": self.rcut, "sel": self.sel}
return {
"@class": "Model",
"type": "pairtab",
"tab": self.tab.serialize(),
"rcut": self.rcut,
"sel": self.sel,
}

@classmethod
def deserialize(cls, data) -> "PairTabAtomicModel":
data = copy.deepcopy(data)
data.pop("@class")
data.pop("type")
rcut = data["rcut"]
sel = data["sel"]
tab = PairTab.deserialize(data["tab"])
Expand Down
4 changes: 4 additions & 0 deletions deepmd/dpmodel/descriptor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,12 @@
from .se_e2_a import (
DescrptSeA,
)
from .se_r import (
DescrptSeR,
)

__all__ = [
"DescrptSeA",
"DescrptSeR",
"make_base_descriptor",
]
38 changes: 3 additions & 35 deletions deepmd/dpmodel/descriptor/make_base_descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@
abstractmethod,
)
from typing import (
Callable,
List,
Optional,
Type,
)

from deepmd.common import (
Expand All @@ -17,7 +15,8 @@
DPPath,
)
from deepmd.utils.plugin import (
Plugin,
PluginVariant,
make_plugin_registry,
)


Expand All @@ -37,45 +36,14 @@ def make_base_descriptor(
"""

class BD(ABC):
class BD(ABC, PluginVariant, make_plugin_registry("descriptor")):
"""Base descriptor provides the interfaces of descriptor."""

__plugins = Plugin()

@staticmethod
def register(key: str) -> Callable:
"""Register a descriptor plugin.
Parameters
----------
key : str
the key of a descriptor
Returns
-------
Descriptor
the registered descriptor
Examples
--------
>>> @Descriptor.register("some_descrpt")
class SomeDescript(Descriptor):
pass
"""
return BD.__plugins.register(key)

def __new__(cls, *args, **kwargs):
if cls is BD:
cls = cls.get_class_by_type(j_get_type(kwargs, cls.__name__))
return super().__new__(cls)

@classmethod
def get_class_by_type(cls, descrpt_type: str) -> Type["BD"]:
if descrpt_type in BD.__plugins.plugins:
return BD.__plugins.plugins[descrpt_type]
else:
raise RuntimeError("Unknown descriptor type: " + descrpt_type)

@abstractmethod
def get_rcut(self) -> float:
"""Returns the cut-off radius."""
Expand Down
17 changes: 13 additions & 4 deletions deepmd/dpmodel/descriptor/se_e2_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@

import numpy as np

from deepmd.env import (
GLOBAL_NP_FLOAT_PRECISION,
)
from deepmd.utils.path import (
DPPath,
)
Expand Down Expand Up @@ -183,8 +186,12 @@ def __init__(
)
self.env_mat = EnvMat(self.rcut, self.rcut_smth)
self.nnei = np.sum(self.sel)
self.davg = np.zeros([self.ntypes, self.nnei, 4])
self.dstd = np.ones([self.ntypes, self.nnei, 4])
self.davg = np.zeros(
[self.ntypes, self.nnei, 4], dtype=PRECISION_DICT[self.precision]
)
self.dstd = np.ones(
[self.ntypes, self.nnei, 4], dtype=PRECISION_DICT[self.precision]
)
self.orig_sel = self.sel

def __setitem__(self, key, value):
Expand Down Expand Up @@ -292,7 +299,7 @@ def call(
sec = np.append([0], np.cumsum(self.sel))

ng = self.neuron[-1]
gr = np.zeros([nf * nloc, ng, 4])
gr = np.zeros([nf * nloc, ng, 4], dtype=PRECISION_DICT[self.precision])
exclude_mask = self.emask.build_type_exclude_mask(nlist, atype_ext)
# merge nf and nloc axis, so for type_one_side == False,
# we don't require atype is the same in all frames
Expand Down Expand Up @@ -322,7 +329,9 @@ def call(
# nf x nloc x ng x ng1
grrg = np.einsum("flid,fljd->flij", gr, gr1)
# nf x nloc x (ng x ng1)
grrg = grrg.reshape(nf, nloc, ng * self.axis_neuron)
grrg = grrg.reshape(nf, nloc, ng * self.axis_neuron).astype(
GLOBAL_NP_FLOAT_PRECISION
)
return grrg, gr[..., 1:], None, None, ww

def serialize(self) -> dict:
Expand Down
Loading

0 comments on commit de0e3ab

Please sign in to comment.