Skip to content

Commit

Permalink
Merge pull request #135 from luigibonati/committor_activ
Browse files Browse the repository at this point in the history
Make last layer activation  of committor an explicit block
  • Loading branch information
EnricoTrizio authored May 21, 2024
2 parents f5598d5 + 0d85fde commit 3f9adeb
Show file tree
Hide file tree
Showing 3 changed files with 82 additions and 16 deletions.
12 changes: 11 additions & 1 deletion docs/api_cvs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -67,4 +67,14 @@ General framework which allows to optimize a single model with different loss fu
:toctree: autosummary
:template: custom-class-template.rst

MultiTaskCV
MultiTaskCV

Framework for the numerical determination of the committor function based on its variational principle.

.. currentmodule:: mlcolvar.cvs

.. autosummary::
:toctree: autosummary
:template: custom-class-template.rst

Committor
60 changes: 60 additions & 0 deletions docs/autosummary/mlcolvar.cvs.Committor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
mlcolvar.cvs.Committor
====================

.. currentmodule:: mlcolvar.cvs

.. autoclass:: Committor
:members:
:show-inheritance:
:inherited-members: Module,LightningModule


.. automethod:: __init__


.. rubric:: Methods

.. autosummary::

~Committor.__init__
~Committor.training_step




..

.. rubric:: Attributes

.. autosummary::

~Committor.BLOCKS
~Committor.CHECKPOINT_HYPER_PARAMS_KEY
~Committor.CHECKPOINT_HYPER_PARAMS_NAME
~Committor.CHECKPOINT_HYPER_PARAMS_TYPE
~Committor.T_destination
~Committor.automatic_optimization
~Committor.call_super_init
~Committor.current_epoch
~Committor.device
~Committor.dtype
~Committor.dump_patches
~Committor.example_input_array
~Committor.fabric
~Committor.global_rank
~Committor.global_step
~Committor.hparams
~Committor.hparams_initial
~Committor.local_rank
~Committor.logger
~Committor.loggers
~Committor.n_cvs
~Committor.on_gpu
~Committor.optimizer_name
~Committor.trainer
~Committor.training




26 changes: 11 additions & 15 deletions mlcolvar/cvs/committor/committor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from mlcolvar.cvs import BaseCV
from mlcolvar.core import FeedForward
from mlcolvar.core.loss import CommittorLoss
from mlcolvar.core.nn.utils import Custom_Sigmoid

__all__ = ["Committor"]

Expand Down Expand Up @@ -31,7 +32,7 @@ class Committor(BaseCV, lightning.LightningModule):
Utils to initialize the masses tensor for the training
"""

BLOCKS = ["nn"]
BLOCKS = ["nn", "sigmoid"]

def __init__(
self,
Expand Down Expand Up @@ -79,23 +80,16 @@ def __init__(
# ======= OPTIONS =======
# parse and sanitize
options = self.parse_options(options)

# add the relevant nn options, set tanh for hidden layers and sharp sigmoid for output layer
activ_list = ["tanh" for i in range( len(layers) - 2 )]
activ_list.append("custom_sigmoid")

# update options dict for activations if not already set
if not "activation" in options["nn"]:
options["nn"]["activation"] = activ_list

# ======= CHECKS =======
# should be empty in this case


# ======= BLOCKS =======
# initialize NN turning on last layer activation
# initialize NN turning
o = "nn"
self.nn = FeedForward(layers, last_layer_activation=True, **options[o])
self.nn = FeedForward(layers, **options[o])

# separately add sigmoid activation on last layer, this way it can be deactived
o = "sigmoid"
if (options[o] is not False) and (options[o] is not None):
self.sigmoid = Custom_Sigmoid(**options[o])


def training_step(self, train_batch, batch_idx):
Expand Down Expand Up @@ -153,5 +147,7 @@ def test_committor():
trainer = lightning.Trainer(max_epochs=5, logger=None, enable_checkpointing=False, limit_val_batches=0, num_sanity_val_steps=0)
trainer.fit(model, datamodule)

model(X).sum().backward()

if __name__ == "__main__":
test_committor()

0 comments on commit 3f9adeb

Please sign in to comment.