Skip to content

Commit

Permalink
Fix neuralogic as torch function
Browse files Browse the repository at this point in the history
  • Loading branch information
LukasZahradnik committed Oct 10, 2024
1 parent 358a9c4 commit 337a783
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 16 deletions.
4 changes: 2 additions & 2 deletions neuralogic/core/builder/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def get_fact(self, fact):
if term_str[0] == term_str[0].upper() and term_str[0] != term_str[0].lower():
raise ValueError(f"{fact} is not a fact")

return self.get_nodes(fact, "FactAtom")
return self.get_nodes(fact, "FactNeuron")

def set_fact_value(self, fact, value) -> int:
for term in fact.terms:
Expand All @@ -130,7 +130,7 @@ def set_fact_value(self, fact, value) -> int:
if term_str[0] == term_str[0].upper() and term_str[0] != term_str[0].lower():
raise ValueError(f"{fact} is not a fact")

node = self.get_nodes(fact, "FactAtom")
node = self.get_nodes(fact, "FactNeuron")

if len(node) == 0:
return -1
Expand Down
17 changes: 3 additions & 14 deletions neuralogic/nn/torch_function.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
from typing import Callable, Any, List, Union

import torch
Expand All @@ -14,10 +13,9 @@

class _NeuraLogicFunction(Function):
@staticmethod
def forward(ctx, mapping, value_factory, sample, model, number_format, dtype, *inputs):
def forward(ctx, mapping, value_factory, sample, model, dtype, *inputs):
ctx.model = model
ctx.sample = sample
ctx.number_format = number_format
ctx.dtype = dtype
ctx.mapping = mapping

Expand All @@ -30,27 +28,20 @@ def forward(ctx, mapping, value_factory, sample, model, number_format, dtype, *i
def backward(ctx: Any, *grad_outputs: Any) -> Any:
model = ctx.model
sample = ctx.sample
number_format = ctx.number_format
dtype = ctx.dtype

backproper, weight_updater = model.backprop(sample, -grad_outputs[0].detach().numpy())
state_index = backproper.stateIndex

gradients = tuple(
-torch.tensor(
json.loads(
str(sample.get_fact(fact).getComputationView(state_index).getGradient().toString(number_format))
),
dtype=dtype,
).reshape(fact.weight.shape)
-torch.tensor(sample.get_fact(fact)[0].gradient, dtype=dtype).reshape(fact.weight.shape)
for fact in ctx.mapping
)

trainer = model.strategy.getTrainer()
trainer.updateWeights(model.strategy.getCurrentModel(), weight_updater)
trainer.invalidateSample(trainer.getInvalidation(), sample.java_sample)

return (None, None, None, None, None, None, *gradients)
return (None, None, None, None, None, *gradients)


class NeuraLogic(nn.Module):
Expand All @@ -74,7 +65,6 @@ def __init__(
self.to_logic = to_logic

self.model = template.build(settings)
self.number_format = self.model.settings.settings_class.superDetailedNumberFormat

dataset = Dataset(Sample(output_relation, input_facts))
self.sample = self.model.build_dataset(dataset, learnable_facts=True).samples[0]
Expand All @@ -91,7 +81,6 @@ def forward(self, *inputs, **kwargs):
self.value_factory,
self.sample,
self.model,
self.number_format,
self.dtype,
*(fact.weight for fact in mapping),
)

0 comments on commit 337a783

Please sign in to comment.