-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Unary Operation Wrapper now does not throw exceptions + Implemented S…
…oftmax wrapper.
- Loading branch information
1 parent
783c2cf
commit f29fa39
Showing
2 changed files
with
85 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
#include <memory> | ||
#include <assert.h> | ||
|
||
#include "tensor_forward_wrapper.h" | ||
#include "activation_layer.h" | ||
#include "m_algorithms.h" | ||
// #include "matrix_printer.h" | ||
#include "matrix_benchmark.h" | ||
#include "config.h" | ||
|
||
|
||
namespace NeuralNetwork { | ||
|
||
std::shared_ptr<Computation::Graph::Tensor> NeuralNetwork::ActivationFunctions::ReLU::doForward(std::shared_ptr<Computation::Graph::Tensor> input) noexcept{ | ||
|
||
assert(input != nullptr && "Matrix has no data (pointing to null)."); | ||
|
||
Computation::Graph::TensorOp relu(Matrix::Operations::Unary::ReLU{}); | ||
|
||
|
||
std::shared_ptr<Computation::Graph::Tensor> output = relu(input); | ||
|
||
// #if DEBUG | ||
// Matrix::Printer m_printer; | ||
// output = m_printer(std::move(output)); | ||
// #endif | ||
|
||
|
||
return output; | ||
} | ||
|
||
std::shared_ptr<Computation::Graph::Tensor> NeuralNetwork::ActivationFunctions::SoftMax::doForward(std::shared_ptr<Computation::Graph::Tensor> input) noexcept{ | ||
|
||
assert(input != nullptr && "Matrix has no data (pointing to null)."); | ||
|
||
Computation::Graph::TensorOp softmax(Matrix::Operations::Unary::SoftMax{}); | ||
|
||
|
||
std::shared_ptr<Computation::Graph::Tensor> output = softmax(input); | ||
|
||
|
||
return output; | ||
} | ||
|
||
|
||
|
||
|
||
} | ||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
#ifndef ACTIVATION_FUNCTIONS_H | ||
#define ACTIVATION_FUNCTIONS_H | ||
|
||
#include <memory> | ||
|
||
#include "network_layer.h" | ||
#include "tensor.h" | ||
|
||
|
||
|
||
|
||
namespace NeuralNetwork { | ||
|
||
namespace ActivationFunctions { | ||
|
||
class ReLU: public ComputationalStep<ReLU> { | ||
|
||
public: | ||
std::shared_ptr<Computation::Graph::Tensor> doForward(std::shared_ptr<Computation::Graph::Tensor> input) noexcept; | ||
}; | ||
|
||
class SoftMax: public ComputationalStep<SoftMax> { | ||
|
||
public: | ||
std::shared_ptr<Computation::Graph::Tensor> doForward(std::shared_ptr<Computation::Graph::Tensor> input) noexcept; | ||
}; | ||
|
||
} | ||
|
||
} | ||
|
||
|
||
|
||
#endif // ACTIVATION_FUNCTIONS_H |