Skip to content

Commit

Permalink
Refactored Breadth first iterator to now have behavior dependant on G…
Browse files Browse the repository at this point in the history
…radient-Compute or Parameter-Update policies.
  • Loading branch information
alejandroarmas committed Jun 2, 2022
1 parent 30bf6e5 commit d994e62
Show file tree
Hide file tree
Showing 3 changed files with 177 additions and 22 deletions.
119 changes: 109 additions & 10 deletions function_object_iterator.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@

#include "function_object_iterator.h"
#include "tensor.h"
#include "generator.h"



Expand All @@ -10,11 +11,102 @@ namespace NeuralNetwork {

namespace Graph {

LevelOrderIterator& LevelOrderIterator::operator++(int) noexcept {

if (!nodeStack.empty()) {
void ReadParameterPolicy::process_head(TensorID tid) {}


void ReadParameterPolicy::apply_to_children(std::stack<TensorID>& nodeStack, TensorID tid) {
nodeStack.emplace(tid);
}


ReadParameterPolicy::ReturnType ReadParameterPolicy::dereference(TensorID tid) {

ComputationalGraphMap& map = ComputationalGraphMap::get();
return map._get_tensor(tid)->release_matrix();
}


ReadParameterPolicy::Matrix_t ReadParameterPolicy::grad(TensorID tid) {

ComputationalGraphMap& map = ComputationalGraphMap::get();
auto gradient = Matrix_t{map._get_tensor(tid)->get_grad()};
return Matrix_t{gradient};
}


void ComputeGradientPolicy::process_head(TensorID tid) {

// std::cout << "Backpropigating TID: " << _t.get() << std::endl;

ComputationalGraphMap& map = ComputationalGraphMap::get();
auto operation = map._get_operation(tid);

// auto _r = map._get_tensor(current)->release_matrix().num_rows();
// auto _w = map._get_tensor(current)->release_matrix().num_cols();

// auto matrix = Matrix_t{
// Matrix::Rows(_r),
// Matrix::Columns(_w)
// };
// Matrix::Generation::Tester<1> unit_gen;

// matrix = unit_gen(matrix);

// Events::Differentiate backpropigate_grad(matrix);
Events::Differentiate backpropigate_grad(Matrix_t{});

operation.stringify_type();
std::cout << "Computing Leaf Derivative" << std::endl;
operation.process_event(backpropigate_grad);
operation.stringify_type();


}


void ComputeGradientPolicy::apply_to_children(std::stack<TensorID>& nodeStack, TensorID tid) {

ComputationalGraphMap& map = ComputationalGraphMap::get();
auto df = Matrix_t{map._get_tensor(tid)->get_grad()};
assert(df.num_rows() && df.num_cols() && "Invalid Derivative.");
std::cout << "Gradient DIM: [" << df.num_rows() << "," << df.num_cols() << "]" << std::endl;

Events::Differentiate backpropigate_grad(df);
auto operation = map._get_operation(tid);

operation.stringify_type();
std::cout << "Processing event:" << std::endl;
operation.process_event(backpropigate_grad);
operation.stringify_type();
nodeStack.emplace(tid);
}


ComputeGradientPolicy::ReturnType ComputeGradientPolicy::dereference(TensorID tid) {

ComputationalGraphMap& map = ComputationalGraphMap::get();
FunctionObject fn_obj = map._get_operation(tid);
return fn_obj;
}

template <TraversalPolicy TP>
LevelOrderIterator<TP>::LevelOrderIterator(const TensorID _t) noexcept : current(_t) {

if (_t.get()) {

TP::process_head(_t);
this->_stack_children();
}

}


template <TraversalPolicy TP>
LevelOrderIterator<TP>& LevelOrderIterator<TP>::operator++(void) noexcept {

if (!nodeStack.empty()) {

current = nodeStack.top();
nodeStack.pop();

Expand All @@ -27,15 +119,15 @@ namespace NeuralNetwork {
}


FunctionObject LevelOrderIterator::operator*() const noexcept {
template <TraversalPolicy TP>
typename LevelOrderIterator<TP>::IterReturnType LevelOrderIterator<TP>::operator*() const noexcept {

ComputationalGraphMap& map = ComputationalGraphMap::get();
FunctionObject fn_obj = map._get_operation(current);
return fn_obj;
return TP::dereference(current);
}


void LevelOrderIterator::_stack_children(void) noexcept {
template <TraversalPolicy TP>
void LevelOrderIterator<TP>::_stack_children(void) noexcept {

ComputationalGraphMap& map = ComputationalGraphMap::get();
FunctionObject fn_obj = map._get_operation(current);
Expand All @@ -45,10 +137,13 @@ namespace NeuralNetwork {
for (std::size_t i = 0; const auto tid: fn_obj.serialize()) {

if (tid) {
std::cout << "tid" << i << ": " << tid->get() << std::endl;
std::cout << "Backpropigating TID " << i << ": " << tid->get() << std::endl;


if (i++) {
nodeStack.emplace(tid->get());

TP::apply_to_children(nodeStack, TensorID(tid->get()));

}
}

Expand All @@ -57,6 +152,10 @@ namespace NeuralNetwork {

return;
}

template class LevelOrderIterator<ReadParameterPolicy>;
template class LevelOrderIterator<ComputeGradientPolicy>;



} // Graph
Expand Down
70 changes: 58 additions & 12 deletions include/function_object_iterator.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#include "computational_graph_map.h"
#include "function_object.h"
#include "strong_types.h"
#include "m_algorithms_concepts.h"

#include <stack>
#include <optional>
Expand All @@ -17,22 +18,69 @@ namespace NeuralNetwork {

namespace Graph {

struct BackPropigationMatrixTrait {
using Type = Matrix::Representation;
};


class ComputeGradientPolicy {

public:

using Matrix_t = BackPropigationMatrixTrait::Type;
using ReturnType = FunctionObject;

static void process_head(TensorID tid);
static void apply_to_children(std::stack<TensorID>& tid_stack, TensorID tid);
static ReturnType dereference(TensorID tid);
};

class ReadParameterPolicy {

public:

using Matrix_t = BackPropigationMatrixTrait::Type;
using ReturnType = BackPropigationMatrixTrait::Type&;

static void process_head(TensorID tid);
static void apply_to_children(std::stack<TensorID>& tid_stack, TensorID tid);
static ReturnType dereference(TensorID tid);
static Matrix_t grad(TensorID tid);
};


/*
Breadth First Search through computational graph.
Policy:
Compute Gradient
Read Parameter
*/
template <TraversalPolicy TP = ComputeGradientPolicy>
class LevelOrderIterator {

using Matrix_t = BackPropigationMatrixTrait::Type;
using IterReturnType = TP::ReturnType;

constexpr static size_t NoOpIdx = 0;

public:
LevelOrderIterator(TensorID _t) noexcept : current(_t) {
if (_t.get()) this->_stack_children();
}
LevelOrderIterator(LevelOrderIterator&) = default;
LevelOrderIterator(LevelOrderIterator&&) = default;
LevelOrderIterator& operator=(const LevelOrderIterator&) = default;
LevelOrderIterator& operator=(LevelOrderIterator&&) = default;
LevelOrderIterator& operator++(int) noexcept;
explicit LevelOrderIterator(const TensorID _t) noexcept;

// LevelOrderIterator(LevelOrderIterator&) = default;
// LevelOrderIterator(LevelOrderIterator&&) = default;
// LevelOrderIterator& operator=(const LevelOrderIterator&) = default;
// LevelOrderIterator& operator=(LevelOrderIterator&&) = default;
LevelOrderIterator& operator++(void) noexcept;


FunctionObject operator*() const noexcept;
Matrix_t gradient() const noexcept
requires Same_as<TP, ReadParameterPolicy> {
return ReadParameterPolicy::grad(current);
}

IterReturnType operator*() const noexcept;

bool operator!=(const LevelOrderIterator& other) const noexcept{
return current != other.current;
Expand All @@ -44,11 +92,9 @@ namespace NeuralNetwork {
TensorID current;
std::stack<TensorID> nodeStack;




};


}

}
Expand Down
10 changes: 10 additions & 0 deletions include/m_algorithms_concepts.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include <concepts>
#include <memory>
#include <stack>

#include "matrix.h"

Expand Down Expand Up @@ -88,6 +89,15 @@ namespace NeuralNetwork {
};


template <typename GraphIteratorPolicy>
concept TraversalPolicy = requires(GraphIteratorPolicy policy, std::stack<TensorID>& tid_stack, TensorID tid) {

policy.process_head(tid);
policy.apply_to_children(tid_stack, tid);
policy.dereference(tid);

};


}
}
Expand Down

0 comments on commit d994e62

Please sign in to comment.