Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

simplify vector load from json #105

Merged
merged 1 commit into from
Jul 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 5 additions & 13 deletions NAM/get_dsp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,10 @@ void verify_config_version(const std::string versionStr)

std::vector<float> GetWeights(nlohmann::json const& j, const std::filesystem::path config_path)
{
if (j.find("weights") != j.end())
auto it = j.find("weights");
if (it != j.end())
{
auto weight_list = j["weights"];
std::vector<float> weights;
for (auto it = weight_list.begin(); it != weight_list.end(); ++it)
weights.push_back(*it);
return weights;
return *it;
}
else
throw std::runtime_error("Corrupted model file is missing weights.");
Expand Down Expand Up @@ -153,9 +150,7 @@ std::unique_ptr<DSP> get_dsp(dspData& conf)
{
const int channels = config["channels"];
const bool batchnorm = config["batchnorm"];
std::vector<int> dilations;
for (size_t i = 0; i < config["dilations"].size(); i++)
dilations.push_back(config["dilations"][i]);
std::vector<int> dilations = config["dilations"];
const std::string activation = config["activation"];
out = std::make_unique<convnet::ConvNet>(channels, dilations, batchnorm, activation, weights, expectedSampleRate);
}
Expand All @@ -172,12 +167,9 @@ std::unique_ptr<DSP> get_dsp(dspData& conf)
for (size_t i = 0; i < config["layers"].size(); i++)
{
nlohmann::json layer_config = config["layers"][i];
std::vector<int> dilations;
for (size_t j = 0; j < layer_config["dilations"].size(); j++)
dilations.push_back(layer_config["dilations"][j]);
layer_array_params.push_back(
wavenet::LayerArrayParams(layer_config["input_size"], layer_config["condition_size"], layer_config["head_size"],
layer_config["channels"], layer_config["kernel_size"], dilations,
layer_config["channels"], layer_config["kernel_size"], layer_config["dilations"],
layer_config["activation"], layer_config["gated"], layer_config["head_bias"]));
}
const bool with_head = config["head"] == NULL;
Expand Down
7 changes: 3 additions & 4 deletions NAM/wavenet.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,20 +58,19 @@ class LayerArrayParams
{
public:
LayerArrayParams(const int input_size_, const int condition_size_, const int head_size_, const int channels_,
const int kernel_size_, const std::vector<int>& dilations_, const std::string activation_,
const int kernel_size_, const std::vector<int>&& dilations_, const std::string activation_,
const bool gated_, const bool head_bias_)
: input_size(input_size_)
, condition_size(condition_size_)
, head_size(head_size_)
, channels(channels_)
, kernel_size(kernel_size_)
, dilations(std::move(dilations_))
, activation(activation_)
, gated(gated_)
, head_bias(head_bias_)
{
for (size_t i = 0; i < dilations_.size(); i++)
this->dilations.push_back(dilations_[i]);
};
}

const int input_size;
const int condition_size;
Expand Down
Loading