diff --git a/NAM/get_dsp.cpp b/NAM/get_dsp.cpp index 47aba97..734a2b7 100644 --- a/NAM/get_dsp.cpp +++ b/NAM/get_dsp.cpp @@ -68,13 +68,10 @@ void verify_config_version(const std::string versionStr) std::vector GetWeights(nlohmann::json const& j, const std::filesystem::path config_path) { - if (j.find("weights") != j.end()) + auto it = j.find("weights"); + if (it != j.end()) { - auto weight_list = j["weights"]; - std::vector weights; - for (auto it = weight_list.begin(); it != weight_list.end(); ++it) - weights.push_back(*it); - return weights; + return *it; } else throw std::runtime_error("Corrupted model file is missing weights."); @@ -153,9 +150,7 @@ std::unique_ptr get_dsp(dspData& conf) { const int channels = config["channels"]; const bool batchnorm = config["batchnorm"]; - std::vector dilations; - for (size_t i = 0; i < config["dilations"].size(); i++) - dilations.push_back(config["dilations"][i]); + std::vector dilations = config["dilations"]; const std::string activation = config["activation"]; out = std::make_unique(channels, dilations, batchnorm, activation, weights, expectedSampleRate); } @@ -172,12 +167,9 @@ std::unique_ptr get_dsp(dspData& conf) for (size_t i = 0; i < config["layers"].size(); i++) { nlohmann::json layer_config = config["layers"][i]; - std::vector dilations; - for (size_t j = 0; j < layer_config["dilations"].size(); j++) - dilations.push_back(layer_config["dilations"][j]); layer_array_params.push_back( wavenet::LayerArrayParams(layer_config["input_size"], layer_config["condition_size"], layer_config["head_size"], - layer_config["channels"], layer_config["kernel_size"], dilations, + layer_config["channels"], layer_config["kernel_size"], layer_config["dilations"], layer_config["activation"], layer_config["gated"], layer_config["head_bias"])); } const bool with_head = config["head"] == NULL; diff --git a/NAM/wavenet.h b/NAM/wavenet.h index 7ea94f1..9441587 100644 --- a/NAM/wavenet.h +++ b/NAM/wavenet.h @@ -58,20 +58,19 @@ class LayerArrayParams { public: LayerArrayParams(const int input_size_, const int condition_size_, const int head_size_, const int channels_, - const int kernel_size_, const std::vector& dilations_, const std::string activation_, + const int kernel_size_, const std::vector&& dilations_, const std::string activation_, const bool gated_, const bool head_bias_) : input_size(input_size_) , condition_size(condition_size_) , head_size(head_size_) , channels(channels_) , kernel_size(kernel_size_) + , dilations(std::move(dilations_)) , activation(activation_) , gated(gated_) , head_bias(head_bias_) { - for (size_t i = 0; i < dilations_.size(); i++) - this->dilations.push_back(dilations_[i]); - }; + } const int input_size; const int condition_size;