commit d8dd701eebbbf9dc7e2f36be0a6907529f4515ed
parent 0597392e15136a966aa947d682f628a1ca4238a6
Author: Toni Kauko <120397558+masqutti@users.noreply.github.com>
Date: Wed, 18 Jan 2023 07:35:59 +0200
added a secondary method get_dsp(...) and a struct to handle the data (#32)
Diffstat:
3 files changed, 51 insertions(+), 17 deletions(-)
diff --git a/NeuralAmpModeler/.editorconfig b/NeuralAmpModeler/.editorconfig
@@ -0,0 +1,9 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+indent_style = space
+indent_size = 2
+\ No newline at end of file
diff --git a/NeuralAmpModeler/dsp/dsp.h b/NeuralAmpModeler/dsp/dsp.h
@@ -11,6 +11,7 @@
#include <Eigen/Dense>
#include "IPlugConstants.h"
+#include "json.hpp"
enum EArchitectures
{
@@ -361,13 +362,23 @@ namespace convnet {
// Utilities ==================================================================
// Implemented in get_dsp.cpp
+struct dspData {
+ std::string version;
+ std::string architecture;
+ nlohmann::json config;
+ std::vector<float> params;
+};
+
// Verify that the config that we are building our model from is supported by
// this plugin version.
void verify_config_version(const std::string version);
// Takes the directory, finds the required files, and uses them to instantiate
-// an instance of DSP.
-std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname);
+// an instance of DSP. Also returns an dspData struct that holds the data of the model.
+std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname, dspData& returnedConfig);
+
+//Instantiates a DSP object from dsp_config struct.
+std::unique_ptr<DSP> get_dsp(dspData& conf);
// Hard-coded model:
std::unique_ptr<DSP> get_hard_dsp();
diff --git a/NeuralAmpModeler/dsp/get_dsp.cpp b/NeuralAmpModeler/dsp/get_dsp.cpp
@@ -16,7 +16,7 @@ void verify_config_version(const std::string version)
throw std::runtime_error("Unsupported config version");
}
-std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname)
+std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname, dspData& returnedConfig)
{
const std::filesystem::path config_filename = dirname / std::filesystem::path("config.json");
if (!std::filesystem::exists(config_filename))
@@ -26,15 +26,32 @@ std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname)
i >> j;
verify_config_version(j["version"]);
- auto architecture = j["architecture"];
- nlohmann::json config = j["config"];
+ returnedConfig.version = j["version"];
+ returnedConfig.architecture = j["architecture"];
+ returnedConfig.config = j["config"];
+ returnedConfig.params = numpy_util::load_to_vector(dirname / std::filesystem::path("weights.npy"));
+
+ /*Copy to a new dsp_config object for get_dsp below,
+ since not sure if params actually get modified as being non-const references on some
+ model constructors inside get_dsp(dsp_config& conf).
+ We need to return unmodified version of dsp_config via returnedConfig.*/
+ dspData conf = returnedConfig;
+
+ return get_dsp(conf);
+}
+
+std::unique_ptr<DSP> get_dsp(dspData& conf) {
+
+ verify_config_version(conf.version);
+
+ auto architecture = conf.architecture;
+ nlohmann::json config = conf.config;
if (architecture == "Linear")
{
const int receptive_field = config["receptive_field"];
const bool _bias = config["bias"];
- std::vector<float> params = numpy_util::load_to_vector(dirname / std::filesystem::path("weights.npy"));
- return std::make_unique<Linear>(receptive_field, _bias, params);
+ return std::make_unique<Linear>(receptive_field, _bias, conf.params);
}
else if (architecture == "ConvNet")
{
@@ -44,25 +61,22 @@ std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname)
for (int i = 0; i < config["dilations"].size(); i++)
dilations.push_back(config["dilations"][i]);
const std::string activation = config["activation"];
- std::vector<float> params = numpy_util::load_to_vector(dirname / std::filesystem::path("weights.npy"));
- return std::make_unique<convnet::ConvNet>(channels, dilations, batchnorm, activation, params);
+ return std::make_unique<convnet::ConvNet>(channels, dilations, batchnorm, activation, conf.params);
}
else if (architecture == "LSTM")
{
const int num_layers = config["num_layers"];
const int input_size = config["input_size"];
const int hidden_size = config["hidden_size"];
- std::vector<float> params = numpy_util::load_to_vector(dirname / std::filesystem::path("weights.npy"));
- auto json = nlohmann::json {};
- return std::make_unique<lstm::LSTM>(num_layers, input_size, hidden_size, params, json);
+ auto json = nlohmann::json{};
+ return std::make_unique<lstm::LSTM>(num_layers, input_size, hidden_size, conf.params, json);
}
else if (architecture == "CatLSTM")
{
const int num_layers = config["num_layers"];
const int input_size = config["input_size"];
const int hidden_size = config["hidden_size"];
- std::vector<float> params = numpy_util::load_to_vector(dirname / std::filesystem::path("weights.npy"));
- return std::make_unique<lstm::LSTM>(num_layers, input_size, hidden_size, params, config["parametric"]);
+ return std::make_unique<lstm::LSTM>(num_layers, input_size, hidden_size, conf.params, config["parametric"]);
}
else if (architecture == "WaveNet" || architecture == "CatWaveNet")
{
@@ -88,7 +102,6 @@ std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname)
}
const bool with_head = config["head"] == NULL;
const float head_scale = config["head_scale"];
- std::vector<float> params = numpy_util::load_to_vector(dirname / std::filesystem::path("weights.npy"));
// Solves compilation issue on macOS Error: No matching constructor for initialization of 'wavenet::WaveNet'
// Solution from https://stackoverflow.com/a/73956681/3768284
auto parametric_json = architecture == "CatWaveNet" ? config["parametric"] : nlohmann::json{};
@@ -97,8 +110,8 @@ std::unique_ptr<DSP> get_dsp(const std::filesystem::path dirname)
head_scale,
with_head,
parametric_json,
- params
- );
+ conf.params
+ );
}
else
{