commit e9136d2987adfde0c48570d97eed55c629ef30d6
parent db4cf2dea019b1a8ef10b62cb2af6f5df2666608
Author: Mike Oliphant <oliphant@nostatic.org>
Date: Thu, 2 Mar 2023 18:01:00 -0800
Do tanh on a whole matrix directly on the data. Use full matrix call in wavenet::_Layer::process_. (#89)
Diffstat:
2 files changed, 11 insertions(+), 2 deletions(-)
diff --git a/NeuralAmpModeler/dsp/dsp.cpp b/NeuralAmpModeler/dsp/dsp.cpp
@@ -202,7 +202,16 @@ void tanh_(Eigen::MatrixXf &x, const long j_start, const long j_end) {
tanh_(x, 0, x.rows(), j_start, j_end);
}
-void tanh_(Eigen::MatrixXf &x) { tanh_(x, 0, x.rows(), 0, x.cols()); }
+void tanh_(Eigen::MatrixXf& x) {
+ float* ptr = x.data();
+
+ long size = x.rows() * x.cols();
+
+ for (long pos = 0; pos < size; pos++)
+ {
+ ptr[pos] = tanh(ptr[pos]);
+ }
+}
void Conv1D::set_params_(std::vector<float>::iterator ¶ms) {
if (this->_weight.size() > 0) {
diff --git a/NeuralAmpModeler/dsp/wavenet.cpp b/NeuralAmpModeler/dsp/wavenet.cpp
@@ -30,7 +30,7 @@ void wavenet::_Layer::process_(const Eigen::MatrixXf &input,
// Mix-in condition
this->_z += this->_input_mixin.process(condition);
if (this->_activation == "Tanh")
- tanh_(this->_z, 0, channels, 0, this->_z.cols());
+ tanh_(this->_z);
else if (this->_activation == "ReLU")
relu_(this->_z, 0, channels, 0, this->_z.cols());
else