NeuralAmpModelerPlugin

Plugin for Neural Amp Modeler
Log | Files | Refs | Submodules | README | LICENSE

commit e1410f3c727512eca9fb5752f617832beb95d10a
parent d41d873818c2ff301edd7265850d9f94faa2d715
Author: Mike Oliphant <oliphant@nostatic.org>
Date:   Sat,  4 Mar 2023 11:28:03 -0800

Fast tanh activvation(#95)


Diffstat:
MNeuralAmpModeler/dsp/dsp.cpp | 21+++++++++++++++++++--
1 file changed, 19 insertions(+), 2 deletions(-)

diff --git a/NeuralAmpModeler/dsp/dsp.cpp b/NeuralAmpModeler/dsp/dsp.cpp @@ -12,6 +12,10 @@ #include "numpy_util.h" #include "util.h" +#define tanh_impl_ std::tanh +//#define tanh_impl_ fast_tanh_ + + constexpr auto _INPUT_BUFFER_SAFETY_FACTOR = 32; DSP::DSP() { this->_stale_params = true; } @@ -191,11 +195,23 @@ void sigmoid_(Eigen::MatrixXf &x, const long i_start, const long i_end, x(i, j) = 1.0 / (1.0 + expf(-x(i, j))); } +inline float fast_tanh_(const float x) +{ + const float ax = fabs(x); + const float x2 = x * x; + + return(x * (2.45550750702956f + 2.45550750702956f * ax + + (0.893229853513558f + 0.821226666969744f * ax) * x2) / + (2.44506634652299f + (2.44506634652299f + x2) * + fabs(x + 0.814642734961073f * x * ax))); +} + + void tanh_(Eigen::MatrixXf &x, const long i_start, const long i_end, const long j_start, const long j_end) { for (long j = j_start; j < j_end; j++) for (long i = i_start; i < i_end; i++) - x(i, j) = tanh(x(i, j)); + x(i, j) = tanh_impl_(x(i, j)); } void tanh_(Eigen::MatrixXf &x, const long j_start, const long j_end) { @@ -203,13 +219,14 @@ void tanh_(Eigen::MatrixXf &x, const long j_start, const long j_end) { } void tanh_(Eigen::MatrixXf& x) { + float* ptr = x.data(); long size = x.rows() * x.cols(); for (long pos = 0; pos < size; pos++) { - ptr[pos] = tanh(ptr[pos]); + ptr[pos] = tanh_impl_(ptr[pos]); } }