SmartGuitarAmp

Guitar plugin made with JUCE that uses neural networks to emulate a tube amplifier
Log | Files | Refs | Submodules | README

Activations.cpp (5489B)


      1 /*
      2   ==============================================================================
      3 
      4     Activations.cpp
      5     Created: 11 Jan 2019 11:15:47am
      6     Author:  Damskägg Eero-Pekka
      7 
      8   ==============================================================================
      9 */
     10 
     11 #include "Activations.h"
     12 
     13 namespace {
     14     typedef float (* activationFunction)(float x);
     15     void applyActivation(float **data, size_t rows, size_t cols, activationFunction activation)
     16     {
     17         for (size_t i = 0; i < rows; ++i)
     18         {
     19             for (size_t j = 0; j < cols; ++j)
     20                 data[i][j] = activation(data[i][j]);
     21         }
     22     }
     23     
     24     void applyActivation(float *data, size_t rows, size_t cols, activationFunction activation)
     25     {
     26         for (size_t i = 0; i < rows*cols; ++i)
     27         {
     28                 data[i] = activation(data[i]);
     29         }
     30     }
     31     
     32     size_t idx(size_t row, size_t col, size_t cols)
     33     {
     34         return row * cols + col;
     35     }
     36     
     37     typedef float (* gatedActivationFunction)(float x1, float x2);
     38     void applyGatedActivation(float *data, size_t rows, size_t cols, gatedActivationFunction activation)
     39     {
     40         size_t rowsHalf = rows / 2;
     41         for (size_t row = 0; row < rowsHalf; ++row)
     42         {
     43             size_t startIdx1 = idx(row, 0, cols); 
     44             size_t startIdx2 = idx(row+rowsHalf, 0, cols);  
     45             for (size_t col = 0; col < cols; ++col)
     46                 data[startIdx1+col] = activation(data[startIdx1+col], data[startIdx2+col]); 
     47         }
     48     }
     49 }
     50 
     51 namespace Activations {
     52     float tanh(float x)
     53     {
     54         return tanhf(x);
     55     }
     56     
     57     float sigmoid(float x)
     58     {
     59         return 1.0f / (1.0f + expf(-x));
     60     }
     61     
     62     float relu(float x)
     63     {
     64         if (x < 0.0f)
     65             return 0.0f;
     66         else
     67             return x;
     68     }
     69     
     70     float softsign(float x)
     71     {
     72         return x / (1.0f + fabsf(x));
     73     }
     74     
     75     float linear(float x)
     76     {
     77         return x;
     78     }
     79     
     80     float gated(float x1, float x2)  
     81     {
     82         return tanh(x1)*sigmoid(x2);
     83     }
     84     
     85     float softgated(float x1, float x2)
     86     {
     87         return softsign(x1) * softsign(x2);
     88     }
     89     
     90     void tanh(float** data, size_t rows, size_t cols)
     91     {
     92         applyActivation(data, rows, cols, (activationFunction)tanh);
     93     }
     94     void sigmoid(float** data, size_t rows, size_t cols)
     95     {
     96         applyActivation(data, rows, cols, (activationFunction)sigmoid);
     97     }
     98     void relu(float** data, size_t rows, size_t cols)
     99     {
    100         applyActivation(data, rows, cols, (activationFunction)relu);
    101     }
    102     void softsign(float** data, size_t rows, size_t cols)
    103     {
    104         applyActivation(data, rows, cols, (activationFunction)softsign);
    105     }
    106     void linear(float** data, size_t rows, size_t cols)
    107     {
    108         return;
    109     }
    110     
    111     void tanh(float* data, size_t rows, size_t cols)
    112     {
    113         applyActivation(data, rows, cols, (activationFunction)tanh);
    114     }
    115     void sigmoid(float* data, size_t rows, size_t cols)
    116     {
    117         applyActivation(data, rows, cols, (activationFunction)sigmoid);
    118     }
    119     void relu(float* data, size_t rows, size_t cols)
    120     {
    121         applyActivation(data, rows, cols, (activationFunction)relu);
    122     }
    123     void softsign(float* data, size_t rows, size_t cols)
    124     {
    125         applyActivation(data, rows, cols, (activationFunction)softsign);
    126     }
    127     void linear(float* data, size_t rows, size_t cols)
    128     {
    129         return;
    130     }
    131     void gated(float* data, size_t rows, size_t cols)
    132     {
    133         assert(rows % 2 == 0);
    134         applyGatedActivation(data, rows, cols, (gatedActivationFunction)gated);
    135     }
    136     void softgated(float* data, size_t rows, size_t cols)
    137     {
    138         assert(rows % 2 == 0);
    139         applyGatedActivation(data, rows, cols, (gatedActivationFunction)softgated);
    140     }
    141     
    142     bool isGated(std::string name)
    143     {
    144         if ((name == "gated") || (name == "softgated"))
    145             return true;
    146         return false;
    147     }
    148     
    149     activationFunction getActivationFunction(std::string name)
    150     {
    151         if (name == "tanh")
    152             return tanh;
    153         else if (name == "sigmoid")
    154             return sigmoid;
    155         else if (name == "relu")
    156             return relu;
    157         else if (name == "softsign")
    158             return softsign;
    159         else if (name == "linear")
    160             return linear;
    161         else
    162             throw std::invalid_argument("Received unkown activation name.");
    163     }
    164     
    165     activationFuncArray getActivationFuncArray(std::string name)
    166     {
    167         if (name == "tanh")
    168             return tanh;
    169         else if (name == "sigmoid")
    170             return sigmoid;
    171         else if (name == "relu")
    172             return relu;
    173         else if (name == "softsign")
    174             return softsign;
    175         else if (name == "linear")
    176             return linear;
    177         else if (name == "gated")
    178             return gated;
    179         else if (name == "softgated")
    180             return softgated;
    181         else
    182             throw std::invalid_argument("Received unkown activation name.");
    183     }
    184     
    185     activationFunc2DArray getActivationFunc2DArray(std::string name)
    186     {
    187         if (name == "tanh")
    188             return tanh;
    189         else if (name == "sigmoid")
    190             return sigmoid;
    191         else if (name == "relu")
    192             return relu;
    193         else if (name == "softsign")
    194             return softsign;
    195         else if (name == "linear")
    196             return linear;
    197         else
    198             throw std::invalid_argument("Received unkown activation name.");
    199     }
    200 }