neural-amp-modeler

Neural network emulator for guitar amplifiers
Log | Files | Refs | README | LICENSE

commit a24caf30e69bf7f2b02e89e428abe12eeba5adc1
parent 8fecd53188e05df328b6762f70fe0ca03350a8c5
Author: Steven Atkinson <steven@atkinson.mn>
Date:   Mon, 16 Sep 2024 17:32:56 -0700

Update NAM-full configs (#463)

* Update NAM-full configs

* Update NAM-full LSTM config
Diffstat:
Mnam_full_configs/data/single_pair.json | 8++++----
Mnam_full_configs/models/lstm.json | 16++++++++--------
Mnam_full_configs/models/wavenet.json | 26++++++++++++++++++++++++--
3 files changed, 36 insertions(+), 14 deletions(-)

diff --git a/nam_full_configs/data/single_pair.json b/nam_full_configs/data/single_pair.json @@ -4,13 +4,13 @@ "representative of this!" ], "train": { - "start": null, - "stop": -432000, + "start_seconds": null, + "stop_seconds": -9.0, "ny": 8192 }, "validation": { - "start": -432000, - "stop": null, + "start_seconds": -9.0, + "stop_seconds": null, "ny": null }, "common": { diff --git a/nam_full_configs/models/lstm.json b/nam_full_configs/models/lstm.json @@ -18,19 +18,19 @@ "name": "LSTM", "config": { "num_layers": 3, - "hidden_size": 24, - "train_burn_in": 4096, - "train_truncate": 512 + "hidden_size": 18, + "train_burn_in": 8192, + "train_truncate": null } }, "loss": { - "val_loss": "mse", - "mask_first": 4096, - "pre_emph_weight": 1.0, - "pre_emph_coef": 0.85 + "val_loss": "esr", + "mask_first": 8192, + "pre_emph_mrstft_weight": 0.002, + "pre_emph_mrstft_coef": 0.85 }, "optimizer": { - "lr": 0.01 + "lr": 0.008 }, "lr_scheduler": { "class": "ExponentialLR", diff --git a/nam_full_configs/models/wavenet.json b/nam_full_configs/models/wavenet.json @@ -13,7 +13,18 @@ "channels": 16, "head_size": 8, "kernel_size": 3, - "dilations": [1,2,4,8,16,32,64,128,256,512], + "dilations": [ + 1, + 2, + 4, + 8, + 16, + 32, + 64, + 128, + 256, + 512 + ], "activation": "Tanh", "gated": false, "head_bias": false @@ -24,7 +35,18 @@ "channels": 8, "head_size": 1, "kernel_size": 3, - "dilations": [1,2,4,8,16,32,64,128,256,512], + "dilations": [ + 1, + 2, + 4, + 8, + 16, + 32, + 64, + 128, + 256, + 512 + ], "activation": "Tanh", "gated": false, "head_bias": true