neural-amp-modeler

Neural network emulator for guitar amplifiers
Log | Files | Refs | README | LICENSE

commit a1ad297c6298d603e5903754a66f0a1d3d2d1203
parent 148f75498de0746d30bc8ded9ca458e0bbd936e4
Author: Steven Atkinson <steven@atkinson.mn>
Date:   Sun,  5 Feb 2023 12:18:01 -0800

Turn off snapshot export by default (#79)

* Export snapshots only when asked to

* params in export script

* Implement for parameteric models
Diffstat:
Mbin/export/main.py | 8+++++++-
Mnam/models/_exportable.py | 22++++++++++++++++------
Mnam/models/parametric/catnets.py | 6+++---
Mnam/models/parametric/hyper_net.py | 11++++++-----
Mtests/test_nam/test_models/test_exportable.py | 118+++++++++++++++++++++++++++++++++++++++++++++++++------------------------------
5 files changed, 106 insertions(+), 59 deletions(-)

diff --git a/bin/export/main.py b/bin/export/main.py @@ -40,7 +40,7 @@ def main(args): export_args = (outdir, param_config) net.eval() outdir.mkdir(parents=True, exist_ok=True) - net.export(*export_args) + net.export(*export_args, include_snapshot=args.include_snapshot) net.export_cpp_header(Path(export_args[0], "HardCodedModel.h"), *export_args[1:]) @@ -50,6 +50,12 @@ if __name__ == "__main__": parser.add_argument("checkpoint", type=str) parser.add_argument("outdir") parser.add_argument( + "--include-snapshot", + "-s", + help="Computes an example input-output pair for the model for debugging " + "purposes", + ) + parser.add_argument( "--param-config", type=str, help="Configuration for a parametric model" ) main(parser.parse_args()) diff --git a/nam/models/_exportable.py b/nam/models/_exportable.py @@ -4,6 +4,7 @@ import abc import json +import logging from pathlib import Path from typing import Tuple @@ -12,13 +13,15 @@ import numpy as np from .._version import __version__ from ..data import np_to_wav +logger = logging.getLogger(__name__) + class Exportable(abc.ABC): """ Interface for my custon export format for use in the plugin. """ - def export(self, outdir: Path): + def export(self, outdir: Path, include_snapshot: bool = False): """ Interface for exporting. You should create at least a `config.json` containing the two fields: @@ -27,6 +30,10 @@ class Exportable(abc.ABC): * "config": (dict w/ other necessary data like tensor shapes etc) :param outdir: Assumed to exist. Can be edited inside at will. + :param include_snapshots: If True, outputs `input.npy` and `output.npy` + Containing an example input/output pair that the model creates. This + Can be used to debug e.g. the implementation of the model in the + plugin. """ training = self.training self.eval() @@ -41,11 +48,14 @@ class Exportable(abc.ABC): fp, indent=4, ) - x, y = self._export_input_output() - np.save(Path(outdir, "inputs.npy"), x) - np.save(Path(outdir, "outputs.npy"), y) - np_to_wav(x, Path(outdir, "input.wav")) - np_to_wav(y, Path(outdir, "output.wav")) + if include_snapshot: + x, y = self._export_input_output() + x_path = Path(outdir, "test_inputs.npy") + y_path = Path(outdir, "test_outputs.npy") + logger.debug(f"Saving snapshot input to {x_path}") + np.save(x_path, x) + logger.debug(f"Saving snapshot output to {y_path}") + np.save(y_path, y) # And resume training state self.train(training) diff --git a/nam/models/parametric/catnets.py b/nam/models/parametric/catnets.py @@ -57,10 +57,10 @@ class _CatMixin(ParametricBaseNet): # ._export_input_output() pass # HACK - def export(self, outdir: Path, parametric_config: Dict[str, Param]): + def export(self, outdir: Path, parametric_config: Dict[str, Param], **kwargs): """ Interface for exporting. - You should create at least a `config.json` containing the two fields: + You should create at least a `config.json` containing the fields: * "version" (str) * "architecture" (str) * "config": (dict w/ other necessary data like tensor shapes etc) @@ -68,7 +68,7 @@ class _CatMixin(ParametricBaseNet): :param outdir: Assumed to exist. Can be edited inside at will. """ with self._use_parametric_config(parametric_config): - return super().export(outdir) + return super().export(outdir, **kwargs) def export_cpp_header(self, filename: Path, parametric_config: Dict[str, Param]): with self._use_parametric_config(parametric_config): diff --git a/nam/models/parametric/hyper_net.py b/nam/models/parametric/hyper_net.py @@ -315,7 +315,7 @@ class HyperConvNet(ParametricBaseNet): # Last conv is the collapser--compensate w/ a minus 1 return sum([m.dilation[0] for m in self._net if isinstance(m, _Conv)]) + 1 - 1 - def export(self, outdir: Path): + def export(self, outdir: Path, include_snapshot: bool=False): """ Files created: * config.json @@ -365,10 +365,11 @@ class HyperConvNet(ParametricBaseNet): np.save(Path(outdir, "weights.npy"), self._export_weights()) # And an input/output to verify correct computation: - params, x, y = self._export_input_output() - np.save(Path(outdir, "test_signal_params.npy"), params.detach().cpu().numpy()) - np.save(Path(outdir, "test_signal_input.npy"), x.detach().cpu().numpy()) - np.save(Path(outdir, "test_signal_output.npy"), y.detach().cpu().numpy()) + if include_snapshot: + params, x, y = self._export_input_output() + np.save(Path(outdir, "test_signal_params.npy"), params.detach().cpu().numpy()) + np.save(Path(outdir, "test_signal_input.npy"), x.detach().cpu().numpy()) + np.save(Path(outdir, "test_signal_output.npy"), y.detach().cpu().numpy()) # And resume training state self.train(training) diff --git a/tests/test_nam/test_models/test_exportable.py b/tests/test_nam/test_models/test_exportable.py @@ -18,46 +18,77 @@ import torch.nn as nn from nam.models import _exportable -def test_export(): - """ - Does it work? - """ - class Model(nn.Module, _exportable.Exportable): - def __init__(self): - super().__init__() - self._scale = nn.Parameter(torch.tensor(0.0)) - self._bias = nn.Parameter(torch.tensor(0.0)) - - def forward(self, x: torch.Tensor): - return self._scale * x + self._bias - - def export_cpp_header(self, filename: Path): - pass - - def _export_config(self): - return {} - - def _export_input_output(self) -> Tuple[np.ndarray, np.ndarray]: - x = 0.01 * np.random.randn(3,) - y = self(torch.Tensor(x)).detach().cpu().numpy() - return x, y - - def _export_weights(self) -> np.ndarray: - return torch.stack([self._scale, self._bias]).detach().cpu().numpy() - - model = Model() - with TemporaryDirectory() as tmpdir: - model.export(tmpdir) - model_basename = "model.nam" - model_path = Path(tmpdir, model_basename) - assert model_path.exists() - with open(model_path, "r") as fp: - model_dict = json.load(fp) - required_keys = {"version", "architecture", "config", "weights"} - for key in required_keys: - assert key in model_dict - weights_list = model_dict["weights"] - assert isinstance(weights_list, list) - assert len(weights_list) == 2 - assert all(isinstance(w, float) for w in weights_list) - -\ No newline at end of file + +class TestExportable(object): + def test_export(self): + """ + Does it work? + """ + + model = self._get_model() + with TemporaryDirectory() as tmpdir: + model.export(tmpdir) + model_basename = "model.nam" + model_path = Path(tmpdir, model_basename) + assert model_path.exists() + with open(model_path, "r") as fp: + model_dict = json.load(fp) + required_keys = {"version", "architecture", "config", "weights"} + for key in required_keys: + assert key in model_dict + weights_list = model_dict["weights"] + assert isinstance(weights_list, list) + assert len(weights_list) == 2 + assert all(isinstance(w, float) for w in weights_list) + + @pytest.mark.parametrize("include_snapshot", (True, False)) + def test_include_snapshot(self, include_snapshot): + """ + Does the option to include a snapshot work? + """ + model = self._get_model() + + with TemporaryDirectory() as tmpdir: + model.export(tmpdir, include_snapshot=include_snapshot) + input_path = Path(tmpdir, "test_inputs.npy") + output_path = Path(tmpdir, "test_outputs.npy") + if include_snapshot: + assert input_path.exists() + assert output_path.exists() + # And check that the output is correct + x = np.load(input_path) + y = np.load(output_path) + preds = model(torch.Tensor(x)).detach().cpu().numpy() + assert preds == pytest.approx(y) + else: + assert not input_path.exists() + assert not output_path.exists() + + @classmethod + def _get_model(cls): + class Model(nn.Module, _exportable.Exportable): + def __init__(self): + super().__init__() + self._scale = nn.Parameter(torch.tensor(0.0)) + self._bias = nn.Parameter(torch.tensor(0.0)) + + def forward(self, x: torch.Tensor): + return self._scale * x + self._bias + + def export_cpp_header(self, filename: Path): + pass + + def _export_config(self): + return {} + + def _export_input_output(self) -> Tuple[np.ndarray, np.ndarray]: + x = 0.01 * np.random.randn( + 3, + ) + y = self(torch.Tensor(x)).detach().cpu().numpy() + return x, y + + def _export_weights(self) -> np.ndarray: + return torch.stack([self._scale, self._bias]).detach().cpu().numpy() + + return Model()