NeuralAmpModelerPlugin

Plugin for Neural Amp Modeler
Log | Files | Refs | Submodules | README | LICENSE

commit 11a9dd97ef9c888e5d5bac17708ed1bcfef779bf
parent 99b0d392ea4b7d1fb1b42b72bc0ef0b683b6d17a
Author: Steven Atkinson <steven@atkinson.mn>
Date:   Sun,  5 Feb 2023 16:24:21 -0800

Remove support for directory-style models (#64)

* Change assert to throwing runtime error for non-little-endian

* Get rid of legacy model loading code in NeuralAmpModeler class

* Remove support for old models from get_dsp.cpp

* Another assert(littleEndian)

* Relay error message into popup
Diffstat:
MNeuralAmpModeler/NeuralAmpModeler.cpp | 85+++++++++++++++----------------------------------------------------------------
MNeuralAmpModeler/NeuralAmpModeler.h | 8++------
MNeuralAmpModeler/dsp/cnpy.cpp | 8++++++--
MNeuralAmpModeler/dsp/get_dsp.cpp | 28+++++++++++-----------------
4 files changed, 35 insertions(+), 94 deletions(-)

diff --git a/NeuralAmpModeler/NeuralAmpModeler.cpp b/NeuralAmpModeler/NeuralAmpModeler.cpp @@ -102,8 +102,8 @@ NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo &info) mIR(nullptr), mStagedNAM(nullptr), mStagedIR(nullptr), mFlagRemoveNAM(false), mFlagRemoveIR(false), mDefaultNAMString("Select model..."), mDefaultIRString("Select IR..."), - mToneBass(), mToneMid(), mToneTreble(), mNAMPath(), mNAMLegacyPath(), - mIRPath(), mInputSender(), mOutputSender() { + mToneBass(), mToneMid(), mToneTreble(), mNAMPath(), mIRPath(), + mInputSender(), mOutputSender() { GetParam(kInputLevel)->InitGain("Input", 0.0, -20.0, 20.0, 0.1); GetParam(kToneBass)->InitDouble("Bass", 5.0, 0.0, 10.0, 0.1); GetParam(kToneMid)->InitDouble("Middle", 5.0, 0.0, 10.0, 0.1); @@ -130,7 +130,6 @@ NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo &info) pGraphics->EnableMouseOver(true); auto helpSVG = pGraphics->LoadSVG(HELP_FN); auto fileSVG = pGraphics->LoadSVG(FILE_FN); - auto folderSVG = pGraphics->LoadSVG(FOLDER_FN); auto closeButtonSVG = pGraphics->LoadSVG(CLOSE_BUTTON_FN); pGraphics->LoadFont("Roboto-Regular", ROBOTO_FN); const IRECT b = pGraphics->GetBounds(); @@ -206,27 +205,20 @@ NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo &info) pGraphics->PromptForFile(filename, path); if (filename.GetLength()) { // Sets mNAMPath and mStagedNAM - bool success = this->_GetNAM(filename); + const std::string msg = this->_GetNAM(filename); // TODO error messages like the IR loader. - if (!success) - pGraphics->ShowMessageBox( - "Failed to load NAM model. If the model is an old " - "\"directory-style\" model, it can be converted using the " - "utility at https://github.com/sdatkinson/nam-model-utility", - "Failed to load model!", kMB_OK); + if (msg.size()) { + std::stringstream ss; + ss << "Failed to load NAM model. Message:\n\n" + << msg << "\n\n" + << "If the model is an old \"directory-style\" model, it can be " + "converted using the utility at " + "https://github.com/sdatkinson/nam-model-utility"; + pGraphics->ShowMessageBox(ss.str().c_str(), "Failed to load model!", + kMB_OK); + } } }; -#if defined OS_MAC - // Legacy directory-based loader. - auto loadNAMLegacy = [&, pGraphics](IControl *pCaller) { - WDL_String dir; - pGraphics->PromptForDirectory( - dir, [&](const WDL_String &fileName, const WDL_String &path) { - if (path.GetLength()) - _GetNAMLegacy(path); - }); - }; -#endif // IR loader button auto loadIR = [&, pGraphics](IControl *pCaller) { WDL_String fileName; @@ -300,15 +292,6 @@ NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo &info) modelArea, "", style.WithColor(kFG, PluginColors::NAM_1))); pGraphics->AttachControl(new IRolloverSVGButtonControl( modelArea.GetFromLeft(iconWidth).GetPadded(-2.f), loadNAM, fileSVG)); -#if defined OS_MAC - // Extra button for legacy model loading since Sandboxing prevent the - // Windows way of doing it. - pGraphics->AttachControl( - new IRolloverSVGButtonControl(modelArea.GetFromLeft(iconWidth) - .GetTranslated(iconWidth, 0.0f) - .GetPadded(-2.f), - loadNAMLegacy, folderSVG)); -#endif pGraphics->AttachControl(new IRolloverSVGButtonControl( modelArea.GetFromRight(iconWidth).GetPadded(-2.f), ClearNAM, closeButtonSVG)); @@ -569,7 +552,6 @@ bool NeuralAmpModeler::SerializeState(IByteChunk &chunk) const { // Model directory (don't serialize the model itself; we'll just load it again // when we unserialize) chunk.PutStr(this->mNAMPath.Get()); - chunk.PutStr(this->mNAMLegacyPath.Get()); chunk.PutStr(this->mIRPath.Get()); return SerializeParams(chunk); } @@ -577,13 +559,10 @@ bool NeuralAmpModeler::SerializeState(IByteChunk &chunk) const { int NeuralAmpModeler::UnserializeState(const IByteChunk &chunk, int startPos) { WDL_String dir; startPos = chunk.GetStr(this->mNAMPath, startPos); - startPos = chunk.GetStr(this->mNAMLegacyPath, startPos); startPos = chunk.GetStr(this->mIRPath, startPos); this->mNAM = nullptr; this->mIR = nullptr; int retcode = UnserializeParams(chunk, startPos); - if (this->mNAMLegacyPath.GetLength()) - this->_GetNAMLegacy(this->mNAMLegacyPath); if (this->mNAMPath.GetLength()) this->_GetNAM(this->mNAMPath); if (this->mIRPath.GetLength()) @@ -593,8 +572,6 @@ int NeuralAmpModeler::UnserializeState(const IByteChunk &chunk, int startPos) { void NeuralAmpModeler::OnUIOpen() { Plugin::OnUIOpen(); - if (this->mNAMLegacyPath.GetLength()) - this->_SetModelMsg(this->mNAMLegacyPath); if (this->mNAMPath.GetLength()) this->_SetModelMsg(this->mNAMPath); if (this->mIRPath.GetLength()) @@ -633,7 +610,6 @@ void NeuralAmpModeler::_ApplyDSPStaging() { if (this->mFlagRemoveNAM) { this->mNAM = nullptr; this->mNAMPath.Set(""); - this->mNAMLegacyPath.Set(""); this->_UnsetModelMsg(); this->mFlagRemoveNAM = false; } @@ -668,15 +644,13 @@ void NeuralAmpModeler::_FallbackDSP(const int nFrames) { this->mOutputArray[c][s] = this->mInputArray[c][s]; } -bool NeuralAmpModeler::_GetNAM(const WDL_String &modelPath) { +std::string NeuralAmpModeler::_GetNAM(const WDL_String &modelPath) { WDL_String previousNAMPath = this->mNAMPath; - WDL_String previousNAMLegacyPath = this->mNAMLegacyPath; try { auto dspPath = std::filesystem::path(modelPath.Get()); mStagedNAM = get_dsp(dspPath); this->_SetModelMsg(modelPath); this->mNAMPath = modelPath; - this->mNAMLegacyPath.Set(""); } catch (std::exception &e) { std::stringstream ss; ss << "FAILED to load model"; @@ -686,38 +660,11 @@ bool NeuralAmpModeler::_GetNAM(const WDL_String &modelPath) { this->mStagedNAM = nullptr; } this->mNAMPath = previousNAMPath; - this->mNAMLegacyPath = previousNAMLegacyPath; - std::cerr << "Failed to read DSP module" << std::endl; - std::cerr << e.what() << std::endl; - return false; - } - return true; -} - -bool NeuralAmpModeler::_GetNAMLegacy(const WDL_String &modelDir) { - WDL_String previousNAMLegacyPath = this->mNAMLegacyPath; - WDL_String previousNAMPath = this->mNAMPath; - try { - auto dspPath = std::filesystem::path(modelDir.Get()); - mStagedNAM = get_dsp_legacy(dspPath); - this->_SetModelMsg(modelDir); - this->mNAMLegacyPath = modelDir; - this->mNAMPath.Set(""); - } catch (std::exception &e) { - std::stringstream ss; - ss << "FAILED to load legacy model"; - SendControlMsgFromDelegate(kCtrlTagModelName, 0, - int(strlen(ss.str().c_str())), ss.str().c_str()); - if (mStagedNAM != nullptr) { - mStagedNAM = nullptr; - } - this->mNAMLegacyPath = previousNAMLegacyPath; - this->mNAMPath = previousNAMLegacyPath; std::cerr << "Failed to read DSP module" << std::endl; std::cerr << e.what() << std::endl; - return false; + return e.what(); } - return true; + return ""; } dsp::wav::LoadReturnCode NeuralAmpModeler::_GetIR(const WDL_String &irPath) { diff --git a/NeuralAmpModeler/NeuralAmpModeler.h b/NeuralAmpModeler/NeuralAmpModeler.h @@ -70,10 +70,8 @@ private: size_t _GetBufferNumChannels() const; size_t _GetBufferNumFrames() const; // Gets a new Neural Amp Model object and stores it to mStagedNAM - // Returns a bool for whether the operation was successful. - bool _GetNAM(const WDL_String &dspFile); - // Legacy load from directory containing "config.json" and "weights.npy" - bool _GetNAMLegacy(const WDL_String &dspDirectory); + // Returns an emptry string on success, or an error message on failure. + std::string _GetNAM(const WDL_String &dspFile); // Gets the IR and stores to mStagedIR. // Return status code so that error messages can be relayed if // it wasn't successful. @@ -141,8 +139,6 @@ private: // Path to model's config.json or model.nam WDL_String mNAMPath; - // Legacy - WDL_String mNAMLegacyPath; // Path to IR (.wav file) WDL_String mIRPath; diff --git a/NeuralAmpModeler/dsp/cnpy.cpp b/NeuralAmpModeler/dsp/cnpy.cpp @@ -116,7 +116,9 @@ void cnpy::parse_npy_header(unsigned char *buffer, size_t &word_size, loc1 = header.find("descr") + 9; bool littleEndian = (header[loc1] == '<' || header[loc1] == '|' ? true : false); - assert(littleEndian); + if (!littleEndian) + throw std::runtime_error( + "Little-endian format is required, but big-endian was found instead!"); // char type = header[loc1+1]; // assert(type == map_type(T)); @@ -172,7 +174,9 @@ void cnpy::parse_npy_header(FILE *fp, size_t &word_size, loc1 += 9; bool littleEndian = (header[loc1] == '<' || header[loc1] == '|' ? true : false); - assert(littleEndian); + if (!littleEndian) + throw std::runtime_error( + "Little-endian format is required, but big-endian was found instead!"); // char type = header[loc1+1]; // assert(type == map_type(T)); diff --git a/NeuralAmpModeler/dsp/get_dsp.cpp b/NeuralAmpModeler/dsp/get_dsp.cpp @@ -9,32 +9,26 @@ #include "wavenet.h" void verify_config_version(const std::string version) { - const std::unordered_set<std::string> supported_versions( - {"0.2.0", "0.2.1", "0.3.0", "0.3.1", "0.4.0", "0.5.0"}); - if (supported_versions.find(version) == supported_versions.end()) - throw std::runtime_error("Unsupported config version"); + const std::unordered_set<std::string> supported_versions({"0.5.0"}); + if (supported_versions.find(version) == supported_versions.end()) { + std::stringstream ss; + ss << "Model config is an unsupported version " << version + << ". Try either converting the model to a more recent version, or " + "update your version of the NAM plugin."; + throw std::runtime_error(ss.str()); + } } std::vector<float> _get_weights(nlohmann::json const &j, const std::filesystem::path config_path) { - if (j.find("weights") != j.end()) { // New-style model + if (j.find("weights") != j.end()) { auto weight_list = j["weights"]; std::vector<float> weights; for (auto it = weight_list.begin(); it != weight_list.end(); ++it) weights.push_back(*it); return weights; - } else { // Old-style config.json + weights.npy - std::filesystem::path weights_path = - config_path.parent_path() / std::filesystem::path("weights.npy"); - if (!std::filesystem::exists(weights_path)) { - std::stringstream s; - s << "No weights in model file, and could not find accompanying weights " - "at expected location " - << weights_path; - throw std::runtime_error(s.str()); - } - return numpy_util::load_to_vector(weights_path); - } + } else + throw std::runtime_error("Corrupted model file is missing weights."); } std::unique_ptr<DSP> get_dsp_legacy(const std::filesystem::path model_dir) {