diff --git a/NeuralAmpModeler/NeuralAmpModeler.cpp b/NeuralAmpModeler/NeuralAmpModeler.cpp index 7774c4ba..02010402 100644 --- a/NeuralAmpModeler/NeuralAmpModeler.cpp +++ b/NeuralAmpModeler/NeuralAmpModeler.cpp @@ -1,6 +1,7 @@ #include // std::clamp, std::min #include // pow #include +#include // std::ifstream for file reading #include #include @@ -74,7 +75,6 @@ const bool kDefaultCalibrateInput = false; const std::string kInputCalibrationLevelParamName = "InputCalibrationLevel"; const double kDefaultInputCalibrationLevel = 12.0; - NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo& info) : Plugin(info, MakeConfig(kNumParams, kNumPresets)) { @@ -183,7 +183,6 @@ NeuralAmpModeler::NeuralAmpModeler(const InstanceInfo& info) { // Sets mNAMPath and mStagedNAM const std::string msg = _StageModel(fileName); - // TODO error messages like the IR loader. if (msg.size()) { std::stringstream ss; @@ -407,16 +406,33 @@ void NeuralAmpModeler::OnIdle() bool NeuralAmpModeler::SerializeState(IByteChunk& chunk) const { - // If this isn't here when unserializing, then we know we're dealing with something before v0.8.0. + // If this isn't here when unserializing, then we know we're dealing with something before v0.7.13. WDL_String header("###NeuralAmpModeler###"); // Don't change this! chunk.PutStr(header.Get()); // Plugin version, so we can load legacy serialized states in the future! WDL_String version(PLUG_VERSION_STR); chunk.PutStr(version.Get()); - // Model directory (don't serialize the model itself; we'll just load it again - // when we unserialize) + + // Serialize file paths for backward compatibility chunk.PutStr(mNAMPath.Get()); chunk.PutStr(mIRPath.Get()); + + // Embed the actual file data for portability + // Data was read when model/IR was loaded + int namDataSize = static_cast(mNAMData.size()); + chunk.Put(&namDataSize); + if (namDataSize > 0) + { + chunk.PutBytes(mNAMData.data(), namDataSize); + } + + int irDataSize = static_cast(mIRData.size()); + chunk.Put(&irDataSize); + if (irDataSize > 0) + { + chunk.PutBytes(mIRData.data(), irDataSize); + } + return SerializeParams(chunk); } @@ -697,6 +713,18 @@ std::string NeuralAmpModeler::_StageModel(const WDL_String& modelPath) temp->Reset(GetSampleRate(), GetBlockSize()); mStagedModel = std::move(temp); mNAMPath = modelPath; + + // Read file data for embedding in session + mNAMData.clear(); + std::ifstream file(dspPath, std::ios::binary | std::ios::ate); + if (file.is_open()) + { + std::streamsize size = file.tellg(); + file.seekg(0, std::ios::beg); + mNAMData.resize(static_cast(size)); + file.read(reinterpret_cast(mNAMData.data()), size); + } + SendControlMsgFromDelegate(kCtrlTagModelFileBrowser, kMsgTagLoadedModel, mNAMPath.GetLength(), mNAMPath.Get()); } catch (std::runtime_error& e) @@ -721,6 +749,7 @@ dsp::wav::LoadReturnCode NeuralAmpModeler::_StageIR(const WDL_String& irPath) // path and the model got caught on opposite sides of the fence... WDL_String previousIRPath = mIRPath; const double sampleRate = GetSampleRate(); + dsp::wav::LoadReturnCode wavState = dsp::wav::LoadReturnCode::ERROR_OTHER; try { @@ -738,6 +767,19 @@ dsp::wav::LoadReturnCode NeuralAmpModeler::_StageIR(const WDL_String& irPath) if (wavState == dsp::wav::LoadReturnCode::SUCCESS) { mIRPath = irPath; + + // Read file data for embedding in session + mIRData.clear(); + auto irPathU8 = std::filesystem::u8path(irPath.Get()); + std::ifstream file(irPathU8, std::ios::binary | std::ios::ate); + if (file.is_open()) + { + std::streamsize size = file.tellg(); + file.seekg(0, std::ios::beg); + mIRData.resize(static_cast(size)); + file.read(reinterpret_cast(mIRData.data()), size); + } + SendControlMsgFromDelegate(kCtrlTagIRFileBrowser, kMsgTagLoadedIR, mIRPath.GetLength(), mIRPath.Get()); } else @@ -911,5 +953,207 @@ void NeuralAmpModeler::_UpdateMeters(sample** inputPointer, sample** outputPoint mOutputSender.ProcessBlock(outputPointer, (int)nFrames, kCtrlTagOutputMeter, nChansHack); } +std::string NeuralAmpModeler::_StageModelFromData(const std::vector& data, const WDL_String& originalPath) +{ + WDL_String previousNAMPath = mNAMPath; + const double sampleRate = GetSampleRate(); + + try + { + // Parse the JSON from memory + std::string jsonStr(data.begin(), data.end()); + nlohmann::json j = nlohmann::json::parse(jsonStr); + + // Build dspData structure + nam::dspData dspData; + dspData.version = j["version"]; + dspData.architecture = j["architecture"]; + dspData.config = j["config"]; + dspData.metadata = j["metadata"]; + + // Extract weights + if (j.find("weights") != j.end()) + { + dspData.weights = j["weights"].get>(); + } + + // Extract sample rate + if (j.find("sample_rate") != j.end()) + dspData.expected_sample_rate = j["sample_rate"]; + else + dspData.expected_sample_rate = -1.0; + + // Create DSP from dspData + std::unique_ptr model = nam::get_dsp(dspData); + std::unique_ptr temp = std::make_unique(std::move(model), sampleRate); + temp->Reset(sampleRate, GetBlockSize()); + mStagedModel = std::move(temp); + mNAMPath = originalPath; + mNAMData = data; // Store the embedded data + SendControlMsgFromDelegate(kCtrlTagModelFileBrowser, kMsgTagLoadedModel, mNAMPath.GetLength(), mNAMPath.Get()); + } + catch (std::exception& e) + { + SendControlMsgFromDelegate(kCtrlTagModelFileBrowser, kMsgTagLoadFailed); + + if (mStagedModel != nullptr) + { + mStagedModel = nullptr; + } + mNAMPath = previousNAMPath; + std::cerr << "Failed to read DSP module from embedded data" << std::endl; + std::cerr << e.what() << std::endl; + return e.what(); + } + return ""; +} + +dsp::wav::LoadReturnCode NeuralAmpModeler::_StageIRFromData(const std::vector& data, + const WDL_String& originalPath) +{ + WDL_String previousIRPath = mIRPath; + const double sampleRate = GetSampleRate(); + + dsp::wav::LoadReturnCode wavState = dsp::wav::LoadReturnCode::ERROR_OTHER; + + try + { + // Parse WAV from memory + std::vector audio; + double wavSampleRate = 0.0; + + // Basic WAV parser for in-memory data + // WAV format: RIFF header (12 bytes) + fmt chunk + data chunk + if (data.size() < 44) // Minimum WAV file size + { + throw std::runtime_error("IR data too small to be valid WAV"); + } + + // Check RIFF header + if (data[0] != 'R' || data[1] != 'I' || data[2] != 'F' || data[3] != 'F') + { + throw std::runtime_error("Invalid WAV format - missing RIFF header"); + } + + // Check WAVE format + if (data[8] != 'W' || data[9] != 'A' || data[10] != 'V' || data[11] != 'E') + { + throw std::runtime_error("Invalid WAV format - not a WAVE file"); + } + + // Find fmt chunk + size_t pos = 12; + uint16_t audioFormat = 0; + uint16_t numChannels = 0; + uint32_t sampleRateInt = 0; + uint16_t bitsPerSample = 0; + + while (pos < data.size() - 8) + { + std::string chunkID(data.begin() + pos, data.begin() + pos + 4); + uint32_t chunkSize = *reinterpret_cast(&data[pos + 4]); + + if (chunkID == "fmt ") + { + audioFormat = *reinterpret_cast(&data[pos + 8]); + numChannels = *reinterpret_cast(&data[pos + 10]); + sampleRateInt = *reinterpret_cast(&data[pos + 12]); + bitsPerSample = *reinterpret_cast(&data[pos + 22]); + wavSampleRate = static_cast(sampleRateInt); + } + else if (chunkID == "data") + { + // Found data chunk + size_t dataStart = pos + 8; + size_t numSamples = chunkSize / (bitsPerSample / 8); + + audio.resize(numSamples); + + // Convert based on bits per sample + if (bitsPerSample == 16 && audioFormat == 1) // PCM 16-bit + { + for (size_t i = 0; i < numSamples; i++) + { + int16_t sample = *reinterpret_cast(&data[dataStart + i * 2]); + audio[i] = sample / 32768.0f; + } + } + else if (bitsPerSample == 24 && audioFormat == 1) // PCM 24-bit + { + for (size_t i = 0; i < numSamples; i++) + { + int32_t sample = 0; + sample |= static_cast(data[dataStart + i * 3]); + sample |= static_cast(data[dataStart + i * 3 + 1]) << 8; + sample |= static_cast(data[dataStart + i * 3 + 2]) << 16; + if (sample & 0x800000) + sample |= 0xFF000000; // Sign extend + audio[i] = sample / 8388608.0f; + } + } + else if (bitsPerSample == 32 && audioFormat == 3) // IEEE float 32-bit + { + for (size_t i = 0; i < numSamples; i++) + { + audio[i] = *reinterpret_cast(&data[dataStart + i * 4]); + } + } + else + { + throw std::runtime_error("Unsupported WAV format"); + } + + break; + } + + pos += 8 + chunkSize; + } + + if (audio.empty()) + { + throw std::runtime_error("No audio data found in WAV"); + } + + // Layer 9: Validate that fmt chunk was actually found and sample rate is valid + // WAV files can have missing fmt chunks or chunks in wrong order + if (wavSampleRate <= 0.0 || wavSampleRate != wavSampleRate) + { + throw std::runtime_error("Invalid or missing sample rate in WAV fmt chunk"); + } + + // Create IR from the loaded data + dsp::ImpulseResponse::IRData irData; + irData.mRawAudio = audio; + irData.mRawAudioSampleRate = wavSampleRate; + + mStagedIR = std::make_unique(irData, sampleRate); + wavState = dsp::wav::LoadReturnCode::SUCCESS; + } + catch (std::exception& e) + { + wavState = dsp::wav::LoadReturnCode::ERROR_OTHER; + std::cerr << "Failed to load IR from embedded data:" << std::endl; + std::cerr << e.what() << std::endl; + } + + if (wavState == dsp::wav::LoadReturnCode::SUCCESS) + { + mIRPath = originalPath; + mIRData = data; // Store the embedded data + SendControlMsgFromDelegate(kCtrlTagIRFileBrowser, kMsgTagLoadedIR, mIRPath.GetLength(), mIRPath.Get()); + } + else + { + if (mStagedIR != nullptr) + { + mStagedIR = nullptr; + } + mIRPath = previousIRPath; + SendControlMsgFromDelegate(kCtrlTagIRFileBrowser, kMsgTagLoadFailed); + } + + return wavState; +} + // HACK #include "Unserialization.cpp" diff --git a/NeuralAmpModeler/NeuralAmpModeler.h b/NeuralAmpModeler/NeuralAmpModeler.h index f22298cd..40735b57 100644 --- a/NeuralAmpModeler/NeuralAmpModeler.h +++ b/NeuralAmpModeler/NeuralAmpModeler.h @@ -220,10 +220,14 @@ class NeuralAmpModeler final : public iplug::Plugin // Loads a NAM model and stores it to mStagedNAM // Returns an empty string on success, or an error message on failure. std::string _StageModel(const WDL_String& dspFile); + // Loads a NAM model from embedded binary data + std::string _StageModelFromData(const std::vector& data, const WDL_String& originalPath); // Loads an IR and stores it to mStagedIR. // Return status code so that error messages can be relayed if // it wasn't successful. dsp::wav::LoadReturnCode _StageIR(const WDL_String& irPath); + // Loads an IR from embedded binary data + dsp::wav::LoadReturnCode _StageIRFromData(const std::vector& data, const WDL_String& originalPath); bool _HaveModel() const { return this->mModel != nullptr; }; // Prepare the input & output buffers @@ -307,6 +311,10 @@ class NeuralAmpModeler final : public iplug::Plugin // Path to IR (.wav file) WDL_String mIRPath; + // Embedded file data for portability (stored with DAW session) + std::vector mNAMData; + std::vector mIRData; + WDL_String mHighLightColor{PluginColors::NAM_THEMECOLOR.ToColorCode()}; std::unordered_map mNAMParams = {{"Input", 0.0}, {"Output", 0.0}}; diff --git a/NeuralAmpModeler/Unserialization.cpp b/NeuralAmpModeler/Unserialization.cpp index 0fd803ce..002ca7be 100644 --- a/NeuralAmpModeler/Unserialization.cpp +++ b/NeuralAmpModeler/Unserialization.cpp @@ -57,17 +57,72 @@ void NeuralAmpModeler::_UnserializeApplyConfig(nlohmann::json& config) mNAMPath.Set(static_cast(config["NAMPath"]).c_str()); mIRPath.Set(static_cast(config["IRPath"]).c_str()); + // v0.7.13+: Try to load from file path first (if file exists), fall back to embedded data + bool namLoaded = false; if (mNAMPath.GetLength()) + { + std::filesystem::path namFilePath = std::filesystem::u8path(mNAMPath.Get()); + if (std::filesystem::exists(namFilePath)) + { + std::string error = _StageModel(mNAMPath); + if (error.empty()) + { + namLoaded = true; + } + } + } + + // Fallback to embedded data if file not available or doesn't exist + if (!namLoaded && config.contains("NAMData")) + { + auto namData = config["NAMData"].get>(); + if (!namData.empty()) + { + std::string error = _StageModelFromData(namData, mNAMPath); + namLoaded = error.empty(); + } + } + + // If neither file nor embedded data worked, try loading from file anyway to show error + if (!namLoaded && mNAMPath.GetLength()) { _StageModel(mNAMPath); } + + // Try to load IR from file path first (if file exists) + bool irLoaded = false; if (mIRPath.GetLength()) + { + std::filesystem::path irFilePath = std::filesystem::u8path(mIRPath.Get()); + if (std::filesystem::exists(irFilePath)) + { + dsp::wav::LoadReturnCode loadResult = _StageIR(mIRPath); + if (loadResult == dsp::wav::LoadReturnCode::SUCCESS) + { + irLoaded = true; + } + } + } + + // Fallback to embedded data if file not available or doesn't exist + if (!irLoaded && config.contains("IRData")) + { + auto irData = config["IRData"].get>(); + if (!irData.empty()) + { + dsp::wav::LoadReturnCode loadResult = _StageIRFromData(irData, mIRPath); + irLoaded = (loadResult == dsp::wav::LoadReturnCode::SUCCESS); + } + } + + // If neither file nor embedded data worked, try loading from file anyway to show error + if (!irLoaded && mIRPath.GetLength()) { _StageIR(mIRPath); } } -// Unserialize NAM Path, IR path, then named keys +// Unserialize NAM Path, IR path, then named keys (for versions before 0.7.13) int _UnserializePathsAndExpectedKeys(const iplug::IByteChunk& chunk, int startPos, nlohmann::json& config, std::vector& paramNames) { @@ -87,6 +142,46 @@ int _UnserializePathsAndExpectedKeys(const iplug::IByteChunk& chunk, int startPo return pos; } +// Unserialize NAM Path, IR path, embedded data, then named keys (for v0.7.13+) +int _UnserializePathsEmbeddedDataAndExpectedKeys(const iplug::IByteChunk& chunk, int startPos, nlohmann::json& config, + std::vector& paramNames) +{ + int pos = startPos; + WDL_String path; + pos = chunk.GetStr(path, pos); + config["NAMPath"] = std::string(path.Get()); + pos = chunk.GetStr(path, pos); + config["IRPath"] = std::string(path.Get()); + + // Read embedded NAM data size and data + int namDataSize = 0; + pos = chunk.Get(&namDataSize, pos); + if (namDataSize > 0) + { + std::vector namData(namDataSize); + pos = chunk.GetBytes(namData.data(), namDataSize, pos); + config["NAMData"] = namData; + } + + // Read embedded IR data size and data + int irDataSize = 0; + pos = chunk.Get(&irDataSize, pos); + if (irDataSize > 0) + { + std::vector irData(irDataSize); + pos = chunk.GetBytes(irData.data(), irDataSize, pos); + config["IRData"] = irData; + } + + for (auto it = paramNames.begin(); it != paramNames.end(); ++it) + { + double v = 0.0; + pos = chunk.Get(&v, pos); + config[*it] = v; + } + return pos; +} + void _RenameKeys(nlohmann::json& j, std::unordered_map newNames) { // Assumes no aliasing! @@ -97,11 +192,40 @@ void _RenameKeys(nlohmann::json& j, std::unordered_map } } +// v0.7.13 - Adds embedded NAM/IR data support + +void _UpdateConfigFrom_0_7_13(nlohmann::json& config) +{ + // Fill me in once something changes! +} + +int _GetConfigFrom_0_7_13(const iplug::IByteChunk& chunk, int startPos, nlohmann::json& config) +{ + std::vector paramNames{"Input", + "Threshold", + "Bass", + "Middle", + "Treble", + "Output", + "NoiseGateActive", + "ToneStack", + "IRToggle", + "CalibrateInput", + "InputCalibrationLevel", + "OutputMode"}; + + int pos = _UnserializePathsEmbeddedDataAndExpectedKeys(chunk, startPos, config, paramNames); + // Then update: + _UpdateConfigFrom_0_7_13(config); + return pos; +} + // v0.7.12 void _UpdateConfigFrom_0_7_12(nlohmann::json& config) { - // Fill me in once something changes! + // Chain to next version + _UpdateConfigFrom_0_7_13(config); } int _GetConfigFrom_0_7_12(const iplug::IByteChunk& chunk, int startPos, nlohmann::json& config) @@ -248,7 +372,11 @@ int NeuralAmpModeler::_UnserializeStateWithKnownVersion(const iplug::IByteChunk& _Version version(versionStr); // Act accordingly nlohmann::json config; - if (version >= _Version(0, 7, 12)) + if (version >= _Version(0, 7, 13)) + { + pos = _GetConfigFrom_0_7_13(chunk, pos, config); + } + else if (version >= _Version(0, 7, 12)) { pos = _GetConfigFrom_0_7_12(chunk, pos, config); }