diff --git a/08_HelloSwapchain/main.cpp b/08_HelloSwapchain/main.cpp index cd294b0d2..9060c1561 100644 --- a/08_HelloSwapchain/main.cpp +++ b/08_HelloSwapchain/main.cpp @@ -160,6 +160,7 @@ class HelloSwapchainApp final : public examples::SimpleWindowedApplication auto window = m_winMgr->createWindow(std::move(params)); // uncomment for some nasty testing of swapchain creation! //m_winMgr->minimize(window.get()); + const_cast&>(m_surface) = CSmoothResizeSurface::create(CSurfaceVulkanWin32::create(smart_refctd_ptr(m_api),move_and_static_cast(window))); } return {{m_surface->getSurface()/*,EQF_NONE*/}}; diff --git a/61_UI/CMakeLists.txt b/61_UI/CMakeLists.txt index 5d0021f61..bbcdacf08 100644 --- a/61_UI/CMakeLists.txt +++ b/61_UI/CMakeLists.txt @@ -12,9 +12,71 @@ if(NBL_BUILD_IMGUI) imguizmo "${NBL_EXT_IMGUI_UI_LIB}" ) - - # TODO; Arek I removed `NBL_EXECUTABLE_PROJECT_CREATION_PCH_TARGET` from the last parameter here, doesn't this macro have 4 arguments anyway !? + nbl_create_executable_project("${NBL_EXTRA_SOURCES}" "" "${NBL_INCLUDE_SERACH_DIRECTORIES}" "${NBL_LIBRARIES}") - # TODO: Arek temporarily disabled cause I haven't figured out how to make this target yet - # LINK_BUILTIN_RESOURCES_TO_TARGET(${EXECUTABLE_NAME} nblExamplesGeometrySpirvBRD) -endif() \ No newline at end of file + + if(NBL_EMBED_BUILTIN_RESOURCES) + set(_BR_TARGET_ ${EXECUTABLE_NAME}_builtinResourceData) + set(RESOURCE_DIR "app_resources") + + get_filename_component(_SEARCH_DIRECTORIES_ "${CMAKE_CURRENT_SOURCE_DIR}" ABSOLUTE) + get_filename_component(_OUTPUT_DIRECTORY_SOURCE_ "${CMAKE_CURRENT_BINARY_DIR}/src" ABSOLUTE) + get_filename_component(_OUTPUT_DIRECTORY_HEADER_ "${CMAKE_CURRENT_BINARY_DIR}/include" ABSOLUTE) + + file(GLOB_RECURSE BUILTIN_RESOURCE_FILES RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/${RESOURCE_DIR}" CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${RESOURCE_DIR}/*") + foreach(RES_FILE ${BUILTIN_RESOURCE_FILES}) + LIST_BUILTIN_RESOURCE(RESOURCES_TO_EMBED "${RES_FILE}") + endforeach() + + ADD_CUSTOM_BUILTIN_RESOURCES(${_BR_TARGET_} RESOURCES_TO_EMBED "${_SEARCH_DIRECTORIES_}" "${RESOURCE_DIR}" "nbl::this_example::builtin" "${_OUTPUT_DIRECTORY_HEADER_}" "${_OUTPUT_DIRECTORY_SOURCE_}") + + LINK_BUILTIN_RESOURCES_TO_TARGET(${EXECUTABLE_NAME} ${_BR_TARGET_}) + endif() + + set(OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/auto-gen") + + set(JSON [=[ +[ + { + "INPUT": "app_resources/imgui_vertex.hlsl", + "KEY": "imgui_vertex", + "COMPILE_OPTIONS": ["-T", "vs_6_7", "-E", "VSMain", "-O3"] + }, + { + "INPUT": "app_resources/imgui_fragment.hlsl", + "KEY": "imgui_fragment", + "COMPILE_OPTIONS": ["-T", "ps_6_7", "-E", "PSMain", "-O3"] + } +] +]=]) + string(CONFIGURE "${JSON}" JSON) + + set(COMPILE_OPTIONS + -I "${CMAKE_CURRENT_SOURCE_DIR}" + -I "${NBL_ROOT_PATH}/include" + -I "${NBL_ROOT_PATH}/include/nbl/ext/ImGui/builtin/hlsl" + ) + + NBL_CREATE_NSC_COMPILE_RULES( + TARGET ${EXECUTABLE_NAME}SPIRV + LINK_TO ${EXECUTABLE_NAME} + BINARY_DIR ${OUTPUT_DIRECTORY} + MOUNT_POINT_DEFINE NBL_THIS_EXAMPLE_BUILD_MOUNT_POINT + COMMON_OPTIONS ${COMPILE_OPTIONS} + OUTPUT_VAR KEYS + INCLUDE nbl/this_example/builtin/build/spirv/keys.hpp + NAMESPACE nbl::this_example::builtin::build + INPUTS ${JSON} + ) + + NBL_CREATE_RESOURCE_ARCHIVE( + NAMESPACE nbl::this_example::builtin::build + TARGET ${EXECUTABLE_NAME}_builtinsBuild + LINK_TO ${EXECUTABLE_NAME} + BIND ${OUTPUT_DIRECTORY} + BUILTINS ${KEYS} + ) + + add_dependencies(${EXECUTABLE_NAME} argparse) + target_include_directories(${EXECUTABLE_NAME} PUBLIC $) +endif() diff --git a/61_UI/app_resources/cameras.json b/61_UI/app_resources/cameras.json new file mode 100644 index 000000000..7e76aef83 --- /dev/null +++ b/61_UI/app_resources/cameras.json @@ -0,0 +1,162 @@ +{ + "cameras": [ + { + "type": "FPS", + "position": [-2.438, 1.995, -3.130], + "orientation": [0.195, 0.311, -0.065, 0.928] + }, + { + "type": "Orbit", + "position": [-2.017, 0.386, 0.684], + "target": [0, 0, 0] + }, + { + "type": "Free", + "position": [2.116, 0.826, 1.152], + "orientation": [0.095, -0.835, 0.152, 0.521] + } + ], + "projections": [ + { + "type": "perspective", + "fov": 40.0, + "zNear": 0.1, + "zFar": 110.0 + }, + { + "type": "orthographic", + "orthoWidth": 16.0, + "zNear": 0.1, + "zFar": 110.0 + } + ], + "viewports": [ + { + "projection": 0, + "controllers": { + "keyboard": 0, + "mouse": 0 + } + }, + { + "projection": 1, + "controllers": { + "keyboard": 1, + "mouse": 0 + } + }, + { + "projection": 0, + "controllers": { + "keyboard": 2 + } + }, + { + "projection": 1, + "controllers": { + "keyboard": 1 + } + }, + { + "projection": 0, + "controllers": { + "keyboard": 3, + "mouse": 1 + } + }, + { + "projection": 1, + "controllers": { + "keyboard": 3, + "mouse": 1 + } + } + ], + "planars": [ + { + "camera": 0, + "viewports": [0, 1] + }, + { + "camera": 1, + "viewports": [4, 5] + }, + { + "camera": 2, + "viewports": [2, 3] + } + ], + "controllers": { + "keyboard": [ + { + "mappings": { + "W": "MoveForward", + "S": "MoveBackward", + "A": "MoveLeft", + "D": "MoveRight", + "I": "TiltDown", + "K": "TiltUp", + "J": "PanLeft", + "L": "PanRight" + } + }, + { + "mappings": { + "W": "MoveUp", + "S": "MoveDown", + "A": "MoveLeft", + "D": "MoveRight" + } + }, + { + "mappings": { + "W": "MoveForward", + "S": "MoveBackward", + "A": "MoveLeft", + "D": "MoveRight", + "E": "MoveUp", + "Q": "MoveDown", + "I": "TiltDown", + "K": "TiltUp", + "J": "PanLeft", + "L": "PanRight", + "U": "RollRight", + "O": "RollLeft" + } + }, + { + "mappings": { + "W": "MoveRight", + "S": "MoveLeft", + "A": "MoveDown", + "D": "MoveUp", + "E": "MoveForward", + "Q": "MoveBackward" + } + } + ], + "mouse": [ + { + "mappings": { + "RELATIVE_POSITIVE_MOVEMENT_X": "PanRight", + "RELATIVE_NEGATIVE_MOVEMENT_X": "PanLeft", + "RELATIVE_POSITIVE_MOVEMENT_Y": "TiltUp", + "RELATIVE_NEGATIVE_MOVEMENT_Y": "TiltDown" + } + }, + { + "mappings": { + "RELATIVE_POSITIVE_MOVEMENT_X": "MoveUp", + "RELATIVE_NEGATIVE_MOVEMENT_X": "MoveDown", + "RELATIVE_POSITIVE_MOVEMENT_Y": "MoveRight", + "RELATIVE_NEGATIVE_MOVEMENT_Y": "MoveLeft", + "VERTICAL_POSITIVE_SCROLL": "MoveForward", + "HORIZONTAL_POSITIVE_SCROLL": "MoveForward", + "VERTICAL_NEGATIVE_SCROLL": "MoveBackward", + "HORIZONTAL_NEGATIVE_SCROLL": "MoveBackward" + } + } + ] + } + } + \ No newline at end of file diff --git a/61_UI/app_resources/imgui_fragment.hlsl b/61_UI/app_resources/imgui_fragment.hlsl new file mode 100644 index 000000000..5280ac2f8 --- /dev/null +++ b/61_UI/app_resources/imgui_fragment.hlsl @@ -0,0 +1,7 @@ +#define NBL_TEXTURES_BINDING_IX 0 +#define NBL_SAMPLER_STATES_BINDING_IX 1 +#define NBL_TEXTURES_SET_IX 0 +#define NBL_SAMPLER_STATES_SET_IX 0 +#define NBL_TEXTURES_COUNT 3 +#define NBL_SAMPLERS_COUNT 2 +#include "nbl/ext/ImGui/builtin/hlsl/fragment.hlsl" diff --git a/61_UI/app_resources/imgui_vertex.hlsl b/61_UI/app_resources/imgui_vertex.hlsl new file mode 100644 index 000000000..36257c853 --- /dev/null +++ b/61_UI/app_resources/imgui_vertex.hlsl @@ -0,0 +1 @@ +#include "nbl/ext/ImGui/builtin/hlsl/vertex.hlsl" diff --git a/61_UI/include/common.hpp b/61_UI/include/common.hpp index fe7d086dd..6b5c64e13 100644 --- a/61_UI/include/common.hpp +++ b/61_UI/include/common.hpp @@ -1,19 +1,57 @@ #ifndef _NBL_THIS_EXAMPLE_COMMON_H_INCLUDED_ #define _NBL_THIS_EXAMPLE_COMMON_H_INCLUDED_ +#include #include "nbl/examples/examples.hpp" +// common api +#include "camera/CFPSCamera.hpp" +#include "camera/CFreeLockCamera.hpp" +#include "camera/COrbitCamera.hpp" + +#include "camera/CCubeProjection.hpp" +#include "camera/CLinearProjection.hpp" +#include "camera/CPlanarProjection.hpp" + // the example's headers -#include "transform.hpp" +#include "nbl/ui/ICursorControl.h" +#include "nbl/ext/ImGui/ImGui.h" +#include "imgui/imgui_internal.h" +#include "imguizmo/ImGuizmo.h" using namespace nbl; using namespace nbl::core; -using namespace nbl::hlsl; using namespace nbl::system; using namespace nbl::asset; using namespace nbl::ui; using namespace nbl::video; using namespace nbl::examples; -#endif // _NBL_THIS_EXAMPLE_COMMON_H_INCLUDED_ \ No newline at end of file +namespace hlsl = nbl::hlsl; +using nbl::hlsl::ICamera; +using nbl::hlsl::CFPSCamera; +using nbl::hlsl::CFreeCamera; +using nbl::hlsl::COrbitCamera; +using nbl::hlsl::IPlanarProjection; +using nbl::hlsl::CPlanarProjection; +using nbl::hlsl::IGimbalController; +using nbl::hlsl::IGimbalManipulateEncoder; +using nbl::hlsl::CVirtualGimbalEvent; +using nbl::hlsl::float32_t; +using nbl::hlsl::float32_t2; +using nbl::hlsl::float32_t3; +using nbl::hlsl::float32_t4; +using nbl::hlsl::float32_t3x3; +using nbl::hlsl::float32_t3x4; +using nbl::hlsl::float32_t4x4; +using nbl::hlsl::float64_t; +using nbl::hlsl::float64_t4x4; +using nbl::hlsl::uint16_t2; +using nbl::hlsl::getCastedMatrix; +using nbl::hlsl::getCastedVector; +using nbl::hlsl::getMatrix3x4As4x4; +using nbl::hlsl::concatenateBFollowedByA; +using nbl::hlsl::mul; + +#endif // _NBL_THIS_EXAMPLE_COMMON_H_INCLUDED_ diff --git a/61_UI/include/keysmapping.hpp b/61_UI/include/keysmapping.hpp new file mode 100644 index 000000000..46153660c --- /dev/null +++ b/61_UI/include/keysmapping.hpp @@ -0,0 +1,234 @@ +#ifndef __NBL_KEYSMAPPING_H_INCLUDED__ +#define __NBL_KEYSMAPPING_H_INCLUDED__ + +#include "common.hpp" + +bool handleAddMapping(const char* tableID, IGimbalManipulateEncoder* encoder, IGimbalManipulateEncoder::EncoderType activeController, CVirtualGimbalEvent::VirtualEventType& selectedEventType, ui::E_KEY_CODE& newKey, ui::E_MOUSE_CODE& newMouseCode, bool& addMode) +{ + bool anyMapUpdated = false; + ImGui::BeginTable(tableID, 3, ImGuiTableFlags_Borders | ImGuiTableFlags_Resizable | ImGuiTableFlags_RowBg | ImGuiTableFlags_SizingStretchSame); + ImGui::TableSetupColumn("Virtual Event", ImGuiTableColumnFlags_WidthStretch, 0.33f); + ImGui::TableSetupColumn("Key", ImGuiTableColumnFlags_WidthStretch, 0.33f); + ImGui::TableSetupColumn("Actions", ImGuiTableColumnFlags_WidthStretch, 0.33f); + ImGui::TableHeadersRow(); + + ImGui::TableNextRow(); + ImGui::TableSetColumnIndex(0); + ImGui::AlignTextToFramePadding(); + if (ImGui::BeginCombo("##selectEvent", CVirtualGimbalEvent::virtualEventToString(selectedEventType).data())) + { + for (const auto& eventType : CVirtualGimbalEvent::VirtualEventsTypeTable) + { + bool isSelected = (selectedEventType == eventType); + if (ImGui::Selectable(CVirtualGimbalEvent::virtualEventToString(eventType).data(), isSelected)) + selectedEventType = eventType; + if (isSelected) + ImGui::SetItemDefaultFocus(); + } + ImGui::EndCombo(); + } + + ImGui::TableSetColumnIndex(1); + if (activeController == IGimbalManipulateEncoder::Keyboard) + { + char newKeyDisplay[2] = { ui::keyCodeToChar(newKey, true), '\0' }; + if (ImGui::BeginCombo("##selectKey", newKeyDisplay)) + { + for (int i = ui::E_KEY_CODE::EKC_A; i <= ui::E_KEY_CODE::EKC_Z; ++i) + { + bool isSelected = (newKey == static_cast(i)); + char label[2] = { ui::keyCodeToChar(static_cast(i), true), '\0' }; + if (ImGui::Selectable(label, isSelected)) + newKey = static_cast(i); + if (isSelected) + ImGui::SetItemDefaultFocus(); + } + ImGui::EndCombo(); + } + } + else + { + if (ImGui::BeginCombo("##selectMouseKey", ui::mouseCodeToString(newMouseCode).data())) + { + for (int i = ui::EMC_LEFT_BUTTON; i < ui::EMC_COUNT; ++i) + { + bool isSelected = (newMouseCode == static_cast(i)); + if (ImGui::Selectable(ui::mouseCodeToString(static_cast(i)).data(), isSelected)) + newMouseCode = static_cast(i); + if (isSelected) + ImGui::SetItemDefaultFocus(); + } + ImGui::EndCombo(); + } + } + + ImGui::TableSetColumnIndex(2); + if (ImGui::Button("Confirm Add", ImVec2(100, 30))) + { + anyMapUpdated |= true; + if (activeController == IGimbalManipulateEncoder::Keyboard) + encoder->updateKeyboardMapping([&](auto& keys) { keys[newKey] = selectedEventType; }); + else + encoder->updateMouseMapping([&](auto& mouse) { mouse[newMouseCode] = selectedEventType; }); + addMode = false; + } + + ImGui::EndTable(); + + return anyMapUpdated; +} + +bool displayKeyMappingsAndVirtualStatesInline(IGimbalManipulateEncoder* encoder, bool spawnWindow = false) +{ + bool anyMapUpdated = false; + + if (!encoder) return anyMapUpdated; + + struct MappingState + { + bool addMode = false; + CVirtualGimbalEvent::VirtualEventType selectedEventType = CVirtualGimbalEvent::VirtualEventType::MoveForward; + ui::E_KEY_CODE newKey = ui::E_KEY_CODE::EKC_A; + ui::E_MOUSE_CODE newMouseCode = ui::EMC_LEFT_BUTTON; + IGimbalManipulateEncoder::EncoderType activeController = IGimbalManipulateEncoder::Keyboard; + }; + + static std::unordered_map cameraStates; + auto& state = cameraStates[encoder]; + + const auto& keyboardMappings = encoder->getKeyboardVirtualEventMap(); + const auto& mouseMappings = encoder->getMouseVirtualEventMap(); + + if (spawnWindow) + { + ImGui::SetNextWindowSize(ImVec2(600, 400), ImGuiCond_FirstUseEver); + ImGui::Begin("Controller Mappings & Virtual States", nullptr, ImGuiWindowFlags_NoResize | ImGuiWindowFlags_AlwaysVerticalScrollbar); + } + + if (ImGui::BeginTabBar("ControllersTabBar")) + { + if (ImGui::BeginTabItem("Keyboard")) + { + state.activeController = IGimbalManipulateEncoder::Keyboard; + ImGui::Separator(); + + if (ImGui::Button("Add Key", ImVec2(100, 30))) + state.addMode = !state.addMode; + + ImGui::Separator(); + + ImGui::BeginTable("KeyboardMappingsTable", 5, ImGuiTableFlags_Borders | ImGuiTableFlags_Resizable | ImGuiTableFlags_RowBg | ImGuiTableFlags_SizingStretchSame); + ImGui::TableSetupColumn("Virtual Event", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Key(s)", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Active Status", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Magnitude", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Actions", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableHeadersRow(); + + for (const auto& [keyboardCode, hash] : keyboardMappings) + { + ImGui::TableNextRow(); + const char* eventName = CVirtualGimbalEvent::virtualEventToString(hash.event.type).data(); + ImGui::TableSetColumnIndex(0); + ImGui::AlignTextToFramePadding(); + ImGui::TextWrapped("%s", eventName); + + ImGui::TableSetColumnIndex(1); + std::string keyString(1, ui::keyCodeToChar(keyboardCode, true)); + ImGui::AlignTextToFramePadding(); + ImGui::TextWrapped("%s", keyString.c_str()); + + ImGui::TableSetColumnIndex(2); + bool isActive = (hash.event.magnitude > 0); + ImVec4 statusColor = isActive ? ImVec4(0.0f, 1.0f, 0.0f, 1.0f) : ImVec4(1.0f, 0.0f, 0.0f, 1.0f); + ImGui::TextColored(statusColor, "%s", isActive ? "Active" : "Inactive"); + + ImGui::TableSetColumnIndex(3); + ImGui::Text("%.2f", hash.event.magnitude); + + ImGui::TableSetColumnIndex(4); + if (ImGui::Button(("Delete##deleteKey" + std::to_string(static_cast(keyboardCode))).c_str())) + { + anyMapUpdated |= true; + encoder->updateKeyboardMapping([keyboardCode](auto& keys) { keys.erase(keyboardCode); }); + break; + } + } + ImGui::EndTable(); + + if (state.addMode) + { + ImGui::Separator(); + anyMapUpdated |= handleAddMapping("AddKeyboardMappingTable", encoder, state.activeController, state.selectedEventType, state.newKey, state.newMouseCode, state.addMode); + } + + ImGui::EndTabItem(); + } + + if (ImGui::BeginTabItem("Mouse")) + { + state.activeController = IGimbalManipulateEncoder::Mouse; + ImGui::Separator(); + + if (ImGui::Button("Add Key", ImVec2(100, 30))) + state.addMode = !state.addMode; + + ImGui::Separator(); + + ImGui::BeginTable("MouseMappingsTable", 5, ImGuiTableFlags_Borders | ImGuiTableFlags_Resizable | ImGuiTableFlags_RowBg | ImGuiTableFlags_SizingStretchSame); + ImGui::TableSetupColumn("Virtual Event", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Mouse Button(s)", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Active Status", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Magnitude", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableSetupColumn("Actions", ImGuiTableColumnFlags_WidthStretch, 0.2f); + ImGui::TableHeadersRow(); + + for (const auto& [mouseCode, hash] : mouseMappings) + { + ImGui::TableNextRow(); + const char* eventName = CVirtualGimbalEvent::virtualEventToString(hash.event.type).data(); + ImGui::TableSetColumnIndex(0); + ImGui::AlignTextToFramePadding(); + ImGui::TextWrapped("%s", eventName); + + ImGui::TableSetColumnIndex(1); + const char* mouseButtonName = ui::mouseCodeToString(mouseCode).data(); + ImGui::AlignTextToFramePadding(); + ImGui::TextWrapped("%s", mouseButtonName); + + ImGui::TableSetColumnIndex(2); + bool isActive = (hash.event.magnitude > 0); + ImVec4 statusColor = isActive ? ImVec4(0.0f, 1.0f, 0.0f, 1.0f) : ImVec4(1.0f, 0.0f, 0.0f, 1.0f); + ImGui::TextColored(statusColor, "%s", isActive ? "Active" : "Inactive"); + + ImGui::TableSetColumnIndex(3); + ImGui::Text("%.2f", hash.event.magnitude); + + ImGui::TableSetColumnIndex(4); + if (ImGui::Button(("Delete##deleteMouse" + std::to_string(static_cast(mouseCode))).c_str())) + { + anyMapUpdated |= true; + encoder->updateMouseMapping([mouseCode](auto& mouse) { mouse.erase(mouseCode); }); + break; + } + } + ImGui::EndTable(); + + if (state.addMode) + { + ImGui::Separator(); + handleAddMapping("AddMouseMappingTable", encoder, state.activeController, state.selectedEventType, state.newKey, state.newMouseCode, state.addMode); + } + ImGui::EndTabItem(); + } + + ImGui::EndTabBar(); + } + + if (spawnWindow) + ImGui::End(); + + return anyMapUpdated; +} + +#endif // __NBL_KEYSMAPPING_H_INCLUDED__ \ No newline at end of file diff --git a/61_UI/main.cpp b/61_UI/main.cpp index 503a2e421..89a7c698d 100644 --- a/61_UI/main.cpp +++ b/61_UI/main.cpp @@ -2,880 +2,2627 @@ // This file is part of the "Nabla Engine". // For conditions of distribution and use, see copyright notice in nabla.h +#include "nlohmann/json.hpp" +#include "argparse/argparse.hpp" +using json = nlohmann::json; + #include "common.hpp" -#include +#include "keysmapping.hpp" +#include "camera/CCubeProjection.hpp" +#include "glm/glm/ext/matrix_clip_space.hpp" // TODO: TESTING +#include "nbl/ext/ScreenShot/ScreenShot.h" +#include "nbl/this_example/builtin/build/spirv/keys.hpp" +#if __has_include("nbl/this_example/builtin/CArchive.h") +#include "nbl/this_example/builtin/CArchive.h" +#endif +#if __has_include("nbl/this_example/builtin/build/CArchive.h") +#include "nbl/this_example/builtin/build/CArchive.h" +#endif + +using planar_projections_range_t = std::vector; +using planar_projection_t = CPlanarProjection; + +// the only reason for those is to remind we must go with transpose & 4x4 matrices +struct ImGuizmoPlanarM16InOut +{ + float32_t4x4 view, projection; +}; + +struct ImGuizmoModelM16InOut +{ + float32_t4x4 inTRS, outTRS, outDeltaTRS; +}; + +constexpr IGPUImage::SSubresourceRange TripleBufferUsedSubresourceRange = +{ + .aspectMask = IGPUImage::EAF_COLOR_BIT, + .baseMipLevel = 0, + .levelCount = 1, + .baseArrayLayer = 0, + .layerCount = 1 +}; + +class CUIEventCallback : public nbl::video::ISmoothResizeSurface::ICallback // I cannot use common CEventCallback because I MUST inherit this callback in order to use smooth resize surface with window callback (for my input events) +{ +public: + CUIEventCallback(nbl::core::smart_refctd_ptr&& m_inputSystem, nbl::system::logger_opt_smart_ptr&& logger) : m_inputSystem(std::move(m_inputSystem)), m_logger(std::move(logger)) {} + CUIEventCallback() {} + + void setLogger(nbl::system::logger_opt_smart_ptr& logger) + { + m_logger = logger; + } + void setInputSystem(nbl::core::smart_refctd_ptr&& m_inputSystem) + { + m_inputSystem = std::move(m_inputSystem); + } +private: + + void onMouseConnected_impl(nbl::core::smart_refctd_ptr&& mch) override + { + m_logger.log("A mouse %p has been connected", nbl::system::ILogger::ELL_INFO, mch.get()); + m_inputSystem.get()->add(m_inputSystem.get()->m_mouse, std::move(mch)); + } + void onMouseDisconnected_impl(nbl::ui::IMouseEventChannel* mch) override + { + m_logger.log("A mouse %p has been disconnected", nbl::system::ILogger::ELL_INFO, mch); + m_inputSystem.get()->remove(m_inputSystem.get()->m_mouse, mch); + } + void onKeyboardConnected_impl(nbl::core::smart_refctd_ptr&& kbch) override + { + m_logger.log("A keyboard %p has been connected", nbl::system::ILogger::ELL_INFO, kbch.get()); + m_inputSystem.get()->add(m_inputSystem.get()->m_keyboard, std::move(kbch)); + } + void onKeyboardDisconnected_impl(nbl::ui::IKeyboardEventChannel* kbch) override + { + m_logger.log("A keyboard %p has been disconnected", nbl::system::ILogger::ELL_INFO, kbch); + m_inputSystem.get()->remove(m_inputSystem.get()->m_keyboard, kbch); + } + +private: + nbl::core::smart_refctd_ptr m_inputSystem = nullptr; + nbl::system::logger_opt_smart_ptr m_logger = nullptr; +}; + +class CSwapchainResources final : public ISmoothResizeSurface::ISwapchainResources +{ +public: + // Because we blit to the swapchain image asynchronously, we need a queue which can not only present but also perform graphics commands. + // If we for example used a compute shader to tonemap and MSAA resolve, we'd request the COMPUTE_BIT here. + constexpr static inline IQueue::FAMILY_FLAGS RequiredQueueFlags = IQueue::FAMILY_FLAGS::GRAPHICS_BIT; + + inline uint8_t getLastImageIndex() const { return m_lastImageIndex; } + +protected: + // We can return `BLIT_BIT` here, because the Source Image will be already in the correct layout to be used for the present + inline core::bitflag getTripleBufferPresentStages() const override { return asset::PIPELINE_STAGE_FLAGS::BLIT_BIT; } + + inline bool tripleBufferPresent(IGPUCommandBuffer* cmdbuf, const ISmoothResizeSurface::SPresentSource& source, const uint8_t imageIndex, const uint32_t qFamToAcquireSrcFrom) override + { + bool success = true; + auto acquiredImage = getImage(imageIndex); + m_lastImageIndex = imageIndex; + + // Ownership of the Source Blit Image, not the Swapchain Image + const bool needToAcquireSrcOwnership = qFamToAcquireSrcFrom != IQueue::FamilyIgnored; + // Should never get asked to transfer ownership if the source is concurrent sharing + assert(!source.image->getCachedCreationParams().isConcurrentSharing() || !needToAcquireSrcOwnership); + + const auto blitDstLayout = IGPUImage::LAYOUT::TRANSFER_DST_OPTIMAL; + IGPUCommandBuffer::SPipelineBarrierDependencyInfo depInfo = {}; + + // barrier before to transition the swapchain image layout + using image_barrier_t = decltype(depInfo.imgBarriers)::element_type; + const image_barrier_t preBarriers[2] = { + { + .barrier = { + .dep = { + .srcStageMask = asset::PIPELINE_STAGE_FLAGS::NONE, // acquire isn't a stage + .srcAccessMask = asset::ACCESS_FLAGS::NONE, // performs no accesses + .dstStageMask = asset::PIPELINE_STAGE_FLAGS::BLIT_BIT, + .dstAccessMask = asset::ACCESS_FLAGS::TRANSFER_WRITE_BIT + } + }, + .image = acquiredImage, + .subresourceRange = { + .aspectMask = IGPUImage::EAF_COLOR_BIT, + .baseMipLevel = 0, + .levelCount = 1, + .baseArrayLayer = 0, + .layerCount = 1 + }, + .oldLayout = IGPUImage::LAYOUT::UNDEFINED, // I do not care about previous contents of the swapchain + .newLayout = blitDstLayout + }, + { + .barrier = { + .dep = { + // when acquiring ownership the source access masks don't matter + .srcStageMask = asset::PIPELINE_STAGE_FLAGS::NONE, + // Acquire must Happen-Before Semaphore wait, but neither has a true stage so NONE here + // https://github.com/KhronosGroup/Vulkan-Docs/issues/2319 + // If no ownership acquire needed then this dep info won't be used at all + .srcAccessMask = asset::ACCESS_FLAGS::NONE, + .dstStageMask = asset::PIPELINE_STAGE_FLAGS::BLIT_BIT, + .dstAccessMask = asset::ACCESS_FLAGS::TRANSFER_READ_BIT + }, + .ownershipOp = IGPUCommandBuffer::SOwnershipTransferBarrier::OWNERSHIP_OP::ACQUIRE, + .otherQueueFamilyIndex = qFamToAcquireSrcFrom + }, + .image = source.image, + .subresourceRange = TripleBufferUsedSubresourceRange + // no layout transition, already in the correct layout for the blit + } + }; + // We only barrier the source image if we need to acquire ownership, otherwise thanks to Timeline Semaphores all sync is good + depInfo.imgBarriers = { preBarriers,needToAcquireSrcOwnership ? 2ull : 1ull }; + success &= cmdbuf->pipelineBarrier(asset::EDF_NONE, depInfo); + + // TODO: Implement scaling modes other than a plain STRETCH, and allow for using subrectangles of the initial contents + { + const auto srcOffset = source.rect.offset; + const auto srcExtent = source.rect.extent; + const auto dstExtent = acquiredImage->getCreationParameters().extent; + const IGPUCommandBuffer::SImageBlit regions[1] = { { + .srcMinCoord = {static_cast(srcOffset.x),static_cast(srcOffset.y),0}, + .srcMaxCoord = {srcExtent.width,srcExtent.height,1}, + .dstMinCoord = {0,0,0}, + .dstMaxCoord = {dstExtent.width,dstExtent.height,1}, + .layerCount = acquiredImage->getCreationParameters().arrayLayers, + .srcBaseLayer = 0, + .dstBaseLayer = 0, + .srcMipLevel = 0 + } }; + success &= cmdbuf->blitImage(source.image, IGPUImage::LAYOUT::TRANSFER_SRC_OPTIMAL, acquiredImage, blitDstLayout, regions, IGPUSampler::ETF_LINEAR); + } + + // Barrier after, note that I don't care about preserving the contents of the Triple Buffer when the Render queue starts writing to it again. + // Therefore no ownership release, and no layout transition. + const image_barrier_t postBarrier[1] = { + { + .barrier = { + // When transitioning the image to VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR or VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, there is no need to delay subsequent processing, + // or perform any visibility operations (as vkQueuePresentKHR performs automatic visibility operations). + // To achieve this, the dstAccessMask member of the VkImageMemoryBarrier should be set to 0, and the dstStageMask parameter should be set to VK_PIPELINE_STAGE_2_NONE + .dep = preBarriers[0].barrier.dep.nextBarrier(asset::PIPELINE_STAGE_FLAGS::NONE,asset::ACCESS_FLAGS::NONE) + }, + .image = preBarriers[0].image, + .subresourceRange = preBarriers[0].subresourceRange, + .oldLayout = blitDstLayout, + .newLayout = IGPUImage::LAYOUT::PRESENT_SRC + } + }; + depInfo.imgBarriers = postBarrier; + success &= cmdbuf->pipelineBarrier(asset::EDF_NONE, depInfo); + + return success; + } + +private: + uint8_t m_lastImageIndex = 0u; +}; + +static smart_refctd_ptr createAttachmentView(ILogicalDevice* device, E_FORMAT format, uint32_t width, uint32_t height, const char* debugName) +{ + if (!device) + return nullptr; + + const bool isDepth = isDepthOrStencilFormat(format); + auto usage = IGPUImage::EUF_RENDER_ATTACHMENT_BIT; + if (!isDepth) + usage |= IGPUImage::EUF_SAMPLED_BIT; + + auto image = device->createImage({{ + .type = IGPUImage::ET_2D, + .samples = IGPUImage::ESCF_1_BIT, + .format = format, + .extent = { width, height, 1u }, + .mipLevels = 1u, + .arrayLayers = 1u, + .usage = usage + }}); + if (!image) + return nullptr; + + image->setObjectDebugName(debugName); + + if (!device->allocate(image->getMemoryReqs(), image.get()).isValid()) + return nullptr; + + IGPUImageView::SCreationParams params = { + .subUsages = usage, + .image = std::move(image), + .viewType = IGPUImageView::ET_2D, + .format = format + }; + params.subresourceRange.aspectMask = isDepth ? IGPUImage::EAF_DEPTH_BIT : IGPUImage::EAF_COLOR_BIT; + return device->createImageView(std::move(params)); +} + +static smart_refctd_ptr createSceneFramebuffer(ILogicalDevice* device, IGPURenderpass* renderpass, IGPUImageView* colorView, IGPUImageView* depthView) +{ + if (!device || !renderpass || !colorView || !depthView) + return nullptr; + + const auto& imageParams = colorView->getCreationParameters().image->getCreationParameters(); + IGPUFramebuffer::SCreationParams params = { { + .renderpass = core::smart_refctd_ptr(renderpass), + .depthStencilAttachments = &depthView, + .colorAttachments = &colorView, + .width = imageParams.extent.width, + .height = imageParams.extent.height, + .layers = imageParams.arrayLayers + } }; + return device->createFramebuffer(std::move(params)); +} /* -Renders scene texture to an offscreen framebuffer whose color attachment is then sampled into a imgui window. + Renders scene texture to an offline + framebuffer which color attachment + is then sampled into a imgui window. -Written with Nabla's UI extension and got integrated with ImGuizmo to handle scene's object translations. + Written with Nabla, it's UI extension + and got integrated with ImGuizmo to + handle scene's object translations. */ -class UISampleApp final : public MonoWindowApplication, public BuiltinResourcesApplication + +class UISampleApp final : public examples::SimpleWindowedApplication { - using device_base_t = MonoWindowApplication; - using asset_base_t = BuiltinResourcesApplication; + using base_t = examples::SimpleWindowedApplication; + using clock_t = std::chrono::steady_clock; + + constexpr static inline clock_t::duration DisplayImageDuration = std::chrono::milliseconds(900); + constexpr static inline auto sceneRenderDepthFormat = EF_D32_SFLOAT; + constexpr static inline auto finalSceneRenderFormat = EF_R8G8B8A8_SRGB; + constexpr static inline IGPUCommandBuffer::SClearColorValue SceneClearColor = { .float32 = {0.f,0.f,0.f,1.f} }; + constexpr static inline IGPUCommandBuffer::SClearDepthStencilValue SceneClearDepth = { .depth = 0.f }; public: + using base_t::base_t; + inline UISampleApp(const path& _localInputCWD, const path& _localOutputCWD, const path& _sharedInputCWD, const path& _sharedOutputCWD) - : IApplicationFramework(_localInputCWD, _localOutputCWD, _sharedInputCWD, _sharedOutputCWD), - device_base_t({1280,720}, EF_UNKNOWN, _localInputCWD, _localOutputCWD, _sharedInputCWD, _sharedOutputCWD) {} + : IApplicationFramework(_localInputCWD, _localOutputCWD, _sharedInputCWD, _sharedOutputCWD) {} + + // Will get called mid-initialization, via `filterDevices` between when the API Connection is created and Physical Device is chosen + core::vector getSurfaces() const override + { + // So let's create our Window and Surface then! + if (!m_surface) + { + { + const auto dpyInfo = m_winMgr->getPrimaryDisplayInfo(); + auto windowCallback = core::make_smart_refctd_ptr(smart_refctd_ptr(m_inputSystem), smart_refctd_ptr(m_logger)); + + IWindow::SCreationParams params = {}; + params.callback = core::make_smart_refctd_ptr(); + params.width = dpyInfo.resX; + params.height = dpyInfo.resY; + params.x = 32; + params.y = 32; + params.flags = IWindow::ECF_INPUT_FOCUS | IWindow::ECF_CAN_RESIZE | IWindow::ECF_CAN_MAXIMIZE | IWindow::ECF_CAN_MINIMIZE; + params.windowCaption = "[Nabla Engine] UI App"; + params.callback = windowCallback; + + const_cast&>(m_window) = m_winMgr->createWindow(std::move(params)); + } + auto surface = CSurfaceVulkanWin32::create(smart_refctd_ptr(m_api), smart_refctd_ptr_static_cast(m_window)); + const_cast&>(m_surface) = CSmoothResizeSurface::create(std::move(surface)); + } + + if (m_surface) + { + m_window->getManager()->maximize(m_window.get()); + auto* cc = m_window->getCursorControl(); + cc->setVisible(false); + + return { {m_surface->getSurface()/*,EQF_NONE*/} }; + } + + return {}; + } inline bool onAppInitialized(smart_refctd_ptr&& system) override { - if (!asset_base_t::onAppInitialized(smart_refctd_ptr(system))) + argparse::ArgumentParser program("Virtual camera event system demo"); + + program.add_argument("--file") + .help("Path to json file with camera inputs"); + program.add_argument("--ci") + .help("Run in CI mode: capture a screenshot after a few frames and exit.") + .default_value(false) + .implicit_value(true); + + try + { + program.parse_args({ argv.data(), argv.data() + argv.size() }); + } + catch (const std::exception& err) + { + std::cerr << err.what() << std::endl << program; return false; - if (!device_base_t::onAppInitialized(smart_refctd_ptr(system))) + } + + m_ciMode = program.get("--ci"); + if (m_ciMode) + m_ciScreenshotPath = localOutputCWD / "cameraz_ci.png"; + + // Create imput system + m_inputSystem = make_smart_refctd_ptr(logger_opt_smart_ptr(smart_refctd_ptr(m_logger))); + + // Remember to call the base class initialization! + if (!base_t::onAppInitialized(std::move(system))) return false; + { + smart_refctd_ptr examplesHeaderArch, examplesSourceArch, examplesBuildArch, thisExampleArch, thisExampleBuildArch; +#ifdef NBL_EMBED_BUILTIN_RESOURCES + examplesHeaderArch = core::make_smart_refctd_ptr(smart_refctd_ptr(m_logger)); + examplesSourceArch = core::make_smart_refctd_ptr(smart_refctd_ptr(m_logger)); + examplesBuildArch = core::make_smart_refctd_ptr(smart_refctd_ptr(m_logger)); + + #ifdef _NBL_THIS_EXAMPLE_BUILTIN_C_ARCHIVE_H_ + thisExampleArch = make_smart_refctd_ptr(smart_refctd_ptr(m_logger)); + #endif + + #ifdef _NBL_THIS_EXAMPLE_BUILTIN_BUILD_C_ARCHIVE_H_ + thisExampleBuildArch = make_smart_refctd_ptr(smart_refctd_ptr(m_logger)); + #endif +#else + examplesHeaderArch = make_smart_refctd_ptr(localInputCWD/"../common/include/nbl/examples", smart_refctd_ptr(m_logger), m_system.get()); + examplesSourceArch = make_smart_refctd_ptr(localInputCWD/"../common/src/nbl/examples", smart_refctd_ptr(m_logger), m_system.get()); + examplesBuildArch = make_smart_refctd_ptr(NBL_EXAMPLES_BUILD_MOUNT_POINT, smart_refctd_ptr(m_logger), m_system.get()); + thisExampleArch = make_smart_refctd_ptr(localInputCWD/"app_resources", smart_refctd_ptr(m_logger), m_system.get()); + #ifdef NBL_THIS_EXAMPLE_BUILD_MOUNT_POINT + thisExampleBuildArch = make_smart_refctd_ptr(NBL_THIS_EXAMPLE_BUILD_MOUNT_POINT, smart_refctd_ptr(m_logger), m_system.get()); + #endif +#endif + m_system->mount(std::move(examplesHeaderArch),"nbl/examples"); + m_system->mount(std::move(examplesSourceArch),"nbl/examples"); + m_system->mount(std::move(examplesBuildArch),"nbl/examples"); + if (thisExampleArch) + m_system->mount(std::move(thisExampleArch),"app_resources"); + if (thisExampleBuildArch) + m_system->mount(std::move(thisExampleBuildArch),"app_resources"); + } + + { + const std::optional cameraJsonFile = program.is_used("--file") ? program.get("--file") : std::optional(std::nullopt); + + json j; + auto loadDefaultConfig = [&]() -> bool + { +#ifdef _NBL_THIS_EXAMPLE_BUILTIN_C_ARCHIVE_H_ + auto assets = make_smart_refctd_ptr(smart_refctd_ptr(m_logger)); + auto pFile = assets->getFile("cameras.json", IFile::ECF_READ, ""); + if (!pFile) + return logFail("Could not open builtin cameras.json!"); + + string config; + IFile::success_t result; + config.resize(pFile->getSize()); + pFile->read(result, config.data(), 0, pFile->getSize()); + j = json::parse(config); + return true; +#else + const auto fallbackPath = localInputCWD / "app_resources" / "cameras.json"; + std::ifstream fallbackFile(fallbackPath); + if (!fallbackFile.is_open()) + return logFail("Cannot open default config \"%s\".", fallbackPath.string().c_str()); + fallbackFile >> j; + return true; +#endif + }; + + auto file = cameraJsonFile.has_value() ? std::ifstream(cameraJsonFile.value()) : std::ifstream(); + if (!file.is_open()) + { + if (cameraJsonFile.has_value()) + m_logger->log("Cannot open input \"%s\" json file. Switching to default config.", ILogger::ELL_WARNING, cameraJsonFile.value().c_str()); + else + m_logger->log("No input json file provided. Switching to default config.", ILogger::ELL_WARNING); + + if (!loadDefaultConfig()) + return false; + } + else + { + file >> j; + } + + std::vector> cameras; + for (const auto& jCamera : j["cameras"]) + { + if (jCamera.contains("type")) + { + if (!jCamera.contains("position")) + { + logFail("Expected \"position\" keyword for camera definition!"); + return false; + } + + const bool withOrientation = jCamera.contains("orientation"); + + auto position = [&]() + { + auto jret = jCamera["position"].get>(); + return float32_t3(jret[0], jret[1], jret[2]); + }(); + + auto getOrientation = [&]() + { + auto jret = jCamera["orientation"].get>(); + + // order important for glm::quat, + // the ctor is GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(T _w, T _x, T _y, T _z) + // but memory layout (and json) is x,y,z,w + return glm::quat(jret[3], jret[0], jret[1], jret[2]); + }; + + auto getTarget = [&]() + { + auto jret = jCamera["target"].get>(); + return float32_t3(jret[0], jret[1], jret[2]); + }; + + if (jCamera["type"] == "FPS") + { + if (!withOrientation) + { + logFail("Expected \"orientation\" keyword for FPS camera definition!"); + return false; + } + + cameras.emplace_back() = make_smart_refctd_ptr(position, getOrientation()); + } + else if (jCamera["type"] == "Free") + { + if (!withOrientation) + { + logFail("Expected \"orientation\" keyword for Free camera definition!"); + return false; + } + + cameras.emplace_back() = make_smart_refctd_ptr(position, getOrientation()); + } + else if (jCamera["type"] == "Orbit") + { + auto& camera = cameras.emplace_back() = make_smart_refctd_ptr(position, getTarget()); + camera->setMoveSpeedScale(0.2); + } + else + { + logFail("Unsupported camera type!"); + return false; + } + } + else + { + logFail("Expected \"type\" keyword for camera definition!"); + return false; + } + } + + std::vector projections; + for (const auto& jProjection : j["projections"]) + { + if (jProjection.contains("type")) + { + float zNear, zFar; + + if (!jProjection.contains("zNear")) + { + logFail("Expected \"zNear\" keyword for planar projection definition!"); + return false; + } + + if (!jProjection.contains("zFar")) + { + logFail("Expected \"zFar\" keyword for planar projection definition!"); + return false; + } + + zNear = jProjection["zNear"].get(); + zFar = jProjection["zFar"].get(); + + if (jProjection["type"] == "perspective") + { + if (!jProjection.contains("fov")) + { + logFail("Expected \"fov\" keyword for planar perspective projection definition!"); + return false; + } + + float fov = jProjection["fov"].get(); + projections.emplace_back(IPlanarProjection::CProjection::create(zNear, zFar, fov)); + } + else if (jProjection["type"] == "orthographic") + { + if (!jProjection.contains("orthoWidth")) + { + logFail("Expected \"orthoWidth\" keyword for planar orthographic projection definition!"); + return false; + } + + float orthoWidth = jProjection["orthoWidth"].get(); + projections.emplace_back(IPlanarProjection::CProjection::create(zNear, zFar, orthoWidth)); + } + else + { + logFail("Unsupported projection!"); + return false; + } + } + } + + struct + { + std::vector keyboard; + std::vector mouse; + } controllers; + + if (j.contains("controllers")) + { + const auto& jControllers = j["controllers"]; + + if (jControllers.contains("keyboard")) + { + for (const auto& jKeyboard : jControllers["keyboard"]) + { + if (jKeyboard.contains("mappings")) + { + auto& controller = controllers.keyboard.emplace_back(); + for (const auto& [key, value] : jKeyboard["mappings"].items()) + { + const auto nativeCode = stringToKeyCode(key.c_str()); + + if (nativeCode == EKC_NONE) + { + logFail("Invalid native key \"%s\" code mapping for keyboard controller", key.c_str()); + return false; + } + + controller[nativeCode] = CVirtualGimbalEvent::stringToVirtualEvent(value.get()); + } + } + else + { + logFail("Expected \"mappings\" keyword for keyboard controller definition!"); + return false; + } + } + } + else + { + logFail("Expected \"keyboard\" keyword in controllers definition!"); + return false; + } + + if (jControllers.contains("mouse")) + { + for (const auto& jMouse : jControllers["mouse"]) + { + if (jMouse.contains("mappings")) + { + auto& controller = controllers.mouse.emplace_back(); + for (const auto& [key, value] : jMouse["mappings"].items()) + { + const auto nativeCode = stringToMouseCode(key.c_str()); + + if (nativeCode == EMC_NONE) + { + logFail("Invalid native key \"%s\" code mapping for mouse controller", key.c_str()); + return false; + } + + controller[nativeCode] = CVirtualGimbalEvent::stringToVirtualEvent(value.get()); + } + } + else + { + logFail("Expected \"mappings\" keyword for mouse controller definition!"); + return false; + } + } + } + else + { + logFail("Expected \"mouse\" keyword in controllers definition"); + return false; + } + } + else + { + logFail("Expected \"controllers\" keyword in controllers JSON"); + return false; + } + + if (j.contains("viewports") && j.contains("planars")) + { + for (const auto& jPlanar : j["planars"]) + { + if (!jPlanar.contains("camera")) + { + logFail("Expected \"camera\" value in planar object"); + return false; + } + + if (!jPlanar.contains("viewports")) + { + logFail("Expected \"viewports\" list in planar object"); + return false; + } + + const auto cameraIx = jPlanar["camera"].get(); + auto boundViewports = jPlanar["viewports"].get>(); + + auto& planar = m_planarProjections.emplace_back() = planar_projection_t::create(smart_refctd_ptr(cameras[cameraIx])); + for (const auto viewportIx : boundViewports) + { + auto& viewport = j["viewports"][viewportIx]; + if (!viewport.contains("projection") || !viewport.contains("controllers")) + { + logFail("\"projection\" or \"controllers\" missing in viewport object index %d", viewportIx); + return false; + } + + const auto projectionIx = viewport["projection"].get(); + auto& projection = planar->getPlanarProjections().emplace_back(projections[projectionIx]); + + const bool hasKeyboardBound = viewport["controllers"].contains("keyboard"); + const bool hasMouseBound = viewport["controllers"].contains("mouse"); + + if (hasKeyboardBound) + { + auto keyboardControllerIx = viewport["controllers"]["keyboard"].get(); + projection.updateKeyboardMapping([&](auto& map) { map = controllers.keyboard[keyboardControllerIx]; }); + } + else + projection.updateKeyboardMapping([&](auto& map) { map = {}; }); // clean the map if not bound + + if (hasMouseBound) + { + auto mouseControllerIx = viewport["controllers"]["mouse"].get(); + projection.updateMouseMapping([&](auto& map) { map = controllers.mouse[mouseControllerIx]; }); + } + else + projection.updateMouseMapping([&](auto& map) { map = {}; }); // clean the map if not bound + } + + { + auto* camera = planar->getCamera(); + { + camera->updateKeyboardMapping([&](auto& map) { map = camera->getKeyboardMappingPreset(); }); + camera->updateMouseMapping([&](auto& map) { map = camera->getMouseMappingPreset(); }); + camera->updateImguizmoMapping([&](auto& map) { map = camera->getImguizmoMappingPreset(); }); + } + } + } + } + else + { + logFail("Expected \"viewports\" and \"planars\" lists in JSON"); + return false; + } + + if (m_planarProjections.size() < windowBindings.size()) + { + // TODO, temporary assuming it, I'm not going to implement each possible case now + logFail("Expected at least %d planars", windowBindings.size()); + return false; + } + + // init render window planar references - we make all render windows start with focus on first + // planar but in a way that first window has the planar's perspective preset bound & second orthographic + for (uint32_t i = 0u; i < windowBindings.size(); ++i) + { + auto& binding = windowBindings[i]; + + auto& planar = m_planarProjections[binding.activePlanarIx = 0]; + binding.pickDefaultProjections(planar->getPlanarProjections()); + + if (i) + binding.boundProjectionIx = binding.lastBoundOrthoPresetProjectionIx.value(); + else + binding.boundProjectionIx = binding.lastBoundPerspectivePresetProjectionIx.value(); + } + } + + // Create asset manager + m_assetManager = make_smart_refctd_ptr(smart_refctd_ptr(m_system)); + + // First create the resources that don't depend on a swapchain m_semaphore = m_device->createSemaphore(m_realFrameIx); if (!m_semaphore) return logFail("Failed to Create a Semaphore!"); - auto pool = m_device->createCommandPool(getGraphicsQueue()->getFamilyIndex(),IGPUCommandPool::CREATE_FLAGS::RESET_COMMAND_BUFFER_BIT); - for (auto i = 0u; icreateCommandBuffers(IGPUCommandPool::BUFFER_LEVEL::PRIMARY,{m_cmdBufs.data()+i,1})) - return logFail("Couldn't create Command Buffer!"); - } - - const uint32_t addtionalBufferOwnershipFamilies[] = {getGraphicsQueue()->getFamilyIndex()}; - m_scene = CGeometryCreatorScene::create( - { - .transferQueue = getTransferUpQueue(), - .utilities = m_utils.get(), - .logger = m_logger.get(), - .addtionalBufferOwnershipFamilies = addtionalBufferOwnershipFamilies - }, - CSimpleDebugRenderer::DefaultPolygonGeometryPatch - ); - - // for the scene drawing pass + // The nice thing about having a triple buffer is that you don't need to do acrobatics to account for the formats available to the surface. + // You can transcode to the swapchain's format while copying, and I actually recommend to do surface rotation, tonemapping and OETF application there. + const auto format = asset::EF_R8G8B8A8_SRGB; + // Could be more clever and use the copy Triple Buffer to Swapchain as an opportunity to do a MSAA resolve or something + const auto samples = IGPUImage::ESCF_1_BIT; + + // Create the renderpass { - IGPURenderpass::SCreationParams params = {}; - const IGPURenderpass::SCreationParams::SDepthStencilAttachmentDescription depthAttachments[] = { - {{ - { - .format = sceneRenderDepthFormat, - .samples = IGPUImage::ESCF_1_BIT, - .mayAlias = false - }, - /*.loadOp = */{IGPURenderpass::LOAD_OP::CLEAR}, - /*.storeOp = */{IGPURenderpass::STORE_OP::STORE}, - /*.initialLayout = */{IGPUImage::LAYOUT::UNDEFINED}, - /*.finalLayout = */{IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL} - }}, - IGPURenderpass::SCreationParams::DepthStencilAttachmentsEnd - }; - params.depthStencilAttachments = depthAttachments; const IGPURenderpass::SCreationParams::SColorAttachmentDescription colorAttachments[] = { {{ { - .format = finalSceneRenderFormat, - .samples = IGPUImage::E_SAMPLE_COUNT_FLAGS::ESCF_1_BIT, + .format = format, + .samples = samples, .mayAlias = false }, - /*.loadOp = */IGPURenderpass::LOAD_OP::CLEAR, - /*.storeOp = */IGPURenderpass::STORE_OP::STORE, - /*.initialLayout = */IGPUImage::LAYOUT::UNDEFINED, - /*.finalLayout = */ IGPUImage::LAYOUT::READ_ONLY_OPTIMAL // ImGUI shall read - }}, - IGPURenderpass::SCreationParams::ColorAttachmentsEnd + /*.loadOp = */IGPURenderpass::LOAD_OP::CLEAR, + /*.storeOp = */IGPURenderpass::STORE_OP::STORE, + /*.initialLayout = */IGPUImage::LAYOUT::UNDEFINED, // because we clear we don't care about contents when we grab the triple buffer img again + /*.finalLayout = */IGPUImage::LAYOUT::TRANSFER_SRC_OPTIMAL // put it already in the correct layout for the blit operation + }}, + IGPURenderpass::SCreationParams::ColorAttachmentsEnd }; - params.colorAttachments = colorAttachments; IGPURenderpass::SCreationParams::SSubpassDescription subpasses[] = { {}, IGPURenderpass::SCreationParams::SubpassesEnd }; - subpasses[0].depthStencilAttachment = {{.render={.attachmentIndex=0,.layout=IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL}}}; - subpasses[0].colorAttachments[0] = {.render={.attachmentIndex=0,.layout=IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL}}; - params.subpasses = subpasses; - - const static IGPURenderpass::SCreationParams::SSubpassDependency dependencies[] = { - // wipe-transition of Color to ATTACHMENT_OPTIMAL and depth + subpasses[0].colorAttachments[0] = { .render = {.attachmentIndex = 0,.layout = IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL} }; + // We actually need external dependencies to ensure ordering of the Implicit Layout Transitions relative to the semaphore signals + IGPURenderpass::SCreationParams::SSubpassDependency dependencies[] = { + // wipe-transition to ATTACHMENT_OPTIMAL { .srcSubpass = IGPURenderpass::SCreationParams::SSubpassDependency::External, .dstSubpass = 0, .memoryBarrier = { - // last place where the depth can get modified in previous frame, `COLOR_ATTACHMENT_OUTPUT_BIT` is implicitly later - // while color is sampled by ImGUI - .srcStageMask = PIPELINE_STAGE_FLAGS::LATE_FRAGMENT_TESTS_BIT|PIPELINE_STAGE_FLAGS::FRAGMENT_SHADER_BIT, - // don't want any writes to be available, as we are clearing both attachments - .srcAccessMask = ACCESS_FLAGS::NONE, - // destination needs to wait as early as possible - // TODO: `COLOR_ATTACHMENT_OUTPUT_BIT` shouldn't be needed, because its a logically later stage, see TODO in `ECommonEnums.h` - .dstStageMask = PIPELINE_STAGE_FLAGS::EARLY_FRAGMENT_TESTS_BIT|PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, - // because depth and color get cleared first no read mask - .dstAccessMask = ACCESS_FLAGS::DEPTH_STENCIL_ATTACHMENT_WRITE_BIT|ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT - } - // leave view offsets and flags default - }, + // we can have NONE as Sources because the semaphore wait is ALL_COMMANDS + // https://github.com/KhronosGroup/Vulkan-Docs/issues/2319 + .dstStageMask = asset::PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, + .dstAccessMask = asset::ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT + } + // leave view offsets and flags default + }, + // ATTACHMENT_OPTIMAL to PRESENT_SRC { .srcSubpass = 0, .dstSubpass = IGPURenderpass::SCreationParams::SSubpassDependency::External, .memoryBarrier = { - // last place where the color can get modified, depth is implicitly earlier - .srcStageMask = PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, - // only write ops, reads can't be made available, also won't be using depth so don't care about it being visible to anyone else - .srcAccessMask = ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT, - // the ImGUI will sample the color, then next frame we overwrite both attachments - .dstStageMask = PIPELINE_STAGE_FLAGS::FRAGMENT_SHADER_BIT|PIPELINE_STAGE_FLAGS::EARLY_FRAGMENT_TESTS_BIT, - // but we only care about the availability-visibility chain between renderpass and imgui - .dstAccessMask = ACCESS_FLAGS::SAMPLED_READ_BIT - } - // leave view offsets and flags default - }, - IGPURenderpass::SCreationParams::DependenciesEnd + .srcStageMask = asset::PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, + .srcAccessMask = asset::ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT + // we can have NONE as the Destinations because the semaphore signal is ALL_COMMANDS + // https://github.com/KhronosGroup/Vulkan-Docs/issues/2319 + } + // leave view offsets and flags default + }, + IGPURenderpass::SCreationParams::DependenciesEnd }; - params.dependencies = {}; - m_renderpass = m_device->createRenderpass(std::move(params)); + + IGPURenderpass::SCreationParams params = {}; + params.colorAttachments = colorAttachments; + params.subpasses = subpasses; + params.dependencies = dependencies; + m_renderpass = m_device->createRenderpass(params); if (!m_renderpass) - return logFail("Failed to create Scene Renderpass!"); + return logFail("Failed to Create a Renderpass!"); } - const auto& geometries = m_scene->getInitParams().geometries; - m_renderer = CSimpleDebugRenderer::create(m_assetMgr.get(),m_renderpass.get(),0,{&geometries.front().get(),geometries.size()}); - // special case + + // We just live life in easy mode and have the Swapchain Creation Parameters get deduced from the surface. + // We don't need any control over the format of the swapchain because we'll be only using Renderpasses this time! + // TODO: improve the queue allocation/choice and allocate a dedicated presentation queue to improve responsiveness and race to present. + ISwapchain::SSharedCreationParams sharedParams = {}; + sharedParams.imageUsage |= IGPUImage::EUF_TRANSFER_SRC_BIT; + auto swapchainResources = std::make_unique(); + if (!m_surface || !m_surface->init(m_surface->pickQueue(m_device.get()), std::move(swapchainResources), sharedParams)) + return logFail("Failed to Create a Swapchain!"); + + // Normally you'd want to recreate these images whenever the swapchain is resized in some increment, like 64 pixels or something. + // But I'm super lazy here and will just create "worst case sized images" and waste all the VRAM I can get. + const auto dpyInfo = m_winMgr->getPrimaryDisplayInfo(); + for (auto i = 0; i < MaxFramesInFlight; i++) { - const auto& pipelines = m_renderer->getInitParams().pipelines; - auto ix = 0u; - for (const auto& name : m_scene->getInitParams().geometryNames) + auto& image = m_tripleBuffers[i]; { - if (name=="Cone") - m_renderer->getGeometry(ix).pipeline = pipelines[CSimpleDebugRenderer::SInitParams::PipelineType::Cone]; - ix++; + IGPUImage::SCreationParams params = {}; + params = asset::IImage::SCreationParams{ + .type = IGPUImage::ET_2D, + .samples = samples, + .format = format, + .extent = {dpyInfo.resX,dpyInfo.resY,1}, + .mipLevels = 1, + .arrayLayers = 1, + .flags = IGPUImage::ECF_NONE, + // in this example I'll be using a renderpass to clear the image, and then a blit to copy it to the swapchain + .usage = IGPUImage::EUF_RENDER_ATTACHMENT_BIT | IGPUImage::EUF_TRANSFER_SRC_BIT + }; + image = m_device->createImage(std::move(params)); + if (!image) + return logFail("Failed to Create Triple Buffer Image!"); + + // use dedicated allocations, we have plenty of allocations left, even on Win32 + if (!m_device->allocate(image->getMemoryReqs(), image.get()).isValid()) + return logFail("Failed to allocate Device Memory for Image %d", i); } - } - // we'll only display one thing at a time - m_renderer->m_instances.resize(1); - - // Create ImGUI - { - auto scRes = static_cast(m_surface->getSwapchainResources()); - ext::imgui::UI::SCreationParameters params = {}; - params.resources.texturesInfo = {.setIx=0u,.bindingIx=TexturesImGUIBindingIndex}; - params.resources.samplersInfo = {.setIx=0u,.bindingIx=1u}; - params.utilities = m_utils; - params.transfer = getTransferUpQueue(); - params.pipelineLayout = ext::imgui::UI::createDefaultPipelineLayout(m_utils->getLogicalDevice(),params.resources.texturesInfo,params.resources.samplersInfo,MaxImGUITextures); - params.assetManager = make_smart_refctd_ptr(smart_refctd_ptr(m_system)); - params.renderpass = smart_refctd_ptr(scRes->getRenderpass()); - params.subpassIx = 0u; - params.pipelineCache = nullptr; - interface.imGUI = ext::imgui::UI::create(std::move(params)); - if (!interface.imGUI) - return logFail("Failed to create `nbl::ext::imgui::UI` class"); - } + image->setObjectDebugName(("Triple Buffer Image " + std::to_string(i)).c_str()); - // create rest of User Interface - { - auto* imgui = interface.imGUI.get(); - // create the suballocated descriptor set - { - // note that we use default layout provided by our extension, but you are free to create your own by filling ext::imgui::UI::S_CREATION_PARAMETERS::resources - const auto* layout = imgui->getPipeline()->getLayout()->getDescriptorSetLayout(0u); - auto pool = m_device->createDescriptorPoolForDSLayouts(IDescriptorPool::E_CREATE_FLAGS::ECF_UPDATE_AFTER_BIND_BIT,{&layout,1}); - auto ds = pool->createDescriptorSet(smart_refctd_ptr(layout)); - interface.subAllocDS = make_smart_refctd_ptr(std::move(ds)); - if (!interface.subAllocDS) - return logFail("Failed to create the descriptor set"); - // make sure Texture Atlas slot is taken for eternity - { - auto dummy = SubAllocatedDescriptorSet::invalid_value; - interface.subAllocDS->multi_allocate(0,1,&dummy); - assert(dummy==ext::imgui::UI::FontAtlasTexId); - } - // write constant descriptors, note we don't create info & write pair for the samplers because UI extension's are immutable and baked into DS layout - IGPUDescriptorSet::SDescriptorInfo info = {}; - info.desc = smart_refctd_ptr(interface.imGUI->getFontAtlasView()); - info.info.image.imageLayout = IImage::LAYOUT::READ_ONLY_OPTIMAL; - const IGPUDescriptorSet::SWriteDescriptorSet write = { - .dstSet = interface.subAllocDS->getDescriptorSet(), - .binding = TexturesImGUIBindingIndex, - .arrayElement = ext::imgui::UI::FontAtlasTexId, - .count = 1, - .info = &info - }; - if (!m_device->updateDescriptorSets({&write,1},{})) - return logFail("Failed to write the descriptor set"); + // create framebuffers for the images + { + auto imageView = m_device->createImageView({ + .flags = IGPUImageView::ECF_NONE, + // give it a Transfer SRC usage flag so we can transition to the Tranfer SRC layout with End Renderpass + .subUsages = IGPUImage::EUF_RENDER_ATTACHMENT_BIT | IGPUImage::EUF_TRANSFER_SRC_BIT, + .image = core::smart_refctd_ptr(image), + .viewType = IGPUImageView::ET_2D, + .format = format + }); + const auto& imageParams = image->getCreationParameters(); + IGPUFramebuffer::SCreationParams params = { { + .renderpass = core::smart_refctd_ptr(m_renderpass), + .depthStencilAttachments = nullptr, + .colorAttachments = &imageView.get(), + .width = imageParams.extent.width, + .height = imageParams.extent.height, + .layers = imageParams.arrayLayers + } }; + m_framebuffers[i] = m_device->createFramebuffer(std::move(params)); + if (!m_framebuffers[i]) + return logFail("Failed to Create a Framebuffer for Image %d", i); } - imgui->registerListener([this](){interface();}); } - interface.camera.mapKeysToArrows(); - - onAppInitializedFinish(); - return true; - } + // This time we'll create all CommandBuffers from one CommandPool, to keep life simple. However the Pool must support individually resettable CommandBuffers + // because they cannot be pre-recorded because the fraembuffers/swapchain images they use will change when a swapchain recreates. + auto pool = m_device->createCommandPool(getGraphicsQueue()->getFamilyIndex(), IGPUCommandPool::CREATE_FLAGS::RESET_COMMAND_BUFFER_BIT); + if (!pool || !pool->createCommandBuffers(IGPUCommandPool::BUFFER_LEVEL::PRIMARY, { m_cmdBufs.data(),MaxFramesInFlight }, core::smart_refctd_ptr(m_logger))) + return logFail("Failed to Create CommandBuffers!"); - // - virtual inline bool onAppTerminated() - { - SubAllocatedDescriptorSet::value_type fontAtlasDescIx = ext::imgui::UI::FontAtlasTexId; - IGPUDescriptorSet::SDropDescriptorSet dummy[1]; - interface.subAllocDS->multi_deallocate(dummy,TexturesImGUIBindingIndex,1,&fontAtlasDescIx); - return device_base_t::onAppTerminated(); - } + // UI + { + { + nbl::ext::imgui::UI::SCreationParameters params; + params.resources.texturesInfo = { .setIx = 0u, .bindingIx = 0u }; + params.resources.samplersInfo = { .setIx = 0u, .bindingIx = 1u }; + params.assetManager = m_assetManager; + params.pipelineCache = nullptr; + params.pipelineLayout = nbl::ext::imgui::UI::createDefaultPipelineLayout(m_utils->getLogicalDevice(), params.resources.texturesInfo, params.resources.samplersInfo, TotalUISampleTexturesAmount); + params.renderpass = smart_refctd_ptr(m_renderpass); + params.streamingBuffer = nullptr; + params.subpassIx = 0u; + params.transfer = getTransferUpQueue(); + params.utilities = m_utils; + + auto loadPrecompiledShader = [&](const std::string_view key) -> smart_refctd_ptr + { + IAssetLoader::SAssetLoadParams loadParams = {}; + loadParams.logger = m_logger.get(); + loadParams.workingDirectory = "app_resources"; + auto bundle = m_assetManager->getAsset(key.data(), loadParams); + const auto& contents = bundle.getContents(); + if (contents.empty()) + return nullptr; + return IAsset::castDown(contents[0]); + }; + + const auto vertexKey = nbl::this_example::builtin::build::get_spirv_key<"imgui_vertex">(m_device.get()); + const auto fragmentKey = nbl::this_example::builtin::build::get_spirv_key<"imgui_fragment">(m_device.get()); + auto vertexShader = loadPrecompiledShader(vertexKey.data()); + auto fragmentShader = loadPrecompiledShader(fragmentKey.data()); + if (!vertexShader || !fragmentShader) + return logFail("Failed to load precompiled ImGui shaders."); + + params.spirv = nbl::ext::imgui::UI::SCreationParameters::PrecompiledShaders{ + .vertex = std::move(vertexShader), + .fragment = std::move(fragmentShader) + }; + + m_ui.manager = nbl::ext::imgui::UI::create(std::move(params)); + } + + if (!m_ui.manager) + return false; + + // note that we use default layout provided by our extension, but you are free to create your own by filling nbl::ext::imgui::UI::S_CREATION_PARAMETERS::resources + const auto* descriptorSetLayout = m_ui.manager->getPipeline()->getLayout()->getDescriptorSetLayout(0u); + + IDescriptorPool::SCreateInfo descriptorPoolInfo = {}; + descriptorPoolInfo.maxDescriptorCount[static_cast(asset::IDescriptor::E_TYPE::ET_SAMPLER)] = (uint32_t)nbl::ext::imgui::UI::DefaultSamplerIx::COUNT; + descriptorPoolInfo.maxDescriptorCount[static_cast(asset::IDescriptor::E_TYPE::ET_SAMPLED_IMAGE)] = TotalUISampleTexturesAmount; + descriptorPoolInfo.maxSets = 1u; + descriptorPoolInfo.flags = IDescriptorPool::E_CREATE_FLAGS::ECF_UPDATE_AFTER_BIND_BIT; + + m_descriptorSetPool = m_device->createDescriptorPool(std::move(descriptorPoolInfo)); + assert(m_descriptorSetPool); + + m_descriptorSetPool->createDescriptorSets(1u, &descriptorSetLayout, &m_ui.descriptorSet); + assert(m_ui.descriptorSet); + + m_ui.manager->registerListener([this]() -> void { imguiListen(); }); + { + const auto ds = float32_t2{ m_window->getWidth(), m_window->getHeight() }; + + wInit.trsEditor.iPos = iPaddingOffset; + wInit.trsEditor.iSize = { ds.x * 0.1, ds.y - wInit.trsEditor.iPos.y * 2 }; + + wInit.planars.iSize = { ds.x * 0.2, ds.y - iPaddingOffset.y * 2 }; + wInit.planars.iPos = { ds.x - wInit.planars.iSize.x - iPaddingOffset.x, 0 + iPaddingOffset.y }; + + { + float leftX = wInit.trsEditor.iPos.x + wInit.trsEditor.iSize.x + iPaddingOffset.x; + float eachXSize = wInit.planars.iPos.x - (wInit.trsEditor.iPos.x + wInit.trsEditor.iSize.x) - 2*iPaddingOffset.x; + float eachYSize = (ds.y - 2 * iPaddingOffset.y - (wInit.renderWindows.size() - 1) * iPaddingOffset.y) / wInit.renderWindows.size(); + + for (size_t i = 0; i < wInit.renderWindows.size(); ++i) + { + auto& rw = wInit.renderWindows[i]; + rw.iPos = { leftX, (1+i) * iPaddingOffset.y + i * eachYSize }; + rw.iSize = { eachXSize, eachYSize }; + } + } + } + } + + // Geometry Creator Render Scene FBOs + { + const uint32_t addtionalBufferOwnershipFamilies[] = { getGraphicsQueue()->getFamilyIndex() }; + m_scene = CGeometryCreatorScene::create( + { + .transferQueue = getTransferUpQueue(), + .utilities = m_utils.get(), + .logger = m_logger.get(), + .addtionalBufferOwnershipFamilies = addtionalBufferOwnershipFamilies + }, + CSimpleDebugRenderer::DefaultPolygonGeometryPatch + ); + + if (!m_scene) + return logFail("Could not create geometry creator scene!"); + + { + IGPURenderpass::SCreationParams params = {}; + const IGPURenderpass::SCreationParams::SDepthStencilAttachmentDescription depthAttachments[] = { + {{ + { + .format = sceneRenderDepthFormat, + .samples = IGPUImage::ESCF_1_BIT, + .mayAlias = false + }, + /*.loadOp = */{IGPURenderpass::LOAD_OP::CLEAR}, + /*.storeOp = */{IGPURenderpass::STORE_OP::STORE}, + /*.initialLayout = */{IGPUImage::LAYOUT::UNDEFINED}, + /*.finalLayout = */{IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL} + }}, + IGPURenderpass::SCreationParams::DepthStencilAttachmentsEnd + }; + params.depthStencilAttachments = depthAttachments; + const IGPURenderpass::SCreationParams::SColorAttachmentDescription colorAttachments[] = { + {{ + { + .format = finalSceneRenderFormat, + .samples = IGPUImage::E_SAMPLE_COUNT_FLAGS::ESCF_1_BIT, + .mayAlias = false + }, + /*.loadOp = */IGPURenderpass::LOAD_OP::CLEAR, + /*.storeOp = */IGPURenderpass::STORE_OP::STORE, + /*.initialLayout = */IGPUImage::LAYOUT::UNDEFINED, + /*.finalLayout = */ IGPUImage::LAYOUT::READ_ONLY_OPTIMAL + }}, + IGPURenderpass::SCreationParams::ColorAttachmentsEnd + }; + params.colorAttachments = colorAttachments; + IGPURenderpass::SCreationParams::SSubpassDescription subpasses[] = { + {}, + IGPURenderpass::SCreationParams::SubpassesEnd + }; + subpasses[0].depthStencilAttachment = {{.render={.attachmentIndex=0,.layout=IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL}}}; + subpasses[0].colorAttachments[0] = {.render={.attachmentIndex=0,.layout=IGPUImage::LAYOUT::ATTACHMENT_OPTIMAL}}; + params.subpasses = subpasses; + const static IGPURenderpass::SCreationParams::SSubpassDependency dependencies[] = { + { + .srcSubpass = IGPURenderpass::SCreationParams::SSubpassDependency::External, + .dstSubpass = 0, + .memoryBarrier = { + .srcStageMask = PIPELINE_STAGE_FLAGS::LATE_FRAGMENT_TESTS_BIT|PIPELINE_STAGE_FLAGS::FRAGMENT_SHADER_BIT, + .srcAccessMask = ACCESS_FLAGS::NONE, + .dstStageMask = PIPELINE_STAGE_FLAGS::EARLY_FRAGMENT_TESTS_BIT|PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, + .dstAccessMask = ACCESS_FLAGS::DEPTH_STENCIL_ATTACHMENT_WRITE_BIT|ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT + } + }, + { + .srcSubpass = 0, + .dstSubpass = IGPURenderpass::SCreationParams::SSubpassDependency::External, + .memoryBarrier = { + .srcStageMask = PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, + .srcAccessMask = ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT, + .dstStageMask = PIPELINE_STAGE_FLAGS::FRAGMENT_SHADER_BIT|PIPELINE_STAGE_FLAGS::EARLY_FRAGMENT_TESTS_BIT, + .dstAccessMask = ACCESS_FLAGS::SAMPLED_READ_BIT + } + }, + IGPURenderpass::SCreationParams::DependenciesEnd + }; + params.dependencies = {}; + m_sceneRenderpass = m_device->createRenderpass(std::move(params)); + if (!m_sceneRenderpass) + return logFail("Failed to create Scene Renderpass!"); + } + + const auto& geometries = m_scene->getInitParams().geometries; + if (geometries.empty()) + return logFail("No geometries found for scene!"); + m_renderer = CSimpleDebugRenderer::create(m_assetManager.get(), m_sceneRenderpass.get(), 0, { &geometries.front().get(), geometries.size() }); + if (!m_renderer) + return logFail("Failed to create debug renderer!"); + + { + const auto& pipelines = m_renderer->getInitParams().pipelines; + auto ix = 0u; + for (const auto& name : m_scene->getInitParams().geometryNames) + { + if (name == "Cone") + m_renderer->getGeometry(ix).pipeline = pipelines[CSimpleDebugRenderer::SInitParams::PipelineType::Cone]; + ix++; + } + } + m_renderer->m_instances.resize(1); + + const auto dpyInfo = m_winMgr->getPrimaryDisplayInfo(); + for (uint32_t i = 0u; i < windowBindings.size(); ++i) + { + auto& binding = windowBindings[i]; + binding.sceneColorView = createAttachmentView(m_device.get(), finalSceneRenderFormat, dpyInfo.resX, dpyInfo.resY, "UI Scene Color Attachment"); + binding.sceneDepthView = createAttachmentView(m_device.get(), sceneRenderDepthFormat, dpyInfo.resX, dpyInfo.resY, "UI Scene Depth Attachment"); + binding.sceneFramebuffer = createSceneFramebuffer(m_device.get(), m_sceneRenderpass.get(), binding.sceneColorView.get(), binding.sceneDepthView.get()); + if (!binding.sceneFramebuffer) + return logFail("Could not create geometry creator scene[%d]!", i); + } + } + + oracle.reportBeginFrameRecord(); + + if (base_t::argv.size() >= 3 && argv[1] == "-timeout_seconds") + timeout = std::chrono::seconds(std::atoi(argv[2].c_str())); + start = clock_t::now(); + return true; + } + + bool updateGUIDescriptorSet() + { + // UI texture atlas + our camera scene textures, note we don't create info & write pair for the font sampler because UI extension's is immutable and baked into DS layout + static std::array descriptorInfo; + static IGPUDescriptorSet::SWriteDescriptorSet writes[TotalUISampleTexturesAmount]; + + descriptorInfo[nbl::ext::imgui::UI::FontAtlasTexId].info.image.imageLayout = IImage::LAYOUT::READ_ONLY_OPTIMAL; + descriptorInfo[nbl::ext::imgui::UI::FontAtlasTexId].desc = core::smart_refctd_ptr(m_ui.manager->getFontAtlasView()); + writes[nbl::ext::imgui::UI::FontAtlasTexId].info = descriptorInfo.data() + nbl::ext::imgui::UI::FontAtlasTexId; + + for (uint32_t i = 0; i < windowBindings.size(); ++i) + { + const auto textureIx = i + 1u; + + descriptorInfo[textureIx].info.image.imageLayout = IImage::LAYOUT::READ_ONLY_OPTIMAL; + descriptorInfo[textureIx].desc = windowBindings[i].sceneColorView; + + writes[textureIx].info = descriptorInfo.data() + textureIx; + writes[textureIx].info = descriptorInfo.data() + textureIx; + } + + for (uint32_t i = 0; i < descriptorInfo.size(); ++i) + { + writes[i].dstSet = m_ui.descriptorSet.get(); + writes[i].binding = 0u; + writes[i].arrayElement = i; + writes[i].count = 1u; + } + + return m_device->updateDescriptorSets(writes, {}); + } + + inline void workLoopBody() override + { + // framesInFlight: ensuring safe execution of command buffers and acquires, `framesInFlight` only affect semaphore waits, don't use this to index your resources because it can change with swapchain recreation. + const uint32_t framesInFlight = core::min(MaxFramesInFlight, m_surface->getMaxAcquiresInFlight()); + // We block for semaphores for 2 reasons here: + // A) Resource: Can't use resource like a command buffer BEFORE previous use is finished! [MaxFramesInFlight] + // B) Acquire: Can't have more acquires in flight than a certain threshold returned by swapchain or your surface helper class. [MaxAcquiresInFlight] + if (m_realFrameIx >= framesInFlight) + { + const ISemaphore::SWaitInfo cmdbufDonePending[] = { + { + .semaphore = m_semaphore.get(), + .value = m_realFrameIx + 1 - framesInFlight + } + }; + if (m_device->blockForSemaphores(cmdbufDonePending) != ISemaphore::WAIT_RESULT::SUCCESS) + return; + } + + // Predict size of next render, and bail if nothing to do + const auto currentSwapchainExtent = m_surface->getCurrentExtent(); + if (currentSwapchainExtent.width * currentSwapchainExtent.height <= 0) + return; + // The extent of the swapchain might change between now and `present` but the blit should adapt nicely + const VkRect2D currentRenderArea = { .offset = {0,0},.extent = currentSwapchainExtent }; + + // You explicitly should not use `getAcquireCount()` see the comment on `m_realFrameIx` + const auto resourceIx = m_realFrameIx % MaxFramesInFlight; + + // We will be using this command buffer to produce the frame + auto frame = m_tripleBuffers[resourceIx].get(); + auto cmdbuf = m_cmdBufs[resourceIx].get(); + + // update CPU stuff - controllers, events, UI state + update(); + + bool willSubmit = true; + { + willSubmit &= cmdbuf->reset(IGPUCommandBuffer::RESET_FLAGS::RELEASE_RESOURCES_BIT); + willSubmit &= cmdbuf->begin(IGPUCommandBuffer::USAGE::ONE_TIME_SUBMIT_BIT); + willSubmit &= cmdbuf->beginDebugMarker("UIApp Frame"); + + auto renderScene = [&](windowControlBinding& binding) + { + if (!binding.sceneFramebuffer) + return; + + const auto& fbParams = binding.sceneFramebuffer->getCreationParameters(); + const VkRect2D renderArea = { .offset = {0,0}, .extent = {fbParams.width, fbParams.height} }; + const IGPUCommandBuffer::SRenderpassBeginInfo info = { + .framebuffer = binding.sceneFramebuffer.get(), + .colorClearValues = &SceneClearColor, + .depthStencilClearValues = &SceneClearDepth, + .renderArea = renderArea + }; + + willSubmit &= cmdbuf->beginRenderPass(info, IGPUCommandBuffer::SUBPASS_CONTENTS::INLINE); + { + asset::SViewport viewport = {}; + viewport.minDepth = 1.f; + viewport.maxDepth = 0.f; + viewport.x = 0u; + viewport.y = 0u; + viewport.width = fbParams.width; + viewport.height = fbParams.height; + + willSubmit &= cmdbuf->setViewport(0u, 1u, &viewport); + willSubmit &= cmdbuf->setScissor(0u, 1u, &renderArea); + + const auto viewParams = CSimpleDebugRenderer::SViewParams(binding.viewMatrix, binding.viewProjMatrix); + m_renderer->render(cmdbuf, viewParams); + } + willSubmit &= cmdbuf->endRenderPass(); + }; + + if (m_renderer && !m_renderer->m_instances.empty()) + { + auto& instance = m_renderer->m_instances[0]; + instance.world = m_model; + const auto geomCount = m_renderer->getGeometries().size(); + if (geomCount) + { + if (gcIndex >= geomCount) + gcIndex = 0; + instance.packedGeo = m_renderer->getGeometries().data() + gcIndex; + } + } + + if (useWindow) + for (auto& binding : windowBindings) + renderScene(binding); + else + renderScene(windowBindings[activeRenderWindowIx]); + + const IGPUCommandBuffer::SClearColorValue clearValue = { .float32 = {0.f,0.f,0.f,1.f} }; + const IGPUCommandBuffer::SRenderpassBeginInfo info = { + .framebuffer = m_framebuffers[resourceIx].get(), + .colorClearValues = &clearValue, + .depthStencilClearValues = nullptr, + .renderArea = currentRenderArea + }; + + // UI renderpass + willSubmit &= cmdbuf->beginRenderPass(info, IGPUCommandBuffer::SUBPASS_CONTENTS::INLINE); + { + asset::SViewport viewport; + { + viewport.minDepth = 1.f; + viewport.maxDepth = 0.f; + viewport.x = 0u; + viewport.y = 0u; + viewport.width = m_window->getWidth(); + viewport.height = m_window->getHeight(); + } + + willSubmit &= cmdbuf->setViewport(0u, 1u, &viewport); + + const VkRect2D currentRenderArea = + { + .offset = {0,0}, + .extent = {m_window->getWidth(),m_window->getHeight()} + }; + + IQueue::SSubmitInfo::SCommandBufferInfo commandBuffersInfo[] = { {.cmdbuf = cmdbuf } }; + + const IGPUCommandBuffer::SRenderpassBeginInfo info = + { + .framebuffer = m_framebuffers[resourceIx].get(), + .colorClearValues = &clearValue, + .depthStencilClearValues = nullptr, + .renderArea = currentRenderArea + }; + + nbl::video::ISemaphore::SWaitInfo waitInfo = { .semaphore = m_semaphore.get(), .value = m_realFrameIx + 1u }; + const auto uiParams = m_ui.manager->getCreationParameters(); + auto* pipeline = m_ui.manager->getPipeline(); + + cmdbuf->bindGraphicsPipeline(pipeline); + cmdbuf->bindDescriptorSets(EPBP_GRAPHICS, pipeline->getLayout(), uiParams.resources.texturesInfo.setIx, 1u, &m_ui.descriptorSet.get()); // note that we use default UI pipeline layout where uiParams.resources.textures.setIx == uiParams.resources.samplers.setIx + + if (!keepRunning()) + return; + + willSubmit &= m_ui.manager->render(cmdbuf, waitInfo); + } + willSubmit &= cmdbuf->endRenderPass(); + + // If the Rendering and Blit/Present Queues don't come from the same family we need to transfer ownership, because we need to preserve contents between them. + auto blitQueueFamily = m_surface->getAssignedQueue()->getFamilyIndex(); + // Also should crash/error if concurrent sharing enabled but would-be-user-queue is not in the share set, but oh well. + const bool needOwnershipRelease = cmdbuf->getQueueFamilyIndex() != blitQueueFamily && !frame->getCachedCreationParams().isConcurrentSharing(); + if (needOwnershipRelease) + { + const IGPUCommandBuffer::SPipelineBarrierDependencyInfo::image_barrier_t barrier[] = { { + .barrier = { + .dep = { + // Normally I'd put `COLOR_ATTACHMENT` on the masks, but we want this to happen after Layout Transition :( + // https://github.com/KhronosGroup/Vulkan-Docs/issues/2319 + .srcStageMask = asset::PIPELINE_STAGE_FLAGS::ALL_COMMANDS_BITS, + .srcAccessMask = asset::ACCESS_FLAGS::MEMORY_READ_BITS | asset::ACCESS_FLAGS::MEMORY_WRITE_BITS, + // For a Queue Family Ownership Release the destination access masks are irrelevant + // and source stage mask can be NONE as long as the semaphore signals ALL_COMMANDS_BIT + .dstStageMask = asset::PIPELINE_STAGE_FLAGS::NONE, + .dstAccessMask = asset::ACCESS_FLAGS::NONE + }, + .ownershipOp = IGPUCommandBuffer::SOwnershipTransferBarrier::OWNERSHIP_OP::RELEASE, + .otherQueueFamilyIndex = blitQueueFamily + }, + .image = frame, + .subresourceRange = TripleBufferUsedSubresourceRange + // there will be no layout transition, already done by the Renderpass End + } }; + const IGPUCommandBuffer::SPipelineBarrierDependencyInfo depInfo = { .imgBarriers = barrier }; + willSubmit &= cmdbuf->pipelineBarrier(asset::EDF_NONE, depInfo); + } + } + willSubmit &= cmdbuf->end(); + + // submit and present under a mutex ASAP + if (willSubmit) + { + // We will signal a semaphore in the rendering queue, and await it with the presentation/blit queue + const IQueue::SSubmitInfo::SSemaphoreInfo rendered = + { + .semaphore = m_semaphore.get(), + .value = m_realFrameIx + 1, + // Normally I'd put `COLOR_ATTACHMENT` on the masks, but we want to signal after Layout Transitions and optional Ownership Release + // https://github.com/KhronosGroup/Vulkan-Docs/issues/2319 + .stageMask = asset::PIPELINE_STAGE_FLAGS::ALL_COMMANDS_BITS + }; + const IQueue::SSubmitInfo::SCommandBufferInfo cmdbufs[1] = + { { + .cmdbuf = cmdbuf + } }; + // We need to wait on previous triple buffer blits/presents from our source image to complete + auto* pBlitWaitValue = m_blitWaitValues.data() + resourceIx; + auto swapchainLock = m_surface->pseudoAcquire(pBlitWaitValue); + const IQueue::SSubmitInfo::SSemaphoreInfo blitted = + { + .semaphore = m_surface->getPresentSemaphore(), + .value = pBlitWaitValue->load(), + // Normally I'd put `BLIT` on the masks, but we want to wait before Implicit Layout Transitions and optional Implicit Ownership Acquire + // https://github.com/KhronosGroup/Vulkan-Docs/issues/2319 + .stageMask = asset::PIPELINE_STAGE_FLAGS::ALL_COMMANDS_BITS + }; + const IQueue::SSubmitInfo submitInfos[1] = + { + { + .waitSemaphores = {&blitted,1}, + .commandBuffers = cmdbufs, + .signalSemaphores = {&rendered,1} + } + }; + + updateGUIDescriptorSet(); + + if (getGraphicsQueue()->submit(submitInfos) != IQueue::RESULT::SUCCESS) + return; + + m_realFrameIx++; + + // only present if there's successful content to show + const ISmoothResizeSurface::SPresentInfo presentInfo = { + { + .source = {.image = frame,.rect = currentRenderArea}, + .waitSemaphore = rendered.semaphore, + .waitValue = rendered.value, + .pPresentSemaphoreWaitValue = pBlitWaitValue, + }, + // The Graphics Queue will be the the most recent owner just before it releases ownership + cmdbuf->getQueueFamilyIndex() + }; + if (m_ciMode && !m_ciScreenshotDone) + { + ++m_ciFrameCounter; + if (m_ciFrameCounter >= CiFramesBeforeCapture) + { + m_ciScreenshotDone = true; + if (!m_device || !m_assetManager || !m_surface) + return; + + m_logger->log("CI screenshot capture start (frame %u).", ILogger::ELL_INFO, m_ciFrameCounter); + const ISemaphore::SWaitInfo waitInfo = { .semaphore = m_semaphore.get(), .value = m_realFrameIx }; + if (m_device->blockForSemaphores({ &waitInfo, &waitInfo + 1 }) != ISemaphore::WAIT_RESULT::SUCCESS) + { + m_logger->log("CI screenshot failed: wait for render finished.", ILogger::ELL_ERROR); + return; + } + + if (!frame) + { + m_logger->log("CI screenshot failed: missing frame image.", ILogger::ELL_ERROR); + return; + } + + auto viewParams = IGPUImageView::SCreationParams{ + .subUsages = IGPUImage::EUF_TRANSFER_SRC_BIT, + .image = core::smart_refctd_ptr(frame), + .viewType = IGPUImageView::ET_2D, + .format = frame->getCreationParameters().format + }; + viewParams.subresourceRange.aspectMask = IGPUImage::EAF_COLOR_BIT; + viewParams.subresourceRange.baseMipLevel = 0u; + viewParams.subresourceRange.levelCount = 1u; + viewParams.subresourceRange.baseArrayLayer = 0u; + viewParams.subresourceRange.layerCount = 1u; + auto frameView = m_device->createImageView(std::move(viewParams)); + if (!frameView) + { + m_logger->log("CI screenshot failed: could not create frame view.", ILogger::ELL_ERROR); + return; + } + + m_logger->log("CI screenshot capture: calling createScreenShot.", ILogger::ELL_INFO); + const bool ok = ext::ScreenShot::createScreenShot( + m_device.get(), + getGraphicsQueue(), + nullptr, + frameView.get(), + m_assetManager.get(), + m_ciScreenshotPath, + asset::IImage::LAYOUT::TRANSFER_SRC_OPTIMAL, + asset::ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT); + + if (ok) + m_logger->log("CI screenshot saved to \"%s\".", ILogger::ELL_INFO, m_ciScreenshotPath.string().c_str()); + else + m_logger->log("CI screenshot failed to save.", ILogger::ELL_ERROR); + } + } + + m_surface->present(std::move(swapchainLock), presentInfo); + } + firstFrame = false; + } + + inline bool keepRunning() override + { + if (m_ciMode && m_ciScreenshotDone) + return false; + if (m_surface->irrecoverable()) + return false; + + return true; + } + + inline bool onAppTerminated() override + { + return base_t::onAppTerminated(); + } + + inline void update() + { + m_inputSystem->getDefaultMouse(&mouse); + m_inputSystem->getDefaultKeyboard(&keyboard); + + auto updatePresentationTimestamp = [&]() + { + oracle.reportEndFrameRecord(); + const auto timestamp = oracle.getNextPresentationTimeStamp(); + oracle.reportBeginFrameRecord(); + + return timestamp; + }; + + m_nextPresentationTimestamp = updatePresentationTimestamp(); + + struct + { + std::vector mouse {}; + std::vector keyboard {}; + } capturedEvents; + { + mouse.consumeEvents([&](const IMouseEventChannel::range_t& events) -> void + { + if (m_window->hasInputFocus()) + for (const auto& e : events) + capturedEvents.mouse.emplace_back(e); + }, m_logger.get()); + + keyboard.consumeEvents([&](const IKeyboardEventChannel::range_t& events) -> void + { + if (m_window->hasInputFocus()) + for (const auto& e : events) + capturedEvents.keyboard.emplace_back(e); + }, m_logger.get()); + } + + const auto cursorPosition = m_window->getCursorControl()->getPosition(); + + nbl::ext::imgui::UI::SUpdateParameters params = + { + .mousePosition = nbl::hlsl::float32_t2(cursorPosition.x, cursorPosition.y) - nbl::hlsl::float32_t2(m_window->getX(), m_window->getY()), + .displaySize = { m_window->getWidth(), m_window->getHeight() }, + .mouseEvents = { capturedEvents.mouse.data(), capturedEvents.mouse.size() }, + .keyboardEvents = { capturedEvents.keyboard.data(), capturedEvents.keyboard.size() } + }; + + if (enableActiveCameraMovement) + { + auto& binding = windowBindings[activeRenderWindowIx]; + auto& planar = m_planarProjections[binding.activePlanarIx]; + auto* camera = planar->getCamera(); + + assert(binding.boundProjectionIx.has_value()); + auto& projection = planar->getPlanarProjections()[binding.boundProjectionIx.value()]; + + static std::vector virtualEvents(0x45); + uint32_t vCount = {}; + + projection.beginInputProcessing(m_nextPresentationTimestamp); + { + projection.process(nullptr, vCount); + + if (virtualEvents.size() < vCount) + virtualEvents.resize(vCount); + + auto* orbit = dynamic_cast(camera); + + if (orbit) + { + uint32_t vKeyboardEventsCount = {}, vMouseEventsCount = {}; + + projection.processKeyboard(nullptr, vKeyboardEventsCount, {}); + projection.processMouse(nullptr, vMouseEventsCount, {}); + + auto* output = virtualEvents.data(); + + projection.processKeyboard(output, vKeyboardEventsCount, params.keyboardEvents); + output += vKeyboardEventsCount; + + if (ImGui::IsMouseDown(ImGuiMouseButton_Left)) + projection.processMouse(output, vMouseEventsCount, params.mouseEvents); + else + vMouseEventsCount = 0; + + vCount = vKeyboardEventsCount + vMouseEventsCount; + } + else + projection.process(virtualEvents.data(), vCount, { params.keyboardEvents, params.mouseEvents }); + } + projection.endInputProcessing(); + + if (vCount) + camera->manipulate({ virtualEvents.data(), vCount }); + } + + m_ui.manager->update(params); + } + + private: + inline void imguiListen() + { + ImGuiIO& io = ImGui::GetIO(); + if (m_ciMode) + { + io.IniFilename = nullptr; + useWindow = true; + } + + ImGuizmo::BeginFrame(); + { + if (!m_ciMode) + { + nbl::hlsl::ShowDebugWindow(); + ImGuizmo::ShowDebugImguizmoWindow(); + } + + SImResourceInfo info; + info.samplerIx = (uint16_t)nbl::ext::imgui::UI::DefaultSamplerIx::USER; + + // ORBIT CAMERA TEST + { + for (auto& planar : m_planarProjections) + { + auto* camera = planar->getCamera(); + + auto* orbit = dynamic_cast(camera); + + if (orbit) + { + auto targetPostion = hlsl::transpose(getMatrix3x4As4x4(m_model))[3]; + orbit->target(targetPostion); + orbit->manipulate({}, {}); + } + } + } + + // render bound planar camera views onto GUI windows + if (useWindow) + { + // ABS TRS editor to manipulate bound object + TransformEditor(); + + if(enableActiveCameraMovement) + ImGuizmo::Enable(false); + else + ImGuizmo::Enable(true); + + size_t gizmoIx = {}; + size_t manipulationCounter = {}; + const std::optional modelInUseIx = ImGuizmo::IsUsingAny() ? std::optional(boundPlanarCameraIxToManipulate.has_value() ? 1u + boundPlanarCameraIxToManipulate.value() : 0u) : std::optional(std::nullopt); + + for (uint32_t windowIx = 0; windowIx < windowBindings.size(); ++windowIx) + { + // setup + { + const auto& rw = wInit.renderWindows[windowIx]; + const ImGuiCond windowCond = m_ciMode ? ImGuiCond_Always : ImGuiCond_Appearing; + ImGui::SetNextWindowPos({ rw.iPos.x, rw.iPos.y }, windowCond); + ImGui::SetNextWindowSize({ rw.iSize.x, rw.iSize.y }, windowCond); + } + ImGui::SetNextWindowSizeConstraints(ImVec2(0x45, 0x45), ImVec2(7680, 4320)); + + ImGui::PushStyleColor(ImGuiCol_WindowBg, (ImVec4)ImColor(0.35f, 0.3f, 0.3f)); + const std::string ident = "Render Window \"" + std::to_string(windowIx) + "\""; + + ImGui::Begin(ident.data(), 0); + const ImVec2 contentRegionSize = ImGui::GetContentRegionAvail(), windowPos = ImGui::GetWindowPos(), cursorPos = ImGui::GetCursorScreenPos(); + + ImGuiWindow* window = ImGui::GetCurrentWindow(); + { + const auto mPos = ImGui::GetMousePos(); + + if (mPos.x < cursorPos.x || mPos.y < cursorPos.y || mPos.x > cursorPos.x + contentRegionSize.x || mPos.y > cursorPos.y + contentRegionSize.y) + window->Flags = ImGuiWindowFlags_None; + else + window->Flags = ImGuiWindowFlags_NoMove; + } + + // setup bound entities for the window like camera & projections + auto& binding = windowBindings[windowIx]; + auto& planarBound = m_planarProjections[binding.activePlanarIx]; + assert(planarBound); + + binding.aspectRatio = contentRegionSize.x / contentRegionSize.y; + auto* planarViewCameraBound = planarBound->getCamera(); + + assert(planarViewCameraBound); + assert(binding.boundProjectionIx.has_value()); + + auto& projection = planarBound->getPlanarProjections()[binding.boundProjectionIx.value()]; + projection.update(binding.leftHandedProjection, binding.aspectRatio); + + // TODO: + // would be nice to normalize imguizmo visual vectors (possible with styles) + + // first 0th texture is for UI texture atlas, then there are our window textures + auto fboImguiTextureID = windowIx + 1u; + info.textureID = fboImguiTextureID; + + if(binding.allowGizmoAxesToFlip) + ImGuizmo::AllowAxisFlip(true); + else + ImGuizmo::AllowAxisFlip(false); + + if(projection.getParameters().m_type == IPlanarProjection::CProjection::Orthographic) + ImGuizmo::SetOrthographic(true); + else + ImGuizmo::SetOrthographic(false); + + ImGuizmo::SetDrawlist(); + ImGui::Image(info, contentRegionSize); + ImGuizmo::SetRect(cursorPos.x, cursorPos.y, contentRegionSize.x, contentRegionSize.y); + + // I will assume we need to focus a window to start manipulating objects from it + if (ImGui::IsWindowFocused(ImGuiFocusedFlags_ChildWindows)) + activeRenderWindowIx = windowIx; + + // we render a scene from view of a camera bound to planar window + ImGuizmoPlanarM16InOut imguizmoPlanar; + imguizmoPlanar.view = getCastedMatrix(hlsl::transpose(getMatrix3x4As4x4(planarViewCameraBound->getGimbal().getViewMatrix()))); + imguizmoPlanar.projection = getCastedMatrix(hlsl::transpose(projection.getProjectionMatrix())); + + if (flipGizmoY) // note we allow to flip gizmo just to match our coordinates + imguizmoPlanar.projection[1][1] *= -1.f; // https://johannesugb.github.io/gpu-programming/why-do-opengl-proj-matrices-fail-in-vulkan/ + + static constexpr float identityMatrix[] = + { + 1.f, 0.f, 0.f, 0.f, + 0.f, 1.f, 0.f, 0.f, + 0.f, 0.f, 1.f, 0.f, + 0.f, 0.f, 0.f, 1.f + }; + + if(binding.enableDebugGridDraw) + ImGuizmo::DrawGrid(&imguizmoPlanar.view[0][0], &imguizmoPlanar.projection[0][0], identityMatrix, 100.f); + + for (uint32_t modelIx = 0; modelIx < 1u + m_planarProjections.size(); modelIx++) + { + ImGuizmo::PushID(gizmoIx); ++gizmoIx; + + const bool isCameraGimbalTarget = modelIx; // I assume scene demo model is 0th ix, left are planar cameras + ICamera* const targetGimbalManipulationCamera = isCameraGimbalTarget ? m_planarProjections[modelIx - 1u]->getCamera() : nullptr; + + // if we try to manipulate a camera which appears to be the same camera we see scene from then obvsly it doesn't make sense to manipulate its gizmo so we skip it + // EDIT: it actually makes some sense if you assume render planar view is rendered with ortho projection, but we would need to add imguizmo controller virtual map + // to ban forward/backward in this mode if this condition is true + if (targetGimbalManipulationCamera == planarViewCameraBound) + { + ImGuizmo::PopID(); + continue; + } + + ImGuizmoModelM16InOut imguizmoModel; + + if (isCameraGimbalTarget) + { + assert(targetGimbalManipulationCamera); + imguizmoModel.inTRS = getCastedMatrix(targetGimbalManipulationCamera->getGimbal().template operator() < float64_t4x4 > ()); + } + else + imguizmoModel.inTRS = hlsl::transpose(getMatrix3x4As4x4(m_model)); + + imguizmoModel.outTRS = imguizmoModel.inTRS; + { + const bool success = ImGuizmo::Manipulate(&imguizmoPlanar.view[0][0], &imguizmoPlanar.projection[0][0], ImGuizmo::OPERATION::UNIVERSAL, mCurrentGizmoMode, &imguizmoModel.outTRS[0][0], &imguizmoModel.outDeltaTRS[0][0], useSnap ? &snap[0] : nullptr); + + if (success) + { + if (targetGimbalManipulationCamera) + { + const auto referenceFrame = getCastedMatrix(*reinterpret_cast(ImGuizmo::GetReferenceFrame())); + + boundCameraToManipulate = smart_refctd_ptr(targetGimbalManipulationCamera); + boundPlanarCameraIxToManipulate = modelIx - 1u; + + // TODO: TO BE REMOVED, ONLY FOR TESTING ITS INCOMPLETE TYPE! + const auto& imguizmoCtx = ImGuizmo::GetContext(); + + struct + { + float32_t3 t, r, s; + } out, delta; + + ImGuizmo::DecomposeMatrixToComponents(&imguizmoModel.outTRS[0][0], &out.t[0], &out.r[0], &out.s[0]); + ImGuizmo::DecomposeMatrixToComponents(&imguizmoModel.outDeltaTRS[0][0], &delta.t[0], &delta.r[0], &delta.s[0]); + { + std::vector virtualEvents; + + auto requestMagnitudeUpdateWithScalar = [&](float signPivot, float dScalar, float dMagnitude, auto positive, auto negative) + { + if (dScalar != signPivot) + { + auto& ev = virtualEvents.emplace_back(); + auto code = (dScalar > signPivot) ? positive : negative; + + ev.type = code; + ev.magnitude += dMagnitude; + } + }; + + // TODO TESTING STUFF WITH MY IMGUIZMO UPDATES + // IT WILL BE REMOVED ONCE ALL TESTS ARE DONE + // AND CONTROLLER API WILL BE USED INSTEAD + + // translations + { + ImGuizmo::OPERATION ioType; + const auto dScalar = ImGuizmo::GetTranslationDeltaScalar(&ioType); + + if (dScalar) + { + switch (ioType) + { + case ImGuizmo::OPERATION::TRANSLATE_X: + { + requestMagnitudeUpdateWithScalar(0.f, dScalar, std::abs(dScalar), CVirtualGimbalEvent::VirtualEventType::MoveRight, CVirtualGimbalEvent::VirtualEventType::MoveLeft); + } break; + + case ImGuizmo::OPERATION::TRANSLATE_Y: + { + requestMagnitudeUpdateWithScalar(0.f, dScalar, std::abs(dScalar), CVirtualGimbalEvent::VirtualEventType::MoveUp, CVirtualGimbalEvent::VirtualEventType::MoveDown); + } break; + + case ImGuizmo::OPERATION::TRANSLATE_Z: + { + requestMagnitudeUpdateWithScalar(0.f, dScalar, std::abs(dScalar), CVirtualGimbalEvent::VirtualEventType::MoveForward, CVirtualGimbalEvent::VirtualEventType::MoveBackward); + } break; + + default: break; + } + } + } + + // TODO: ok becuase I have only one reference from imguizmo I must do it differently when + // I have local base && want to do rotation with respect to world instead; we almost there + + // rotations + { + ImGuizmo::OPERATION ioType; + float dRadians = ImGuizmo::GetRotationDeltaRadians(&ioType); + + if (dRadians) + { + switch (ioType) + { + case ImGuizmo::OPERATION::ROTATE_X: + { + requestMagnitudeUpdateWithScalar(0.f, dRadians, std::abs(dRadians), CVirtualGimbalEvent::VirtualEventType::TiltUp, CVirtualGimbalEvent::VirtualEventType::TiltDown); + } break; + + case ImGuizmo::OPERATION::ROTATE_Y: + { + requestMagnitudeUpdateWithScalar(0.f, dRadians, std::abs(dRadians), CVirtualGimbalEvent::VirtualEventType::PanRight, CVirtualGimbalEvent::VirtualEventType::PanLeft); + } break; + + case ImGuizmo::OPERATION::ROTATE_Z: + { + requestMagnitudeUpdateWithScalar(0.f, dRadians, std::abs(dRadians), CVirtualGimbalEvent::VirtualEventType::RollRight, CVirtualGimbalEvent::VirtualEventType::RollLeft); + } break; + + default: + assert(false); break; // should never be hit + } + } + } + + const auto vCount = virtualEvents.size(); + + if (vCount) + { + const float pMoveSpeed = targetGimbalManipulationCamera->getMoveSpeedScale(); + const float pRotationSpeed = targetGimbalManipulationCamera->getRotationSpeedScale(); + + // I start to think controller should be able to set sensitivity to scale magnitudes of generated events + // in order for camera to not keep any magnitude scalars like move or rotation speed scales + + targetGimbalManipulationCamera->setMoveSpeedScale(1); + targetGimbalManipulationCamera->setRotationSpeedScale(1); + + targetGimbalManipulationCamera->manipulate({ virtualEvents.data(), vCount }, &referenceFrame); + + targetGimbalManipulationCamera->setMoveSpeedScale(pMoveSpeed); + targetGimbalManipulationCamera->setRotationSpeedScale(pRotationSpeed); + } + + } + } + else + { + // again, for scene demo model full affine transformation without limits is assumed + m_model = float32_t3x4(hlsl::transpose(imguizmoModel.outTRS)); + boundCameraToManipulate = nullptr; + boundPlanarCameraIxToManipulate = std::nullopt; + } + } + + if (ImGuizmo::IsOver() and not ImGuizmo::IsUsingAny() && not enableActiveCameraMovement) + { + ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.2f, 0.2f, 0.2f, 0.8f)); + ImGui::PushStyleColor(ImGuiCol_Border, ImVec4(1.0f, 1.0f, 1.0f, 1.0f)); + ImGui::PushStyleVar(ImGuiStyleVar_WindowBorderSize, 1.5f); + + ImGuiIO& io = ImGui::GetIO(); + ImVec2 mousePos = io.MousePos; + ImGui::SetNextWindowPos(ImVec2(mousePos.x + 10, mousePos.y + 10), ImGuiCond_Always); + + ImGui::Begin("InfoOverlay", nullptr, + ImGuiWindowFlags_NoDecoration | + ImGuiWindowFlags_AlwaysAutoResize | + ImGuiWindowFlags_NoSavedSettings); + + std::string ident; + + if (targetGimbalManipulationCamera) + ident = targetGimbalManipulationCamera->getIdentifier(); + else + ident = "Geometry Creator Object"; + + ImGui::Text("Identifier: %s", ident.c_str()); + ImGui::Text("Object Ix: %u", modelIx); + + ImGui::End(); + + ImGui::PopStyleVar(); + ImGui::PopStyleColor(2); + } + } + ImGuizmo::PopID(); + } + + ImGui::End(); + ImGui::PopStyleColor(1); + } + assert(manipulationCounter <= 1u); + } + // render selected camera view onto full screen + else + { + info.textureID = 1u + activeRenderWindowIx; + + ImGui::SetNextWindowPos(ImVec2(0, 0)); + ImGui::SetNextWindowSize(io.DisplaySize); + ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0, 0, 0, 0)); // fully transparent fake window + ImGui::Begin("FullScreenWindow", nullptr, ImGuiWindowFlags_NoTitleBar | ImGuiWindowFlags_NoResize | ImGuiWindowFlags_NoMove | ImGuiWindowFlags_NoScrollbar | ImGuiWindowFlags_NoScrollWithMouse | ImGuiWindowFlags_NoCollapse | ImGuiWindowFlags_NoBringToFrontOnFocus | ImGuiWindowFlags_NoBackground | ImGuiWindowFlags_NoInputs); + const ImVec2 contentRegionSize = ImGui::GetContentRegionAvail(), windowPos = ImGui::GetWindowPos(), cursorPos = ImGui::GetCursorScreenPos(); + { + auto& binding = windowBindings[activeRenderWindowIx]; + auto& planarBound = m_planarProjections[binding.activePlanarIx]; + assert(planarBound); + + binding.aspectRatio = contentRegionSize.x / contentRegionSize.y; + auto* planarViewCameraBound = planarBound->getCamera(); + + assert(planarViewCameraBound); + assert(binding.boundProjectionIx.has_value()); + + auto& projection = planarBound->getPlanarProjections()[binding.boundProjectionIx.value()]; + projection.update(binding.leftHandedProjection, binding.aspectRatio); + } + + ImGui::Image(info, contentRegionSize); + ImGuizmo::SetRect(cursorPos.x, cursorPos.y, contentRegionSize.x, contentRegionSize.y); + + ImGui::End(); + ImGui::PopStyleColor(1); + } + } + + // update camera matrices for scene rendering + { + for (uint32_t i = 0u; i < windowBindings.size(); ++i) + { + auto& binding = windowBindings[i]; + + auto& planarBound = m_planarProjections[binding.activePlanarIx]; + assert(planarBound); + auto* boundPlanarCamera = planarBound->getCamera(); + + assert(binding.boundProjectionIx.has_value()); + auto& projection = planarBound->getPlanarProjections()[binding.boundProjectionIx.value()]; + projection.update(binding.leftHandedProjection, binding.aspectRatio); + + auto viewMatrix = getCastedMatrix(boundPlanarCamera->getGimbal().getViewMatrix()); + auto viewProjMatrix = mul(getCastedMatrix(projection.getProjectionMatrix()), getMatrix3x4As4x4(viewMatrix)); + + binding.viewMatrix = viewMatrix; + binding.viewProjMatrix = viewProjMatrix; + } + } + + // Planars + { + // setup + { + const ImGuiCond windowCond = m_ciMode ? ImGuiCond_Always : ImGuiCond_Appearing; + ImGui::SetNextWindowPos({ wInit.planars.iPos.x, wInit.planars.iPos.y }, windowCond); + ImGui::SetNextWindowSize({ wInit.planars.iSize.x, wInit.planars.iSize.y }, windowCond); + } + + ImGui::Begin("Planar projection"); + ImGui::Checkbox("Window mode##useWindow", &useWindow); + ImGui::Separator(); + + auto& active = windowBindings[activeRenderWindowIx]; + const auto activeRenderWindowIxString = std::to_string(activeRenderWindowIx); + + ImGui::Text("Active Render Window: %s", activeRenderWindowIxString.c_str()); + { + const size_t planarsCount = m_planarProjections.size(); + assert(planarsCount); + + std::vector sbels(planarsCount); + for (size_t i = 0; i < planarsCount; ++i) + sbels[i] = "Planar " + std::to_string(i); - inline IQueue::SSubmitInfo::SSemaphoreInfo renderFrame(const std::chrono::microseconds nextPresentationTimestamp) override - { - // CPU events - update(nextPresentationTimestamp); + std::vector labels(planarsCount); + for (size_t i = 0; i < planarsCount; ++i) + labels[i] = sbels[i].c_str(); + + int currentPlanarIx = static_cast(active.activePlanarIx); + if (ImGui::Combo("Active Planar", ¤tPlanarIx, labels.data(), static_cast(labels.size()))) + { + active.activePlanarIx = static_cast(currentPlanarIx); + active.pickDefaultProjections(m_planarProjections[active.activePlanarIx]->getPlanarProjections()); + } + } - const auto& virtualWindowRes = interface.sceneResolution; - if (!m_framebuffer || m_framebuffer->getCreationParameters().width!=virtualWindowRes[0] || m_framebuffer->getCreationParameters().height!=virtualWindowRes[1]) - recreateFramebuffer(virtualWindowRes); + assert(active.boundProjectionIx.has_value()); + assert(active.lastBoundPerspectivePresetProjectionIx.has_value()); + assert(active.lastBoundOrthoPresetProjectionIx.has_value()); - // - const auto resourceIx = m_realFrameIx % MaxFramesInFlight; + const auto activePlanarIxString = std::to_string(active.activePlanarIx); + auto& planarBound = m_planarProjections[active.activePlanarIx]; + assert(planarBound); - auto* const cb = m_cmdBufs.data()[resourceIx].get(); - cb->reset(IGPUCommandBuffer::RESET_FLAGS::RELEASE_RESOURCES_BIT); - cb->begin(IGPUCommandBuffer::USAGE::ONE_TIME_SUBMIT_BIT); - // clear to black for both things - const IGPUCommandBuffer::SClearColorValue clearValue = { .float32 = {0.f,0.f,0.f,1.f} }; - if (m_framebuffer) - { - cb->beginDebugMarker("UISampleApp Scene Frame"); + auto selectedProjectionType = planarBound->getPlanarProjections()[active.boundProjectionIx.value()].getParameters().m_type; { - const IGPUCommandBuffer::SClearDepthStencilValue farValue = { .depth=0.f }; - const IGPUCommandBuffer::SRenderpassBeginInfo renderpassInfo = + const char* labels[] = { "Perspective", "Orthographic" }; + int type = static_cast(selectedProjectionType); + + if (ImGui::Combo("Projection Type", &type, labels, IM_ARRAYSIZE(labels))) { - .framebuffer = m_framebuffer.get(), - .colorClearValues = &clearValue, - .depthStencilClearValues = &farValue, - .renderArea = { - .offset = {0,0}, - .extent = {virtualWindowRes[0],virtualWindowRes[1]} + selectedProjectionType = static_cast(type); + + switch (selectedProjectionType) + { + case IPlanarProjection::CProjection::Perspective: active.boundProjectionIx = active.lastBoundPerspectivePresetProjectionIx.value(); break; + case IPlanarProjection::CProjection::Orthographic: active.boundProjectionIx = active.lastBoundOrthoPresetProjectionIx.value(); break; + default: active.boundProjectionIx = std::nullopt; assert(false); break; } - }; - beginRenderpass(cb,renderpassInfo); + } } - // draw scene - { - const auto& camera = interface.camera; - float32_t3x4 viewMatrix = camera.getViewMatrix(); - float32_t4x4 viewProjMatrix = camera.getConcatenatedMatrix(); - const auto viewParams = CSimpleDebugRenderer::SViewParams(viewMatrix,viewProjMatrix); - // tear down scene every frame - auto& instance = m_renderer->m_instances[0]; - memcpy(&instance.world,&interface.model,sizeof(instance.world)); - instance.packedGeo = m_renderer->getGeometries().data() + interface.gcIndex; - m_renderer->render(cb,viewParams); - } - cb->endRenderPass(); - cb->endDebugMarker(); - } - { - cb->beginDebugMarker("UISampleApp IMGUI Frame"); + auto getPresetName = [&](auto ix) -> std::string { - auto scRes = static_cast(m_surface->getSwapchainResources()); - const IGPUCommandBuffer::SRenderpassBeginInfo renderpassInfo = + switch (selectedProjectionType) { - .framebuffer = scRes->getFramebuffer(device_base_t::getCurrentAcquire().imageIndex), - .colorClearValues = &clearValue, - .depthStencilClearValues = nullptr, - .renderArea = { - .offset = {0,0}, - .extent = {m_window->getWidth(),m_window->getHeight()} - } - }; - beginRenderpass(cb,renderpassInfo); - } - // draw ImGUI + case IPlanarProjection::CProjection::Perspective: return "Perspective Projection Preset " + std::to_string(ix); + case IPlanarProjection::CProjection::Orthographic: return "Orthographic Projection Preset " + std::to_string(ix); + default: return "Unknown Projection Preset " + std::to_string(ix); + } + }; + + bool updateBoundVirtualMaps = false; + if (ImGui::BeginCombo("Projection Preset", getPresetName(active.boundProjectionIx.value()).c_str())) { - auto* imgui = interface.imGUI.get(); - auto* pipeline = imgui->getPipeline(); - cb->bindGraphicsPipeline(pipeline); - // note that we use default UI pipeline layout where uiParams.resources.textures.setIx == uiParams.resources.samplers.setIx - const auto* ds = interface.subAllocDS->getDescriptorSet(); - cb->bindDescriptorSets(EPBP_GRAPHICS,pipeline->getLayout(),imgui->getCreationParameters().resources.texturesInfo.setIx,1u,&ds); - // a timepoint in the future to release streaming resources for geometry - const ISemaphore::SWaitInfo drawFinished = {.semaphore=m_semaphore.get(),.value=m_realFrameIx+1u}; - if (!imgui->render(cb,drawFinished)) + auto& projections = planarBound->getPlanarProjections(); + + for (uint32_t i = 0; i < projections.size(); ++i) { - m_logger->log("TODO: need to present acquired image before bailing because its already acquired.",ILogger::ELL_ERROR); - return {}; + const auto& projection = projections[i]; + const auto& params = projection.getParameters(); + + if (params.m_type != selectedProjectionType) + continue; + + bool isSelected = (i == active.boundProjectionIx.value()); + + if (ImGui::Selectable(getPresetName(i).c_str(), isSelected)) + { + active.boundProjectionIx = i; + updateBoundVirtualMaps |= true; + + switch (selectedProjectionType) + { + case IPlanarProjection::CProjection::Perspective: active.lastBoundPerspectivePresetProjectionIx = active.boundProjectionIx.value(); break; + case IPlanarProjection::CProjection::Orthographic: active.lastBoundOrthoPresetProjectionIx = active.boundProjectionIx.value(); break; + default: assert(false); break; + } + } + + if (isSelected) + ImGui::SetItemDefaultFocus(); } + ImGui::EndCombo(); } - cb->endRenderPass(); - cb->endDebugMarker(); - } - cb->end(); - //updateGUIDescriptorSet(); + auto* const boundCamera = planarBound->getCamera(); + auto& boundProjection = planarBound->getPlanarProjections()[active.boundProjectionIx.value()]; + assert(not boundProjection.isProjectionSingular()); - IQueue::SSubmitInfo::SSemaphoreInfo retval = - { - .semaphore = m_semaphore.get(), - .value = ++m_realFrameIx, - .stageMask = PIPELINE_STAGE_FLAGS::ALL_GRAPHICS_BITS - }; - const IQueue::SSubmitInfo::SCommandBufferInfo commandBuffers[] = - { - {.cmdbuf = cb } - }; - const IQueue::SSubmitInfo::SSemaphoreInfo acquired[] = { + auto updateParameters = boundProjection.getParameters(); + + if (useWindow) + ImGui::Checkbox("Allow axes to flip##allowAxesToFlip", &active.allowGizmoAxesToFlip); + + if(useWindow) + ImGui::Checkbox("Draw debug grid##drawDebugGrid", &active.enableDebugGridDraw); + + if (ImGui::RadioButton("LH", active.leftHandedProjection)) + active.leftHandedProjection = true; + + ImGui::SameLine(); + + if (ImGui::RadioButton("RH", not active.leftHandedProjection)) + active.leftHandedProjection = false; + + updateParameters.m_zNear = std::clamp(updateParameters.m_zNear, 0.1f, 100.f); + updateParameters.m_zFar = std::clamp(updateParameters.m_zFar, 110.f, 10000.f); + + ImGui::SliderFloat("zNear", &updateParameters.m_zNear, 0.1f, 100.f, "%.2f", ImGuiSliderFlags_Logarithmic); + ImGui::SliderFloat("zFar", &updateParameters.m_zFar, 110.f, 10000.f, "%.1f", ImGuiSliderFlags_Logarithmic); + + switch (selectedProjectionType) { - .semaphore = device_base_t::getCurrentAcquire().semaphore, - .value = device_base_t::getCurrentAcquire().acquireCount, - .stageMask = PIPELINE_STAGE_FLAGS::NONE + case IPlanarProjection::CProjection::Perspective: + { + ImGui::SliderFloat("Fov", &updateParameters.m_planar.perspective.fov, 20.f, 150.f, "%.1f", ImGuiSliderFlags_Logarithmic); + boundProjection.setPerspective(updateParameters.m_zNear, updateParameters.m_zFar, updateParameters.m_planar.perspective.fov); + } break; + + case IPlanarProjection::CProjection::Orthographic: + { + ImGui::SliderFloat("Ortho width", &updateParameters.m_planar.orthographic.orthoWidth, 1.f, 30.f, "%.1f", ImGuiSliderFlags_Logarithmic); + boundProjection.setOrthographic(updateParameters.m_zNear, updateParameters.m_zFar, updateParameters.m_planar.orthographic.orthoWidth); + } break; + + default: break; } - }; - const IQueue::SSubmitInfo infos[] = - { + { - .waitSemaphores = acquired, - .commandBuffers = commandBuffers, - .signalSemaphores = {&retval,1} + if (ImGui::TreeNodeEx("Cursor Behaviour")) + { + if (ImGui::RadioButton("Clamp to the window", !resetCursorToCenter)) + resetCursorToCenter = false; + if (ImGui::RadioButton("Reset to the window center", resetCursorToCenter)) + resetCursorToCenter = true; + ImGui::TreePop(); + } } - }; - - if (getGraphicsQueue()->submit(infos) != IQueue::RESULT::SUCCESS) - { - retval.semaphore = nullptr; // so that we don't wait on semaphore that will never signal - m_realFrameIx--; - } + { + ImGuiIO& io = ImGui::GetIO(); - m_window->setCaption("[Nabla Engine] UI App Test Demo"); - return retval; - } + if (ImGui::IsKeyPressed(ImGuiKey_Space)) + enableActiveCameraMovement = !enableActiveCameraMovement; - protected: - const video::IGPURenderpass::SCreationParams::SSubpassDependency* getDefaultSubpassDependencies() const override - { - // Subsequent submits don't wait for each other, but they wait for acquire and get waited on by present - const static IGPURenderpass::SCreationParams::SSubpassDependency dependencies[] = { - // don't want any writes to be available, we'll clear, only thing to worry about is the layout transition - { - .srcSubpass = IGPURenderpass::SCreationParams::SSubpassDependency::External, - .dstSubpass = 0, - .memoryBarrier = { - .srcStageMask = PIPELINE_STAGE_FLAGS::NONE, // should sync against the semaphore wait anyway - .srcAccessMask = ACCESS_FLAGS::NONE, - // layout transition needs to finish before the color write - .dstStageMask = PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, - .dstAccessMask = ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT + if (enableActiveCameraMovement) + { + ImGui::TextColored(ImVec4(0.0f, 1.0f, 0.0f, 1.0f), "Bound Camera Movement: Enabled"); + io.ConfigFlags |= ImGuiConfigFlags_NoMouse; + io.MouseDrawCursor = false; + io.WantCaptureMouse = false; + + ImVec2 cursorPos = ImGui::GetMousePos(); + ImVec2 viewportSize = io.DisplaySize; + auto* cc = m_window->getCursorControl(); + int32_t posX = m_window->getX(); + int32_t posY = m_window->getY(); + + if (resetCursorToCenter) + { + const ICursorControl::SPosition middle{ static_cast(viewportSize.x / 2 + posX), static_cast(viewportSize.y / 2 + posY) }; + cc->setPosition(middle); + } + else + { + auto currentCursorPos = cc->getPosition(); + ICursorControl::SPosition newPos{}; + newPos.x = std::clamp(currentCursorPos.x, posX, viewportSize.x + posX); + newPos.y = std::clamp(currentCursorPos.y, posY, viewportSize.y + posY); + cc->setPosition(newPos); + } } - // leave view offsets and flags default - }, - // want layout transition to begin after all color output is done - { - .srcSubpass = 0, - .dstSubpass = IGPURenderpass::SCreationParams::SSubpassDependency::External, - .memoryBarrier = { - // last place where the color can get modified, depth is implicitly earlier - .srcStageMask = PIPELINE_STAGE_FLAGS::COLOR_ATTACHMENT_OUTPUT_BIT, - // only write ops, reads can't be made available - .srcAccessMask = ACCESS_FLAGS::COLOR_ATTACHMENT_WRITE_BIT - // spec says nothing is needed when presentation is the destination + else + { + ImGui::TextColored(ImVec4(1.0f, 0.0f, 0.0f, 1.0f), "Bound Camera Movement: Disabled"); + io.ConfigFlags &= ~ImGuiConfigFlags_NoMouse; + io.MouseDrawCursor = true; + io.WantCaptureMouse = true; } - // leave view offsets and flags default - }, - IGPURenderpass::SCreationParams::DependenciesEnd - }; - return dependencies; - } + - private: - inline void update(const std::chrono::microseconds nextPresentationTimestamp) - { - auto& camera = interface.camera; - camera.setMoveSpeed(interface.moveSpeed); - camera.setRotateSpeed(interface.rotateSpeed); + if (ImGui::IsItemHovered()) + { + ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.2f, 0.2f, 0.2f, 0.8f)); + ImGui::PushStyleColor(ImGuiCol_Border, ImVec4(1.0f, 1.0f, 1.0f, 1.0f)); + ImGui::PushStyleVar(ImGuiStyleVar_WindowBorderSize, 1.5f); + ImVec2 mousePos = ImGui::GetMousePos(); + ImGui::SetNextWindowPos(ImVec2(mousePos.x + 10, mousePos.y + 10), ImGuiCond_Always); - m_inputSystem->getDefaultMouse(&mouse); - m_inputSystem->getDefaultKeyboard(&keyboard); + ImGui::Begin("HoverOverlay", nullptr, + ImGuiWindowFlags_NoDecoration | + ImGuiWindowFlags_AlwaysAutoResize | + ImGuiWindowFlags_NoSavedSettings); - struct - { - std::vector mouse{}; - std::vector keyboard{}; - } uiEvents; + ImGui::Text("Press 'Space' to Enable/Disable bound planar camera movement"); - // TODO: should be a member really - static std::chrono::microseconds previousEventTimestamp{}; + ImGui::End(); - // I think begin/end should always be called on camera, just events shouldn't be fed, why? - // If you stop begin/end, whatever keys were up/down get their up/down values frozen leading to - // `perActionDt` becoming obnoxiously large the first time the even processing resumes due to - // `timeDiff` being computed since `lastVirtualUpTimeStamp` - camera.beginInputProcessing(nextPresentationTimestamp); - { - mouse.consumeEvents([&](const IMouseEventChannel::range_t& events) -> void - { - if (interface.move) - camera.mouseProcess(events); // don't capture the events, only let camera handle them with its impl + ImGui::PopStyleVar(); + ImGui::PopStyleColor(2); + } - for (const auto& e : events) // here capture + ImGui::Separator(); + + const auto flags = ImGuiTreeNodeFlags_DefaultOpen; + if (ImGui::TreeNodeEx("Bound Camera", flags)) + { + ImGui::Text("Type: %s", boundCamera->getIdentifier().data()); + ImGui::Text("Object Ix: %s", std::to_string(active.activePlanarIx + 1u).c_str()); + ImGui::Separator(); { - if (e.timeStamp < previousEventTimestamp) - continue; + auto* orbit = dynamic_cast(boundCamera); + + float moveSpeed = boundCamera->getMoveSpeedScale(); + float rotationSpeed = boundCamera->getRotationSpeedScale(); - previousEventTimestamp = e.timeStamp; - uiEvents.mouse.emplace_back(e); + ImGui::SliderFloat("Move speed factor", &moveSpeed, 0.0001f, 10.f, "%.4f", ImGuiSliderFlags_Logarithmic); + + if(not orbit) + ImGui::SliderFloat("Rotate speed factor", &rotationSpeed, 0.0001f, 10.f, "%.4f", ImGuiSliderFlags_Logarithmic); + + boundCamera->setMoveSpeedScale(moveSpeed); + boundCamera->setRotationSpeedScale(rotationSpeed); - if (e.type==nbl::ui::SMouseEvent::EET_SCROLL && m_renderer) { - interface.gcIndex += int16_t(core::sign(e.scrollEvent.verticalScroll)); - interface.gcIndex = core::clamp(interface.gcIndex,0ull,m_renderer->getGeometries().size()-1); + if (orbit) + { + float distance = orbit->getDistance(); + ImGui::SliderFloat("Distance", &distance, COrbitCamera::MinDistance, COrbitCamera::MaxDistance, "%.4f", ImGuiSliderFlags_Logarithmic); + orbit->setDistance(distance); + } } } - }, - m_logger.get() - ); - keyboard.consumeEvents([&](const IKeyboardEventChannel::range_t& events) -> void - { - if (interface.move) - camera.keyboardProcess(events); // don't capture the events, only let camera handle them with its impl - for (const auto& e : events) // here capture + if (ImGui::TreeNodeEx("World Data", flags)) { - if (e.timeStamp < previousEventTimestamp) - continue; + auto& gimbal = boundCamera->getGimbal(); + const auto position = getCastedVector(gimbal.getPosition()); + const auto& orientation = gimbal.getOrientation(); + const auto viewMatrix = getCastedMatrix(gimbal.getViewMatrix()); + + addMatrixTable("Position", ("PositionTable_" + activePlanarIxString).c_str(), 1, 3, &position[0], false); + addMatrixTable("Orientation (Quaternion)", ("OrientationTable_" + activePlanarIxString).c_str(), 1, 4, &orientation[0], false); + addMatrixTable("View Matrix", ("ViewMatrixTable_" + activePlanarIxString).c_str(), 3, 4, &viewMatrix[0][0], false); + ImGui::TreePop(); + } - previousEventTimestamp = e.timeStamp; - uiEvents.keyboard.emplace_back(e); + if (ImGui::TreeNodeEx("Virtual Event Mappings", flags)) + { + displayKeyMappingsAndVirtualStatesInline(&boundProjection); + ImGui::TreePop(); } - }, - m_logger.get() - ); - } - camera.endInputProcessing(nextPresentationTimestamp); - const auto cursorPosition = m_window->getCursorControl()->getPosition(); + ImGui::TreePop(); + } + } - ext::imgui::UI::SUpdateParameters params = - { - .mousePosition = float32_t2(cursorPosition.x,cursorPosition.y) - float32_t2(m_window->getX(),m_window->getY()), - .displaySize = {m_window->getWidth(),m_window->getHeight()}, - .mouseEvents = uiEvents.mouse, - .keyboardEvents = uiEvents.keyboard - }; + { + ImGuiIO& io = ImGui::GetIO(); + ImVec2 mousePos = ImGui::GetMousePos(); + ImVec2 viewportSize = io.DisplaySize; + auto* cc = m_window->getCursorControl(); - interface.objectName = m_scene->getInitParams().geometryNames[interface.gcIndex]; - interface.imGUI->update(params); - } + if (mousePos.x < 0.0f || mousePos.y < 0.0f || mousePos.x > viewportSize.x || mousePos.y > viewportSize.y) + { + if (not enableActiveCameraMovement) + cc->setVisible(true); + } + else + { + cc->setVisible(false); + } + } - void recreateFramebuffer(const uint16_t2 resolution) - { - auto createImageAndView = [&](E_FORMAT format)->smart_refctd_ptr - { - auto image = m_device->createImage({{ - .type = IGPUImage::ET_2D, - .samples = IGPUImage::ESCF_1_BIT, - .format = format, - .extent = {resolution.x,resolution.y,1}, - .mipLevels = 1, - .arrayLayers = 1, - .usage = IGPUImage::EUF_RENDER_ATTACHMENT_BIT|IGPUImage::EUF_SAMPLED_BIT - }}); - if (!m_device->allocate(image->getMemoryReqs(),image.get()).isValid()) - return nullptr; - IGPUImageView::SCreationParams params = { - .image = std::move(image), - .viewType = IGPUImageView::ET_2D, - .format = format - }; - params.subresourceRange.aspectMask = isDepthOrStencilFormat(format) ? IGPUImage::EAF_DEPTH_BIT:IGPUImage::EAF_COLOR_BIT; - return m_device->createImageView(std::move(params)); - }; - - smart_refctd_ptr colorView; - // detect window minimization - if (resolution.x<0x4000 && resolution.y<0x4000) - { - colorView = createImageAndView(finalSceneRenderFormat); - auto depthView = createImageAndView(sceneRenderDepthFormat); - m_framebuffer = m_device->createFramebuffer({ { - .renderpass = m_renderpass, - .depthStencilAttachments = &depthView.get(), - .colorAttachments = &colorView.get(), - .width = resolution.x, - .height = resolution.y - }}); - } - else - m_framebuffer = nullptr; - - // release previous slot and its image - interface.subAllocDS->multi_deallocate(0,1,&interface.renderColorViewDescIndex,{.semaphore=m_semaphore.get(),.value=m_realFrameIx}); - // - if (colorView) - { - interface.subAllocDS->multi_allocate(0,1,&interface.renderColorViewDescIndex); - // update descriptor set - IGPUDescriptorSet::SDescriptorInfo info = {}; - info.desc = colorView; - info.info.image.imageLayout = IGPUImage::LAYOUT::READ_ONLY_OPTIMAL; - const IGPUDescriptorSet::SWriteDescriptorSet write = { - .dstSet = interface.subAllocDS->getDescriptorSet(), - .binding = TexturesImGUIBindingIndex, - .arrayElement = interface.renderColorViewDescIndex, - .count = 1, - .info = &info - }; - m_device->updateDescriptorSets({&write,1},{}); + ImGui::End(); } - interface.transformParams.sceneTexDescIx = interface.renderColorViewDescIndex; } - inline void beginRenderpass(IGPUCommandBuffer* cb, const IGPUCommandBuffer::SRenderpassBeginInfo& info) + inline void TransformEditor() { - cb->beginRenderPass(info,IGPUCommandBuffer::SUBPASS_CONTENTS::INLINE); - cb->setScissor(0,1,&info.renderArea); - const SViewport viewport = { - .x = 0, - .y = 0, - .width = static_cast(info.renderArea.extent.width), - .height = static_cast(info.renderArea.extent.height) - }; - cb->setViewport(0u,1u,&viewport); - } + static float bounds[] = { -0.5f, -0.5f, -0.5f, 0.5f, 0.5f, 0.5f }; + static float boundsSnap[] = { 0.1f, 0.1f, 0.1f }; + static bool boundSizing = false; + static bool boundSizingSnap = false; - // Maximum frames which can be simultaneously submitted, used to cycle through our per-frame resources like command buffers - constexpr static inline uint32_t MaxFramesInFlight = 3u; - constexpr static inline auto sceneRenderDepthFormat = EF_D32_SFLOAT; - constexpr static inline auto finalSceneRenderFormat = EF_R8G8B8A8_SRGB; - constexpr static inline auto TexturesImGUIBindingIndex = 0u; - // we create the Descriptor Set with a few slots extra to spare, so we don't have to `waitIdle` the device whenever ImGUI virtual window resizes - constexpr static inline auto MaxImGUITextures = 2u+MaxFramesInFlight; - - // - smart_refctd_ptr m_scene; - smart_refctd_ptr m_renderpass; - smart_refctd_ptr m_renderer; - smart_refctd_ptr m_framebuffer; - // - smart_refctd_ptr m_semaphore; - uint64_t m_realFrameIx = 0; - std::array,MaxFramesInFlight> m_cmdBufs; - // - InputSystem::ChannelReader mouse; - InputSystem::ChannelReader keyboard; - // UI stuff - struct CInterface - { - void operator()() + ImGuiIO& io = ImGui::GetIO(); + + // setup + { + const ImGuiCond windowCond = m_ciMode ? ImGuiCond_Always : ImGuiCond_Appearing; + ImGui::SetNextWindowPos({ wInit.trsEditor.iPos.x, wInit.trsEditor.iPos.y }, windowCond); + ImGui::SetNextWindowSize({ wInit.trsEditor.iSize.x, wInit.trsEditor.iSize.y }, windowCond); + } + + ImGui::Begin("TRS Editor"); { - ImGuiIO& io = ImGui::GetIO(); + const size_t objectsCount = m_planarProjections.size() + 1u; + assert(objectsCount); - // TODO: why is this a lambda and not just an assignment in a scope ? - camera.setProjectionMatrix([&]() + std::vector sbels(objectsCount); + for (size_t i = 0; i < objectsCount; ++i) + sbels[i] = "Object " + std::to_string(i); + + std::vector labels(objectsCount); + for (size_t i = 0; i < objectsCount; ++i) + labels[i] = sbels[i].c_str(); + + int activeObject = boundCameraToManipulate ? static_cast(boundPlanarCameraIxToManipulate.value() + 1u) : 0; + if (ImGui::Combo("Active Object", &activeObject, labels.data(), static_cast(labels.size()))) { - hlsl::float32_t4x4 projection; + const auto newActiveObject = static_cast(activeObject); - if (isPerspective) - if(isLH) - projection = hlsl::math::thin_lens::lhPerspectiveFovMatrix(core::radians(fov), io.DisplaySize.x / io.DisplaySize.y, zNear, zFar); - else - projection = hlsl::math::thin_lens::rhPerspectiveFovMatrix(core::radians(fov), io.DisplaySize.x / io.DisplaySize.y, zNear, zFar); - else + if (newActiveObject) // camera { - float viewHeight = viewWidth * io.DisplaySize.y / io.DisplaySize.x; - - if(isLH) - projection = hlsl::math::thin_lens::lhPerspectiveFovMatrix(viewWidth, viewHeight, zNear, zFar); - else - projection = hlsl::math::thin_lens::rhPerspectiveFovMatrix(viewWidth, viewHeight, zNear, zFar); + boundPlanarCameraIxToManipulate = newActiveObject - 1u; + ICamera* const targetGimbalManipulationCamera = m_planarProjections[boundPlanarCameraIxToManipulate.value()]->getCamera(); + boundCameraToManipulate = smart_refctd_ptr(targetGimbalManipulationCamera); } + else // gc model + { + boundPlanarCameraIxToManipulate = std::nullopt; + boundCameraToManipulate = nullptr; + } + } + } - return projection; - }()); - - ImGuizmo::SetOrthographic(false); - ImGuizmo::BeginFrame(); + ImGuizmoModelM16InOut imguizmoModel; - ImGui::SetNextWindowPos(ImVec2(1024, 100), ImGuiCond_Appearing); - ImGui::SetNextWindowSize(ImVec2(256, 256), ImGuiCond_Appearing); + if (boundCameraToManipulate) + imguizmoModel.inTRS = getCastedMatrix(boundCameraToManipulate->getGimbal().template operator() < float64_t4x4 > ()); + else + imguizmoModel.inTRS = hlsl::transpose(getMatrix3x4As4x4(m_model)); - // create a window and insert the inspector - ImGui::SetNextWindowPos(ImVec2(10, 10), ImGuiCond_Appearing); - ImGui::SetNextWindowSize(ImVec2(320, 340), ImGuiCond_Appearing); - ImGui::Begin("Editor"); + imguizmoModel.outTRS = imguizmoModel.inTRS; + float* m16TRSmatrix = &imguizmoModel.outTRS[0][0]; - if (ImGui::RadioButton("Full view", !transformParams.useWindow)) - transformParams.useWindow = false; + std::string indent; + if (boundCameraToManipulate) + indent = boundCameraToManipulate->getIdentifier(); + else + indent = "Geometry Creator Object"; - ImGui::SameLine(); + ImGui::Text("Identifier: \"%s\"", indent.c_str()); + { + if (ImGuizmo::IsUsingAny()) + ImGui::TextColored(ImVec4(0.0f, 1.0f, 0.0f, 1.0f), "Gizmo: In Use"); + else + ImGui::TextColored(ImVec4(1.0f, 1.0f, 0.0f, 1.0f), "Gizmo: Idle"); - if (ImGui::RadioButton("Window", transformParams.useWindow)) - transformParams.useWindow = true; + if (ImGui::IsItemHovered()) + { + ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.2f, 0.2f, 0.2f, 0.8f)); + ImGui::PushStyleColor(ImGuiCol_Border, ImVec4(1.0f, 1.0f, 1.0f, 1.0f)); + ImGui::PushStyleVar(ImGuiStyleVar_WindowBorderSize, 1.5f); - ImGui::Text("Camera"); - bool viewDirty = false; + ImVec2 mousePos = ImGui::GetMousePos(); + ImGui::SetNextWindowPos(ImVec2(mousePos.x + 10, mousePos.y + 10), ImGuiCond_Always); - if (ImGui::RadioButton("LH", isLH)) - isLH = true; + ImGui::Begin("HoverOverlay", nullptr, + ImGuiWindowFlags_NoDecoration | + ImGuiWindowFlags_AlwaysAutoResize | + ImGuiWindowFlags_NoSavedSettings); - ImGui::SameLine(); + ImGui::Text("Right-click and drag on the gizmo to manipulate the object."); - if (ImGui::RadioButton("RH", !isLH)) - isLH = false; + ImGui::End(); - if (ImGui::RadioButton("Perspective", isPerspective)) - isPerspective = true; + ImGui::PopStyleVar(); + ImGui::PopStyleColor(2); + } + } - ImGui::SameLine(); + ImGui::Separator(); - if (ImGui::RadioButton("Orthographic", !isPerspective)) - isPerspective = false; + if (!boundCameraToManipulate) + { + const auto& names = m_scene->getInitParams().geometryNames; + if (!names.empty()) + { + if (gcIndex >= names.size()) + gcIndex = 0; - ImGui::Checkbox("Enable \"view manipulate\"", &transformParams.enableViewManipulate); - ImGui::Checkbox("Enable camera movement", &move); - ImGui::SliderFloat("Move speed", &moveSpeed, 0.1f, 10.f); - ImGui::SliderFloat("Rotate speed", &rotateSpeed, 0.1f, 10.f); + if (ImGui::BeginCombo("Object Type", names[gcIndex].c_str())) + { + for (uint32_t i = 0u; i < names.size(); ++i) + { + const bool isSelected = (gcIndex == i); + if (ImGui::Selectable(names[i].c_str(), isSelected)) + gcIndex = static_cast(i); - // ImGui::Checkbox("Flip Gizmo's Y axis", &flipGizmoY); // let's not expose it to be changed in UI but keep the logic in case + if (isSelected) + ImGui::SetItemDefaultFocus(); + } + ImGui::EndCombo(); + } + } + } - if (isPerspective) - ImGui::SliderFloat("Fov", &fov, 20.f, 150.f); - else - ImGui::SliderFloat("Ortho width", &viewWidth, 1, 20); + addMatrixTable("Model (TRS) Matrix", "ModelMatrixTable", 4, 4, m16TRSmatrix); - ImGui::SliderFloat("zNear", &zNear, 0.1f, 100.f); - ImGui::SliderFloat("zFar", &zFar, 110.f, 10000.f); + if (ImGui::RadioButton("Translate", mCurrentGizmoOperation == ImGuizmo::TRANSLATE)) + mCurrentGizmoOperation = ImGuizmo::TRANSLATE; - viewDirty |= ImGui::SliderFloat("Distance", &transformParams.camDistance, 1.f, 69.f); + ImGui::SameLine(); + if (ImGui::RadioButton("Rotate", mCurrentGizmoOperation == ImGuizmo::ROTATE)) + mCurrentGizmoOperation = ImGuizmo::ROTATE; + ImGui::SameLine(); + if (ImGui::RadioButton("Scale", mCurrentGizmoOperation == ImGuizmo::SCALE)) + mCurrentGizmoOperation = ImGuizmo::SCALE; - if (viewDirty || firstFrame) - { - core::vectorSIMDf cameraPosition(cosf(camYAngle)* cosf(camXAngle)* transformParams.camDistance, sinf(camXAngle)* transformParams.camDistance, sinf(camYAngle)* cosf(camXAngle)* transformParams.camDistance); - core::vectorSIMDf cameraTarget(0.f, 0.f, 0.f); - const static core::vectorSIMDf up(0.f, 1.f, 0.f); + float32_t3 matrixTranslation, matrixRotation, matrixScale; + IGimbalController::input_imguizmo_event_t decomposed, recomposed; + imguizmoModel.outDeltaTRS = IGimbalController::input_imguizmo_event_t(1); - camera.setPosition(cameraPosition); - camera.setTarget(cameraTarget); - camera.setBackupUpVector(up); + ImGuizmo::DecomposeMatrixToComponents(m16TRSmatrix, &matrixTranslation[0], &matrixRotation[0], &matrixScale[0]); + decomposed = *reinterpret_cast(m16TRSmatrix); + { + ImGuiInputTextFlags flags = 0; - camera.recomputeViewMatrix(); - } - firstFrame = false; + ImGui::InputFloat3("Tr", &matrixTranslation[0], "%.3f", flags); + ImGui::InputFloat3("Rt", &matrixRotation[0], "%.3f", flags); + ImGui::InputFloat3("Sc", &matrixScale[0], "%.3f", flags); + } + ImGuizmo::RecomposeMatrixFromComponents(&matrixTranslation[0], &matrixRotation[0], &matrixScale[0], m16TRSmatrix); + recomposed = *reinterpret_cast(m16TRSmatrix); - ImGui::Text("X: %f Y: %f", io.MousePos.x, io.MousePos.y); - if (ImGuizmo::IsUsing()) - { - ImGui::Text("Using gizmo"); - } - else - { - ImGui::Text(ImGuizmo::IsOver() ? "Over gizmo" : ""); - ImGui::SameLine(); - ImGui::Text(ImGuizmo::IsOver(ImGuizmo::TRANSLATE) ? "Over translate gizmo" : ""); - ImGui::SameLine(); - ImGui::Text(ImGuizmo::IsOver(ImGuizmo::ROTATE) ? "Over rotate gizmo" : ""); - ImGui::SameLine(); - ImGui::Text(ImGuizmo::IsOver(ImGuizmo::SCALE) ? "Over scale gizmo" : ""); - } - ImGui::Separator(); + if (mCurrentGizmoOperation != ImGuizmo::SCALE) + { + if (ImGui::RadioButton("Local", mCurrentGizmoMode == ImGuizmo::LOCAL)) + mCurrentGizmoMode = ImGuizmo::LOCAL; + ImGui::SameLine(); + if (ImGui::RadioButton("World", mCurrentGizmoMode == ImGuizmo::WORLD)) + mCurrentGizmoMode = ImGuizmo::WORLD; + } - /* - * ImGuizmo expects view & perspective matrix to be column major both with 4x4 layout - * and Nabla uses row major matricies - 3x4 matrix for view & 4x4 for projection + ImGui::Checkbox(" ", &useSnap); + ImGui::SameLine(); + switch (mCurrentGizmoOperation) + { + case ImGuizmo::TRANSLATE: + ImGui::InputFloat3("Snap", &snap[0]); + break; + case ImGuizmo::ROTATE: + ImGui::InputFloat("Angle Snap", &snap[0]); + break; + case ImGuizmo::SCALE: + ImGui::InputFloat("Scale Snap", &snap[0]); + break; + } - - VIEW: + ImGui::End(); + { + // generate virtual events given delta TRS matrix + if (boundCameraToManipulate) + { + const float pmSpeed = boundCameraToManipulate->getMoveSpeedScale(); + const float prSpeed = boundCameraToManipulate->getRotationSpeedScale(); - ImGuizmo + boundCameraToManipulate->setMoveSpeedScale(1); + boundCameraToManipulate->setRotationSpeedScale(1); - | X[0] Y[0] Z[0] 0.0f | - | X[1] Y[1] Z[1] 0.0f | - | X[2] Y[2] Z[2] 0.0f | - | -Dot(X, eye) -Dot(Y, eye) -Dot(Z, eye) 1.0f | + auto referenceFrame = getCastedMatrix(imguizmoModel.outTRS); + boundCameraToManipulate->manipulate({}, &referenceFrame); - Nabla + boundCameraToManipulate->setMoveSpeedScale(pmSpeed); + boundCameraToManipulate->setRotationSpeedScale(prSpeed); - | X[0] X[1] X[2] -Dot(X, eye) | - | Y[0] Y[1] Y[2] -Dot(Y, eye) | - | Z[0] Z[1] Z[2] -Dot(Z, eye) | + /* + { + static std::vector virtualEvents(0x45); - = transpose(nbl::core::matrix4SIMD()) + if (not enableActiveCameraMovement) + { + uint32_t vCount = {}; - - PERSPECTIVE [PROJECTION CASE]: + boundCameraToManipulate->beginInputProcessing(m_nextPresentationTimestamp); + { + boundCameraToManipulate->process(nullptr, vCount); - ImGuizmo + if (virtualEvents.size() < vCount) + virtualEvents.resize(vCount); - | (temp / temp2) (0.0) (0.0) (0.0) | - | (0.0) (temp / temp3) (0.0) (0.0) | - | ((right + left) / temp2) ((top + bottom) / temp3) ((-zfar - znear) / temp4) (-1.0f) | - | (0.0) (0.0) ((-temp * zfar) / temp4) (0.0) | + IGimbalController::SUpdateParameters params; + params.imguizmoEvents = { { imguizmoModel.outDeltaTRS } }; + boundCameraToManipulate->process(virtualEvents.data(), vCount, params); + } + boundCameraToManipulate->endInputProcessing(); - Nabla + // I start to think controller should be able to set sensitivity to scale magnitudes of generated events + // in order for camera to not keep any magnitude scalars like move or rotation speed scales - | w (0.0) (0.0) (0.0) | - | (0.0) -h (0.0) (0.0) | - | (0.0) (0.0) (-zFar/(zFar-zNear)) (-zNear*zFar/(zFar-zNear)) | - | (0.0) (0.0) (-1.0) (0.0) | + if (vCount) + { + const float pmSpeed = boundCameraToManipulate->getMoveSpeedScale(); + const float prSpeed = boundCameraToManipulate->getRotationSpeedScale(); - = transpose() + boundCameraToManipulate->setMoveSpeedScale(1); + boundCameraToManipulate->setRotationSpeedScale(1); - * - * the ViewManipulate final call (inside EditTransform) returns world space column major matrix for an object, - * note it also modifies input view matrix but projection matrix is immutable - */ + auto referenceFrame = getCastedMatrix(imguizmoModel.outTRS); + boundCameraToManipulate->manipulate({ virtualEvents.data(), vCount }, &referenceFrame); - static struct + boundCameraToManipulate->setMoveSpeedScale(pmSpeed); + boundCameraToManipulate->setRotationSpeedScale(prSpeed); + } + } + } + */ + } + else { - hlsl::float32_t4x4 view, projection, model; - } imguizmoM16InOut; - - ImGuizmo::SetID(0u); + // for scene demo model full affine transformation without limits is assumed + m_model = float32_t3x4(hlsl::transpose(imguizmoModel.outTRS)); + } + } + } - imguizmoM16InOut.view = hlsl::transpose(hlsl::math::linalg::promote_affine<4,4,3,4>(camera.getViewMatrix())); - imguizmoM16InOut.projection = hlsl::transpose(camera.getProjectionMatrix()); - imguizmoM16InOut.model = hlsl::transpose(hlsl::math::linalg::promote_affine<4,4,3,4>(model)); + inline void addMatrixTable(const char* topText, const char* tableName, int rows, int columns, const float* pointer, bool withSeparator = true) + { + ImGui::Text(topText); + ImGui::PushStyleColor(ImGuiCol_TableRowBg, ImGui::GetStyleColorVec4(ImGuiCol_ChildBg)); + ImGui::PushStyleColor(ImGuiCol_TableRowBgAlt, ImGui::GetStyleColorVec4(ImGuiCol_WindowBg)); + if (ImGui::BeginTable(tableName, columns, ImGuiTableFlags_Borders | ImGuiTableFlags_RowBg | ImGuiTableFlags_SizingStretchSame)) + { + for (int y = 0; y < rows; ++y) { - if (flipGizmoY) // note we allow to flip gizmo just to match our coordinates - imguizmoM16InOut.projection[1][1] *= -1.f; // https://johannesugb.github.io/gpu-programming/why-do-opengl-proj-matrices-fail-in-vulkan/ - - transformParams.editTransformDecomposition = true; - sceneResolution = EditTransform(&imguizmoM16InOut.view[0][0], &imguizmoM16InOut.projection[0][0], &imguizmoM16InOut.model[0][0], transformParams); + ImGui::TableNextRow(); + for (int x = 0; x < columns; ++x) + { + ImGui::TableSetColumnIndex(x); + if (pointer) + ImGui::Text("%.3f", *(pointer + (y * columns) + x)); + else + ImGui::Text("-"); + } } + ImGui::EndTable(); + } + ImGui::PopStyleColor(2); + if (withSeparator) + ImGui::Separator(); + } - model = hlsl::math::linalg::truncate<3,4,4,4>(hlsl::transpose(imguizmoM16InOut.model)); - // to Nabla + update camera & model matrices -// TODO: make it more nicely, extract: -// - Position by computing inverse of the view matrix and grabbing its translation -// - Target from 3rd row without W component of view matrix multiplied by some arbitrary distance value (can be the length of position from origin) and adding the position -// But then set the view matrix this way anyway, because up-vector may not be compatible - const auto& view = camera.getViewMatrix(); - const_cast(view) = hlsl::math::linalg::truncate<3,4,4,4>(hlsl::transpose(imguizmoM16InOut.view)); // a hack, correct way would be to use inverse matrix and get position + target because now it will bring you back to last position & target when switching from gizmo move to manual move (but from manual to gizmo is ok) - // update concatanated matrix - const auto& projection = camera.getProjectionMatrix(); - camera.setProjectionMatrix(projection); + std::chrono::seconds timeout = std::chrono::seconds(0x7fffFFFFu); + clock_t::time_point start; - // object meta display - { - ImGui::Begin("Object"); - ImGui::Text("type: \"%s\"", objectName.data()); - ImGui::End(); - } - - // view matrices editor - { - ImGui::Begin("Matrices"); + //! One window & surface + smart_refctd_ptr> m_surface; + smart_refctd_ptr m_window; + // We can't use the same semaphore for acquire and present, because that would disable "Frames in Flight" by syncing previous present against next acquire. + // At least two timelines must be used. + smart_refctd_ptr m_semaphore; + // Maximum frames which can be simultaneously submitted, used to cycle through our per-frame resources like command buffers + constexpr static inline uint32_t MaxFramesInFlight = 3u; + // Use a separate counter to cycle through our resources because `getAcquireCount()` increases upon spontaneous resizes with immediate blit-presents + uint64_t m_realFrameIx = 0; + // We'll write to the Triple Buffer with a Renderpass + core::smart_refctd_ptr m_renderpass = {}; + // These are atomic counters where the Surface lets us know what's the latest Blit timeline semaphore value which will be signalled on the resource + std::array m_blitWaitValues; + // Enough Command Buffers and other resources for all frames in flight! + std::array, MaxFramesInFlight> m_cmdBufs; + // Our own persistent images that don't get recreated with the swapchain + std::array, MaxFramesInFlight> m_tripleBuffers; + // Resources derived from the images + std::array, MaxFramesInFlight> m_framebuffers = {}; + // We will use it to get some asset stuff like geometry creator + smart_refctd_ptr m_assetManager; + // Input system for capturing system events + core::smart_refctd_ptr m_inputSystem; + // Handles mouse events + InputSystem::ChannelReader mouse; + // Handles keyboard events + InputSystem::ChannelReader keyboard; + //! next presentation timestamp + std::chrono::microseconds m_nextPresentationTimestamp = {}; - auto addMatrixTable = [&](const char* topText, const char* tableName, const int rows, const int columns, const float* pointer, const bool withSeparator = true) - { - ImGui::Text(topText); - if (ImGui::BeginTable(tableName, columns)) - { - for (int y = 0; y < rows; ++y) - { - ImGui::TableNextRow(); - for (int x = 0; x < columns; ++x) - { - ImGui::TableSetColumnIndex(x); - ImGui::Text("%.3f", *(pointer + (y * columns) + x)); - } - } - ImGui::EndTable(); - } + core::smart_refctd_ptr m_descriptorSetPool; - if (withSeparator) - ImGui::Separator(); - }; + struct CRenderUI + { + nbl::core::smart_refctd_ptr manager; + + struct + { + core::smart_refctd_ptr gui, scene; + } samplers; - addMatrixTable("Model Matrix", "ModelMatrixTable", 3, 4, &model[0][0]); - addMatrixTable("Camera View Matrix", "ViewMatrixTable", 3, 4, &view[0][0]); - addMatrixTable("Camera View Projection Matrix", "ViewProjectionMatrixTable", 4, 4, &projection[0][0], false); + core::smart_refctd_ptr descriptorSet; + }; - ImGui::End(); - } + // one model object in the world, testing multiuple cameraz for which view is rendered to separate frame buffers (so what they see) with new controller API including imguizmo + nbl::hlsl::float32_t3x4 m_model = nbl::hlsl::float32_t3x4(1.f); - // Nabla Imgui backend MDI buffer info - // To be 100% accurate and not overly conservative we'd have to explicitly `cull_frees` and defragment each time, - // so unless you do that, don't use this basic info to optimize the size of your IMGUI buffer. - { - auto* streaminingBuffer = imGUI->getStreamingBuffer(); + // if we had working IObjectTransform or something similar then it would be it instead, it is "last manipulated object" I need for TRS editor + // in reality we should store range of those IObjectTransforem interface range & index to object representing last manipulated one + nbl::core::smart_refctd_ptr boundCameraToManipulate = nullptr; + std::optional boundPlanarCameraIxToManipulate = std::nullopt; - const size_t total = streaminingBuffer->get_total_size(); // total memory range size for which allocation can be requested - const size_t freeSize = streaminingBuffer->getAddressAllocator().get_free_size(); // max total free bloock memory size we can still allocate from total memory available - const size_t consumedMemory = total - freeSize; // memory currently consumed by streaming buffer + std::vector> m_planarProjections; - float freePercentage = 100.0f * (float)(freeSize) / (float)total; - float allocatedPercentage = (float)(consumedMemory) / (float)total; + bool enableActiveCameraMovement = false; - ImVec2 barSize = ImVec2(400, 30); - float windowPadding = 10.0f; - float verticalPadding = ImGui::GetStyle().FramePadding.y; + bool resetCursorToCenter = false; - ImGui::SetNextWindowSize(ImVec2(barSize.x + 2 * windowPadding, 110 + verticalPadding), ImGuiCond_Always); - ImGui::Begin("Nabla Imgui MDI Buffer Info", nullptr, ImGuiWindowFlags_NoResize | ImGuiWindowFlags_NoScrollbar); + struct windowControlBinding + { + nbl::core::smart_refctd_ptr sceneFramebuffer; + nbl::core::smart_refctd_ptr sceneColorView; + nbl::core::smart_refctd_ptr sceneDepthView; + float32_t3x4 viewMatrix = float32_t3x4(1.f); + float32_t4x4 viewProjMatrix = float32_t4x4(1.f); - ImGui::Text("Total Allocated Size: %zu bytes", total); - ImGui::Text("In use: %zu bytes", consumedMemory); - ImGui::Text("Buffer Usage:"); + uint32_t activePlanarIx = 0u; + bool allowGizmoAxesToFlip = false; + bool enableDebugGridDraw = true; + float aspectRatio = 16.f / 9.f; + bool leftHandedProjection = true; - ImGui::SetCursorPosX(windowPadding); + std::optional boundProjectionIx = std::nullopt, lastBoundPerspectivePresetProjectionIx = std::nullopt, lastBoundOrthoPresetProjectionIx = std::nullopt; - if (freePercentage > 70.0f) - ImGui::PushStyleColor(ImGuiCol_PlotHistogram, ImVec4(0.0f, 1.0f, 0.0f, 0.4f)); // Green - else if (freePercentage > 30.0f) - ImGui::PushStyleColor(ImGuiCol_PlotHistogram, ImVec4(1.0f, 1.0f, 0.0f, 0.4f)); // Yellow - else - ImGui::PushStyleColor(ImGuiCol_PlotHistogram, ImVec4(1.0f, 0.0f, 0.0f, 0.4f)); // Red + inline void pickDefaultProjections(const planar_projections_range_t& projections) + { + auto init = [&](std::optional& presetix, IPlanarProjection::CProjection::ProjectionType requestedType) -> void + { + for (uint32_t i = 0u; i < projections.size(); ++i) + { + const auto& params = projections[i].getParameters(); + if (params.m_type == requestedType) + { + presetix = i; + break; + } + } - ImGui::ProgressBar(allocatedPercentage, barSize, ""); + assert(presetix.has_value()); + }; - ImGui::PopStyleColor(); + init(lastBoundPerspectivePresetProjectionIx = std::nullopt, IPlanarProjection::CProjection::Perspective); + init(lastBoundOrthoPresetProjectionIx = std::nullopt, IPlanarProjection::CProjection::Orthographic); + boundProjectionIx = lastBoundPerspectivePresetProjectionIx.value(); + } + }; - ImDrawList* drawList = ImGui::GetWindowDrawList(); + static constexpr inline auto MaxSceneFBOs = 2u; + std::array windowBindings; + uint32_t activeRenderWindowIx = 0u; - ImVec2 progressBarPos = ImGui::GetItemRectMin(); - ImVec2 progressBarSize = ImGui::GetItemRectSize(); + // UI font atlas + viewport FBO color attachment textures + constexpr static inline auto TotalUISampleTexturesAmount = 1u + MaxSceneFBOs; - const char* text = "%.2f%% free"; - char textBuffer[64]; - snprintf(textBuffer, sizeof(textBuffer), text, freePercentage); + nbl::core::smart_refctd_ptr m_scene; + nbl::core::smart_refctd_ptr m_sceneRenderpass; + nbl::core::smart_refctd_ptr m_renderer; - ImVec2 textSize = ImGui::CalcTextSize(textBuffer); - ImVec2 textPos = ImVec2 - ( - progressBarPos.x + (progressBarSize.x - textSize.x) * 0.5f, - progressBarPos.y + (progressBarSize.y - textSize.y) * 0.5f - ); + CRenderUI m_ui; + video::CDumbPresentationOracle oracle; + uint16_t gcIndex = {}; - ImVec4 bgColor = ImGui::GetStyleColorVec4(ImGuiCol_WindowBg); - drawList->AddRectFilled - ( - ImVec2(textPos.x - 5, textPos.y - 2), - ImVec2(textPos.x + textSize.x + 5, textPos.y + textSize.y + 2), - ImGui::GetColorU32(bgColor) - ); + static constexpr uint32_t CiFramesBeforeCapture = 10u; + bool m_ciMode = false; + bool m_ciScreenshotDone = false; + uint32_t m_ciFrameCounter = 0u; + system::path m_ciScreenshotPath; - ImGui::SetCursorScreenPos(textPos); - ImGui::Text("%s", textBuffer); + const bool flipGizmoY = true; - ImGui::Dummy(ImVec2(0.0f, verticalPadding)); + float camYAngle = 165.f / 180.f * 3.14159f; + float camXAngle = 32.f / 180.f * 3.14159f; + float camDistance = 8.f; + bool useWindow = true, useSnap = false; + ImGuizmo::OPERATION mCurrentGizmoOperation = ImGuizmo::TRANSLATE; + ImGuizmo::MODE mCurrentGizmoMode = ImGuizmo::LOCAL; + float snap[3] = { 1.f, 1.f, 1.f }; - ImGui::End(); - } + bool firstFrame = true; + const float32_t2 iPaddingOffset = float32_t2(10, 10); - ImGui::End(); - } + struct ImWindowInit + { + float32_t2 iPos, iSize; + }; - smart_refctd_ptr imGUI; - // descriptor set - smart_refctd_ptr subAllocDS; - SubAllocatedDescriptorSet::value_type renderColorViewDescIndex = SubAllocatedDescriptorSet::invalid_value; - // - Camera camera = Camera(core::vectorSIMDf(0, 0, 0), core::vectorSIMDf(0, 0, 0), hlsl::float32_t4x4()); - // mutables - hlsl::float32_t3x4 model = hlsl::math::linalg::diagonal(1.0f); - std::string_view objectName; - TransformRequestParams transformParams; - uint16_t2 sceneResolution = {1280,720}; - float fov = 60.f, zNear = 0.1f, zFar = 10000.f, moveSpeed = 1.f, rotateSpeed = 1.f; - float viewWidth = 10.f; - float camYAngle = 165.f / 180.f * 3.14159f; - float camXAngle = 32.f / 180.f * 3.14159f; - uint16_t gcIndex = {}; // note: this is dirty however since I assume only single object in scene I can leave it now, when this example is upgraded to support multiple objects this needs to be changed - bool isPerspective = true, isLH = true, flipGizmoY = true, move = false; - bool firstFrame = true; - } interface; + struct + { + ImWindowInit trsEditor; + ImWindowInit planars; + std::array renderWindows; + } wInit; }; -NBL_MAIN_FUNC(UISampleApp) \ No newline at end of file +NBL_MAIN_FUNC(UISampleApp) diff --git a/common/include/camera/CCubeProjection.hpp b/common/include/camera/CCubeProjection.hpp new file mode 100644 index 000000000..d47c5c6b8 --- /dev/null +++ b/common/include/camera/CCubeProjection.hpp @@ -0,0 +1,94 @@ +#ifndef _NBL_CCUBE_PROJECTION_HPP_ +#define _NBL_CCUBE_PROJECTION_HPP_ + +#include "IRange.hpp" +#include "IPerspectiveProjection.hpp" + +namespace nbl::hlsl +{ + +/** +* @brief A projection where each cube face is a perspective quad we project onto. +* +* Represents a cube projection given direction vector where each face of +* the cube is treated as a quad. The projection onto the cube is done using +* these quads and each face has its own unique pre-transform and +* view-port linear matrix. +*/ +class CCubeProjection final : public IPerspectiveProjection, public IProjection +{ +public: + //! Represents six face identifiers of a cube. + enum CubeFaces : uint8_t + { + //! Cube face in the +X base direction + PositiveX = 0, + + //! Cube face in the -X base direction + NegativeX, + + //! Cube face in the +Y base direction + PositiveY, + + //! Cube face in the -Y base direction + NegativeY, + + //! Cube face in the +Z base direction + PositiveZ, + + //! Cube face in the -Z base direction + NegativeZ, + + CubeFacesCount + }; + + inline static core::smart_refctd_ptr create(core::smart_refctd_ptr&& camera) + { + if (!camera) + return nullptr; + + return core::smart_refctd_ptr(new CCubeProjection(core::smart_refctd_ptr(camera)), core::dont_grab); + } + + virtual std::span getLinearProjections() const override + { + return { reinterpret_cast(m_quads.data()), m_quads.size() }; + } + + void transformCube() + { + // TODO: update m_quads + } + + virtual ProjectionType getProjectionType() const override { return ProjectionType::Cube; } + + virtual void project(const projection_vector_t& vecToProjectionSpace, projection_vector_t& output) const override + { + auto direction = normalize(vecToProjectionSpace); + + // TODO: project onto cube using quads representing faces + } + + virtual bool unproject(const projection_vector_t& vecFromProjectionSpace, projection_vector_t& output) const override + { + // TODO: return back direction vector? + } + + template + requires (FaceIx != CubeFacesCount) + inline const CProjection& getProjectionQuad() + { + return m_quads[FaceIx]; + } + +private: + CCubeProjection(core::smart_refctd_ptr&& camera) + : IPerspectiveProjection(core::smart_refctd_ptr(camera)) {} + virtual ~CCubeProjection() = default; + + std::array m_quads; +}; + +} // nbl::hlsl namespace + +#endif // _NBL_CCUBE_PROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/CFPSCamera.hpp b/common/include/camera/CFPSCamera.hpp new file mode 100644 index 000000000..95a750a08 --- /dev/null +++ b/common/include/camera/CFPSCamera.hpp @@ -0,0 +1,174 @@ +// Copyright (C) 2018-2020 - DevSH Graphics Programming Sp. z O.O. +// This file is part of the "Nabla Engine". +// For conditions of distribution and use, see copyright notice in nabla.h + +#ifndef _C_FPS_CAMERA_HPP_ +#define _C_FPS_CAMERA_HPP_ + +#include "ICamera.hpp" + +namespace nbl::hlsl // TODO: DIFFERENT NAMESPACE +{ + +// FPS Camera +class CFPSCamera final : public ICamera +{ +public: + using base_t = ICamera; + + CFPSCamera(const float64_t3& position, const glm::quat& orientation = glm::quat(1.0f, 0.0f, 0.0f, 0.0f)) + : base_t(), m_gimbal({ .position = position, .orientation = orientation }) + { + m_gimbal.begin(); + { + const auto& gForward = m_gimbal.getZAxis(); + const float gForwardX = static_cast(gForward.x); + const float gForwardY = static_cast(gForward.y); + const float gForwardZ = static_cast(gForward.z); + const float gPitch = glm::atan(glm::length(glm::vec2(gForwardX, gForwardZ)), gForwardY) - glm::half_pi(); + const float gYaw = glm::atan(gForwardX, gForwardZ); + auto test = glm::quat(glm::vec3(gPitch, gYaw, 0.0f)); + + + m_gimbal.setOrientation(test); + } + m_gimbal.end(); + } + ~CFPSCamera() = default; + + const base_t::keyboard_to_virtual_events_t getKeyboardMappingPreset() const override { return m_keyboard_to_virtual_events_preset; } + const base_t::mouse_to_virtual_events_t getMouseMappingPreset() const override { return m_mouse_to_virtual_events_preset; } + const base_t::imguizmo_to_virtual_events_t getImguizmoMappingPreset() const override { return m_imguizmo_to_virtual_events_preset; } + + const typename base_t::CGimbal& getGimbal() override + { + return m_gimbal; + } + + // rotation events IN RADIANS + + virtual bool manipulate(std::span virtualEvents, const float64_t4x4 const* referenceFrame = nullptr) override + { + // TODO: note, for FPS camera its assumed tilt is performed with respect to "world" up vector which is (0,1,0) + // but in reality its all about where -(gravity force) vector is, we can just add it and construct yaw quat with respect to this new custom vector instead + + if (not virtualEvents.size() and not referenceFrame) + return false; + + CReferenceTransform reference; + if (not m_gimbal.extractReferenceTransform(&reference, referenceFrame)) + return false; + + auto validateReference = [&]() + { + if (referenceFrame) + { + const auto& q = reference.orientation; + const float w = static_cast(q.w); + const float x = static_cast(q.x); + const float y = static_cast(q.y); + const float z = static_cast(q.z); + const float sinr_cosp = 2.f * (w * z + x * y); + const float cosr_cosp = 1.f - 2.f * (y * y + z * z); + const float roll = glm::degrees(glm::atan(sinr_cosp, cosr_cosp)); + const float absRoll = glm::abs(roll); + constexpr float epsilon = 1.e-4f; + + if (not (glm::epsilonEqual(absRoll, 0.f, epsilon) || glm::epsilonEqual(absRoll, 180.f, epsilon))) + return false; + } + + return true; + }; + + auto impulse = m_gimbal.accumulate(virtualEvents); + + bool manipulated = true; + + m_gimbal.begin(); + { + const auto rForward = glm::vec3(reference.frame[2]); + const float rPitch = glm::atan(glm::length(glm::vec2(rForward.x, rForward.z)), rForward.y) - glm::half_pi(); + const float gYaw = glm::atan(rForward.x, rForward.z); + const float newPitch = std::clamp(rPitch + impulse.dVirtualRotation.x * m_rotationSpeedScale, MinVerticalAngle, MaxVerticalAngle), newYaw = gYaw + impulse.dVirtualRotation.y * m_rotationSpeedScale; + + if(validateReference()) m_gimbal.setOrientation(glm::quat(glm::vec3(newPitch, newYaw, 0.0f))); + m_gimbal.setPosition(glm::vec3(reference.frame[3]) + reference.orientation * glm::vec3(impulse.dVirtualTranslate)); + } + m_gimbal.end(); + + manipulated &= bool(m_gimbal.getManipulationCounter()); + + if (manipulated) + m_gimbal.updateView(); + + return manipulated; + } + + virtual const uint32_t getAllowedVirtualEvents() override + { + return AllowedVirtualEvents; + } + + virtual const std::string_view getIdentifier() override + { + return "FPS Camera"; + } + +private: + + typename base_t::CGimbal m_gimbal; + + static inline constexpr auto AllowedVirtualEvents = CVirtualGimbalEvent::Translate | CVirtualGimbalEvent::Rotate; + static inline constexpr float MaxVerticalAngle = glm::radians(88.0f), MinVerticalAngle = -MaxVerticalAngle; + + static inline const auto m_keyboard_to_virtual_events_preset = []() + { + typename base_t::keyboard_to_virtual_events_t preset; + + preset[ui::E_KEY_CODE::EKC_W] = CVirtualGimbalEvent::MoveForward; + preset[ui::E_KEY_CODE::EKC_S] = CVirtualGimbalEvent::MoveBackward; + preset[ui::E_KEY_CODE::EKC_A] = CVirtualGimbalEvent::MoveLeft; + preset[ui::E_KEY_CODE::EKC_D] = CVirtualGimbalEvent::MoveRight; + preset[ui::E_KEY_CODE::EKC_I] = CVirtualGimbalEvent::TiltDown; + preset[ui::E_KEY_CODE::EKC_K] = CVirtualGimbalEvent::TiltUp; + preset[ui::E_KEY_CODE::EKC_J] = CVirtualGimbalEvent::PanLeft; + preset[ui::E_KEY_CODE::EKC_L] = CVirtualGimbalEvent::PanRight; + + return preset; + }(); + + static inline const auto m_mouse_to_virtual_events_preset = []() + { + typename base_t::mouse_to_virtual_events_t preset; + + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_POSITIVE_MOVEMENT_X] = CVirtualGimbalEvent::PanRight; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_NEGATIVE_MOVEMENT_X] = CVirtualGimbalEvent::PanLeft; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_POSITIVE_MOVEMENT_Y] = CVirtualGimbalEvent::TiltUp; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_NEGATIVE_MOVEMENT_Y] = CVirtualGimbalEvent::TiltDown; + + return preset; + }(); + + static inline const auto m_imguizmo_to_virtual_events_preset = []() + { + typename base_t::imguizmo_to_virtual_events_t preset; + + preset[CVirtualGimbalEvent::MoveForward] = CVirtualGimbalEvent::MoveForward; + preset[CVirtualGimbalEvent::MoveBackward] = CVirtualGimbalEvent::MoveBackward; + preset[CVirtualGimbalEvent::MoveLeft] = CVirtualGimbalEvent::MoveLeft; + preset[CVirtualGimbalEvent::MoveRight] = CVirtualGimbalEvent::MoveRight; + preset[CVirtualGimbalEvent::MoveUp] = CVirtualGimbalEvent::MoveUp; + preset[CVirtualGimbalEvent::MoveDown] = CVirtualGimbalEvent::MoveDown; + preset[CVirtualGimbalEvent::TiltDown] = CVirtualGimbalEvent::TiltDown; + preset[CVirtualGimbalEvent::TiltUp] = CVirtualGimbalEvent::TiltUp; + preset[CVirtualGimbalEvent::PanLeft] = CVirtualGimbalEvent::PanLeft; + preset[CVirtualGimbalEvent::PanRight] = CVirtualGimbalEvent::PanRight; + + return preset; + }(); +}; + +} + +#endif // _C_FPS_CAMERA_HPP_ diff --git a/common/include/camera/CFreeLockCamera.hpp b/common/include/camera/CFreeLockCamera.hpp new file mode 100644 index 000000000..2ca94c2af --- /dev/null +++ b/common/include/camera/CFreeLockCamera.hpp @@ -0,0 +1,194 @@ +// Copyright (C) 2018-2024 - DevSH Graphics Programming Sp. z O.O. +// This file is part of the "Nabla Engine". +// For conditions of distribution and use, see copyright notice in nabla.h + +#ifndef _C_FREE_CAMERA_HPP_ +#define _C_FREE_CAMERA_HPP_ + +#include "ICamera.hpp" + +#include "nbl/ext/ImGui/ImGui.h" +#include "imgui/imgui_internal.h" + +namespace nbl::hlsl // TODO: DIFFERENT NAMESPACE +{ + static inline IGimbal::VirtualImpulse sVirtualImpulse = {}; + static inline glm::mat4 sReferenceFrame = glm::mat4(1.0f); + static inline glm::quat sReferenceOrientation = {}; + + // TODO: DEBUG AND TEMPORARY + void ShowDebugWindow() + { + ImGui::Begin("Debug Window"); + + ImGui::Text("Translate deltas:"); + ImGui::Text(" x: %.3f", sVirtualImpulse.dVirtualTranslate.x); + ImGui::Text(" y: %.3f", sVirtualImpulse.dVirtualTranslate.y); + ImGui::Text(" z: %.3f", sVirtualImpulse.dVirtualTranslate.z); + + ImGui::Separator(); + + ImGui::Text("Rotation deltas:"); + ImGui::Text(" x: %.3f", sVirtualImpulse.dVirtualRotation.x); + ImGui::Text(" y: %.3f", sVirtualImpulse.dVirtualRotation.y); + ImGui::Text(" z: %.3f", sVirtualImpulse.dVirtualRotation.z); + + ImGui::Separator(); + + ImGui::Text("Scale deltas:"); + ImGui::Text(" x: %.3f", sVirtualImpulse.dVirtualScale.x); + ImGui::Text(" y: %.3f", sVirtualImpulse.dVirtualScale.y); + ImGui::Text(" z: %.3f", sVirtualImpulse.dVirtualScale.z); + + ImGui::Separator(); + + ImGui::Text("Reference frame:"); + + for (int row = 0; row < 4; ++row) + { + ImGui::Text("%.3f %.3f %.3f %.3f", + sReferenceFrame[0][row], + sReferenceFrame[1][row], + sReferenceFrame[2][row], + sReferenceFrame[3][row]); + } + + ImGui::Text("Reference orientation:"); + + ImGui::Text("%.3f %.3f %.3f %.3f", + sReferenceOrientation.x, + sReferenceOrientation.y, + sReferenceOrientation.z, + sReferenceOrientation.w); + + ImGui::End(); + } + +// Free Lock Camera +class CFreeCamera final : public ICamera +{ +public: + using base_t = ICamera; + + CFreeCamera(const float64_t3& position, const glm::quat& orientation = glm::quat(1.0f, 0.0f, 0.0f, 0.0f)) + : base_t(), m_gimbal({ .position = position, .orientation = orientation }) {} + ~CFreeCamera() = default; + + const base_t::keyboard_to_virtual_events_t getKeyboardMappingPreset() const override { return m_keyboard_to_virtual_events_preset; } + const base_t::mouse_to_virtual_events_t getMouseMappingPreset() const override { return m_mouse_to_virtual_events_preset; } + const base_t::imguizmo_to_virtual_events_t getImguizmoMappingPreset() const override { return m_imguizmo_to_virtual_events_preset; } + + const typename base_t::CGimbal& getGimbal() override + { + return m_gimbal; + } + + virtual bool manipulate(std::span virtualEvents, const float64_t4x4 const* referenceFrame = nullptr) override + { + if (not virtualEvents.size() and not referenceFrame) + return false; + + CReferenceTransform reference; + if (not m_gimbal.extractReferenceTransform(&reference, referenceFrame)) + return false; + + auto impulse = m_gimbal.accumulate(virtualEvents); + + bool manipulated = true; + + // TODO: DEBUG AND TEMPORARY + { + sVirtualImpulse = impulse; + auto cast = getCastedMatrix(reference.frame);; + memcpy(&sReferenceFrame, &cast, sizeof(sReferenceFrame)); + sReferenceOrientation = reference.orientation; + } + + m_gimbal.begin(); + { + glm::quat pitch = glm::angleAxis(impulse.dVirtualRotation.x, glm::vec3(reference.frame[0])); + glm::quat yaw = glm::angleAxis(impulse.dVirtualRotation.y, glm::vec3(reference.frame[1])); + glm::quat roll = glm::angleAxis(impulse.dVirtualRotation.z, glm::vec3(reference.frame[2])); + + m_gimbal.setOrientation(yaw * pitch * roll * reference.orientation); + m_gimbal.setPosition(glm::vec3(reference.frame[3]) + reference.orientation * glm::vec3(impulse.dVirtualTranslate)); + } + m_gimbal.end(); + + manipulated &= bool(m_gimbal.getManipulationCounter()); + + if (manipulated) + m_gimbal.updateView(); + + return manipulated; + } + + virtual const uint32_t getAllowedVirtualEvents() override + { + return AllowedVirtualEvents; + } + + virtual const std::string_view getIdentifier() override + { + return "Free-Look Camera"; + } + +private: + typename base_t::CGimbal m_gimbal; + + static inline constexpr auto AllowedVirtualEvents = CVirtualGimbalEvent::Translate | CVirtualGimbalEvent::Rotate; + + static inline const auto m_keyboard_to_virtual_events_preset = []() + { + typename base_t::keyboard_to_virtual_events_t preset; + + preset[ui::E_KEY_CODE::EKC_W] = CVirtualGimbalEvent::MoveForward; + preset[ui::E_KEY_CODE::EKC_S] = CVirtualGimbalEvent::MoveBackward; + preset[ui::E_KEY_CODE::EKC_A] = CVirtualGimbalEvent::MoveLeft; + preset[ui::E_KEY_CODE::EKC_D] = CVirtualGimbalEvent::MoveRight; + preset[ui::E_KEY_CODE::EKC_I] = CVirtualGimbalEvent::TiltDown; + preset[ui::E_KEY_CODE::EKC_K] = CVirtualGimbalEvent::TiltUp; + preset[ui::E_KEY_CODE::EKC_J] = CVirtualGimbalEvent::PanLeft; + preset[ui::E_KEY_CODE::EKC_L] = CVirtualGimbalEvent::PanRight; + preset[ui::E_KEY_CODE::EKC_Q] = CVirtualGimbalEvent::RollLeft; + preset[ui::E_KEY_CODE::EKC_E] = CVirtualGimbalEvent::RollRight; + + return preset; + }(); + + static inline const auto m_mouse_to_virtual_events_preset = []() + { + typename base_t::mouse_to_virtual_events_t preset; + + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_POSITIVE_MOVEMENT_X] = CVirtualGimbalEvent::PanRight; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_NEGATIVE_MOVEMENT_X] = CVirtualGimbalEvent::PanLeft; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_POSITIVE_MOVEMENT_Y] = CVirtualGimbalEvent::TiltUp; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_NEGATIVE_MOVEMENT_Y] = CVirtualGimbalEvent::TiltDown; + + return preset; + }(); + + static inline const auto m_imguizmo_to_virtual_events_preset = []() + { + typename base_t::imguizmo_to_virtual_events_t preset; + + preset[CVirtualGimbalEvent::MoveForward] = CVirtualGimbalEvent::MoveForward; + preset[CVirtualGimbalEvent::MoveBackward] = CVirtualGimbalEvent::MoveBackward; + preset[CVirtualGimbalEvent::MoveLeft] = CVirtualGimbalEvent::MoveLeft; + preset[CVirtualGimbalEvent::MoveRight] = CVirtualGimbalEvent::MoveRight; + preset[CVirtualGimbalEvent::MoveUp] = CVirtualGimbalEvent::MoveUp; + preset[CVirtualGimbalEvent::MoveDown] = CVirtualGimbalEvent::MoveDown; + preset[CVirtualGimbalEvent::TiltDown] = CVirtualGimbalEvent::TiltDown; + preset[CVirtualGimbalEvent::TiltUp] = CVirtualGimbalEvent::TiltUp; + preset[CVirtualGimbalEvent::PanLeft] = CVirtualGimbalEvent::PanLeft; + preset[CVirtualGimbalEvent::PanRight] = CVirtualGimbalEvent::PanRight; + preset[CVirtualGimbalEvent::RollLeft] = CVirtualGimbalEvent::RollLeft; + preset[CVirtualGimbalEvent::RollRight] = CVirtualGimbalEvent::RollRight; + + return preset; + }(); +}; + +} + +#endif // _C_FREE_CAMERA_HPP_ diff --git a/common/include/camera/CGeneralPurposeGimbal.hpp b/common/include/camera/CGeneralPurposeGimbal.hpp new file mode 100644 index 000000000..7e1c07096 --- /dev/null +++ b/common/include/camera/CGeneralPurposeGimbal.hpp @@ -0,0 +1,20 @@ +#ifndef _NBL_CGENERAL_PURPOSE_GIMBAL_HPP_ +#define _NBL_CGENERAL_PURPOSE_GIMBAL_HPP_ + +#include "IGimbal.hpp" + +// TODO: DIFFERENT NAMESPACE +namespace nbl::hlsl +{ + template + class CGeneralPurposeGimbal : public IGimbal + { + public: + using base_t = IGimbal; + + CGeneralPurposeGimbal(typename base_t::SCreationParameters&& parameters) : base_t(std::move(parameters)) {} + ~CGeneralPurposeGimbal() = default; + }; +} + +#endif // _NBL_IGIMBAL_HPP_ \ No newline at end of file diff --git a/common/include/camera/CLinearProjection.hpp b/common/include/camera/CLinearProjection.hpp new file mode 100644 index 000000000..791e9cb1b --- /dev/null +++ b/common/include/camera/CLinearProjection.hpp @@ -0,0 +1,45 @@ +#ifndef _NBL_C_LINEAR_PROJECTION_HPP_ +#define _NBL_C_LINEAR_PROJECTION_HPP_ + +#include "ILinearProjection.hpp" +#include "IRange.hpp" + +namespace nbl::hlsl +{ + template ProjectionsRange> + class CLinearProjection : public ILinearProjection + { + public: + using ILinearProjection::ILinearProjection; + + CLinearProjection() = default; + + inline static core::smart_refctd_ptr create(core::smart_refctd_ptr&& camera) + { + if (!camera) + return nullptr; + + return core::smart_refctd_ptr(new CLinearProjection(core::smart_refctd_ptr(camera)), core::dont_grab); + } + + virtual std::span getLinearProjections() const override + { + return std::span(m_projections.data(), m_projections.size()); + } + + inline std::span getLinearProjections() + { + return std::span(m_projections.data(), m_projections.size()); + } + + private: + CLinearProjection(core::smart_refctd_ptr&& camera) + : ILinearProjection(core::smart_refctd_ptr(camera)) {} + virtual ~CLinearProjection() = default; + + ProjectionsRange m_projections; + }; + +} // nbl::hlsl namespace + +#endif // _NBL_C_LINEAR_PROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/COrbitCamera.hpp b/common/include/camera/COrbitCamera.hpp new file mode 100644 index 000000000..24b0e0b9d --- /dev/null +++ b/common/include/camera/COrbitCamera.hpp @@ -0,0 +1,201 @@ +#ifndef _C_ORBIT_CAMERA_HPP_ +#define _C_ORBIT_CAMERA_HPP_ + +#include "ICamera.hpp" + +namespace nbl::hlsl +{ + +class COrbitCamera final : public ICamera +{ +public: + using base_t = ICamera; + + COrbitCamera(const float64_t3& position, const float64_t3& target) + : base_t(), m_targetPosition(target), m_distance(length(m_targetPosition - position)), m_gimbal({ .position = position, .orientation = glm::quat(glm::vec3(0, 0, 0)) }) {} + ~COrbitCamera() = default; + + const base_t::keyboard_to_virtual_events_t getKeyboardMappingPreset() const override { return m_keyboard_to_virtual_events_preset; } + const base_t::mouse_to_virtual_events_t getMouseMappingPreset() const override { return m_mouse_to_virtual_events_preset; } + const base_t::imguizmo_to_virtual_events_t getImguizmoMappingPreset() const override { return m_imguizmo_to_virtual_events_preset; } + + const typename base_t::CGimbal& getGimbal() override { return m_gimbal; } + + inline bool setDistance(float d) + { + const auto clamped = std::clamp(d, MinDistance, MaxDistance); + const bool ok = clamped == d; + + m_distance = clamped; + + return ok; + } + + inline void target(const float64_t3& p) + { + m_targetPosition = p; + } + + virtual bool manipulate(std::span virtualEvents, const float64_t4x4 const* referenceFrame = nullptr) override + { + // TODO: it must work differently, we should take another gimbal to control target + + // position on the sphere + auto S = [&](double u, double v) -> float64_t3 + { + return float64_t3 + { + std::cos(v) * std::cos(u), + std::cos(v) * std::sin(u), + std::sin(v) + } * (double) m_distance; + }; + + /* + // partial derivative of S with respect to u + auto Sdu = [&](double u, double v) -> float64_t3 + { + return float64_t3 + { + -std::cos(v) * std::sin(u), + std::cos(v)* std::cos(u), + 0 + } * (double) m_distance; + }; + */ + + // partial derivative of S with respect to v + auto Sdv = [&](double u, double v) -> float64_t3 + { + return float64_t3 + { + -std::sin(v) * std::cos(u), + -std::sin(v) * std::sin(u), + std::cos(v) + } *(double)m_distance; + }; + + auto impulse = m_gimbal.accumulate(virtualEvents); + double deltaU = impulse.dVirtualTranslate.y, deltaV = impulse.dVirtualTranslate.x, deltaDistance = impulse.dVirtualTranslate.z; + + // TODO! + constexpr auto nastyScalar = 0.01; + deltaU *= nastyScalar * m_moveSpeedScale; + deltaV *= nastyScalar * m_moveSpeedScale; + + u += deltaU; + v += deltaV; + + m_distance = std::clamp(m_distance += deltaDistance * nastyScalar, MinDistance, MaxDistance); + + const auto localSpherePostion = S(u, v); + const auto newPosition = localSpherePostion + m_targetPosition; + + // note we are not using Sdu (though we could!) + // instead we benefit from forward we have for free when moving on sphere surface + // and given up vector obtained from partial derivative we can easily get right vector, this way + // we don't have frenet frame flip we would have with Sdu, however it could be adjusted anyway, less code + + const auto newUp = normalize(Sdv(u, v)); + const auto newForward = normalize(-localSpherePostion); + const auto newRight = normalize(cross(newUp, newForward)); + + const auto newOrientation = glm::quat_cast + ( + glm::dmat3 + { + newRight, + newUp, + newForward + } + ); + + m_gimbal.begin(); + { + m_gimbal.setPosition(newPosition); + m_gimbal.setOrientation(newOrientation); + } + m_gimbal.end(); + + bool manipulated = bool(m_gimbal.getManipulationCounter()); + + if (manipulated) + m_gimbal.updateView(); + + return manipulated; + } + + virtual const uint32_t getAllowedVirtualEvents() override + { + return AllowedVirtualEvents; + } + + virtual const std::string_view getIdentifier() override + { + return "Orbit Camera"; + } + + inline float getDistance() { return m_distance; } + inline double getU() { return u; } + inline double getV() { return v; } + + static inline constexpr float MinDistance = 0.1f; + static inline constexpr float MaxDistance = 10000.f; + +private: + float64_t3 m_targetPosition; + float m_distance; + typename base_t::CGimbal m_gimbal; + + double u = {}, v = {}; + + static inline constexpr auto AllowedVirtualEvents = CVirtualGimbalEvent::Translate; + + static inline const auto m_keyboard_to_virtual_events_preset = []() + { + typename base_t::keyboard_to_virtual_events_t preset; + + preset[ui::E_KEY_CODE::EKC_W] = CVirtualGimbalEvent::MoveUp; + preset[ui::E_KEY_CODE::EKC_S] = CVirtualGimbalEvent::MoveDown; + preset[ui::E_KEY_CODE::EKC_A] = CVirtualGimbalEvent::MoveLeft; + preset[ui::E_KEY_CODE::EKC_D] = CVirtualGimbalEvent::MoveRight; + preset[ui::E_KEY_CODE::EKC_E] = CVirtualGimbalEvent::MoveForward; + preset[ui::E_KEY_CODE::EKC_Q] = CVirtualGimbalEvent::MoveBackward; + + return preset; + }(); + + static inline const auto m_mouse_to_virtual_events_preset = []() + { + typename base_t::mouse_to_virtual_events_t preset; + + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_POSITIVE_MOVEMENT_X] = CVirtualGimbalEvent::MoveRight; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_NEGATIVE_MOVEMENT_X] = CVirtualGimbalEvent::MoveLeft; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_POSITIVE_MOVEMENT_Y] = CVirtualGimbalEvent::MoveUp; + preset[ui::E_MOUSE_CODE::EMC_RELATIVE_NEGATIVE_MOVEMENT_Y] = CVirtualGimbalEvent::MoveDown; + preset[ui::E_MOUSE_CODE::EMC_VERTICAL_POSITIVE_SCROLL] = CVirtualGimbalEvent::MoveForward; + preset[ui::E_MOUSE_CODE::EMC_HORIZONTAL_POSITIVE_SCROLL] = CVirtualGimbalEvent::MoveForward; + preset[ui::E_MOUSE_CODE::EMC_VERTICAL_NEGATIVE_SCROLL] = CVirtualGimbalEvent::MoveBackward; + preset[ui::E_MOUSE_CODE::EMC_HORIZONTAL_NEGATIVE_SCROLL] = CVirtualGimbalEvent::MoveBackward; + + return preset; + }(); + + static inline const auto m_imguizmo_to_virtual_events_preset = []() + { + typename base_t::imguizmo_to_virtual_events_t preset; + + preset[CVirtualGimbalEvent::MoveForward] = CVirtualGimbalEvent::MoveForward; + preset[CVirtualGimbalEvent::MoveBackward] = CVirtualGimbalEvent::MoveBackward; + preset[CVirtualGimbalEvent::MoveLeft] = CVirtualGimbalEvent::MoveLeft; + preset[CVirtualGimbalEvent::MoveRight] = CVirtualGimbalEvent::MoveRight; + preset[CVirtualGimbalEvent::MoveUp] = CVirtualGimbalEvent::MoveUp; + preset[CVirtualGimbalEvent::MoveDown] = CVirtualGimbalEvent::MoveDown; + + return preset; + }(); +}; + +} + +#endif // _C_ORBIT_CAMERA_HPP_ diff --git a/common/include/camera/CPlanarProjection.hpp b/common/include/camera/CPlanarProjection.hpp new file mode 100644 index 000000000..a85cc39de --- /dev/null +++ b/common/include/camera/CPlanarProjection.hpp @@ -0,0 +1,42 @@ +#ifndef _NBL_C_PLANAR_PROJECTION_HPP_ +#define _NBL_C_PLANAR_PROJECTION_HPP_ + +#include "IPlanarProjection.hpp" +#include "IRange.hpp" + +namespace nbl::hlsl +{ + template ProjectionsRange> + class CPlanarProjection : public IPlanarProjection + { + public: + virtual ~CPlanarProjection() = default; + + inline static core::smart_refctd_ptr create(core::smart_refctd_ptr&& camera) + { + if (!camera) + return nullptr; + + return core::smart_refctd_ptr(new CPlanarProjection(core::smart_refctd_ptr(camera)), core::dont_grab); + } + + virtual std::span getLinearProjections() const override + { + return { reinterpret_cast(m_projections.data()), m_projections.size() }; + } + + inline ProjectionsRange& getPlanarProjections() + { + return m_projections; + } + + protected: + CPlanarProjection(core::smart_refctd_ptr&& camera) + : IPlanarProjection(core::smart_refctd_ptr(camera)) {} + + ProjectionsRange m_projections; + }; + +} // nbl::hlsl namespace + +#endif // _NBL_C_PLANAR_PROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/ICamera.hpp b/common/include/camera/ICamera.hpp new file mode 100644 index 000000000..aceec1bc2 --- /dev/null +++ b/common/include/camera/ICamera.hpp @@ -0,0 +1,86 @@ +// Copyright (C) 2018-2020 - DevSH Graphics Programming Sp. z O.O. +// This file is part of the "Nabla Engine". +// For conditions of distribution and use, see copyright notice in nabla.h + +#ifndef _I_CAMERA_HPP_ +#define _I_CAMERA_HPP_ + +#include "camera/IGimbalController.hpp" + +namespace nbl::hlsl // TODO: DIFFERENT NAMESPACE +{ + +class ICamera : public IGimbalController, virtual public core::IReferenceCounted +{ +public: + using IGimbalController::IGimbalController; + + // Gimbal with view parameters representing a camera in world space + class CGimbal : public IGimbal + { + public: + using base_t = IGimbal; + + CGimbal(typename base_t::SCreationParameters&& parameters) : base_t(std::move(parameters)) { updateView(); } + ~CGimbal() = default; + + inline void updateView() + { + const auto& gRight = base_t::getXAxis(), gUp = base_t::getYAxis(), gForward = base_t::getZAxis(); + + assert(isOrthoBase(gRight, gUp, gForward)); + + const auto& position = base_t::getPosition(); + + m_viewMatrix[0u] = float64_t4(gRight, -glm::dot(gRight, position)); + m_viewMatrix[1u] = float64_t4(gUp, -glm::dot(gUp, position)); + m_viewMatrix[2u] = float64_t4(gForward, -glm::dot(gForward, position)); + } + + inline const float64_t3x4& getViewMatrix() const { return m_viewMatrix; } + + private: + float64_t3x4 m_viewMatrix; + }; + + ICamera() {} + virtual ~ICamera() = default; + + // Returns a gimbal which *models the camera view* + virtual const CGimbal& getGimbal() = 0u; + + // Manipulates camera with virtual events, returns true if *any* manipulation happens, it may fail partially or fully because each camera type has certain constraints which determine how it actually works + // TODO: this really needs to be moved to more abstract interface, eg. IObjectTransform or something and ICamera should inherit it (its also an object!) + virtual bool manipulate(std::span virtualEvents, const float64_t4x4 const* referenceFrame = nullptr) = 0; + + // VirtualEventType bitmask for a camera view gimbal manipulation requests filtering + virtual const uint32_t getAllowedVirtualEvents() = 0u; + + // Identifier of a camera type + virtual const std::string_view getIdentifier() = 0u; + + // (***) + inline void setMoveSpeedScale(double scalar) + { + m_moveSpeedScale = scalar; + } + + // (***) + inline void setRotationSpeedScale(double scalar) + { + m_rotationSpeedScale = scalar; + } + + inline double getMoveSpeedScale() const { return m_moveSpeedScale; } + inline double getRotationSpeedScale() const { return m_rotationSpeedScale; } + +protected: + + // (***) TODO: I need to think whether a camera should own this or controllers should be able + // to set sensitivity to scale magnitudes of generated events we put into manipulate method + double m_moveSpeedScale = 0.01, m_rotationSpeedScale = 0.003; +}; + +} + +#endif // _I_CAMERA_HPP_ \ No newline at end of file diff --git a/common/include/camera/IGimbal.hpp b/common/include/camera/IGimbal.hpp new file mode 100644 index 000000000..384786b46 --- /dev/null +++ b/common/include/camera/IGimbal.hpp @@ -0,0 +1,451 @@ +#ifndef _NBL_IGIMBAL_HPP_ +#define _NBL_IGIMBAL_HPP_ + +#include "glm/glm/ext/matrix_transform.hpp" // TODO: TEMPORARY!!! whatever used will be moved to cpp +#include "glm/glm/gtc/quaternion.hpp" +#include "nbl/builtin/hlsl/matrix_utils/transformation_matrix_utils.hlsl" + +// TODO: DIFFERENT NAMESPACE +namespace nbl::hlsl +{ + struct CVirtualGimbalEvent + { + enum VirtualEventType : uint32_t + { + None = 0, + + // Individual events + MoveForward = core::createBitmask({ 0 }), + MoveBackward = core::createBitmask({ 1 }), + MoveLeft = core::createBitmask({ 2 }), + MoveRight = core::createBitmask({ 3 }), + MoveUp = core::createBitmask({ 4 }), + MoveDown = core::createBitmask({ 5 }), + TiltUp = core::createBitmask({ 6 }), + TiltDown = core::createBitmask({ 7 }), + PanLeft = core::createBitmask({ 8 }), + PanRight = core::createBitmask({ 9 }), + RollLeft = core::createBitmask({ 10 }), + RollRight = core::createBitmask({ 11 }), + ScaleXInc = core::createBitmask({ 12 }), + ScaleXDec = core::createBitmask({ 13 }), + ScaleYInc = core::createBitmask({ 14 }), + ScaleYDec = core::createBitmask({ 15 }), + ScaleZInc = core::createBitmask({ 16 }), + ScaleZDec = core::createBitmask({ 17 }), + + EventsCount = 18, + + // Grouped bitmasks + Translate = MoveForward | MoveBackward | MoveLeft | MoveRight | MoveUp | MoveDown, + Rotate = TiltUp | TiltDown | PanLeft | PanRight | RollLeft | RollRight, + Scale = ScaleXInc | ScaleXDec | ScaleYInc | ScaleYDec | ScaleZInc | ScaleZDec, + + All = Translate | Rotate | Scale + }; + + using manipulation_encode_t = float64_t; + + VirtualEventType type = None; + manipulation_encode_t magnitude = {}; + + static constexpr std::string_view virtualEventToString(VirtualEventType event) + { + switch (event) + { + case MoveForward: return "MoveForward"; + case MoveBackward: return "MoveBackward"; + case MoveLeft: return "MoveLeft"; + case MoveRight: return "MoveRight"; + case MoveUp: return "MoveUp"; + case MoveDown: return "MoveDown"; + case TiltUp: return "TiltUp"; + case TiltDown: return "TiltDown"; + case PanLeft: return "PanLeft"; + case PanRight: return "PanRight"; + case RollLeft: return "RollLeft"; + case RollRight: return "RollRight"; + case ScaleXInc: return "ScaleXInc"; + case ScaleXDec: return "ScaleXDec"; + case ScaleYInc: return "ScaleYInc"; + case ScaleYDec: return "ScaleYDec"; + case ScaleZInc: return "ScaleZInc"; + case ScaleZDec: return "ScaleZDec"; + case Translate: return "Translate"; + case Rotate: return "Rotate"; + case Scale: return "Scale"; + case None: return "None"; + default: return "Unknown"; + } + } + + static constexpr VirtualEventType stringToVirtualEvent(std::string_view event) + { + if (event == "MoveForward") return MoveForward; + if (event == "MoveBackward") return MoveBackward; + if (event == "MoveLeft") return MoveLeft; + if (event == "MoveRight") return MoveRight; + if (event == "MoveUp") return MoveUp; + if (event == "MoveDown") return MoveDown; + if (event == "TiltUp") return TiltUp; + if (event == "TiltDown") return TiltDown; + if (event == "PanLeft") return PanLeft; + if (event == "PanRight") return PanRight; + if (event == "RollLeft") return RollLeft; + if (event == "RollRight") return RollRight; + if (event == "ScaleXInc") return ScaleXInc; + if (event == "ScaleXDec") return ScaleXDec; + if (event == "ScaleYInc") return ScaleYInc; + if (event == "ScaleYDec") return ScaleYDec; + if (event == "ScaleZInc") return ScaleZInc; + if (event == "ScaleZDec") return ScaleZDec; + if (event == "Translate") return Translate; + if (event == "Rotate") return Rotate; + if (event == "Scale") return Scale; + if (event == "None") return None; + return None; + } + + static inline constexpr auto VirtualEventsTypeTable = []() + { + std::array output; + + for (uint16_t i = 0u; i < EventsCount; ++i) + output[i] = static_cast(core::createBitmask({ i })); + + return output; + }(); + }; + + struct CReferenceTransform + { + float64_t4x4 frame; + glm::quat orientation; + }; + + template + requires is_any_of_v + class IGimbal + { + public: + using precision_t = T; + //! underlying type for world matrix (TRS) + using model_matrix_t = matrix; + + struct VirtualImpulse + { + vector dVirtualTranslate { 0.0f }, dVirtualRotation { 0.0f }, dVirtualScale { 1.0f }; + }; + + //! Accumulates virtual impulse given allowed virtual event bitmap. Input virtual events are already deltas with respect to some base frame, the utility filters the events & outputs the impulse + template + VirtualImpulse accumulate(std::span virtualEvents, const vector& gRightOverride, const vector& gUpOverride, const vector& gForwardOverride) + { + VirtualImpulse impulse; + + for (const auto& event : virtualEvents) + { + assert(event.magnitude >= 0); + + // translation events + if constexpr (AllowedEvents & CVirtualGimbalEvent::MoveRight) + if (event.type == CVirtualGimbalEvent::MoveRight) + impulse.dVirtualTranslate.x += static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::MoveLeft) + if (event.type == CVirtualGimbalEvent::MoveLeft) + impulse.dVirtualTranslate.x -= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::MoveUp) + if (event.type == CVirtualGimbalEvent::MoveUp) + impulse.dVirtualTranslate.y += static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::MoveDown) + if (event.type == CVirtualGimbalEvent::MoveDown) + impulse.dVirtualTranslate.y -= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::MoveForward) + if (event.type == CVirtualGimbalEvent::MoveForward) + impulse.dVirtualTranslate.z += static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::MoveBackward) + if (event.type == CVirtualGimbalEvent::MoveBackward) + impulse.dVirtualTranslate.z -= static_cast(event.magnitude); + + // rotation events + if constexpr (AllowedEvents & CVirtualGimbalEvent::TiltUp) + if (event.type == CVirtualGimbalEvent::TiltUp) + impulse.dVirtualRotation.x += static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::TiltDown) + if (event.type == CVirtualGimbalEvent::TiltDown) + impulse.dVirtualRotation.x -= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::PanRight) + if (event.type == CVirtualGimbalEvent::PanRight) + impulse.dVirtualRotation.y += static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::PanLeft) + if (event.type == CVirtualGimbalEvent::PanLeft) + impulse.dVirtualRotation.y -= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::RollRight) + if (event.type == CVirtualGimbalEvent::RollRight) + impulse.dVirtualRotation.z += static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::RollLeft) + if (event.type == CVirtualGimbalEvent::RollLeft) + impulse.dVirtualRotation.z -= static_cast(event.magnitude); + + // scaling events + if constexpr (AllowedEvents & CVirtualGimbalEvent::ScaleXInc) + if (event.type == CVirtualGimbalEvent::ScaleXInc) + impulse.dVirtualScale.x *= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::ScaleXDec) + if (event.type == CVirtualGimbalEvent::ScaleXDec) + impulse.dVirtualScale.x *= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::ScaleYInc) + if (event.type == CVirtualGimbalEvent::ScaleYInc) + impulse.dVirtualScale.y *= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::ScaleYDec) + if (event.type == CVirtualGimbalEvent::ScaleYDec) + impulse.dVirtualScale.y *= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::ScaleZInc) + if (event.type == CVirtualGimbalEvent::ScaleZInc) + impulse.dVirtualScale.z *= static_cast(event.magnitude); + + if constexpr (AllowedEvents & CVirtualGimbalEvent::ScaleZDec) + if (event.type == CVirtualGimbalEvent::ScaleZDec) + impulse.dVirtualScale.z *= static_cast(event.magnitude); + } + + return impulse; + } + + template + VirtualImpulse accumulate(std::span virtualEvents) + { + return accumulate(virtualEvents, getXAxis(), getYAxis(), getZAxis()); + } + + struct SCreationParameters + { + vector position; + glm::quat orientation = glm::quat(1.0f, 0.0f, 0.0f, 0.0f); + }; + + IGimbal(const IGimbal&) = default; + IGimbal(IGimbal&&) noexcept = default; + IGimbal& operator=(const IGimbal&) = default; + IGimbal& operator=(IGimbal&&) noexcept = default; + + IGimbal(SCreationParameters&& parameters) + : m_position(parameters.position), m_orientation(parameters.orientation), m_id(reinterpret_cast(this)) + { + updateOrthonormalOrientationBase(); + } + + inline const uintptr_t getID() const { return m_id; } + + void begin() + { + m_isManipulating = true; + m_counter = 0u; + } + + inline void setPosition(const vector& position) + { + assert(m_isManipulating); // TODO: log error and return without doing nothing + + if (m_position != position) + m_counter++; + + m_position = position; + } + + inline void setScale(const vector& scale) + { + m_scale = scale; + } + + inline void setOrientation(const glm::quat& orientation) + { + assert(m_isManipulating); // TODO: log error and return without doing nothing + + if(m_orientation != orientation) + m_counter++; + + m_orientation = glm::normalize(orientation); + updateOrthonormalOrientationBase(); + } + + inline void transform(const CReferenceTransform& reference, const VirtualImpulse& impulse) + { + setOrientation(reference.orientation * glm::quat(glm::radians(impulse.dVirtualRotation))); + setPosition(mul(float64_t4(impulse.dVirtualTranslate, 1), reference.frame).xyz); + } + + inline void rotate(const vector& axis, float dRadians) + { + assert(m_isManipulating); // TODO: log error and return without doing nothing + + if(dRadians) + m_counter++; + + glm::quat dRotation = glm::angleAxis(dRadians, axis); + m_orientation = glm::normalize(dRotation * m_orientation); + updateOrthonormalOrientationBase(); + } + + inline void move(vector delta) + { + assert(m_isManipulating); // TODO: log error and return without doing nothing + + auto newPosition = m_position + delta; + + if (newPosition != m_position) + m_counter++; + + m_position = newPosition; + } + + inline void strafe(precision_t distance) + { + move(getXAxis() * distance); + } + + inline void climb(precision_t distance) + { + move(getYAxis() * distance); + } + + inline void advance(precision_t distance) + { + move(getZAxis() * distance); + } + + inline void end() + { + m_isManipulating = false; + } + + //! Position of gimbal in world space + inline const auto& getPosition() const { return m_position; } + + //! Orientation of gimbal + inline const auto& getOrientation() const { return m_orientation; } + + //! Scale transform component + inline const auto& getScale() const { return m_scale; } + + //! World matrix (TRS) + template + requires is_any_of_v> + const TRS operator()() const + { + const auto& position = getPosition(); + const auto& rotation = getOrthonornalMatrix(); + const auto& scale = getScale(); + + if constexpr (is_same_v) + { + return + { + vector(rotation[0] * scale.x, position.x), + vector(rotation[1] * scale.y, position.y), + vector(rotation[2] * scale.z, position.z) + }; + } + else + { + return + { + vector(rotation[0] * scale.x, T(0)), + vector(rotation[1] * scale.y, T(0)), + vector(rotation[2] * scale.z, T(0)), + vector(position, T(1)) + }; + } + } + + //! Orthonormal [getXAxis(), getYAxis(), getZAxis()] orientation matrix + inline const auto& getOrthonornalMatrix() const { return m_orthonormal; } + + //! Base "right" vector in orthonormal orientation basis (X-axis) + inline const auto& getXAxis() const { return m_orthonormal[0u]; } + + //! Base "up" vector in orthonormal orientation basis (Y-axis) + inline const auto& getYAxis() const { return m_orthonormal[1u]; } + + //! Base "forward" vector in orthonormal orientation basis (Z-axis) + inline const auto& getZAxis() const { return m_orthonormal[2u]; } + + //! Target vector in local space, alias for getZAxis() + inline const auto getLocalTarget() const { return getZAxis(); } + + //! Target vector in world space + inline const auto getWorldTarget() const { return getPosition() + getLocalTarget(); } + + //! Counts how many times a valid manipulation has been performed, the counter resets when begin() is called + inline const auto& getManipulationCounter() { return m_counter; } + + //! Returns true if gimbal records a manipulation + inline bool isManipulating() const { return m_isManipulating; } + + bool extractReferenceTransform(CReferenceTransform* out, const float64_t4x4 const* referenceFrame = nullptr) + { + if (not out) + return false; + + if (referenceFrame) + { + out->frame = *referenceFrame; + if (not isOrthoBase(float64_t3(out->frame[0]), float64_t3(out->frame[1]), float64_t3(out->frame[2]))) + return false; + } + else + { + out->frame = getMatrix3x3As4x4(getOrthonornalMatrix()); + out->frame[3] = float64_t4(getPosition(), 1); + } + + out->orientation = glm::quat_cast(glm::dmat3{ out->frame[0], out->frame[1], out->frame[2] }); + + return true; + } + + private: + inline void updateOrthonormalOrientationBase() + { + m_orthonormal = matrix(glm::mat3_cast(glm::normalize(m_orientation))); + } + + //! Position of a gimbal in world space + vector m_position; + + //! Normalized orientation of gimbal + //! TODO: precision + replace with our "quat at home" + glm::quat m_orientation; + + //! Scale transform component + vector m_scale = { 1.f, 1.f , 1.f }; + + //! Orthonormal base composed from "m_orientation" representing gimbal's "forward", "up" & "right" vectors in local space - basically it spans orientation space + matrix m_orthonormal; + + //! Counter that increments for each performed manipulation, resets with each begin() call + size_t m_counter = {}; + + //! Tracks whether gimbal is currently in manipulation mode + bool m_isManipulating = false; + + //! The fact ImGUIZMO has global context I don't like, however for IDs we can do a life-tracking trick and cast addresses which are unique & we don't need any global associative container to track them! + const uintptr_t m_id; + }; +} // namespace nbl::hlsl + +#endif // _NBL_IGIMBAL_HPP_ \ No newline at end of file diff --git a/common/include/camera/IGimbalController.hpp b/common/include/camera/IGimbalController.hpp new file mode 100644 index 000000000..7b4ba365e --- /dev/null +++ b/common/include/camera/IGimbalController.hpp @@ -0,0 +1,457 @@ +#ifndef _NBL_I_CAMERA_CONTROLLER_HPP_ +#define _NBL_I_CAMERA_CONTROLLER_HPP_ + +///////////////////////// +// TODO: TEMPORARY!!! +#include "common.hpp" +namespace ImGuizmo +{ + void DecomposeMatrixToComponents(const float*, float*, float*, float*); +} +///////////////////////// + +#include "IProjection.hpp" +#include "IGimbal.hpp" + +// TODO: DIFFERENT NAMESPACE +namespace nbl::hlsl +{ + +struct IGimbalManipulateEncoder +{ + IGimbalManipulateEncoder() {} + virtual ~IGimbalManipulateEncoder() {} + + //! output of any controller process method + using gimbal_event_t = CVirtualGimbalEvent; + + //! encode keyboard code used to translate to gimbal_event_t event + using encode_keyboard_code_t = ui::E_KEY_CODE; + + //! encode mouse code used to translate to gimbal_event_t event + using encode_mouse_code_t = ui::E_MOUSE_CODE; + + //! encode ImGuizmo code used to translate to gimbal_event_t event + using encode_imguizmo_code_t = gimbal_event_t::VirtualEventType; + + //! Encoder types, a controller takes encoder type events and outputs gimbal_event_t events + enum EncoderType : uint8_t + { + Keyboard, + Mouse, + Imguizmo, + + Count + }; + + //! A key in a hash map which is "encode__code_t" as union with information about EncoderType the encode value got produced from + struct CKeyInfo + { + union + { + encode_keyboard_code_t keyboardCode; + encode_mouse_code_t mouseCode; + encode_imguizmo_code_t imguizmoCode; + }; + + CKeyInfo(encode_keyboard_code_t code) : keyboardCode(code), type(Keyboard) {} + CKeyInfo(encode_mouse_code_t code) : mouseCode(code), type(Mouse) {} + CKeyInfo(encode_imguizmo_code_t code) : imguizmoCode(code), type(Imguizmo) {} + + EncoderType type; + }; + + //! Hash value in hash map which is gimbal_event_t & state + struct CHashInfo + { + CHashInfo() {} + CHashInfo(gimbal_event_t::VirtualEventType _type) : event({ .type = _type }) {} + ~CHashInfo() = default; + + gimbal_event_t event = {}; + bool active = false; + }; + + using keyboard_to_virtual_events_t = std::unordered_map; + using mouse_to_virtual_events_t = std::unordered_map; + using imguizmo_to_virtual_events_t = std::unordered_map; + + virtual const keyboard_to_virtual_events_t getKeyboardMappingPreset() const { return {}; } + virtual const mouse_to_virtual_events_t getMouseMappingPreset() const { return {}; } + virtual const imguizmo_to_virtual_events_t getImguizmoMappingPreset() const { return {}; } + + virtual const keyboard_to_virtual_events_t& getKeyboardVirtualEventMap() const = 0; + virtual const mouse_to_virtual_events_t& getMouseVirtualEventMap() const = 0; + virtual const imguizmo_to_virtual_events_t& getImguizmoVirtualEventMap() const = 0; + + // Binds mouse key codes to virtual events, the mapKeys lambda will be executed with controller keyboard_to_virtual_events_t table + virtual void updateKeyboardMapping(const std::function& mapKeys) = 0; + + // Binds mouse key codes to virtual events, the mapKeys lambda will be executed with controller mouse_to_virtual_events_t table + virtual void updateMouseMapping(const std::function& mapKeys) = 0; + + // Binds imguizmo key codes to virtual events, the mapKeys lambda will be executed with controller imguizmo_to_virtual_events_t table + virtual void updateImguizmoMapping(const std::function& mapKeys) = 0; +}; + +class IGimbalController : public IGimbalManipulateEncoder +{ +public: + using IGimbalManipulateEncoder::IGimbalManipulateEncoder; + + IGimbalController() {} + virtual ~IGimbalController() {} + + //! input of keyboard gimbal controller process utility - Nabla UI event handler produces ui::SKeyboardEvent events + using input_keyboard_event_t = ui::SKeyboardEvent; + + //! input of mouse gimbal controller process utility - Nabla UI event handler produces ui::SMouseEvent events + using input_mouse_event_t = ui::SMouseEvent; + + //! input of ImGuizmo gimbal controller process utility - ImGuizmo manipulate utility produces "delta (TRS) matrix" events + using input_imguizmo_event_t = float32_t4x4; + + void beginInputProcessing(const std::chrono::microseconds nextPresentationTimeStamp) + { + m_nextPresentationTimeStamp = nextPresentationTimeStamp; + m_frameDeltaTime = std::chrono::duration_cast(m_nextPresentationTimeStamp - m_lastVirtualUpTimeStamp).count(); + assert(m_frameDeltaTime >= 0.f); + } + + void endInputProcessing() + { + m_lastVirtualUpTimeStamp = m_nextPresentationTimeStamp; + } + + virtual void updateKeyboardMapping(const std::function& mapKeys) override { mapKeys(m_keyboardVirtualEventMap); } + virtual void updateMouseMapping(const std::function& mapKeys) override { mapKeys(m_mouseVirtualEventMap); } + virtual void updateImguizmoMapping(const std::function& mapKeys) override { mapKeys(m_imguizmoVirtualEventMap); } + + struct SUpdateParameters + { + std::span keyboardEvents = {}; + std::span mouseEvents = {}; + std::span imguizmoEvents = {}; + }; + + /** + * @brief Processes combined events from SUpdateParameters to generate virtual manipulation events. + * + * @note This function combines the processing of events from keyboards, mouse and ImGuizmo. + * It delegates the actual processing to the respective functions: + * - @ref processKeyboard for keyboard events + * - @ref processMouse for mouse events + * - @ref processImguizmo for ImGuizmo events + * The results are accumulated into the output array and the total count. + * + * @param "output" is a pointer to the array where all generated gimbal events will be stored. + * If nullptr, the function will only calculate the total count of potential + * output events without processing. + * + * @param "count" is a uint32_t reference to store the total count of generated gimbal events. + * + * @param "parameters" is an SUpdateParameters structure containing the individual event arrays + * for keyboard, mouse, and ImGuizmo inputs. + * + * @return void. If "count" > 0 and "output" is a valid pointer, use it to dereference your "output" + * containing "count" events. If "output" is nullptr, "count" tells you the total size of "output" + * you must guarantee to be valid. + */ + void process(gimbal_event_t* output, uint32_t& count, const SUpdateParameters parameters = {}) + { + count = 0u; + uint32_t vKeyboardEventsCount = {}, vMouseEventsCount = {}, vImguizmoEventsCount = {}; + + if (output) + { + processKeyboard(output, vKeyboardEventsCount, parameters.keyboardEvents); output += vKeyboardEventsCount; + processMouse(output, vMouseEventsCount, parameters.mouseEvents); output += vMouseEventsCount; + processImguizmo(output, vImguizmoEventsCount, parameters.imguizmoEvents); + } + else + { + processKeyboard(nullptr, vKeyboardEventsCount, {}); + processMouse(nullptr, vMouseEventsCount, {}); + processImguizmo(nullptr, vImguizmoEventsCount, {}); + } + + count = vKeyboardEventsCount + vMouseEventsCount + vImguizmoEventsCount; + } + + virtual const keyboard_to_virtual_events_t& getKeyboardVirtualEventMap() const override { return m_keyboardVirtualEventMap; } + virtual const mouse_to_virtual_events_t& getMouseVirtualEventMap() const override { return m_mouseVirtualEventMap; } + virtual const imguizmo_to_virtual_events_t& getImguizmoVirtualEventMap() const override { return m_imguizmoVirtualEventMap; } + + /** + * @brief Processes keyboard events to generate virtual manipulation events. + * + * @note This function maps keyboard events into virtual gimbal manipulation events + * based on predefined mappings. It supports event types such as key press and key release + * to trigger corresponding actions. + * + * @param "output" is a pointer to the array where generated gimbal events will be stored. + * If nullptr, the function will only calculate the count of potential + * output events without processing. + * + * @param "count" is a uint32_t reference to store the count of generated gimbal events. + * + * @param "events" is a span of input_keyboard_event_t. Each such event contains a key code and action, + * such as key press or release. + * + * @return void. If "count" > 0 and "output" is a valid pointer, use it to dereference your "output" + * containing "count" events. If "output" is nullptr, "count" tells you the size of "output" you must guarantee to be valid. + */ + void processKeyboard(gimbal_event_t* output, uint32_t& count, std::span events) + { + count = 0u; + const auto mappedVirtualEventsCount = m_keyboardVirtualEventMap.size(); + + if (!output) + { + count = mappedVirtualEventsCount; + return; + } + + if (mappedVirtualEventsCount) + { + preprocess(m_keyboardVirtualEventMap); + + for (const auto& keyboardEvent : events) + { + auto request = m_keyboardVirtualEventMap.find(keyboardEvent.keyCode); + if (request != std::end(m_keyboardVirtualEventMap)) + { + auto& hash = request->second; + + if (keyboardEvent.action == input_keyboard_event_t::ECA_PRESSED) + { + if (!hash.active) + { + const auto keyDeltaTime = std::chrono::duration_cast(m_nextPresentationTimeStamp - keyboardEvent.timeStamp).count(); + assert(keyDeltaTime >= 0); + + hash.active = true; + hash.event.magnitude = keyDeltaTime; + } + } + else if (keyboardEvent.action == input_keyboard_event_t::ECA_RELEASED) + hash.active = false; + } + } + + postprocess(m_keyboardVirtualEventMap, output, count); + } + } + + /** + * @brief Processes mouse events to generate virtual manipulation events. + * + * @note This function processes mouse input, including clicks, scrolls, and movements, + * and maps them into virtual gimbal manipulation events. Mouse actions are processed + * using predefined mappings to determine corresponding gimbal manipulations. + * + * @param "output" is a pointer to the array where generated gimbal events will be stored. + * If nullptr, the function will only calculate the count of potential + * output events without processing. + * + * @param "count" is a uint32_t reference to store the count of generated gimbal events. + * + * @param "events" is a span of input_mouse_event_t. Each such event represents a mouse action, + * including clicks, scrolls, or movements. + * + * @return void. If "count" > 0 and "output" is a valid pointer, use it to dereference your "output" + * containing "count" events. If "output" is nullptr, "count" tells you the size of "output" you must guarantee to be valid. + */ + void processMouse(gimbal_event_t* output, uint32_t& count, std::span events) + { + count = 0u; + const auto mappedVirtualEventsCount = m_mouseVirtualEventMap.size(); + + if (!output) + { + count = mappedVirtualEventsCount; + return; + } + + if (mappedVirtualEventsCount) + { + preprocess(m_mouseVirtualEventMap); + + for (const auto& mouseEvent : events) + { + ui::E_MOUSE_CODE mouseCode = ui::EMC_NONE; + + switch (mouseEvent.type) + { + case input_mouse_event_t::EET_CLICK: + { + switch (mouseEvent.clickEvent.mouseButton) + { + case ui::EMB_LEFT_BUTTON: mouseCode = ui::EMC_LEFT_BUTTON; break; + case ui::EMB_RIGHT_BUTTON: mouseCode = ui::EMC_RIGHT_BUTTON; break; + case ui::EMB_MIDDLE_BUTTON: mouseCode = ui::EMC_MIDDLE_BUTTON; break; + case ui::EMB_BUTTON_4: mouseCode = ui::EMC_BUTTON_4; break; + case ui::EMB_BUTTON_5: mouseCode = ui::EMC_BUTTON_5; break; + default: continue; + } + + auto request = m_mouseVirtualEventMap.find(mouseCode); + if (request != std::end(m_mouseVirtualEventMap)) + { + auto& hash = request->second; + + if (mouseEvent.clickEvent.action == input_mouse_event_t::SClickEvent::EA_PRESSED) + { + if (!hash.active) + { + const auto keyDeltaTime = std::chrono::duration_cast(m_nextPresentationTimeStamp - mouseEvent.timeStamp).count(); + assert(keyDeltaTime >= 0); + + hash.active = true; + hash.event.magnitude += keyDeltaTime; + } + } + else if (mouseEvent.clickEvent.action == input_mouse_event_t::SClickEvent::EA_RELEASED) + hash.active = false; + } + } break; + + case input_mouse_event_t::EET_SCROLL: + { + requestMagnitudeUpdateWithScalar(0.f, float(mouseEvent.scrollEvent.verticalScroll), float(std::abs(mouseEvent.scrollEvent.verticalScroll)), ui::EMC_VERTICAL_POSITIVE_SCROLL, ui::EMC_VERTICAL_NEGATIVE_SCROLL, m_mouseVirtualEventMap); + requestMagnitudeUpdateWithScalar(0.f, mouseEvent.scrollEvent.horizontalScroll, std::abs(mouseEvent.scrollEvent.horizontalScroll), ui::EMC_HORIZONTAL_POSITIVE_SCROLL, ui::EMC_HORIZONTAL_NEGATIVE_SCROLL, m_mouseVirtualEventMap); + } break; + + case input_mouse_event_t::EET_MOVEMENT: + { + requestMagnitudeUpdateWithScalar(0.f, mouseEvent.movementEvent.relativeMovementX, std::abs(mouseEvent.movementEvent.relativeMovementX), ui::EMC_RELATIVE_POSITIVE_MOVEMENT_X, ui::EMC_RELATIVE_NEGATIVE_MOVEMENT_X, m_mouseVirtualEventMap); + requestMagnitudeUpdateWithScalar(0.f, mouseEvent.movementEvent.relativeMovementY, std::abs(mouseEvent.movementEvent.relativeMovementY), ui::EMC_RELATIVE_POSITIVE_MOVEMENT_Y, ui::EMC_RELATIVE_NEGATIVE_MOVEMENT_Y, m_mouseVirtualEventMap); + } break; + + default: + break; + } + } + + postprocess(m_mouseVirtualEventMap, output, count); + } + } + + /** + * @brief Processes input events from ImGuizmo and generates virtual gimbal events. + * + * @note This function is intended to process transformations provided by ImGuizmo and convert + * them into virtual gimbal events for the ICamera::World mode (ICamera::Local is invalid!). + * The function computes translation, rotation, and scale deltas from ImGuizmo's delta matrix, + * which are then mapped to corresponding virtual events using a predefined mapping. + * + * @param "output" is pointer to the array where generated gimbal events will be stored. + * If nullptr, the function will only calculate the count of potential + * output events without processing. + * + * @param "count" is uint32_t reference to store the count of generated gimbal events. + * + * @param "events" is a span of input_imguizmo_event_t. Each such event contains a delta + * transformation matrix that represents changes in world space. + * + * @return void. If "count" > 0 & "output" was valid pointer then use it to dereference your "output" containing "count" events. + * If "output" is nullptr then "count" tells you about size of "output" you must guarantee to be valid. + */ + void processImguizmo(gimbal_event_t* output, uint32_t& count, std::span events) + { + count = 0u; + const auto mappedVirtualEventsCount = m_imguizmoVirtualEventMap.size(); + + if (!output) + { + count = mappedVirtualEventsCount; + return; + } + + if (mappedVirtualEventsCount) + { + preprocess(m_imguizmoVirtualEventMap); + + for (const auto& ev : events) + { + const auto& deltaWorldTRS = ev; + + struct + { + float32_t3 dTranslation, dRotation, dScale; + } world; + + // TODO: write it in Nabla, this is temp + ImGuizmo::DecomposeMatrixToComponents(&deltaWorldTRS[0][0], &world.dTranslation[0], &world.dRotation[0], &world.dScale[0]); + + // Delta translation impulse + requestMagnitudeUpdateWithScalar(0.f, world.dTranslation[0], std::abs(world.dTranslation[0]), gimbal_event_t::MoveRight, gimbal_event_t::MoveLeft, m_imguizmoVirtualEventMap); + requestMagnitudeUpdateWithScalar(0.f, world.dTranslation[1], std::abs(world.dTranslation[1]), gimbal_event_t::MoveUp, gimbal_event_t::MoveDown, m_imguizmoVirtualEventMap); + requestMagnitudeUpdateWithScalar(0.f, world.dTranslation[2], std::abs(world.dTranslation[2]), gimbal_event_t::MoveForward, gimbal_event_t::MoveBackward, m_imguizmoVirtualEventMap); + + // Delta rotation impulse + requestMagnitudeUpdateWithScalar(0.f, world.dRotation[0], std::abs(world.dRotation[0]), gimbal_event_t::TiltUp , gimbal_event_t::TiltDown, m_imguizmoVirtualEventMap); + requestMagnitudeUpdateWithScalar(0.f, world.dRotation[1], std::abs(world.dRotation[1]), gimbal_event_t::PanRight, gimbal_event_t::PanLeft, m_imguizmoVirtualEventMap); + requestMagnitudeUpdateWithScalar(0.f, world.dRotation[2], std::abs(world.dRotation[2]), gimbal_event_t::RollRight, gimbal_event_t::RollLeft, m_imguizmoVirtualEventMap); + + // Delta scale impulse + requestMagnitudeUpdateWithScalar(1.f, world.dScale[0], std::abs(world.dScale[0]), gimbal_event_t::ScaleXInc, gimbal_event_t::ScaleXDec, m_imguizmoVirtualEventMap); + requestMagnitudeUpdateWithScalar(1.f, world.dScale[1], std::abs(world.dScale[1]), gimbal_event_t::ScaleYInc, gimbal_event_t::ScaleYDec, m_imguizmoVirtualEventMap); + requestMagnitudeUpdateWithScalar(1.f, world.dScale[2], std::abs(world.dScale[2]), gimbal_event_t::ScaleZInc, gimbal_event_t::ScaleZDec, m_imguizmoVirtualEventMap); + } + + postprocess(m_imguizmoVirtualEventMap, output, count); + } + } + +private: + + //! helper utility, for any controller this should be called before any update of hash map + void preprocess(auto& map) + { + for (auto& [key, hash] : map) + { + hash.event.magnitude = 0.0f; + + if (hash.active) + hash.event.magnitude = m_frameDeltaTime; + } + } + + //! helper utility, for any controller this should be called after updating a hash map + void postprocess(const auto& map, gimbal_event_t* output, uint32_t& count) + { + for (const auto& [key, hash] : map) + if (hash.event.magnitude) + { + auto* virtualEvent = output + count; + virtualEvent->type = hash.event.type; + virtualEvent->magnitude = hash.event.magnitude; + ++count; + } + } + + //! helper utility, it *doesn't* assume we keep requested events alive but only increase their magnitude + template + void requestMagnitudeUpdateWithScalar(float signPivot, float dScalar, float dMagnitude, EncodeType positive, EncodeType negative, Map& map) + { + if (dScalar != signPivot) + { + auto code = (dScalar > signPivot) ? positive : negative; + auto request = map.find(code); + if (request != map.end()) + request->second.event.magnitude += dMagnitude; + } + } + + keyboard_to_virtual_events_t m_keyboardVirtualEventMap; + mouse_to_virtual_events_t m_mouseVirtualEventMap; + imguizmo_to_virtual_events_t m_imguizmoVirtualEventMap; + + size_t m_frameDeltaTime = {}; + std::chrono::microseconds m_nextPresentationTimeStamp = {}, m_lastVirtualUpTimeStamp = {}; +}; + +} // nbl::hlsl namespace + +#endif // _NBL_I_CAMERA_CONTROLLER_HPP_ \ No newline at end of file diff --git a/common/include/camera/ILinearProjection.hpp b/common/include/camera/ILinearProjection.hpp new file mode 100644 index 000000000..fc0115610 --- /dev/null +++ b/common/include/camera/ILinearProjection.hpp @@ -0,0 +1,180 @@ +#ifndef _NBL_I_LINEAR_PROJECTION_HPP_ +#define _NBL_I_LINEAR_PROJECTION_HPP_ + +#include "IProjection.hpp" +#include "ICamera.hpp" + +namespace nbl::hlsl +{ + +/** + * @brief Interface class for any custom linear projection transformation (matrix elements are already evaluated scalars) + * referencing a camera, great for Perspective, Orthographic, Oblique, Axonometric and Shear projections + */ +class ILinearProjection : virtual public core::IReferenceCounted +{ +protected: + ILinearProjection(core::smart_refctd_ptr&& camera) + : m_camera(core::smart_refctd_ptr(camera)) {} + virtual ~ILinearProjection() = default; + + core::smart_refctd_ptr m_camera; +public: + //! underlying type for linear world TRS matrix + using model_matrix_t = typename decltype(m_camera)::pointee::CGimbal::model_matrix_t; + + //! underlying type for linear concatenated matrix + using concatenated_matrix_t = float64_t4x4; + + //! underlying type for linear inverse of concatenated matrix + using inv_concatenated_matrix_t = std::optional; + + struct CProjection : public IProjection + { + using IProjection::IProjection; + using projection_matrix_t = concatenated_matrix_t; + using inv_projection_matrix_t = inv_concatenated_matrix_t; + + CProjection() : CProjection(projection_matrix_t(1)) {} + CProjection(const projection_matrix_t& matrix) { setProjectionMatrix(matrix); } + + //! Returns P (Projection matrix) + inline const projection_matrix_t& getProjectionMatrix() const { return m_projectionMatrix; } + + //! Returns P⁻¹ (Inverse of Projection matrix) *if it exists* + inline const inv_projection_matrix_t& getInvProjectionMatrix() const { return m_invProjectionMatrix; } + + inline const std::optional& isProjectionLeftHanded() const { return m_isProjectionLeftHanded; } + inline bool isProjectionSingular() const { return m_isProjectionSingular; } + virtual ProjectionType getProjectionType() const override { return ProjectionType::Linear; } + + virtual void project(const projection_vector_t& vecToProjectionSpace, projection_vector_t& output) const override + { + output = mul(m_projectionMatrix, vecToProjectionSpace); + } + + virtual bool unproject(const projection_vector_t& vecFromProjectionSpace, projection_vector_t& output) const override + { + if (m_isProjectionSingular) + return false; + + output = mul(m_invProjectionMatrix.value(), vecFromProjectionSpace); + + return true; + } + + protected: + inline void setProjectionMatrix(const projection_matrix_t& matrix) + { + m_projectionMatrix = matrix; + const auto det = hlsl::determinant(m_projectionMatrix); + + // we will allow you to lose a dimension since such a projection itself *may* + // be valid, however then you cannot un-project because the inverse doesn't exist! + m_isProjectionSingular = not det; + + if (m_isProjectionSingular) + { + m_isProjectionLeftHanded = std::nullopt; + m_invProjectionMatrix = std::nullopt; + } + else + { + m_isProjectionLeftHanded = det < 0.0; + m_invProjectionMatrix = inverse(m_projectionMatrix); + } + } + + private: + projection_matrix_t m_projectionMatrix; + inv_projection_matrix_t m_invProjectionMatrix; + std::optional m_isProjectionLeftHanded; + bool m_isProjectionSingular; + }; + + virtual std::span getLinearProjections() const = 0; + + inline bool setCamera(core::smart_refctd_ptr&& camera) + { + if (camera) + { + m_camera = camera; + return true; + } + + return false; + } + + inline ICamera* getCamera() + { + return m_camera.get(); + } + + /** + * @brief Computes Model View (MV) matrix + * @param "model" is world TRS matrix + * @return Returns MV matrix + */ + inline concatenated_matrix_t getMV(const model_matrix_t& model) const + { + const auto& v = m_camera->getGimbal().getViewMatrix(); + return mul(getMatrix3x4As4x4(v), getMatrix3x4As4x4(model)); + } + + /** + * @brief Computes Model View Projection (MVP) matrix + * @param "projection" is linear projection + * @param "model" is world TRS matrix + * @return Returns MVP matrix + */ + inline concatenated_matrix_t getMVP(const CProjection& projection, const model_matrix_t& model) const + { + const auto& v = m_camera->getGimbal().getViewMatrix(); + const auto& p = projection.getProjectionMatrix(); + auto mv = mul(getMatrix3x4As4x4(v), getMatrix3x4As4x4(model)); + return mul(p, mv); + } + + /** + * @brief Computes Model View Projection (MVP) matrix + * @param "projection" is linear projection + * @param "mv" is Model View (MV) matrix + * @return Returns MVP matrix + */ + inline concatenated_matrix_t getMVP(const CProjection& projection, const concatenated_matrix_t& mv) const + { + const auto& p = projection.getProjectionMatrix(); + return mul(p, mv); + } + + /** + * @brief Computes Inverse of Model View ((MV)⁻¹) matrix + * @param "mv" is Model View (MV) matrix + * @return Returns ((MV)⁻¹) matrix *if it exists*, otherwise returns std::nullopt + */ + inline inv_concatenated_matrix_t getMVInverse(const model_matrix_t& model) const + { + const auto mv = getMV(model); + if (auto det = determinant(mv); det) + return inverse(mv); + return std::nullopt; + } + + /** + * @brief Computes Inverse of Model View Projection ((MVP)⁻¹) matrix + * @param "projection" is linear projection + * @param "model" is world TRS matrix + * @return Returns ((MVP)⁻¹) matrix *if it exists*, otherwise returns std::nullopt + */ + inline inv_concatenated_matrix_t getMVPInverse(const CProjection& projection, const model_matrix_t& model) const + { + const auto mvp = getMVP(projection, model); + if (auto det = determinant(mvp); det) + return inverse(mvp); + return std::nullopt; + } +}; + +} // nbl::hlsl namespace + +#endif // _NBL_I_LINEAR_PROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/IPerspectiveProjection.hpp b/common/include/camera/IPerspectiveProjection.hpp new file mode 100644 index 000000000..479db170e --- /dev/null +++ b/common/include/camera/IPerspectiveProjection.hpp @@ -0,0 +1,62 @@ +#ifndef _NBL_I_QUAD_PROJECTION_HPP_ +#define _NBL_I_QUAD_PROJECTION_HPP_ + +#include "ILinearProjection.hpp" + +namespace nbl::hlsl +{ + +/** +* @brief Interface class for quad projections. +* +* This projection transforms a vector into the **model space of a perspective quad** +* (defined by the pre-transform matrix) and then projects it onto the perspective quad +* using the linear view-port transform. +* +* A perspective quad projection is represented by: +* - A **pre-transform matrix** (non-linear/skewed transformation). +* - A **linear view-port transform matrix**. +* +* The final projection matrix is the concatenation of the pre-transform and the linear view-port transform. +* +* @note Single perspective quad projection can represent a face quad of a CAVE-like system. +*/ +class IPerspectiveProjection : public ILinearProjection +{ +public: + struct CProjection : ILinearProjection::CProjection + { + using base_t = ILinearProjection::CProjection; + + CProjection() = default; + CProjection(const ILinearProjection::model_matrix_t& pretransform, ILinearProjection::concatenated_matrix_t viewport) + { + setQuadTransform(pretransform, viewport); + } + + inline void setQuadTransform(const ILinearProjection::model_matrix_t& pretransform, ILinearProjection::concatenated_matrix_t viewport) + { + auto concatenated = mul(getMatrix3x4As4x4(pretransform), viewport); + base_t::setProjectionMatrix(concatenated); + + m_pretransform = pretransform; + m_viewport = viewport; + } + + inline const ILinearProjection::model_matrix_t& getPretransform() const { return m_pretransform; } + inline const ILinearProjection::concatenated_matrix_t& getViewportProjection() const { return m_viewport; } + + private: + ILinearProjection::model_matrix_t m_pretransform = ILinearProjection::model_matrix_t(1); + ILinearProjection::concatenated_matrix_t m_viewport = ILinearProjection::concatenated_matrix_t(1); + }; + +protected: + IPerspectiveProjection(core::smart_refctd_ptr&& camera) + : ILinearProjection(core::smart_refctd_ptr(camera)) {} + virtual ~IPerspectiveProjection() = default; +}; + +} // nbl::hlsl namespace + +#endif // _NBL_I_QUAD_PROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/IPlanarProjection.hpp b/common/include/camera/IPlanarProjection.hpp new file mode 100644 index 000000000..1fccf0ef4 --- /dev/null +++ b/common/include/camera/IPlanarProjection.hpp @@ -0,0 +1,120 @@ +#ifndef _NBL_I_PLANAR_PROJECTION_HPP_ +#define _NBL_I_PLANAR_PROJECTION_HPP_ + +#include "ILinearProjection.hpp" + +namespace nbl::hlsl +{ + +class IPlanarProjection : public ILinearProjection +{ +public: + struct CProjection : public ILinearProjection::CProjection, public IGimbalController + { + using base_t = ILinearProjection::CProjection; + + enum ProjectionType : uint8_t + { + Perspective, + Orthographic, + + Count + }; + + template + static CProjection create(Args&&... args) + requires (T != Count) + { + CProjection output; + + if constexpr (T == Perspective) output.setPerspective(std::forward(args)...); + else if (T == Orthographic) output.setOrthographic(std::forward(args)...); + + return output; + } + + CProjection(const CProjection& other) = default; + CProjection(CProjection&& other) noexcept = default; + + struct ProjectionParameters + { + ProjectionType m_type; + + union PlanarParameters + { + struct + { + float fov; + } perspective; + + struct + { + float orthoWidth; + } orthographic; + + PlanarParameters() {} + ~PlanarParameters() {} + } m_planar; + + float m_zNear; + float m_zFar; + }; + + inline void update(bool leftHanded, float aspectRatio) + { + switch (m_parameters.m_type) + { + case Perspective: + { + const auto& fov = m_parameters.m_planar.perspective.fov; + + if (leftHanded) + base_t::setProjectionMatrix(buildProjectionMatrixPerspectiveFovLH(glm::radians(fov), aspectRatio, m_parameters.m_zNear, m_parameters.m_zFar)); + else + base_t::setProjectionMatrix(buildProjectionMatrixPerspectiveFovRH(glm::radians(fov), aspectRatio, m_parameters.m_zNear, m_parameters.m_zFar)); + } break; + + case Orthographic: + { + const auto& orthoW = m_parameters.m_planar.orthographic.orthoWidth; + const auto viewHeight = orthoW * core::reciprocal(aspectRatio); + + if (leftHanded) + base_t::setProjectionMatrix(buildProjectionMatrixOrthoLH(orthoW, viewHeight, m_parameters.m_zNear, m_parameters.m_zFar)); + else + base_t::setProjectionMatrix(buildProjectionMatrixOrthoRH(orthoW, viewHeight, m_parameters.m_zNear, m_parameters.m_zFar)); + } break; + } + } + + inline void setPerspective(float zNear = 0.1f, float zFar = 100.f, float fov = 60.f) + { + m_parameters.m_type = Perspective; + m_parameters.m_planar.perspective.fov = fov; + m_parameters.m_zNear = zNear; + m_parameters.m_zFar = zFar; + } + + inline void setOrthographic(float zNear = 0.1f, float zFar = 100.f, float orthoWidth = 10.f) + { + m_parameters.m_type = Orthographic; + m_parameters.m_planar.orthographic.orthoWidth = orthoWidth; + m_parameters.m_zNear = zNear; + m_parameters.m_zFar = zFar; + } + + inline const ProjectionParameters& getParameters() const { return m_parameters; } + private: + CProjection() = default; + ProjectionParameters m_parameters; + }; + +protected: + IPlanarProjection(core::smart_refctd_ptr&& camera) + : ILinearProjection(core::smart_refctd_ptr(camera)) {} + virtual ~IPlanarProjection() = default; +}; + +} // nbl::hlsl namespace + +#endif // _NBL_I_PLANAR_PROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/IProjection.hpp b/common/include/camera/IProjection.hpp new file mode 100644 index 000000000..cb6facdcc --- /dev/null +++ b/common/include/camera/IProjection.hpp @@ -0,0 +1,70 @@ +#ifndef _NBL_I_PROJECTION_HPP_ +#define _NBL_I_PROJECTION_HPP_ + +#include + +namespace nbl::hlsl +{ + +//! Interface class for any type of projection +class IProjection +{ +public: + //! underlying type for all vectors we project or un-project (inverse), projections *may* transform vectors in less dimensions + using projection_vector_t = float64_t4; + + enum class ProjectionType + { + //! Any raw linear transformation, for example it may represent Perspective, Orthographic, Oblique, Axonometric, Shear projections + Linear, + + //! Specialized linear projection for planar projections with parameters + Planar, + + //! Extension of planar projection represented by pre-transform & planar transform combined projecting onto R3 cave quad + CaveQuad, + + //! Specialized CaveQuad projection, represents planar projections onto cube with 6 quad cube faces + Cube, + + Spherical, + ThinLens, + + Count + }; + + IProjection() = default; + virtual ~IProjection() = default; + + /** + * @brief Transforms a vector from its input space into the projection space. + * + * @param "vecToProjectionSpace" is a vector to transform from its space into projection space. + * @param "output" is a vector which is "vecToProjectionSpace" transformed into projection space. + * @return void. "output" is the vector in projection space. + */ + virtual void project(const projection_vector_t& vecToProjectionSpace, projection_vector_t& output) const = 0; + + /** + * @brief Transforms a vector from the projection space back to the original space. + * Note the inverse transform may fail because original projection may be singular. + * + * @param "vecFromProjectionSpace" is a vector in the projection space to transform back to original space. + * @param "output" is a vector which is "vecFromProjectionSpace" transformed back to its original space. + * @return true if inverse succeeded and then "output" is the vector in the original space. False otherwise. + */ + virtual bool unproject(const projection_vector_t& vecFromProjectionSpace, projection_vector_t& output) const = 0; + + /** + * @brief Returns the specific type of the projection + * (e.g., linear, spherical, thin-lens) as defined by the + * ProjectionType enumeration. + * + * @return The type of this projection. + */ + virtual ProjectionType getProjectionType() const = 0; +}; + +} // namespace nbl::hlsl + +#endif // _NBL_IPROJECTION_HPP_ \ No newline at end of file diff --git a/common/include/camera/IRange.hpp b/common/include/camera/IRange.hpp new file mode 100644 index 000000000..a6ed29270 --- /dev/null +++ b/common/include/camera/IRange.hpp @@ -0,0 +1,20 @@ +#ifndef _NBL_IRANGE_HPP_ +#define _NBL_IRANGE_HPP_ + +namespace nbl::hlsl +{ + +template +concept GeneralPurposeRange = requires +{ + typename std::ranges::range_value_t; +}; + +template +concept ContiguousGeneralPurposeRangeOf = GeneralPurposeRange && +std::ranges::contiguous_range && +std::same_as, T>; + +} // namespace nbl::hlsl + +#endif // _NBL_IRANGE_HPP_ \ No newline at end of file diff --git a/common/include/nbl/examples/cameras/CCamera.hpp b/common/include/nbl/examples/cameras/CCamera.hpp index 221e8d42d..2c178edb4 100644 --- a/common/include/nbl/examples/cameras/CCamera.hpp +++ b/common/include/nbl/examples/cameras/CCamera.hpp @@ -14,7 +14,7 @@ #include #include -#include +#include class Camera {