From e9abec1382673a5b312ccfd7301bd0d4edbc5fc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Cerveau?= Date: Thu, 24 Apr 2025 16:59:56 +0200 Subject: [PATCH] Logger: add a log system instead of std::cout or printf MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a logger with level from ERROR to DEBUG Signed-off-by: Stéphane Cerveau --- common/include/Logger.h | 124 +++++++++++++ .../include/VkVideoCore/VkVideoCoreProfile.h | 43 ++--- .../VkVideoCore/VulkanVideoCapabilities.h | 91 +++++----- common/libs/VkCodecUtils/DecoderConfig.h | 44 +++-- common/libs/VkCodecUtils/FrameProcessor.h | 25 ++- common/libs/VkCodecUtils/Helpers.h | 19 +- .../VkCodecUtils/VulkanBistreamBufferImpl.cpp | 2 +- .../VkCodecUtils/VulkanBistreamBufferImpl.h | 2 +- .../VkCodecUtils/VulkanComputePipeline.cpp | 2 +- .../libs/VkCodecUtils/VulkanDeviceContext.cpp | 78 ++++---- .../VkCodecUtils/VulkanFilterYuvCompute.cpp | 12 +- common/libs/VkCodecUtils/VulkanFrame.cpp | 69 +++---- .../VkCodecUtils/VulkanShaderCompiler.cpp | 6 +- .../VkCodecUtils/VulkanVideoProcessor.cpp | 37 ++-- common/libs/VkCodecUtils/VulkanVideoUtils.cpp | 27 +-- common/libs/VkShell/Shell.h | 2 - common/libs/VkShell/ShellXcb.cpp | 6 +- .../libs/VkDecoderUtils/FFmpegDemuxer.cpp | 51 +++--- .../libs/VkVideoDecoder/VkVideoDecoder.cpp | 82 ++++----- .../libs/VkVideoDecoder/VkVideoDecoder.h | 8 +- .../libs/VkVideoParser/VulkanVideoParser.cpp | 168 +++++++++--------- .../VulkanVideoFrameBuffer.cpp | 12 +- vk_video_encoder/demos/vk-video-enc/Main.cpp | 12 +- .../libs/VkVideoEncoder/VkEncoderConfig.cpp | 156 ++++++++-------- .../libs/VkVideoEncoder/VkEncoderConfig.h | 44 ++--- .../VkVideoEncoder/VkEncoderConfigAV1.cpp | 30 ++-- .../VkVideoEncoder/VkEncoderConfigH264.cpp | 22 +-- .../VkVideoEncoder/VkEncoderConfigH265.cpp | 51 +++--- .../libs/VkVideoEncoder/VkEncoderDpbH265.cpp | 9 +- .../libs/VkVideoEncoder/VkVideoEncoder.cpp | 122 ++++++------- .../libs/VkVideoEncoder/VkVideoEncoderAV1.cpp | 22 +-- .../VkVideoEncoder/VkVideoEncoderH264.cpp | 14 +- .../VkVideoEncoder/VkVideoEncoderH265.cpp | 16 +- .../VkVideoEncoder/VkVideoGopStructure.cpp | 27 +-- vk_video_encoder/src/vulkan_video_encoder.cpp | 10 +- .../test/vulkan-video-enc/Main.cpp | 9 +- 36 files changed, 791 insertions(+), 663 deletions(-) create mode 100644 common/include/Logger.h diff --git a/common/include/Logger.h b/common/include/Logger.h new file mode 100644 index 00000000..850ba414 --- /dev/null +++ b/common/include/Logger.h @@ -0,0 +1,124 @@ +/* +* Copyright (C) 2025 Igalia, S.L. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +#ifndef _LOGGER_H_ +#define _LOGGER_H_ + +#include +#include +#include +#include + +// Enum for log levels +enum LogLevel { + LOG_NONE = 0, // Use this to disable logging + LOG_ERROR, + LOG_WARNING, + LOG_INFO, + LOG_DEBUG +}; + +#define LOG_S_DEBUG Logger::instance()(LogLevel::LOG_DEBUG) +#define LOG_S_INFO Logger::instance()(LogLevel::LOG_INFO) +#define LOG_S_WARN Logger::instance()(LogLevel::LOG_WARNING) +#define LOG_S_ERROR Logger::instance()(LogLevel::LOG_ERROR) + +#define LOG_CAT_LEVEL(LEVEL, CAT, FMT, ...) Logger::instance().printf(LEVEL, FMT, ##__VA_ARGS__) + + +#define LOG_DEBUG_CAT(CAT, FMT, ...) LOG_CAT_LEVEL(LogLevel::LOG_DEBUG, CAT, FMT, ##__VA_ARGS__) +#define LOG_INFO_CAT(CAT, FMT, ...) LOG_CAT_LEVEL(LogLevel::LOG_INFO, CAT, FMT, ##__VA_ARGS__) +#define LOG_WARN_CAT(CAT, FMT, ...) LOG_CAT_LEVEL(LogLevel::LOG_WARNING, CAT, FMT, ##__VA_ARGS__) +#define LOG_ERROR_CAT(CAT, FMT, ...) LOG_CAT_LEVEL(LogLevel::LOG_ERROR, CAT, FMT, ##__VA_ARGS__) + +#define LOG_DEBUG(FMT, ...) LOG_DEBUG_CAT("", FMT, ##__VA_ARGS__) +#define LOG_INFO(FMT, ...) LOG_INFO_CAT("", FMT, ##__VA_ARGS__) +#define LOG_WARN( FMT,...) LOG_WARN_CAT("", FMT, ##__VA_ARGS__) +#define LOG_ERROR( FMT,...) LOG_DEBUG_CAT("", FMT, ##__VA_ARGS__) + +#define LOG_DEBUG_CONFIG( FMT, ...) LOG_DEBUG_CAT("config:\t", FMT,##__VA_ARGS__) +#define LOG_INFO_CONFIG( FMT,...) LOG_INFO_CAT("config:\t", FMT,##__VA_ARGS__) +#define LOG_WARN_CONFIG( FMT,...) LOG_WARN_CAT("config:\t", FMT,##__VA_ARGS__) +#define LOG_ERROR_CONFIG( FMT,...) LOG_DEBUG_CAT("config:\t", FMT,##__VA_ARGS__) + +class Logger { +private: + std::ostream& os; // The output stream (e.g., std::cout or std::ofstream) + std::ostream& err; // The error stream (e.g., std::cerr) + LogLevel currentLevel; // Current log level + LogLevel messageLevel; // The log level for the current message + +public: + static Logger &instance () + { + static Logger instance; + return instance; + } + // Constructor to set the output stream and log level (default is INFO) + Logger(std::ostream& outStream = std::cout, std::ostream& errStream = std::cerr, LogLevel level = LogLevel::LOG_INFO) + : os(outStream), err(errStream), currentLevel(level), messageLevel(LogLevel::LOG_INFO) {} + + // Set the log level for the logger + void setLogLevel(int level) { + if (level > LOG_DEBUG) + level = LOG_DEBUG; + currentLevel = static_cast(level); + } + + // Set the log level for the current message + Logger& operator()(LogLevel level) { + messageLevel = level; + return *this; + } + + // Overload the << operator for generic types + template + Logger& operator<<(const T& data) { + if (messageLevel <= currentLevel) { + if (messageLevel == LOG_ERROR) + err << data; + else + os << data; + } + return *this; + } + + // Overload for stream manipulators (like std::endl) + typedef std::ostream& (*StreamManipulator)(std::ostream&); + Logger& operator<<(StreamManipulator manip) { + if (messageLevel <= currentLevel) { + if (messageLevel == LOG_ERROR) + err << manip; + else + os << manip; // Handle std::endl, std::flush, etc. + } + return *this; + } + + void printf(LogLevel level, const char* format, ...) { + if (level <= currentLevel) { + va_list args; + va_start(args, format); + if (level == LOG_ERROR) + vfprintf(stderr, format, args); + else + vfprintf(stdout,format, args); + va_end(args); + } + } +}; + + +#endif diff --git a/common/include/VkVideoCore/VkVideoCoreProfile.h b/common/include/VkVideoCore/VkVideoCoreProfile.h index 7483d8ce..d328c69a 100644 --- a/common/include/VkVideoCore/VkVideoCoreProfile.h +++ b/common/include/VkVideoCore/VkVideoCoreProfile.h @@ -26,6 +26,7 @@ #include "vulkan/vulkan.h" #include "nvidia_utils/vulkan/ycbcr_utils.h" +#include "Logger.h" typedef enum StdChromaFormatIdc { chroma_format_idc_monochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME, @@ -621,38 +622,38 @@ class VkVideoCoreProfile { // formatProfile info based on supported chroma_format_idc if (pVideoProfile->chromaSubsampling & VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR) { - std::cout << "MONO, "; + LOG_S_DEBUG << "MONO, "; } if (pVideoProfile->chromaSubsampling & VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR) { - std::cout << " 420, "; + LOG_S_DEBUG << " 420, "; } if (pVideoProfile->chromaSubsampling & VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR) { - std::cout << " 422, "; + LOG_S_DEBUG << " 422, "; } if (pVideoProfile->chromaSubsampling & VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR) { - std::cout << " 444, "; + LOG_S_DEBUG << " 444, "; } // Profile info based on max bit_depth_luma_minus8 if (pVideoProfile->lumaBitDepth & VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR) { - std::cout << "LUMA: 8-bit, "; + LOG_S_DEBUG << "LUMA: 8-bit, "; } if (pVideoProfile->lumaBitDepth & VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR) { - std::cout << "LUMA: 10-bit, "; + LOG_S_DEBUG << "LUMA: 10-bit, "; } if (pVideoProfile->lumaBitDepth & VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR) { - std::cout << "LUMA: 12-bit, "; + LOG_S_DEBUG << "LUMA: 12-bit, "; } // Profile info based on max bit_depth_chroma_minus8 if (pVideoProfile->chromaBitDepth & VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR) { - std::cout << "CHROMA: 8-bit, "; + LOG_S_DEBUG << "CHROMA: 8-bit, "; } if (pVideoProfile->chromaBitDepth & VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR) { - std::cout << "CHROMA:10-bit, "; + LOG_S_DEBUG << "CHROMA:10-bit, "; } if (pVideoProfile->chromaBitDepth & VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR) { - std::cout << "CHROMA:12-bit,"; + LOG_S_DEBUG << "CHROMA:12-bit,"; } } @@ -660,19 +661,19 @@ class VkVideoCoreProfile { switch (pH264Profiles->stdProfileIdc) { case STD_VIDEO_H264_PROFILE_IDC_BASELINE: - std::cout << "BASELINE, "; + LOG_S_DEBUG << "BASELINE, "; break; case STD_VIDEO_H264_PROFILE_IDC_MAIN: - std::cout << "MAIN, "; + LOG_S_DEBUG << "MAIN, "; break; case STD_VIDEO_H264_PROFILE_IDC_HIGH: - std::cout << "HIGH, "; + LOG_S_DEBUG << "HIGH, "; break; case STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE: - std::cout << "HIGH_444_PREDICTIVE, "; + LOG_S_DEBUG << "HIGH_444_PREDICTIVE, "; break; default: - std::cout << "UNKNOWN PROFILE, "; + LOG_S_DEBUG << "UNKNOWN PROFILE, "; break; } } @@ -681,22 +682,22 @@ class VkVideoCoreProfile { switch (pH265Profiles->stdProfileIdc) { case STD_VIDEO_H265_PROFILE_IDC_MAIN: - std::cout << "MAIN, "; + LOG_S_DEBUG << "MAIN, "; break; case STD_VIDEO_H265_PROFILE_IDC_MAIN_10: - std::cout << "MAIN_10, "; + LOG_S_DEBUG << "MAIN_10, "; break; case STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE: - std::cout << "MAIN_STILL_PICTURE, "; + LOG_S_DEBUG << "MAIN_STILL_PICTURE, "; break; case STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS: - std::cout << "FORMAT_RANGE_EXTENSIONS, "; + LOG_S_DEBUG << "FORMAT_RANGE_EXTENSIONS, "; break; case STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS: - std::cout << "SCC_EXTENSIONS, "; + LOG_S_DEBUG << "SCC_EXTENSIONS, "; break; default: - std::cout << "UNKNOWN PROFILE, "; + LOG_S_DEBUG << "UNKNOWN PROFILE, "; break; } } diff --git a/common/include/VkVideoCore/VulkanVideoCapabilities.h b/common/include/VkVideoCore/VulkanVideoCapabilities.h index 549508a6..0416474f 100644 --- a/common/include/VkVideoCore/VulkanVideoCapabilities.h +++ b/common/include/VkVideoCore/VulkanVideoCapabilities.h @@ -22,6 +22,7 @@ #include "VkCodecUtils/VulkanDeviceContext.h" #include "VkCodecUtils/Helpers.h" #include "VkVideoCore/VkVideoCoreProfile.h" +#include "Logger.h" class VulkanVideoCapabilities { @@ -52,7 +53,7 @@ class VulkanVideoCapabilities VkResult result = GetVideoCapabilities(vkDevCtx, videoProfile, &videoCapabilities); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: Input is not supported. GetVideoCapabilities() result: 0x%x\n", result); + LOG_ERROR("ERROR: Input is not supported. GetVideoCapabilities() result: 0x%x\n", result); } return result; } @@ -76,7 +77,7 @@ class VulkanVideoCapabilities VkResult result = GetVideoCapabilities(vkDevCtx, videoProfile, &videoCapabilities); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: Input is not supported. GetVideoCapabilities() result: 0x%x\n", result); + LOG_ERROR("ERROR: Input is not supported. GetVideoCapabilities() result: 0x%x\n", result); } return result; } @@ -118,13 +119,13 @@ class VulkanVideoCapabilities pictureFormat = supportedOutFormats[0]; } else { - fprintf(stderr, "\nERROR: Unsupported decode capability flags."); + LOG_ERROR("ERROR: Unsupported decode capability flags."); return VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR; } assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetVideoFormats() result: 0x%x\n", result); + LOG_ERROR("ERROR: GetVideoFormats() result: 0x%x\n", result); } assert((referencePicturesFormat != VK_FORMAT_UNDEFINED) && (pictureFormat != VK_FORMAT_UNDEFINED)); @@ -218,46 +219,45 @@ class VulkanVideoCapabilities return result; } - if (dumpData) { - std::cout << "\t\t\t" << ((videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) ? "h264" : "h265") << "decode capabilities: " << std::endl; - if (pVideoCapabilities->flags & VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR) { - std::cout << "\t\t\t" << "Use separate reference images" << std::endl; - } + LOG_S_ERROR << "\t\t\t" << videoProfile.CodecToName(videoProfile.GetCodecType()) << " capabilities: " << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << pVideoCapabilities->minBitstreamBufferOffsetAlignment << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << pVideoCapabilities->minBitstreamBufferSizeAlignment << std::endl; - std::cout << "\t\t\t" << "pictureAccessGranularity: " << pVideoCapabilities->pictureAccessGranularity.width << " x " << pVideoCapabilities->pictureAccessGranularity.height << std::endl; - std::cout << "\t\t\t" << "minCodedExtent: " << pVideoCapabilities->minCodedExtent.width << " x " << pVideoCapabilities->minCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxCodedExtent: " << pVideoCapabilities->maxCodedExtent.width << " x " << pVideoCapabilities->maxCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxDpbSlots: " << pVideoCapabilities->maxDpbSlots << std::endl; - std::cout << "\t\t\t" << "maxActiveReferencePictures: " << pVideoCapabilities->maxActiveReferencePictures << std::endl; - - if (videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) { - const VkVideoDecodeH264CapabilitiesKHR* pH264DecCapabilities = (VkVideoDecodeH264CapabilitiesKHR*)pVideoDecodeCapabilities->pNext; - std::cout << "\t\t\t" << "maxLevelIdc: " << pH264DecCapabilities->maxLevelIdc << std::endl; - std::cout << "\t\t\t" << "fieldOffsetGranularity: " << pH264DecCapabilities->fieldOffsetGranularity.x << " x " << pH264DecCapabilities->fieldOffsetGranularity.y << std::endl; - - if (strncmp(pVideoCapabilities->stdHeaderVersion.extensionName, - VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME, - sizeof (pVideoCapabilities->stdHeaderVersion.extensionName) - 1U) || - (pVideoCapabilities->stdHeaderVersion.specVersion != VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION)) { - assert(!"Unsupported h.264 STD version"); - return VK_ERROR_INCOMPATIBLE_DRIVER; - } - } else if (videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR) { - const VkVideoDecodeH265CapabilitiesKHR* pH265DecCapabilities = (VkVideoDecodeH265CapabilitiesKHR*)pVideoDecodeCapabilities->pNext; - std::cout << "\t\t\t" << "maxLevelIdc: " << pH265DecCapabilities->maxLevelIdc << std::endl; - if (strncmp(pVideoCapabilities->stdHeaderVersion.extensionName, - VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME, - sizeof (pVideoCapabilities->stdHeaderVersion.extensionName) - 1U) || - (pVideoCapabilities->stdHeaderVersion.specVersion != VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION)) { - assert(!"Unsupported h.265 STD version"); - return VK_ERROR_INCOMPATIBLE_DRIVER; - } - } else { - assert(!"Unsupported codec"); + if (pVideoCapabilities->flags & VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR) { + LOG_S_DEBUG << "\t\t\t" << "Use separate reference images" << std::endl; + } + + LOG_S_DEBUG << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << pVideoCapabilities->minBitstreamBufferOffsetAlignment << std::endl; + LOG_S_DEBUG << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << pVideoCapabilities->minBitstreamBufferSizeAlignment << std::endl; + LOG_S_DEBUG << "\t\t\t" << "pictureAccessGranularity: " << pVideoCapabilities->pictureAccessGranularity.width << " x " << pVideoCapabilities->pictureAccessGranularity.height << std::endl; + LOG_S_DEBUG << "\t\t\t" << "minCodedExtent: " << pVideoCapabilities->minCodedExtent.width << " x " << pVideoCapabilities->minCodedExtent.height << std::endl; + LOG_S_DEBUG << "\t\t\t" << "maxCodedExtent: " << pVideoCapabilities->maxCodedExtent.width << " x " << pVideoCapabilities->maxCodedExtent.height << std::endl; + LOG_S_DEBUG << "\t\t\t" << "maxDpbSlots: " << pVideoCapabilities->maxDpbSlots << std::endl; + LOG_S_DEBUG << "\t\t\t" << "maxActiveReferencePictures: " << pVideoCapabilities->maxActiveReferencePictures << std::endl; + + if (videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) { + const VkVideoDecodeH264CapabilitiesKHR* pH264DecCapabilities = (VkVideoDecodeH264CapabilitiesKHR*)pVideoDecodeCapabilities->pNext; + LOG_S_DEBUG << "\t\t\t" << "maxLevelIdc: " << pH264DecCapabilities->maxLevelIdc << std::endl; + LOG_S_DEBUG << "\t\t\t" << "fieldOffsetGranularity: " << pH264DecCapabilities->fieldOffsetGranularity.x << " x " << pH264DecCapabilities->fieldOffsetGranularity.y << std::endl; + + if (strncmp(pVideoCapabilities->stdHeaderVersion.extensionName, + VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME, + sizeof (pVideoCapabilities->stdHeaderVersion.extensionName) - 1U) || + (pVideoCapabilities->stdHeaderVersion.specVersion != VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION)) { + assert(!"Unsupported h.264 STD version"); + return VK_ERROR_INCOMPATIBLE_DRIVER; } + } else if (videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR) { + const VkVideoDecodeH265CapabilitiesKHR* pH265DecCapabilities = (VkVideoDecodeH265CapabilitiesKHR*)pVideoDecodeCapabilities->pNext; + LOG_S_DEBUG << "\t\t\t" << "maxLevelIdc: " << pH265DecCapabilities->maxLevelIdc << std::endl; + if (strncmp(pVideoCapabilities->stdHeaderVersion.extensionName, + VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME, + sizeof (pVideoCapabilities->stdHeaderVersion.extensionName) - 1U) || + (pVideoCapabilities->stdHeaderVersion.specVersion != VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION)) { + assert(!"Unsupported h.265 STD version"); + return VK_ERROR_INCOMPATIBLE_DRIVER; + } + } else { + assert(!"Unsupported codec"); } return result; @@ -300,11 +300,10 @@ class VulkanVideoCapabilities result = vkDevCtx->GetPhysicalDeviceVideoFormatPropertiesKHR(vkDevCtx->getPhysicalDevice(), &videoFormatInfo, &supportedFormatCount, pSupportedFormats); assert(result == VK_SUCCESS); - if (dumpData) { - std::cout << "\t\t\t" << ((videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) ? "h264" : "h265") << "decode formats: " << std::endl; - for (uint32_t fmt = 0; fmt < supportedFormatCount; fmt++) { - std::cout << "\t\t\t " << fmt << ": " << std::hex << pSupportedFormats[fmt].format << std::dec << std::endl; - } + + LOG_S_DEBUG << "\t\t\t" << ((videoProfile.GetCodecType() == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) ? "h264" : "h265") << "decode formats: " << std::endl; + for (uint32_t fmt = 0; fmt < supportedFormatCount; fmt++) { + LOG_S_DEBUG << "\t\t\t " << fmt << ": " << std::hex << pSupportedFormats[fmt].format << std::dec << std::endl; } formatCount = std::min(supportedFormatCount, formatCount); diff --git a/common/libs/VkCodecUtils/DecoderConfig.h b/common/libs/VkCodecUtils/DecoderConfig.h index 0f9819b7..95ec8fc4 100644 --- a/common/libs/VkCodecUtils/DecoderConfig.h +++ b/common/libs/VkCodecUtils/DecoderConfig.h @@ -29,6 +29,7 @@ #include #include #include "vulkan_interfaces.h" +#include "Logger.h" struct DecoderConfig { @@ -111,6 +112,12 @@ struct DecoderConfig { exit(EXIT_SUCCESS); return rtn; }}, + {"--logLevel", "-l", 1, "Set the log level", + [this](const char **args, const ProgramArgs &a) { + int logLevel = std::atoi(args[0]); + Logger::instance().setLogLevel(logLevel); + return true; + }}, {"--disableStrDemux", nullptr, 0, "Disable stream demuxing", [this](const char **, const ProgramArgs &a) { enableStreamDemuxing = false; @@ -130,7 +137,7 @@ struct DecoderConfig { forceParserType = VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR; return true; } else { - std::cerr << "Invalid codec \"" << args[0] << "\"" << std::endl; + LOG_ERROR_CONFIG ("Invalid codec \" %s \"", args[0]); return false; } }}, @@ -227,7 +234,7 @@ struct DecoderConfig { [this](const char **args, const ProgramArgs &a) { loopCount = std::atoi(args[0]); if (loopCount < 0) { - std::cerr << "Loop count must not be negative" << std::endl; + LOG_ERROR_CONFIG("Loop count must not be negative"); return false; } return true; @@ -241,9 +248,9 @@ struct DecoderConfig { {"--queueid", nullptr, 1, "Index of the decoder queue to be used", [this](const char **args, const ProgramArgs &a) { queueId = std::atoi(args[0]); - std::cout << queueId << std::endl; + LOG_S_DEBUG << queueId << std::endl; if (queueId < 0) { - std::cerr << "queueid must not be negative" << std::endl; + LOG_ERROR_CONFIG("queueid must not be negative"); return false; } return true; @@ -257,11 +264,7 @@ struct DecoderConfig { [this](const char **args, const ProgramArgs &a) { size_t size = SetHexDeviceUUID(args[0]); if (size != VK_UUID_SIZE) { - std::cerr << "Invalid deviceUuid format used: " << args[0] - << " with size: " << strlen(args[0]) - << std::endl; - std::cerr << "deviceUuid must be represented by 16 hex (32 bytes) values." - << std::endl; + LOG_ERROR_CONFIG("Invalid deviceUuid format used: %s with size: %d. deviceUuid must be represented by 16 hex (32 bytes) values.", args[0], strlen(args[0])); return false; } return true; @@ -308,8 +311,7 @@ struct DecoderConfig { char* endPtr = NULL; uint32_t initValue = strtoul(token.c_str(), &endPtr, 16); if ((endPtr == NULL) || (*endPtr != 0)) { - std::cerr << "Failed to parse the following initial CRC value:" - << token << std::endl; + LOG_ERROR_CONFIG("Failed to parse the following initial CRC value: %s", token.c_str()); return false; } @@ -327,21 +329,20 @@ struct DecoderConfig { (a.short_flag != nullptr && strcmp(argv[i], a.short_flag) == 0); }); if (flag == spec.end()) { - std::cerr << "Unknown argument \"" << argv[i] << "\"" << std::endl; - std::cout << std::endl; + LOG_ERROR_CONFIG("Unknown argument \" %s \"", argv[i]); showHelp(argv, spec); exit(EXIT_FAILURE); } if (i + flag->numArgs >= argc) { - std::cerr << "Missing arguments for \"" << argv[i] << "\"" << std::endl; + LOG_ERROR_CONFIG("Missing arguments for \" %s \"", argv[i]); exit(EXIT_FAILURE); } bool disableValueCheck = false; if (i + 1 < argc && strcmp(argv[i + 1], "--") == 0) { if (i + 1 + flag->numArgs >= argc) { - std::cerr << "Missing arguments for \"" << argv[i] << "\"" << std::endl; + LOG_ERROR_CONFIG("Missing arguments for \" %s \"", argv[i]); exit(EXIT_FAILURE); } disableValueCheck = true; @@ -353,11 +354,9 @@ struct DecoderConfig { if (!disableValueCheck) { for (int j = 1; j <= flag->numArgs; j++) { if (argv[i + j][0] == '-') { - std::cerr << "Invalid value \"" << argv[i + j] << "\" for \"" << argv[i] << "\" " - "(we don't allow values starting with `-` by default). You probably missed to " - "set a value for \"" << argv[i] << "\"." << std::endl; - std::cerr << "Use \"-- " << argv[i + j] << "\" if you meant to set \"" << argv[i + j] - << "\" for \"" << argv[i] << "\"." << std::endl; + LOG_ERROR_CONFIG("Invalid value \" %s \" for \" %s \"(we don't allow values starting with `-` by default). You probably missed to " + "set a value for \" %s", argv[i + j], argv[i], argv[i]); + LOG_ERROR_CONFIG("Use \"--%s\" if you meant to set \"%s\" for \"%s\".", argv[i + j], argv[i + j], argv[i]); exit(EXIT_FAILURE); } } @@ -374,9 +373,8 @@ struct DecoderConfig { if (((outputcrcPerFrame != 0) || (outputcrc != 0))) { if (crcInitValue.empty() != false) { if (outputFileName.empty() != false) { - std::cerr << "Please specify -o if you intend to use CRC calculation, CRC calculation requires HOST accessible memory." - "Host accessible linear images requires an extra copy at the moment." - << std::endl; + LOG_ERROR_CONFIG("Please specify -o if you intend to use CRC calculation, CRC calculation requires HOST accessible memory." + "Host accessible linear images requires an extra copy at the moment."); exit(EXIT_FAILURE); } diff --git a/common/libs/VkCodecUtils/FrameProcessor.h b/common/libs/VkCodecUtils/FrameProcessor.h index 8a94f6ab..3d38aac0 100644 --- a/common/libs/VkCodecUtils/FrameProcessor.h +++ b/common/libs/VkCodecUtils/FrameProcessor.h @@ -24,6 +24,7 @@ #include #include "VkCodecUtils/VkVideoRefCountBase.h" +#include "Logger.h" class Shell; @@ -108,19 +109,18 @@ class FrameProcessor : public VkVideoRefCountBase { , m_profileFramesCount(0) , m_displayTimePeriodMilliseconds(1000) , start_time (std::chrono::steady_clock::now()) - , m_verbose(verbose) { - if (m_verbose) { - std::cout << "The clock resolution of high_resolution_clock is: " - << (double) std::chrono::high_resolution_clock::period::num / - std::chrono::high_resolution_clock::period::den << std::endl; - std::cout << "The clock resolution of steady_clock is: " - << (double) std::chrono::steady_clock::period::num / - std::chrono::steady_clock::period::den << std::endl; - std::cout << "The clock resolution of system_clock is: " - << (double) std::chrono::system_clock::period::num / - std::chrono::system_clock::period::den << std::endl; - } + + LOG_S_DEBUG << "The clock resolution of high_resolution_clock is: " + << (double) std::chrono::high_resolution_clock::period::num / + std::chrono::high_resolution_clock::period::den << std::endl; + LOG_S_DEBUG << "The clock resolution of steady_clock is: " + << (double) std::chrono::steady_clock::period::num / + std::chrono::steady_clock::period::den << std::endl; + LOG_S_DEBUG << "The clock resolution of system_clock is: " + << (double) std::chrono::system_clock::period::num / + std::chrono::system_clock::period::den << std::endl; + } protected: @@ -128,7 +128,6 @@ class FrameProcessor : public VkVideoRefCountBase { int64_t m_profileFramesCount; const int64_t m_displayTimePeriodMilliseconds; std::chrono::time_point start_time; - uint32_t m_verbose: 1; }; #endif // FRAMEPROCESSOR_H diff --git a/common/libs/VkCodecUtils/Helpers.h b/common/libs/VkCodecUtils/Helpers.h index 4218d36d..2e3e9241 100644 --- a/common/libs/VkCodecUtils/Helpers.h +++ b/common/libs/VkCodecUtils/Helpers.h @@ -23,6 +23,7 @@ #include #include #include "HelpersDispatchTable.h" +#include "Logger.h" namespace vk { @@ -240,7 +241,7 @@ inline VkResult WaitAndResetFence(const VkInterfaceFunctions* vkIf, VkDevice dev result = vkIf->WaitForFences(device, 1, &fence, true, fenceWaitTimeout); if (result != VK_SUCCESS) { - fprintf(stderr, "\t **** WARNING: fence %s(%llu) is not done after %llu nSec with result 0x%x ****\n", + LOG_ERROR("\t **** ERROR: fence %s(%llu) is not done after %llu nSec with result 0x%x ****\n", fenceName, (long long unsigned int)fence, (long long unsigned int)fenceWaitTimeout, result); assert(!"Fence is not signaled yet after more than 100 mSec wait"); } @@ -253,7 +254,7 @@ inline VkResult WaitAndResetFence(const VkInterfaceFunctions* vkIf, VkDevice dev } if (result != VK_SUCCESS) { - fprintf(stderr, "\t **** ERROR: fence %s(%llu) is not done after %llu nSec with result 0x%x ****\n", + LOG_ERROR("\t **** ERROR: fence %s(%llu) is not done after %llu nSec with result 0x%x ****\n", fenceName, (long long unsigned int)fence, (long long unsigned int)fenceTotalWaitTimeout, vkIf->GetFenceStatus(device, fence)); assert(!"Fence is not signaled yet after more than 100 mSec wait"); } @@ -261,7 +262,7 @@ inline VkResult WaitAndResetFence(const VkInterfaceFunctions* vkIf, VkDevice dev if (resetAfterWait) { result = vkIf->ResetFences(device, 1, &fence); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: ResetFences() result: 0x%x\n", result); + LOG_ERROR("\nERROR: ResetFences() result: 0x%x\n", result); assert(result == VK_SUCCESS); } @@ -282,7 +283,7 @@ inline VkResult WaitAndGetStatus(const VkInterfaceFunctions* vkIf, VkDevice devi do { result = WaitAndResetFence(vkIf, device, fence, resetAfterWait, fenceName, fenceWaitTimeout, fenceTotalWaitTimeout); if (result != VK_SUCCESS) { - std::cout << "WaitForFences timeout " << fenceWaitTimeout + LOG_S_WARN << "WaitForFences timeout " << fenceWaitTimeout << " result " << result << " retry " << retryCount << std::endl << std::flush; VkQueryResultStatusKHR decodeStatus = VK_QUERY_RESULT_STATUS_NOT_READY_KHR; @@ -295,19 +296,19 @@ inline VkResult WaitAndGetStatus(const VkInterfaceFunctions* vkIf, VkDevice devi sizeof(decodeStatus), VK_QUERY_RESULT_WITH_STATUS_BIT_KHR); - printf("\nERROR: GetQueryPoolResults() result: 0x%x\n", queryResult); - std::cout << "\t +++++++++++++++++++++++++++< " << pictureIndex + LOG_ERROR("\nERROR: GetQueryPoolResults() result: 0x%x\n", queryResult); + LOG_S_WARN << "\t +++++++++++++++++++++++++++< " << pictureIndex << " >++++++++++++++++++++++++++++++" << std::endl; - std::cout << "\t => Decode Status for CurrPicIdx: " << pictureIndex << std::endl + LOG_S_WARN << "\t => Decode Status for CurrPicIdx: " << pictureIndex << std::endl << "\t\tdecodeStatus: " << decodeStatus << std::endl; if (queryResult == VK_ERROR_DEVICE_LOST) { - std::cout << "\t Dropping frame" << std::endl; + LOG_S_WARN << "\t Dropping frame" << std::endl; break; } if ((queryResult == VK_SUCCESS) && (decodeStatus == VK_QUERY_RESULT_STATUS_ERROR_KHR)) { - std::cout << "\t Decoding of the frame failed." << std::endl; + LOG_S_ERROR << "\t Decoding of the frame failed." << std::endl; break; } } diff --git a/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.cpp b/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.cpp index cb6d265b..f26277db 100644 --- a/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.cpp +++ b/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.cpp @@ -231,7 +231,7 @@ VkDeviceSize VulkanBitstreamBufferImpl::Resize(VkDeviceSize newSize, VkDeviceSiz return m_bufferSize; } - std::cout << " ======= Req resize old " << m_bufferSize << " -> new " << newSize << " ====== " << std::endl; + LOG_S_INFO << " ======= Req resize old " << m_bufferSize << " -> new " << newSize << " ====== " << std::endl; VkBuffer newBuffer = VK_NULL_HANDLE; VkDeviceSize newBufferOffset = 0; diff --git a/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.h b/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.h index 8960276b..13225597 100644 --- a/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.h +++ b/common/libs/VkCodecUtils/VulkanBistreamBufferImpl.h @@ -42,7 +42,7 @@ class VulkanBitstreamBufferImpl : public VulkanBitstreamBuffer uint32_t ret = --m_refCount; // Destroy the buffer if ref-count reaches zero if (ret == 0) { - // std::cout << "Delete bitstream buffer " << this << " with size " << GetMaxSize() << std::endl; + // LOG_S_DEBUG << "Delete bitstream buffer " << this << " with size " << GetMaxSize() << std::endl; delete this; } return ret; diff --git a/common/libs/VkCodecUtils/VulkanComputePipeline.cpp b/common/libs/VkCodecUtils/VulkanComputePipeline.cpp index 1763883b..dfd78fe8 100644 --- a/common/libs/VkCodecUtils/VulkanComputePipeline.cpp +++ b/common/libs/VkCodecUtils/VulkanComputePipeline.cpp @@ -45,7 +45,7 @@ VkResult VulkanComputePipeline::CreatePipeline(const VulkanDeviceContext* vkDevC const bool verbose = false; - if (verbose) printf("\nCompute shader code:\n %s", shaderCode); + if (verbose) LOG_DEBUG("\nCompute shader code:\n %s", shaderCode); DestroyShaderModule(); m_shaderModule = shaderCompiler.BuildGlslShader(shaderCode, diff --git a/common/libs/VkCodecUtils/VulkanDeviceContext.cpp b/common/libs/VkCodecUtils/VulkanDeviceContext.cpp index f13d598e..6b437831 100644 --- a/common/libs/VkCodecUtils/VulkanDeviceContext.cpp +++ b/common/libs/VkCodecUtils/VulkanDeviceContext.cpp @@ -33,6 +33,7 @@ #ifdef VIDEO_DISPLAY_QUEUE_SUPPORT #include "VkShell/Shell.h" #endif // VIDEO_DISPLAY_QUEUE_SUPPORT +#include "Logger.h" #if !defined(VK_USE_PLATFORM_WIN32_KHR) PFN_vkGetInstanceProcAddr VulkanDeviceContext::LoadVk(VulkanLibraryHandleType &vulkanLibHandle, @@ -127,23 +128,23 @@ VkResult VulkanDeviceContext::CheckAllInstanceLayers(bool verbose) std::vector layers; vk::enumerate(this, layers); - if (verbose) std::cout << "Enumerating instance layers:" << std::endl; + if (verbose) LOG_S_INFO << "Enumerating instance layers:" << std::endl; std::set layer_names; for (const auto &layer : layers) { layer_names.insert(layer.layerName); - if (verbose ) std::cout << '\t' << layer.layerName << std::endl; + if (verbose ) LOG_S_INFO << '\t' << layer.layerName << std::endl; } // all listed instance layers are required - if (verbose) std::cout << "Looking for instance layers:" << std::endl; + if (verbose) LOG_S_INFO << "Looking for instance layers:" << std::endl; for (uint32_t i = 0; i < m_reqInstanceLayers.size(); i++) { const char* name = m_reqInstanceLayers[i]; if (name == nullptr) { break; } - std::cout << '\t' << name << std::endl; + LOG_S_INFO << '\t' << name << std::endl; if (layer_names.find(name) == layer_names.end()) { - std::cerr << "AssertAllInstanceLayers() ERROR: requested instance layer" + LOG_S_ERROR << "AssertAllInstanceLayers() ERROR: requested instance layer" << name << " is missing!" << std::endl << std::flush; return VK_ERROR_LAYER_NOT_PRESENT; } @@ -181,23 +182,23 @@ VkResult VulkanDeviceContext::CheckAllInstanceExtensions(bool verbose) std::vector exts; vk::enumerate(this, nullptr, exts); - if (verbose) std::cout << "Enumerating instance extensions:" << std::endl; + if (verbose) LOG_S_INFO << "Enumerating instance extensions:" << std::endl; std::set ext_names; for (const auto &ext : exts) { ext_names.insert(ext.extensionName); - if (verbose) std::cout << '\t' << ext.extensionName << std::endl; + if (verbose) LOG_S_INFO << '\t' << ext.extensionName << std::endl; } // all listed instance extensions are required - if (verbose) std::cout << "Looking for instance extensions:" << std::endl; + if (verbose) LOG_S_INFO << "Looking for instance extensions:" << std::endl; for (uint32_t i = 0; i < m_reqInstanceExtensions.size(); i++) { const char* name = m_reqInstanceExtensions[i]; if (name == nullptr) { break; } - if (verbose) std::cout << '\t' << name << std::endl; + if (verbose) LOG_S_INFO << '\t' << name << std::endl; if (ext_names.find(name) == ext_names.end()) { - std::cerr << "AssertAllInstanceExtensions() ERROR: requested instance extension " + LOG_S_ERROR << "AssertAllInstanceExtensions() ERROR: requested instance extension " << name << " is missing!" << std::endl << std::flush; return VK_ERROR_EXTENSION_NOT_PRESENT; } @@ -256,7 +257,7 @@ bool VulkanDeviceContext::HasAllDeviceExtensions(VkPhysicalDevice physDevice, co if (ext_names.find(name) == ext_names.end()) { hasAllRequiredExtensions = false; if (printMissingDeviceExt) { - std::cerr << __FUNCTION__ + LOG_S_ERROR << __FUNCTION__ << ": ERROR: required device extension " << name << " is missing for device with name: " << printMissingDeviceExt << std::endl << std::flush; @@ -276,7 +277,7 @@ bool VulkanDeviceContext::HasAllDeviceExtensions(VkPhysicalDevice physDevice, co } if (ext_names.find(name) == ext_names.end()) { if (printMissingDeviceExt) { - std::cout << __FUNCTION__ + LOG_S_INFO << __FUNCTION__ << " : WARNING: requested optional device extension " << name << " is missing for device with name: " << printMissingDeviceExt << std::endl << std::flush; @@ -304,7 +305,7 @@ static int DumpSoLibs() auto* map = reinterpret_cast(p->ptr); while (map) { - std::cout << map->l_name << std::endl; + LOG_S_INFO << map->l_name << std::endl; // do something with |map| like with handle, returned by |dlopen()|. map = map->l_next; } @@ -452,7 +453,7 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t } if (!HasAllDeviceExtensions(physicalDevice, props.deviceName)) { - std::cerr << "ERROR: Found physical device with name: " << props.deviceName << std::hex + LOG_S_ERROR << "ERROR: Found physical device with name: " << props.deviceName << std::hex << ", vendor ID: " << props.vendorID << ", and device ID: " << props.deviceID << std::dec << " NOT having the required extensions!" << std::endl << std::flush; @@ -482,7 +483,6 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t transferQueueFamilyOnly = -1, transferNumQueues = 0; - const bool dumpQueues = false; for (uint32_t i = 0; i < queues.size(); i++) { const VkQueueFamilyProperties2 &queue = queues[i]; @@ -508,19 +508,19 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t videoDecodeQueueFamily = i; videoDecodeQueueCount = queue.queueFamilyProperties.queueCount; - if (dumpQueues) std::cout << "\t Found video decode only queue family " << i << + LOG_S_INFO << "\t Found video decode only queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; // Does the video decode queue also support transfer operations? if (queueFamilyFlags & VK_QUEUE_TRANSFER_BIT) { - if (dumpQueues) std::cout << "\t\t Video decode queue " << i << + LOG_S_INFO << "\t\t Video decode queue " << i << " supports transfer operations" << std::endl; } // Does the video decode queue also support compute operations? if (queueFamilyFlags & VK_QUEUE_COMPUTE_BIT) { - if (dumpQueues) std::cout << "\t\t Video decode queue " << i << + LOG_S_INFO << "\t\t Video decode queue " << i << " supports compute operations" << std::endl; } @@ -536,19 +536,19 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t videoEncodeQueueFamily = i; videoEncodeQueueCount = queue.queueFamilyProperties.queueCount; - if (dumpQueues) std::cout << "\t Found video encode only queue family " << i << + LOG_S_INFO << "\t Found video encode only queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; // Does the video encode queue also support transfer operations? if (queueFamilyFlags & VK_QUEUE_TRANSFER_BIT) { - if (dumpQueues) std::cout << "\t\t Video encode queue " << i << + LOG_S_INFO << "\t\t Video encode queue " << i << " supports transfer operations" << std::endl; } // Does the video encode queue also support compute operations? if (queueFamilyFlags & VK_QUEUE_COMPUTE_BIT) { - if (dumpQueues) std::cout << "\t\t Video encode queue " << i << + LOG_S_INFO << "\t\t Video encode queue " << i << " supports compute operations" << std::endl; } @@ -564,17 +564,17 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t (queueFamilyFlags & VK_QUEUE_GRAPHICS_BIT)) { gfxQueueFamily = i; foundQueueTypes |= queueFamilyFlags; - if (dumpQueues) std::cout << "\t Found graphics queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; + LOG_S_INFO << "\t Found graphics queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; } else if ((requestQueueTypes & VK_QUEUE_COMPUTE_BIT) && (computeQueueFamilyOnly < 0) && ((VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT) == (queueFamilyFlags & (VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT)))) { computeQueueFamilyOnly = i; foundQueueTypes |= queueFamilyFlags; - if (dumpQueues) std::cout << "\t Found compute only queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; + LOG_S_INFO << "\t Found compute only queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; } else if ((requestQueueTypes & VK_QUEUE_TRANSFER_BIT) && (transferQueueFamilyOnly < 0) && (VK_QUEUE_TRANSFER_BIT == (queueFamilyFlags & VK_QUEUE_TRANSFER_BIT))) { transferQueueFamilyOnly = i; foundQueueTypes |= queueFamilyFlags; - if (dumpQueues) std::cout << "\t Found transfer only queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; + LOG_S_INFO << "\t Found transfer only queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; } // requires only COMPUTE for frameProcessor queues @@ -582,13 +582,13 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t (queueFamilyFlags & VK_QUEUE_COMPUTE_BIT)) { computeQueueFamily = i; foundQueueTypes |= queueFamilyFlags; - if (dumpQueues) std::cout << "\t Found compute queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; + LOG_S_INFO << "\t Found compute queue family " << i << " with " << queue.queueFamilyProperties.queueCount << " max num of queues." << std::endl; } // present queue must support the surface if ((pWsiDisplay != nullptr) && (presentQueueFamily < 0) && pWsiDisplay->PhysDeviceCanPresent(physicalDevice, i)) { - if (dumpQueues) std::cout << "\t Found present queue family " << i << "." << std::endl; + LOG_S_INFO << "\t Found present queue family " << i << "." << std::endl; presentQueueFamily = i; } @@ -618,17 +618,17 @@ VkResult VulkanDeviceContext::InitPhysicalDevice(int32_t deviceId, const uint8_t PrintExtensions(true); } - if (dumpQueues) { - std::cout << "*** Selected Vulkan physical device with name: " << props.deviceName << std::hex - << ", vendor ID: " << props.vendorID << ", and device ID: " << props.deviceID << std::dec - << ", Num Decode Queues: " << m_videoDecodeNumQueues - << ", Num Encode Queues: " << m_videoEncodeNumQueues - << " ***" << std::endl << std::flush; + { + LOG_S_INFO << "*** Selected Vulkan physical device with name: " << props.deviceName << std::hex + << ", vendor ID: " << props.vendorID << ", and device ID: " << props.deviceID << std::dec + << ", Num Decode Queues: " << m_videoDecodeNumQueues + << ", Num Encode Queues: " << m_videoEncodeNumQueues + << " ***" << std::endl << std::flush; } return VK_SUCCESS; } } - std::cerr << "ERROR: Found physical device with name: " << props.deviceName << std::hex + LOG_S_ERROR << "ERROR: Found physical device with name: " << props.deviceName << std::hex << ", vendor ID: " << props.vendorID << ", and device ID: " << props.deviceID << std::dec << " NOT having the required queue families!" << std::endl << std::flush; @@ -945,9 +945,9 @@ const char * VulkanDeviceContext::FindRequiredDeviceExtension(const char* name) void VulkanDeviceContext::PrintExtensions(bool deviceExt) const { const std::vector& extensions = deviceExt ? m_deviceExtensions : m_instanceExtensions; - std::cout << "###### List of " << (deviceExt ? "Device" : "Instance") << " Extensions: ######" << std::endl; + LOG_S_INFO << "###### List of " << (deviceExt ? "Device" : "Instance") << " Extensions: ######" << std::endl; for (const auto& e : extensions) { - std::cout << "\t " << e.extensionName << "(v." << e.specVersion << ")\n"; + LOG_S_INFO << "\t " << e.extensionName << "(v." << e.specVersion << ")\n"; } } @@ -956,13 +956,13 @@ VkResult VulkanDeviceContext::PopulateInstanceExtensions() uint32_t extensionsCount = 0; VkResult result = EnumerateInstanceExtensionProperties( nullptr, &extensionsCount, nullptr ); if ((result != VK_SUCCESS) || (extensionsCount == 0)) { - std::cout << "Could not get the number of instance extensions." << std::endl; + LOG_S_ERROR << "Could not get the number of instance extensions." << std::endl; return result; } m_instanceExtensions.resize( extensionsCount ); result = EnumerateInstanceExtensionProperties( nullptr, &extensionsCount, m_instanceExtensions.data() ); if ((result != VK_SUCCESS) || (extensionsCount == 0)) { - std::cout << "Could not enumerate instance extensions." << std::endl; + LOG_S_ERROR << "Could not enumerate instance extensions." << std::endl; return result; } return result; @@ -973,13 +973,13 @@ VkResult VulkanDeviceContext::PopulateDeviceExtensions() uint32_t extensions_count = 0; VkResult result = EnumerateDeviceExtensionProperties( m_physDevice, nullptr, &extensions_count, nullptr ); if ((result != VK_SUCCESS) || (extensions_count == 0)) { - std::cout << "Could not get the number of device extensions." << std::endl; + LOG_S_ERROR << "Could not get the number of device extensions." << std::endl; return result; } m_deviceExtensions.resize( extensions_count ); result = EnumerateDeviceExtensionProperties( m_physDevice, nullptr, &extensions_count, m_deviceExtensions.data() ); if ((result != VK_SUCCESS) || (extensions_count == 0)) { - std::cout << "Could not enumerate device extensions." << std::endl; + LOG_S_ERROR << "Could not enumerate device extensions." << std::endl; return result; } return result; diff --git a/common/libs/VkCodecUtils/VulkanFilterYuvCompute.cpp b/common/libs/VkCodecUtils/VulkanFilterYuvCompute.cpp index c4b5e4d1..2f51a86d 100644 --- a/common/libs/VkCodecUtils/VulkanFilterYuvCompute.cpp +++ b/common/libs/VkCodecUtils/VulkanFilterYuvCompute.cpp @@ -17,8 +17,7 @@ #include "VulkanFilterYuvCompute.h" #include "nvidia_utils/vulkan/ycbcrvkinfo.h" - -static bool dumpShaders = false; +#include "Logger.h" VkResult VulkanFilterYuvCompute::Create(const VulkanDeviceContext* vkDevCtx, uint32_t queueFamilyIndex, @@ -409,8 +408,7 @@ size_t VulkanFilterYuvCompute::InitYCBCR2RGBA(std::string& computeShader) "}\n"; computeShader = shaderStr.str(); - if (dumpShaders) - std::cout << "\nCompute Shader:\n" << computeShader; + LOG_S_DEBUG << "\nCompute Shader:\n" << computeShader; return computeShader.size(); } @@ -477,8 +475,7 @@ size_t VulkanFilterYuvCompute::InitYCBCRCOPY(std::string& computeShader) "}\n"; computeShader = shaderStr.str(); - if (dumpShaders) - std::cout << "\nCompute Shader:\n" << computeShader; + LOG_S_DEBUG << "\nCompute Shader:\n" << computeShader; return computeShader.size(); } @@ -526,7 +523,6 @@ size_t VulkanFilterYuvCompute::InitYCBCRCLEAR(std::string& computeShader) "}\n"; computeShader = shaderStr.str(); - if (dumpShaders) - std::cout << "\nCompute Shader:\n" << computeShader; + LOG_S_DEBUG << "\nCompute Shader:\n" << computeShader; return computeShader.size(); } diff --git a/common/libs/VkCodecUtils/VulkanFrame.cpp b/common/libs/VkCodecUtils/VulkanFrame.cpp index dcb2d6e0..548cb7f9 100644 --- a/common/libs/VkCodecUtils/VulkanFrame.cpp +++ b/common/libs/VkCodecUtils/VulkanFrame.cpp @@ -25,6 +25,7 @@ #include "VkCodecUtils/VulkanVideoUtils.h" #include "VulkanFrame.h" #include "VkVideoCore/DecodeFrameBufferIf.h" +#include "Logger.h" template VulkanFrame::VulkanFrame(const VulkanDeviceContext* vkDevCtx) @@ -71,9 +72,9 @@ int VulkanFrame::AttachShell(const Shell& sh) const uint32_t apiPatchVersion = VK_API_VERSION_PATCH(m_physicalDevProps.apiVersion); if (m_physicalDevProps.apiVersion < VK_MAKE_API_VERSION(0, 1, 2, 199)) { - std::cerr << std::endl << "Incompatible Vulkan API version: " << apiMajorVersion << "." << apiMinorVersion << "." << apiPatchVersion << std::endl; - std::cerr << "Info: Driver version is: " << m_physicalDevProps.driverVersion << std::endl; - std::cerr << "Please upgrade your driver. The version supported is: 1.2.199 or later aka " << std::hex << VK_MAKE_API_VERSION(0, 1, 2, 199) << std::endl; + LOG_S_ERROR << std::endl << "Incompatible Vulkan API version: " << apiMajorVersion << "." << apiMinorVersion << "." << apiPatchVersion << std::endl; + LOG_S_ERROR << "Info: Driver version is: " << m_physicalDevProps.driverVersion << std::endl; + LOG_S_ERROR << "Please upgrade your driver. The version supported is: 1.2.199 or later aka " << std::hex << VK_MAKE_API_VERSION(0, 1, 2, 199) << std::endl; assert(!"Incompatible API version - please upgrade your driver."); return -1; } @@ -106,7 +107,7 @@ int VulkanFrame::AttachShell(const Shell& sh) (const float*)vertices, sizeof(vertices), sizeof(vertices) / sizeof(vertices[0]))) { - std::cerr << "VulkanVideoFrame: " << "File " << __FILE__ << "line " << __LINE__; + LOG_S_ERROR << "VulkanVideoFrame: " << "File " << __FILE__ << "line " << __LINE__; return -1; } @@ -293,11 +294,11 @@ bool VulkanFrame::OnFrame( int32_t renderIndex, bool displayTimeNow = false; float fps = GetFrameRateFps(displayTimeNow); if (displayTimeNow) { - std::cout << "\t\tFrame " << m_frameCount << ", FPS: " << fps << std::endl; + LOG_S_DEBUG << "\t\tFrame " << m_frameCount << ", FPS: " << fps << std::endl; } } else { uint64_t timeDiffNanoSec = GetTimeDiffNanoseconds(); - std::cout << "\t\t Time nanoseconds: " << timeDiffNanoSec << + LOG_S_DEBUG << "\t\t Time nanoseconds: " << timeDiffNanoSec << " milliseconds: " << timeDiffNanoSec / 1000 << " rate: " << 1000000000.0 / timeDiffNanoSec << std::endl; } @@ -328,7 +329,7 @@ bool VulkanFrame::OnFrame( int32_t renderIndex, assert(result == VK_SUCCESS); assert(decodeStatus == VK_QUERY_RESULT_STATUS_COMPLETE_KHR); if ((result != VK_SUCCESS) || (decodeStatus != VK_QUERY_RESULT_STATUS_COMPLETE_KHR)) { - fprintf(stderr, "\nERROR: GetQueryPoolResults() result: 0x%x\n", result); + LOG_ERROR( "\nERROR: GetQueryPoolResults() result: 0x%x\n", result); return false; } @@ -336,19 +337,19 @@ bool VulkanFrame::OnFrame( int32_t renderIndex, auto diffMilliseconds = std::chrono::duration_cast(deltaTime); auto diffMicroseconds = std::chrono::duration_cast(deltaTime); if (dumpDebug) { - std::cout << pLastDecodedFrame->pictureIndex << ": frameWaitTime: " << + LOG_S_DEBUG << pLastDecodedFrame->pictureIndex << ": frameWaitTime: " << diffMilliseconds.count() << "." << diffMicroseconds.count() << " mSec" << std::endl; } } else if (pLastDecodedFrame->frameCompleteFence != VkFence()) { VkResult result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &pLastDecodedFrame->frameCompleteFence, true, 100 * 1000 * 1000 /* 100 mSec */); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result); + LOG_ERROR("\nERROR: WaitForFences() result: 0x%x\n", result); } result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, pLastDecodedFrame->frameCompleteFence); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result); + LOG_ERROR("\nERROR: GetFenceStatus() result: 0x%x\n", result); } } } @@ -366,7 +367,7 @@ bool VulkanFrame::OnFrame( int32_t renderIndex, bool displayTimeNow = true; float fps = GetFrameRateFps(displayTimeNow); if (displayTimeNow) { - std::cout << "\t\tFrame " << m_frameCount << ", FPS: " << fps << std::endl; + LOG_S_DEBUG << "\t\tFrame " << m_frameCount << ", FPS: " << fps << std::endl; } } } @@ -377,14 +378,14 @@ bool VulkanFrame::OnFrame( int32_t renderIndex, VkSharedBaseObj imageResourceView; pLastDecodedFrame->imageViews[FrameDataType::IMAGE_VIEW_TYPE_OPTIMAL_DISPLAY].GetImageResourceView(imageResourceView); - std::cout << "<= Wait on picIdx: " << pLastDecodedFrame->pictureIndex - << "\t\tdisplayWidth: " << pLastDecodedFrame->displayWidth - << "\t\tdisplayHeight: " << pLastDecodedFrame->displayHeight - << "\t\tdisplayOrder: " << pLastDecodedFrame->displayOrder - << "\tdecodeOrder: " << pLastDecodedFrame->decodeOrder - << "\ttimestamp " << pLastDecodedFrame->timestamp - << "\tdstImageView " << (imageResourceView ? imageResourceView->GetImageResource()->GetImage() : VkImage()) - << std::endl; + LOG_S_DEBUG << "<= Wait on picIdx: " << pLastDecodedFrame->pictureIndex + << "\t\tdisplayWidth: " << pLastDecodedFrame->displayWidth + << "\t\tdisplayHeight: " << pLastDecodedFrame->displayHeight + << "\t\tdisplayOrder: " << pLastDecodedFrame->displayOrder + << "\tdecodeOrder: " << pLastDecodedFrame->decodeOrder + << "\ttimestamp " << pLastDecodedFrame->timestamp + << "\tdstImageView " << (imageResourceView ? imageResourceView->GetImageResource()->GetImage() : VkImage()) + << std::endl; } if (gfxRendererIsEnabled == false) { @@ -502,11 +503,11 @@ VkResult VulkanFrame::DrawFrame( int32_t renderIndex, m_videoRenderer->m_vertexBuffer); if (dumpDebug) { - std::cout << "Drawing Frame " << m_frameCount << " FB: " << renderIndex << std::endl; + LOG_S_DEBUG << "Drawing Frame " << m_frameCount << " FB: " << renderIndex << std::endl; } if (dumpDebug && inFrame) { - std::cout << "<= Present picIdx: " << inFrame->pictureIndex + LOG_S_DEBUG << "<= Present picIdx: " << inFrame->pictureIndex << "\t\tdisplayOrder: " << inFrame->displayOrder << "\tdecodeOrder: " << inFrame->decodeOrder << "\ttimestamp " << inFrame->timestamp @@ -522,19 +523,19 @@ VkResult VulkanFrame::DrawFrame( int32_t renderIndex, result = m_vkDevCtx->QueueWaitIdle(videoDecodeQueue); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: QueueWaitIdle() result: 0x%x\n", result); + LOG_ERROR("\nERROR: QueueWaitIdle() result: 0x%x\n", result); } } } else { result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &inFrame->frameCompleteFence, true, 100 * 1000 * 1000 /* 100 mSec */); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result); + LOG_ERROR("\nERROR: WaitForFences() result: 0x%x\n", result); } result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, inFrame->frameCompleteFence); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result); + LOG_ERROR("\nERROR: GetFenceStatus() result: 0x%x\n", result); } } } @@ -551,12 +552,12 @@ VkResult VulkanFrame::DrawFrame( int32_t renderIndex, result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &inFrame->frameCompleteFence, true, 100 * 1000 * 1000 /* 100 mSec */); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result); + LOG_ERROR("\nERROR: WaitForFences() result: 0x%x\n", result); } result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, inFrame->frameCompleteFence); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result); + LOG_ERROR("\nERROR: GetFenceStatus() result: 0x%x\n", result); } } @@ -571,14 +572,14 @@ VkResult VulkanFrame::DrawFrame( int32_t renderIndex, VK_QUERY_RESULT_WITH_STATUS_BIT_KHR | VK_QUERY_RESULT_WAIT_BIT); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetQueryPoolResults() result: 0x%x\n", result); + LOG_ERROR("\nERROR: GetQueryPoolResults() result: 0x%x\n", result); } assert(decodeStatus == VK_QUERY_RESULT_STATUS_COMPLETE_KHR); if (dumpDebug) { - std::cout << "\t +++++++++++++++++++++++++++< " << (inFrame ? inFrame->pictureIndex : -1) + LOG_S_DEBUG << "\t +++++++++++++++++++++++++++< " << (inFrame ? inFrame->pictureIndex : -1) << " >++++++++++++++++++++++++++++++" << std::endl; - std::cout << "\t => Decode Status for CurrPicIdx: " << (inFrame ? inFrame->pictureIndex : -1) << std::endl + LOG_S_DEBUG << "\t => Decode Status for CurrPicIdx: " << (inFrame ? inFrame->pictureIndex : -1) << std::endl << "\t\tdecodeStatus: " << decodeStatus << std::endl; } } @@ -637,19 +638,19 @@ VkResult VulkanFrame::DrawFrame( int32_t renderIndex, result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &inFrame->frameCompleteFence, true, 100 * 1000 * 1000); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result); + LOG_ERROR("\nERROR: WaitForFences() result: 0x%x\n", result); } result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, inFrame->frameCompleteFence); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result); + LOG_ERROR("\nERROR: GetFenceStatus() result: 0x%x\n", result); } } result = m_vkDevCtx->MultiThreadedQueueSubmit(VulkanDeviceContext::GRAPHICS, 0, 1, &primaryCmdSubmitInfo, frameConsumerDoneFence); if (result != VK_SUCCESS) { assert(result == VK_SUCCESS); - fprintf(stderr, "\nERROR: MultiThreadedQueueSubmit() result: 0x%x\n", result); + LOG_ERROR("\nERROR: MultiThreadedQueueSubmit() result: 0x%x\n", result); return result; } @@ -658,12 +659,12 @@ VkResult VulkanFrame::DrawFrame( int32_t renderIndex, result = m_vkDevCtx->WaitForFences(*m_vkDevCtx, 1, &frameConsumerDoneFence, true, fenceTimeout); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: WaitForFences() result: 0x%x\n", result); + LOG_ERROR("\nERROR: WaitForFences() result: 0x%x\n", result); } result = m_vkDevCtx->GetFenceStatus(*m_vkDevCtx, frameConsumerDoneFence); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: GetFenceStatus() result: 0x%x\n", result); + LOG_ERROR("\nERROR: GetFenceStatus() result: 0x%x\n", result); } } diff --git a/common/libs/VkCodecUtils/VulkanShaderCompiler.cpp b/common/libs/VkCodecUtils/VulkanShaderCompiler.cpp index 20fc073e..78317957 100644 --- a/common/libs/VkCodecUtils/VulkanShaderCompiler.cpp +++ b/common/libs/VkCodecUtils/VulkanShaderCompiler.cpp @@ -40,7 +40,7 @@ static shaderc_shader_kind getShadercShaderType(VkShaderStageFlagBits type) case VK_SHADER_STAGE_COMPUTE_BIT: return shaderc_glsl_compute_shader; default: - std::cerr << "VulkanShaderCompiler: " << "invalid VKShaderStageFlagBits" << "type = " << type; + LOG_S_ERROR << "VulkanShaderCompiler: " << "invalid VKShaderStageFlagBits" << "type = " << type; } return static_cast(-1); } @@ -75,7 +75,7 @@ VkShaderModule VulkanShaderCompiler::BuildGlslShader(const char *shaderCode, siz if (shaderc_result_get_compilation_status(spvShader) != shaderc_compilation_status_success) { - std::cerr << "Compilation error: \n" << shaderc_result_get_error_message(spvShader) << std::endl; + LOG_S_ERROR << "Compilation error: \n" << shaderc_result_get_error_message(spvShader) << std::endl; return VK_NULL_HANDLE; } @@ -90,7 +90,7 @@ VkShaderModule VulkanShaderCompiler::BuildGlslShader(const char *shaderCode, siz VkResult result = vkDevCtx->CreateShaderModule(*vkDevCtx, &shaderModuleCreateInfo, nullptr, &shaderModule); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - std::cerr << "Failed to create shader module" << std::endl; + LOG_S_ERROR << "Failed to create shader module" << std::endl; return VK_NULL_HANDLE; } shaderc_result_release(spvShader); diff --git a/common/libs/VkCodecUtils/VulkanVideoProcessor.cpp b/common/libs/VkCodecUtils/VulkanVideoProcessor.cpp index 3122b062..4e9d7e71 100644 --- a/common/libs/VkCodecUtils/VulkanVideoProcessor.cpp +++ b/common/libs/VkCodecUtils/VulkanVideoProcessor.cpp @@ -33,6 +33,7 @@ #include "vulkan_interfaces.h" #include "nvidia_utils/vulkan/ycbcrvkinfo.h" #include "crcgenerator.h" +#include "Logger.h" int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, VkSharedBaseObj& videoStreamDemuxer, @@ -56,7 +57,7 @@ int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, const bool verbose = false; if (vkDevCtx->GetVideoDecodeQueue(videoQueueIndx) == VkQueue()) { - std::cerr << "videoQueueIndx is out of bounds: " << videoQueueIndx << + LOG_S_ERROR << "videoQueueIndx is out of bounds: " << videoQueueIndx << " Max decode queues: " << vkDevCtx->GetVideoDecodeNumQueues() << std::endl; assert(!"Invalid Video Queue"); return -1; @@ -80,7 +81,7 @@ int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, VkResult result = VulkanVideoFrameBuffer::Create(vkDevCtx, m_vkVideoFrameBuffer); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: Create VulkanVideoFrameBuffer result: 0x%x\n", result); + LOG_ERROR("\nERROR: Create VulkanVideoFrameBuffer result: 0x%x\n", result); } m_frameToFile = frameToFile; @@ -113,7 +114,7 @@ int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, m_vkVideoDecoder); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: Create VkVideoDecoder result: 0x%x\n", result); + LOG_ERROR("\nERROR: Create VkVideoDecoder result: 0x%x\n", result); } VkVideoCoreProfile videoProfile(m_videoStreamDemuxer->GetVideoCodec(), @@ -125,7 +126,7 @@ int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, if (!VulkanVideoCapabilities::IsCodecTypeSupported(vkDevCtx, vkDevCtx->GetVideoDecodeQueueFamilyIdx(), m_videoStreamDemuxer->GetVideoCodec())) { - std::cout << "*** The video codec " << VkVideoCoreProfile::CodecToName(m_videoStreamDemuxer->GetVideoCodec()) << " is not supported! ***" << std::endl; + LOG_S_ERROR << "*** The video codec " << VkVideoCoreProfile::CodecToName(m_videoStreamDemuxer->GetVideoCodec()) << " is not supported! ***" << std::endl; assert(!"The video codec is not supported"); return -1; } @@ -137,7 +138,7 @@ int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, videoDecodeCapabilities); if (result != VK_SUCCESS) { - std::cout << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; + LOG_S_ERROR << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; assert(!"Could not get Video Capabilities!"); return -result; } @@ -150,7 +151,7 @@ int32_t VulkanVideoProcessor::Initialize(const VulkanDeviceContext* vkDevCtx, (uint32_t)videoCapabilities.minBitstreamBufferSizeAlignment); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: CreateParser() result: 0x%x\n", result); + LOG_ERROR("\nERROR: CreateParser() result: 0x%x\n", result); } m_loopCount = loopCount; @@ -242,7 +243,7 @@ void VulkanVideoProcessor::Deinit() void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* videoFormat, bool dumpData) { if (dumpData) { - std::cout << "Display Area : " << std::endl + LOG_S_DEBUG << "Display Area : " << std::endl << "\tLeft : " << videoFormat->display_area.left << std::endl << "\tRight : " << videoFormat->display_area.right << std::endl << "\tTop : " << videoFormat->display_area.top << std::endl @@ -250,7 +251,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi } if (dumpData) { - std::cout << "Geometry : " << std::endl + LOG_S_DEBUG << "Geometry : " << std::endl << "\tCoded Width : " << videoFormat->coded_width << std::endl << "\tDisplayed Width : " << videoFormat->display_area.right - videoFormat->display_area.left << std::endl << "\tCoded Height : " << videoFormat->coded_height << std::endl @@ -259,7 +260,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi const char* pCodec = VkVideoCoreProfile::CodecToName(videoFormat->codec); if (dumpData) { - std::cout << "Codec : " << pCodec << std::endl; + LOG_S_DEBUG << "Codec : " << pCodec << std::endl; } /* These below token numbers are based on "chroma_format_idc" from the spec. */ @@ -279,7 +280,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi assert(nvVideoChromaFormat[videoFormat->chromaSubsampling] != nullptr); const char* pVideoChromaFormat = nvVideoChromaFormat[videoFormat->chromaSubsampling]; if (dumpData) { - std::cout << "VideoChromaFormat : " << pVideoChromaFormat << std::endl; + LOG_S_DEBUG << "VideoChromaFormat : " << pVideoChromaFormat << std::endl; } static const char* VideoFormat[] = { @@ -297,7 +298,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi assert(videoFormat->video_signal_description.video_format < sizeof(VideoFormat)/sizeof(VideoFormat[0])); const char* pVideoFormat = VideoFormat[videoFormat->video_signal_description.video_format]; if (dumpData) { - std::cout << "VideoFormat : " << pVideoFormat << std::endl; + LOG_S_DEBUG << "VideoFormat : " << pVideoFormat << std::endl; } const char* ColorPrimaries[] = { @@ -317,7 +318,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi assert(videoFormat->video_signal_description.color_primaries < sizeof(ColorPrimaries)/sizeof(ColorPrimaries[0])); const char* pColorPrimaries = ColorPrimaries[videoFormat->video_signal_description.color_primaries]; if (dumpData) { - std::cout << "ColorPrimaries : " << pColorPrimaries << std::endl; + LOG_S_DEBUG << "ColorPrimaries : " << pColorPrimaries << std::endl; } const char* TransferCharacteristics[] = { @@ -343,7 +344,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi assert(videoFormat->video_signal_description.transfer_characteristics < sizeof(TransferCharacteristics)/sizeof(TransferCharacteristics[0])); const char* pTransferCharacteristics = TransferCharacteristics[videoFormat->video_signal_description.transfer_characteristics]; if (dumpData) { - std::cout << "TransferCharacteristics : " << pTransferCharacteristics << std::endl; + LOG_S_DEBUG << "TransferCharacteristics : " << pTransferCharacteristics << std::endl; } const char* MatrixCoefficients[] = { @@ -362,7 +363,7 @@ void VulkanVideoProcessor::DumpVideoFormat(const VkParserDetectedVideoFormat* vi assert(videoFormat->video_signal_description.matrix_coefficients < sizeof(MatrixCoefficients)/sizeof(MatrixCoefficients[0])); const char* pMatrixCoefficients = MatrixCoefficients[videoFormat->video_signal_description.matrix_coefficients]; if (dumpData) { - std::cout << "MatrixCoefficients : " << pMatrixCoefficients << std::endl; + LOG_S_DEBUG << "MatrixCoefficients : " << pMatrixCoefficients << std::endl; } } @@ -512,13 +513,13 @@ uint32_t VulkanVideoProcessor::Restart(int64_t& bitstreamOffset) bool VulkanVideoProcessor::StreamCompleted() { if (--m_loopCount > 0) { - std::cout << "Restarting video stream with loop number " << (m_loopCount + 1) << std::endl; + LOG_S_INFO << "Restarting video stream with loop number " << (m_loopCount + 1) << std::endl; // Reload the file stream int64_t bitstreamOffset = 0; Restart(bitstreamOffset); return false; } else { - std::cout << "End of Video Stream with status " << VK_SUCCESS << std::endl; + LOG_S_INFO << "End of Video Stream with status " << VK_SUCCESS << std::endl; return true; } } @@ -550,7 +551,7 @@ int32_t VulkanVideoProcessor::ParserProcessNextDataChunk() requiresPartialParsing); if (parserStatus != VK_SUCCESS) { m_videoStreamsCompleted = true; - std::cerr << "Parser: end of Video Stream with status " << parserStatus << std::endl; + LOG_S_ERROR << "Parser: end of Video Stream with status " << parserStatus << std::endl; retValue = -1; } else { retValue = (int32_t)bitstreamBytesConsumed; @@ -596,7 +597,7 @@ int32_t VulkanVideoProcessor::GetNextFrame(VulkanDecodedFrame* pFrame, bool* end if ((m_maxFrameCount != -1) && (m_videoFrameNum >= (uint32_t)m_maxFrameCount)) { // Tell the FrameProcessor we're done after this frame is drawn. - std::cout << "Number of video frames " << m_videoFrameNum + LOG_S_ERROR << "Number of video frames " << m_videoFrameNum << " of max frame number " << m_maxFrameCount << std::endl; m_videoStreamsCompleted = StreamCompleted(); *endOfStream = m_videoStreamsCompleted; diff --git a/common/libs/VkCodecUtils/VulkanVideoUtils.cpp b/common/libs/VkCodecUtils/VulkanVideoUtils.cpp index a95d0a88..db9d7ebf 100644 --- a/common/libs/VkCodecUtils/VulkanVideoUtils.cpp +++ b/common/libs/VkCodecUtils/VulkanVideoUtils.cpp @@ -22,11 +22,12 @@ #include "VulkanVideoUtils.h" #include +#include "Logger.h" // Vulkan call wrapper #define CALL_VK(func) \ if (VK_SUCCESS != (func)) { \ - std::cerr << "VkVideoUtils: " << "File " << __FILE__ << "line " << __LINE__; \ + LOG_S_ERROR << "VkVideoUtils: " << "File " << __FILE__ << "line " << __LINE__; \ assert(false); \ } @@ -43,7 +44,7 @@ using namespace Pattern; void VulkanSwapchainInfo::CreateSwapChain(const VulkanDeviceContext* vkDevCtx, VkSwapchainKHR swapchain) { - if (mVerbose) std::cout << "VkVideoUtils: " << "Enter Function: " << __FUNCTION__ << "File " << __FILE__ << "line " << __LINE__ << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "Enter Function: " << __FUNCTION__ << "File " << __FILE__ << "line " << __LINE__ << std::endl; mInstance = vkDevCtx->getInstance(); m_vkDevCtx = vkDevCtx; @@ -77,7 +78,7 @@ void VulkanSwapchainInfo::CreateSwapChain(const VulkanDeviceContext* vkDevCtx, V m_vkDevCtx->GetPhysicalDeviceSurfaceFormatsKHR(vkDevCtx->getPhysicalDevice(), mSurface, &formatCount, formats); - std::cout << "VkVideoUtils: " << "VulkanSwapchainInfo - got " << formatCount << "surface formats"; + LOG_S_INFO << "VkVideoUtils: " << "VulkanSwapchainInfo - got " << formatCount << "surface formats"; uint32_t chosenFormat; for (chosenFormat = 0; chosenFormat < formatCount; chosenFormat++) { @@ -506,8 +507,8 @@ VkResult VulkanGraphicsPipeline::CreatePipeline(const VulkanDeviceContext* vkDev const bool verbose = false; - if (false) printf("\nVertex shader output code:\n %s", vss); - if (false) printf("\nFragment shader output code:\n %s", imageFss.str().c_str()); + if (false) LOG_DEBUG("\nVertex shader output code:\n %s", vss); + if (false) LOG_DEBUG("\nFragment shader output code:\n %s", imageFss.str().c_str()); const bool loadShadersFromFile = false; if (loadShadersFromFile) { @@ -536,7 +537,7 @@ VkResult VulkanGraphicsPipeline::CreatePipeline(const VulkanDeviceContext* vkDev m_vkDevCtx); m_fssCache.swap(imageFss); - if (verbose) printf("\nFragment shader cache output code:\n %s", m_fssCache.str().c_str()); + if (verbose) LOG_DEBUG("\nFragment shader cache output code:\n %s", m_fssCache.str().c_str()); } } @@ -846,14 +847,14 @@ VkResult VulkanRenderInfo::UpdatePerDrawContexts(VulkanPerDrawContext* pPerDrawC const VkSamplerYcbcrConversionCreateInfo* pSamplerYcbcrConversionCreateInfo) { - if (mVerbose) std::cout << "VkVideoUtils: " << "CreateVulkanSamplers " << pPerDrawContext->contextIndex << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "CreateVulkanSamplers " << pPerDrawContext->contextIndex << std::endl; VkResult result = pPerDrawContext->samplerYcbcrConversion.CreateVulkanSampler(m_vkDevCtx, pSamplerCreateInfo, pSamplerYcbcrConversionCreateInfo); if (result != VK_SUCCESS) { return result; } - if (mVerbose) std::cout << "VkVideoUtils: " << "CreateDescriptorSet " << pPerDrawContext->contextIndex << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "CreateDescriptorSet " << pPerDrawContext->contextIndex << std::endl; VkSampler immutableSampler = pPerDrawContext->samplerYcbcrConversion.GetSampler(); const std::vector setLayoutBindings{ @@ -878,7 +879,7 @@ VkResult VulkanRenderInfo::UpdatePerDrawContexts(VulkanPerDrawContext* pPerDrawC if (result != VK_SUCCESS) { return result; } - if (mVerbose) std::cout << "VkVideoUtils: " << "CreateGraphicsPipeline " << pPerDrawContext->contextIndex << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "CreateGraphicsPipeline " << pPerDrawContext->contextIndex << std::endl; // Create graphics pipeline result = pPerDrawContext->gfxPipeline.CreatePipeline(m_vkDevCtx, pViewport, @@ -911,15 +912,15 @@ VkResult VulkanRenderInfo::CreatePerDrawContexts(const VulkanDeviceContext* vkDe VulkanPerDrawContext* pPerDrawContext = GetDrawContext(ctxsIndx); pPerDrawContext->m_vkDevCtx = vkDevCtx; pPerDrawContext->contextIndex = ctxsIndx; - if (mVerbose) std::cout << "VkVideoUtils: " << "Init pPerDrawContext " << ctxsIndx << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "Init pPerDrawContext " << ctxsIndx << std::endl; - if (mVerbose) std::cout << "VkVideoUtils: " << "CreateCommandBufferPool " << pPerDrawContext->contextIndex << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "CreateCommandBufferPool " << pPerDrawContext->contextIndex << std::endl; result = pPerDrawContext->commandBuffer.CreateCommandBufferPool(vkDevCtx, vkDevCtx->GetGfxQueueFamilyIdx()); if (result != VK_SUCCESS) { return result; } - if (mVerbose) std::cout << "VkVideoUtils: " << "CreateFrameBuffer " << pPerDrawContext->contextIndex << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "CreateFrameBuffer " << pPerDrawContext->contextIndex << std::endl; result = pPerDrawContext->frameBuffer.CreateFrameBuffer(m_vkDevCtx, swapchain, pFbExtent2D, pSurfaceFormat, fbImages[ctxsIndx], renderPass); @@ -927,7 +928,7 @@ VkResult VulkanRenderInfo::CreatePerDrawContexts(const VulkanDeviceContext* vkDe return result; } - if (mVerbose) std::cout << "VkVideoUtils: " << "CreateSyncPrimitives " << pPerDrawContext->contextIndex << std::endl; + if (mVerbose) LOG_S_DEBUG << "VkVideoUtils: " << "CreateSyncPrimitives " << pPerDrawContext->contextIndex << std::endl; result = pPerDrawContext->syncPrimitives.CreateSyncPrimitives(m_vkDevCtx); if (result != VK_SUCCESS) { return result; diff --git a/common/libs/VkShell/Shell.h b/common/libs/VkShell/Shell.h index 1da4836b..7145ce66 100644 --- a/common/libs/VkShell/Shell.h +++ b/common/libs/VkShell/Shell.h @@ -45,7 +45,6 @@ class Shell : public VkWsiDisplay, public VkVideoRefCountBase { int32_t m_backBufferCount; uint32_t m_directToDisplayMode : 1; uint32_t m_vsync : 1; - uint32_t m_verbose : 1; Configuration(const char* windowName, int32_t backBufferCount = 4, bool directToDisplayMode = false, int32_t initialWidth = 1920, int32_t initialHeight = 1080, int32_t initialBitdepth = 8, @@ -57,7 +56,6 @@ class Shell : public VkWsiDisplay, public VkVideoRefCountBase { , m_backBufferCount(backBufferCount) , m_directToDisplayMode(false) , m_vsync(vsync) - , m_verbose(verbose) {} }; diff --git a/common/libs/VkShell/ShellXcb.cpp b/common/libs/VkShell/ShellXcb.cpp index 8061484b..cc129684 100644 --- a/common/libs/VkShell/ShellXcb.cpp +++ b/common/libs/VkShell/ShellXcb.cpp @@ -24,6 +24,7 @@ #include "VkCodecUtils/Helpers.h" #include "VkCodecUtils/FrameProcessor.h" #include "ShellXcb.h" +#include "Logger.h" namespace { @@ -158,9 +159,8 @@ void ShellXcb::HandleEvent(const xcb_generic_event_t *ev) { switch (ev->response_type & 0x7f) { case XCB_CONFIGURE_NOTIFY: { const xcb_configure_notify_event_t *notify = reinterpret_cast(ev); - if (m_settings.m_verbose) { - std::cout << "Notify display resize " << notify->width << " x " << notify->height << '\n'; - } + + LOG_S_DEBUG << "Notify display resize " << notify->width << " x " << notify->height << '\n'; m_winWidth = notify->width; m_winHeight = notify->height; diff --git a/vk_video_decoder/libs/VkDecoderUtils/FFmpegDemuxer.cpp b/vk_video_decoder/libs/VkDecoderUtils/FFmpegDemuxer.cpp index 37888fcc..9efcfb92 100644 --- a/vk_video_decoder/libs/VkDecoderUtils/FFmpegDemuxer.cpp +++ b/vk_video_decoder/libs/VkDecoderUtils/FFmpegDemuxer.cpp @@ -16,6 +16,7 @@ #include #include "VkDecoderUtils/VideoStreamDemuxer.h" +#include "Logger.h" extern "C" { #include @@ -28,7 +29,7 @@ extern "C" { inline bool check(int e, int iLine, const char *szFile) { if (e < 0) { - std::cerr << "General error " << e << " at line " << iLine << " in file " << szFile; + LOG_S_ERROR << "General error " << e << " at line " << iLine << " in file " << szFile; return false; } return true; @@ -54,16 +55,16 @@ class FFmpegDemuxer : public VideoStreamDemuxer { { if (!fmtc) { - std::cerr << "No AVFormatContext provided."; + LOG_S_ERROR << "No AVFormatContext provided."; return -1; } - std::cout << "Media format: " << fmtc->iformat->long_name << " (" << fmtc->iformat->name << ")"; + LOG_S_DEBUG << "Media format: " << fmtc->iformat->long_name << " (" << fmtc->iformat->name << ")"; ck(avformat_find_stream_info(fmtc, NULL)); videoStream = av_find_best_stream(fmtc, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); if (videoStream < 0) { - std::cerr << "FFmpeg error: " << __FILE__ << " " << __LINE__ << " " << "Could not find stream in input file"; + LOG_S_ERROR << "FFmpeg error: " << __FILE__ << " " << __LINE__ << " " << "Could not find stream in input file"; return -1; } @@ -143,7 +144,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { } if (!bsf) { - std::cerr << "FFmpeg error: " << __FILE__ << " " << __LINE__ << " " << "av_bsf_get_by_name(): " << videoCodec << " failed"; + LOG_S_ERROR << "FFmpeg error: " << __FILE__ << " " << __LINE__ << " " << "av_bsf_get_by_name(): " << videoCodec << " failed"; return -1; } ck(av_bsf_alloc(bsf, &bsfc)); @@ -168,7 +169,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { AVFormatContext *ctx = NULL; if (!(ctx = avformat_alloc_context())) { - std::cerr << "FFmpeg error: " << __FILE__ << " " << __LINE__; + LOG_S_ERROR << "FFmpeg error: " << __FILE__ << " " << __LINE__; return NULL; } @@ -176,13 +177,13 @@ class FFmpegDemuxer : public VideoStreamDemuxer { int avioc_buffer_size = 8 * 1024 * 1024; avioc_buffer = (uint8_t *)av_malloc(avioc_buffer_size); if (!avioc_buffer) { - std::cerr << "FFmpeg error: " << __FILE__ << " " << __LINE__; + LOG_S_ERROR << "FFmpeg error: " << __FILE__ << " " << __LINE__; return NULL; } avioc = avio_alloc_context(avioc_buffer, avioc_buffer_size, 0, pDataProvider, &ReadPacket, NULL, NULL); if (!avioc) { - std::cerr << "FFmpeg error: " << __FILE__ << " " << __LINE__; + LOG_S_ERROR << "FFmpeg error: " << __FILE__ << " " << __LINE__; return NULL; } ctx->pb = avioc; @@ -385,7 +386,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { break; } // assert(!"Unknown CHROMA_SUBSAMPLING!"); - std::cerr << "\nUnknown CHROMA_SUBSAMPLING from format: " << format << std::endl; + LOG_S_ERROR << "\nUnknown CHROMA_SUBSAMPLING from format: " << format << std::endl; return VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR; } @@ -401,7 +402,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { case STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE: break; default: - std::cerr << "\nInvalid h.264 profile: " << profile << std::endl; + LOG_S_ERROR << "\nInvalid h.264 profile: " << profile << std::endl; } } break; @@ -415,7 +416,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { case STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS: break; default: - std::cerr << "\nInvalid h.265 profile: " << profile << std::endl; + LOG_S_ERROR << "\nInvalid h.265 profile: " << profile << std::endl; } } break; @@ -427,12 +428,12 @@ class FFmpegDemuxer : public VideoStreamDemuxer { case STD_VIDEO_AV1_PROFILE_PROFESSIONAL: break; default: - std::cerr << "\nInvalid AV1 profile: " << profile << std::endl; + LOG_S_ERROR << "\nInvalid AV1 profile: " << profile << std::endl; } } break; default: - std::cerr << "\nInvalid codec type: " << FFmpegToVkCodecOperation(videoCodec) << std::endl; + LOG_S_ERROR << "\nInvalid codec type: " << FFmpegToVkCodecOperation(videoCodec) << std::endl; } return (uint32_t)profile; } @@ -497,12 +498,12 @@ class FFmpegDemuxer : public VideoStreamDemuxer { virtual void DumpStreamParameters() const { - std::cout << "Width: " << codedWidth << std::endl; - std::cout << "Height: " << codedHeight << std::endl; - std::cout << "BitDepth: " << codedLumaBitDepth << std::endl; - std::cout << "Profile: " << profile << std::endl; - std::cout << "Level: " << level << std::endl; - std::cout << "Aspect Ration: " << (float)sample_aspect_ratio.num / sample_aspect_ratio.den << std::endl; + LOG_S_DEBUG << "Width: " << codedWidth << std::endl; + LOG_S_DEBUG << "Height: " << codedHeight << std::endl; + LOG_S_DEBUG << "BitDepth: " << codedLumaBitDepth << std::endl; + LOG_S_DEBUG << "Profile: " << profile << std::endl; + LOG_S_DEBUG << "Level: " << level << std::endl; + LOG_S_DEBUG << "Aspect Ration: " << (float)sample_aspect_ratio.num / sample_aspect_ratio.den << std::endl; static const char* FieldOrder[] = { "UNKNOWN", @@ -512,7 +513,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { "TB: Top coded first, bottom displayed first", "BT: Bottom coded first, top displayed first", }; - std::cout << "Field Order: " << FieldOrder[field_order] << std::endl; + LOG_S_DEBUG << "Field Order: " << FieldOrder[field_order] << std::endl; static const char* ColorRange[] = { "UNSPECIFIED", @@ -520,7 +521,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { "JPEG: the normal 2^n-1 JPEG YUV ranges", "NB: Not part of ABI", }; - std::cout << "Color Range: " << ColorRange[colorRange] << std::endl; + LOG_S_DEBUG << "Color Range: " << ColorRange[colorRange] << std::endl; static const char* ColorPrimaries[] = { "RESERVED0", @@ -540,7 +541,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { "JEDEC_P22: JEDEC P22 phosphors", "NB: Not part of ABI", }; - std::cout << "Color Primaries: " << ColorPrimaries[colorPrimaries] << std::endl; + LOG_S_DEBUG << "Color Primaries: " << ColorPrimaries[colorPrimaries] << std::endl; static const char* ColorTransferCharacteristic[] = { "RESERVED0", @@ -564,7 +565,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { "ARIB_STD_B67: ARIB STD-B67, known as Hybrid log-gamma", "NB: Not part of ABI", }; - std::cout << "Color Transfer Characteristic: " << ColorTransferCharacteristic[colorTransferCharacteristics] << std::endl; + LOG_S_DEBUG << "Color Transfer Characteristic: " << ColorTransferCharacteristic[colorTransferCharacteristics] << std::endl; static const char* ColorSpace[] = { "RGB: order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)", @@ -584,7 +585,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { "ICTCP: ITU-R BT.2100-0, ICtCp", "NB: Not part of ABI", }; - std::cout << "Color Space: " << ColorSpace[colorSpace] << std::endl; + LOG_S_DEBUG << "Color Space: " << ColorSpace[colorSpace] << std::endl; static const char* ChromaLocation[] = { "UNSPECIFIED", @@ -596,7 +597,7 @@ class FFmpegDemuxer : public VideoStreamDemuxer { "BOTTOM", "NB:Not part of ABI", }; - std::cout << "Chroma Location: " << ChromaLocation[chromaLocation] << std::endl; + LOG_S_DEBUG << "Chroma Location: " << ChromaLocation[chromaLocation] << std::endl; } private: diff --git a/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.cpp b/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.cpp index dc980474..6b335fad 100644 --- a/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.cpp +++ b/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.cpp @@ -103,7 +103,7 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo 1}; } - std::cout << "Video Input Information" << std::endl + LOG_S_INFO << "Video Input Information" << std::endl << "\tCodec : " << GetVideoCodecString(pVideoFormat->codec) << std::endl << "\tFrame rate : " << pVideoFormat->frame_rate.numerator << "/" << pVideoFormat->frame_rate.denominator << " = " << ((pVideoFormat->frame_rate.denominator != 0) ? (1.0 * pVideoFormat->frame_rate.numerator / pVideoFormat->frame_rate.denominator) : 0.0) << " fps" << std::endl @@ -130,13 +130,13 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo assert(videoCodecs != VK_VIDEO_CODEC_OPERATION_NONE_KHR); if (m_dumpDecodeData) { - std::cout << "\t" << std::hex << videoCodecs << " HW codec types are available: " << std::dec << std::endl; + LOG_S_DEBUG << "\t" << std::hex << videoCodecs << " HW codec types are available: " << std::dec << std::endl; } VkVideoCodecOperationFlagBitsKHR videoCodec = pVideoFormat->codec; if (m_dumpDecodeData) { - std::cout << "\tcodec " << VkVideoCoreProfile::CodecToName(videoCodec) << std::endl; + LOG_S_DEBUG << "\tcodec " << VkVideoCoreProfile::CodecToName(videoCodec) << std::endl; } VkVideoCoreProfile videoProfile(videoCodec, pVideoFormat->chromaSubsampling, pVideoFormat->lumaBitDepth, pVideoFormat->chromaBitDepth, @@ -144,7 +144,7 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo if (!VulkanVideoCapabilities::IsCodecTypeSupported(m_vkDevCtx, m_vkDevCtx->GetVideoDecodeQueueFamilyIdx(), videoCodec)) { - std::cout << "*** The video codec " << VkVideoCoreProfile::CodecToName(videoCodec) << " is not supported! ***" << std::endl; + LOG_S_ERROR << "*** The video codec " << VkVideoCoreProfile::CodecToName(videoCodec) << " is not supported! ***" << std::endl; assert(!"The video codec is not supported"); return -1; } @@ -158,7 +158,7 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo } } - std::cout << "Video Decoding Params:" << std::endl + LOG_S_INFO << "Video Decoding Params:" << std::endl << "\tNum Surfaces : " << numDecodeSurfaces << std::endl << "\tResize : " << m_codedExtent.width << " x " << m_codedExtent.height << std::endl; @@ -176,7 +176,7 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo videoCapabilities, videoDecodeCapabilities); if (result != VK_SUCCESS) { - std::cout << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; + LOG_S_ERROR << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; assert(!"Could not get Video Capabilities!"); return -1; } @@ -189,7 +189,7 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo outImageFormat, dpbImageFormat); if (result != VK_SUCCESS) { - std::cout << "*** Could not get supported video formats :" << result << " ***" << std::endl; + LOG_S_ERROR << "*** Could not get supported video formats :" << result << " ***" << std::endl; assert(!"Could not get supported video formats!"); return -1; } @@ -537,11 +537,11 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo assert((uint32_t)ret == numDecodeSurfaces); if ((uint32_t)ret != numDecodeSurfaces) { - fprintf(stderr, "\nERROR: InitImagePool() ret(%d) != m_numDecodeSurfaces(%d)\n", ret, numDecodeSurfaces); + LOG_ERROR("\nERROR: InitImagePool() ret(%d) != m_numDecodeSurfaces(%d)\n", ret, numDecodeSurfaces); } if (m_dumpDecodeData) { - std::cout << "Allocating Video Device Memory" << std::endl + LOG_S_DEBUG << "Allocating Video Device Memory" << std::endl << "Allocating " << numDecodeSurfaces << " Num Decode Surfaces and " << maxDpbSlotCount << " Video Device Memory Images for DPB " << std::endl << imageExtent.width << " x " << imageExtent.height << std::endl; @@ -576,7 +576,7 @@ int32_t VkVideoDecoder::StartVideoSequence(VkParserDetectedVideoFormat* pVideoFo nullptr, 0, bitstreamBuffer); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: VulkanBitstreamBufferImpl::Create() result: 0x%x\n", result); + LOG_ERROR("\nERROR: VulkanBitstreamBufferImpl::Create() result: 0x%x\n", result); break; } @@ -691,7 +691,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters int32_t picNumInDecodeOrder = (int32_t)(uint32_t)m_decodePicCount; if (m_dumpDecodeData) { - std::cout << "currPicIdx: " << currPicIdx << ", currentVideoQueueIndx: " << m_currentVideoQueueIndx << ", decodePicCount: " << m_decodePicCount << std::endl; + LOG_S_DEBUG << "currPicIdx: " << currPicIdx << ", currentVideoQueueIndx: " << m_currentVideoQueueIndx << ", decodePicCount: " << m_decodePicCount << std::endl; } m_videoFrameBuffer->SetPicNumInDecodeOrder(currPicIdx, picNumInDecodeOrder); @@ -700,7 +700,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters assert(retPicIdx == currPicIdx); if (retPicIdx != currPicIdx) { - fprintf(stderr, "\nERROR: DecodePictureWithParameters() retPicIdx(%d) != currPicIdx(%d)\n", retPicIdx, currPicIdx); + LOG_ERROR("\nERROR: DecodePictureWithParameters() retPicIdx(%d) != currPicIdx(%d)\n", retPicIdx, currPicIdx); } assert(pCurrFrameDecParams->bitstreamData->GetMaxSize() >= pCurrFrameDecParams->bitstreamDataLen); @@ -841,7 +841,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters } if (m_dumpDecodeData) { - std::cout << "currPicIdx: " << currPicIdx << ", OutInfo: " << pOutputPictureResource->codedExtent.width << " x " + LOG_S_DEBUG << "currPicIdx: " << currPicIdx << ", OutInfo: " << pOutputPictureResource->codedExtent.width << " x " << pOutputPictureResource->codedExtent.height << " with layout " << ((pOutputPictureResourceInfo->currentImageLayout == VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR) || (pOutputPictureResourceInfo->currentImageLayout == VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR) ? "OUT" : "INVALID") @@ -917,7 +917,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters } if (m_dumpDecodeData) { - std::cout << "\tdpb: " << (int)pCurrFrameDecParams->pGopReferenceImagesIndexes[resId] + LOG_S_DEBUG << "\tdpb: " << (int)pCurrFrameDecParams->pGopReferenceImagesIndexes[resId] << ", DpbInfo: " << pOutputPictureResource->codedExtent.width << " x " << pOutputPictureResource->codedExtent.height << " with layout " << ((pictureResourcesInfo[resId].currentImageLayout == VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR) ? "DPB" : "INVALID") @@ -1006,7 +1006,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters decodeBeginInfo.videoSessionParameters = *pOwnerPictureParameters; if (m_dumpDecodeData) { - std::cout << "Using object " << decodeBeginInfo.videoSessionParameters << + LOG_S_DEBUG << "Using object " << decodeBeginInfo.videoSessionParameters << " with ID: (" << pOwnerPictureParameters->GetId() << ")" << " for SPS: " << spsId << ", PPS: " << ppsId << std::endl; } @@ -1169,7 +1169,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters if (m_dumpDecodeData) { uint64_t currSemValue = 0; VkResult semResult = m_vkDevCtx->GetSemaphoreCounterValue(*m_vkDevCtx, m_hwLoadBalancingTimelineSemaphore, &currSemValue); - std::cout << "\t TL semaphore value: " << currSemValue << ", status: " << semResult << std::endl; + LOG_S_DEBUG << "\t TL semaphore value: " << currSemValue << ", status: " << semResult << std::endl; } waitSemaphores[waitSemaphoreCount] = m_hwLoadBalancingTimelineSemaphore; @@ -1189,7 +1189,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters timelineSemaphoreInfos.signalSemaphoreValueCount = signalSemaphoreCount; timelineSemaphoreInfos.pSignalSemaphoreValues = signalTlSemaphoresValues; if (m_dumpDecodeData) { - std::cout << "\t Wait for: " << (waitSemaphoreCount ? waitTlSemaphoresValues[waitSemaphoreCount - 1] : 0) << + LOG_S_DEBUG << "\t Wait for: " << (waitSemaphoreCount ? waitTlSemaphoresValues[waitSemaphoreCount - 1] : 0) << ", signal at " << signalTlSemaphoresValues[signalSemaphoreCount - 1] << std::endl; } } @@ -1210,22 +1210,22 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters if (m_dumpDecodeData) { if (m_hwLoadBalancingTimelineSemaphore != VK_NULL_HANDLE) { - std::cout << "\t\t waitSemaphoreValueCount: " << timelineSemaphoreInfos.waitSemaphoreValueCount << std::endl; - std::cout << "\t pWaitSemaphoreValues: " << timelineSemaphoreInfos.pWaitSemaphoreValues[0] << ", " << + LOG_S_DEBUG << "\t\t waitSemaphoreValueCount: " << timelineSemaphoreInfos.waitSemaphoreValueCount << std::endl; + LOG_S_DEBUG << "\t pWaitSemaphoreValues: " << timelineSemaphoreInfos.pWaitSemaphoreValues[0] << ", " << timelineSemaphoreInfos.pWaitSemaphoreValues[1] << ", " << timelineSemaphoreInfos.pWaitSemaphoreValues[2] << std::endl; - std::cout << "\t\t signalSemaphoreValueCount: " << timelineSemaphoreInfos.signalSemaphoreValueCount << std::endl; - std::cout << "\t pSignalSemaphoreValues: " << timelineSemaphoreInfos.pSignalSemaphoreValues[0] << ", " << + LOG_S_DEBUG << "\t\t signalSemaphoreValueCount: " << timelineSemaphoreInfos.signalSemaphoreValueCount << std::endl; + LOG_S_DEBUG << "\t pSignalSemaphoreValues: " << timelineSemaphoreInfos.pSignalSemaphoreValues[0] << ", " << timelineSemaphoreInfos.pSignalSemaphoreValues[1] << ", " << timelineSemaphoreInfos.pSignalSemaphoreValues[2] << std::endl; } - std::cout << "\t waitSemaphoreCount: " << submitInfo.waitSemaphoreCount << std::endl; - std::cout << "\t\t pWaitSemaphores: " << submitInfo.pWaitSemaphores[0] << ", " << + LOG_S_DEBUG << "\t waitSemaphoreCount: " << submitInfo.waitSemaphoreCount << std::endl; + LOG_S_DEBUG << "\t\t pWaitSemaphores: " << submitInfo.pWaitSemaphores[0] << ", " << submitInfo.pWaitSemaphores[1] << ", " << submitInfo.pWaitSemaphores[2] << std::endl; - std::cout << "\t signalSemaphoreCount: " << submitInfo.signalSemaphoreCount << std::endl; - std::cout << "\t\t pSignalSemaphores: " << submitInfo.pSignalSemaphores[0] << ", " << + LOG_S_DEBUG << "\t signalSemaphoreCount: " << submitInfo.signalSemaphoreCount << std::endl; + LOG_S_DEBUG << "\t\t pSignalSemaphores: " << submitInfo.pSignalSemaphores[0] << ", " << submitInfo.pSignalSemaphores[1] << ", " << submitInfo.pSignalSemaphores[2] << std::endl << std::endl; } @@ -1239,8 +1239,8 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters } if (m_dumpDecodeData) { - std::cout << "\t +++++++++++++++++++++++++++< " << currPicIdx << " >++++++++++++++++++++++++++++++" << std::endl; - std::cout << "\t => Decode Submitted for CurrPicIdx: " << currPicIdx << std::endl + LOG_S_DEBUG << "\t +++++++++++++++++++++++++++< " << currPicIdx << " >++++++++++++++++++++++++++++++" << std::endl; + LOG_S_DEBUG << "\t => Decode Submitted for CurrPicIdx: " << currPicIdx << std::endl << "\t\tm_nPicNumInDecodeOrder: " << picNumInDecodeOrder << "\t\tframeCompleteFence " << videoDecodeCompleteFence << "\t\tvideoDecodeCompleteSemaphore " << videoDecodeCompleteSemaphore << "\t\tdstImageView " << pCurrFrameDecParams->decodeFrameInfo.dstPictureResource.imageViewBinding << std::endl; @@ -1264,18 +1264,18 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters if (m_dumpDecodeData && (m_hwLoadBalancingTimelineSemaphore != VK_NULL_HANDLE)) { // For TL semaphore debug uint64_t currSemValue = 0; VkResult semResult = m_vkDevCtx->GetSemaphoreCounterValue(*m_vkDevCtx, m_hwLoadBalancingTimelineSemaphore, &currSemValue); - std::cout << "\t TL semaphore value ater submit: " << currSemValue << ", status: " << semResult << std::endl; + LOG_S_DEBUG << "\t TL semaphore value ater submit: " << currSemValue << ", status: " << semResult << std::endl; const bool waitOnTlSemaphore = false; if (waitOnTlSemaphore) { uint64_t value = m_decodePicCount; VkSemaphoreWaitInfo waitInfo = { VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, nullptr, VK_SEMAPHORE_WAIT_ANY_BIT, 1, &m_hwLoadBalancingTimelineSemaphore, &value }; - std::cout << "\t TL semaphore wait for value: " << value << std::endl; + LOG_S_DEBUG << "\t TL semaphore wait for value: " << value << std::endl; semResult = m_vkDevCtx->WaitSemaphores(*m_vkDevCtx, &waitInfo, gLongTimeout); semResult = m_vkDevCtx->GetSemaphoreCounterValue(*m_vkDevCtx, m_hwLoadBalancingTimelineSemaphore, &currSemValue); - std::cout << "\t TL semaphore value: " << currSemValue << ", status: " << semResult << std::endl; + LOG_S_DEBUG << "\t TL semaphore value: " << currSemValue << ", status: " << semResult << std::endl; } } @@ -1303,8 +1303,8 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters assert(decodeStatus == VK_QUERY_RESULT_STATUS_COMPLETE_KHR); if (m_dumpDecodeData) { - std::cout << "\t +++++++++++++++++++++++++++< " << currPicIdx << " >++++++++++++++++++++++++++++++" << std::endl; - std::cout << "\t => Decode Status for CurrPicIdx: " << currPicIdx << std::endl + LOG_S_DEBUG << "\t +++++++++++++++++++++++++++< " << currPicIdx << " >++++++++++++++++++++++++++++++" << std::endl; + LOG_S_DEBUG << "\t => Decode Status for CurrPicIdx: " << currPicIdx << std::endl << "\t\tdecodeStatus: " << decodeStatus << std::endl; } } @@ -1368,7 +1368,7 @@ int VkVideoDecoder::DecodePictureWithParameters(VkParserPerFrameDecodeParameters result = filterCmdBuffer->EndCommandBufferRecording(cmdBuf); assert(result == VK_SUCCESS); - if (false) std::cout << currPicIdx << " : OUT view: " << outputImageView->GetImageView() << ", signalSem: " << frameCompleteSemaphore << std::endl << std::flush; + if (false) LOG_S_DEBUG << currPicIdx << " : OUT view: " << outputImageView->GetImageView() << ", signalSem: " << frameCompleteSemaphore << std::endl << std::flush; assert(videoDecodeCompleteSemaphore != frameCompleteSemaphore); result = m_yuvFilter->SubmitCommandBuffer(1, filterCmdBuffer->GetCommandBuffer(), 1, &videoDecodeCompleteSemaphore, @@ -1414,11 +1414,11 @@ VkDeviceSize VkVideoDecoder::GetBitstreamBuffer(VkDeviceSize size, pInitializeBufferMemory, initializeBufferMemorySize, newBitstreamBuffer); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: VulkanBitstreamBufferImpl::Create() result: 0x%x\n", result); + LOG_ERROR("ERROR: VulkanBitstreamBufferImpl::Create() result: 0x%x\n", result); return 0; } if (debugBitstreamBufferDumpAlloc) { - std::cout << "\tAllocated bitstream buffer with size " << newSize << " B, " << + LOG_S_DEBUG << "\tAllocated bitstream buffer with size " << newSize << " B, " << newSize/1024 << " KB, " << newSize/1024/1024 << " MB" << std::endl; } if (enablePool) { @@ -1444,18 +1444,18 @@ VkDeviceSize VkVideoDecoder::GetBitstreamBuffer(VkDeviceSize size, newBitstreamBuffer->MemsetData(0x0, copySize, newSize - copySize); #endif if (debugBitstreamBufferDumpAlloc) { - std::cout << "\t\tFrom bitstream buffer pool with size " << newSize << " B, " << + LOG_S_DEBUG << "\t\tFrom bitstream buffer pool with size " << newSize << " B, " << newSize/1024 << " KB, " << newSize/1024/1024 << " MB" << std::endl; - std::cout << "\t\t\t FreeNodes " << m_decodeFramesData.GetBitstreamBuffersQueue().GetFreeNodesNumber(); - std::cout << " of MaxNodes " << m_decodeFramesData.GetBitstreamBuffersQueue().GetMaxNodes(); - std::cout << ", AvailableNodes " << m_decodeFramesData.GetBitstreamBuffersQueue().GetAvailableNodesNumber(); - std::cout << std::endl; + LOG_S_DEBUG << "\t\t\t FreeNodes " << m_decodeFramesData.GetBitstreamBuffersQueue().GetFreeNodesNumber(); + LOG_S_DEBUG << " of MaxNodes " << m_decodeFramesData.GetBitstreamBuffersQueue().GetMaxNodes(); + LOG_S_DEBUG << ", AvailableNodes " << m_decodeFramesData.GetBitstreamBuffersQueue().GetAvailableNodesNumber(); + LOG_S_DEBUG << std::endl; } } bitstreamBuffer = newBitstreamBuffer; if (newSize > m_maxStreamBufferSize) { - std::cout << "\tAllocated bitstream buffer with size " << newSize << " B, " << + LOG_S_DEBUG << "\tAllocated bitstream buffer with size " << newSize << " B, " << newSize/1024 << " KB, " << newSize/1024/1024 << " MB" << std::endl; m_maxStreamBufferSize = newSize; } diff --git a/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.h b/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.h index 32839be4..3538a693 100644 --- a/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.h +++ b/vk_video_decoder/libs/VkVideoDecoder/VkVideoDecoder.h @@ -93,7 +93,7 @@ class NvVkDecodeFrameData { VkResult result = m_vkDevCtx->CreateCommandPool(*m_vkDevCtx, &cmdPoolInfo, nullptr, &m_videoCommandPool); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: CreateCommandPool() result: 0x%x\n", result); + LOG_ERROR("ERROR: CreateCommandPool() result: 0x%x\n", result); } VkCommandBufferAllocateInfo cmdInfo = {}; @@ -106,7 +106,7 @@ class NvVkDecodeFrameData { result = m_vkDevCtx->AllocateCommandBuffers(*m_vkDevCtx, &cmdInfo, &m_commandBuffers[0]); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: AllocateCommandBuffers() result: 0x%x\n", result); + LOG_ERROR("ERROR: AllocateCommandBuffers() result: 0x%x\n", result); } else { allocatedCommandBuffers = maxDecodeFramesCount; } @@ -255,7 +255,7 @@ class VkVideoDecoder : public IVulkanVideoDecoderHandler { if (enableDecoderFeatures & ENABLE_HW_LOAD_BALANCING) { if (m_vkDevCtx->GetVideoDecodeNumQueues() < 2) { - std::cout << "\t WARNING: Enabling HW Load Balancing for device with only " << + LOG_S_WARN << "\t WARNING: Enabling HW Load Balancing for device with only " << m_vkDevCtx->GetVideoDecodeNumQueues() << " queue!!!" << std::endl; } @@ -275,7 +275,7 @@ class VkVideoDecoder : public IVulkanVideoDecoderHandler { if (result == VK_SUCCESS) { m_currentVideoQueueIndx = 0; // start with index zero } - std::cout << "\t Enabling HW Load Balancing for device with " + LOG_S_INFO << "\t Enabling HW Load Balancing for device with " << m_vkDevCtx->GetVideoDecodeNumQueues() << " queues" << std::endl; } diff --git a/vk_video_decoder/libs/VkVideoParser/VulkanVideoParser.cpp b/vk_video_decoder/libs/VkVideoParser/VulkanVideoParser.cpp index be28dae0..d7e89e83 100644 --- a/vk_video_decoder/libs/VkVideoParser/VulkanVideoParser.cpp +++ b/vk_video_decoder/libs/VkVideoParser/VulkanVideoParser.cpp @@ -432,39 +432,39 @@ class VulkanVideoParser : public VkParserVideoDecodeClient, { StdVideoDecodeH264ReferenceInfoFlags picFlags = StdVideoDecodeH264ReferenceInfoFlags(); if (m_dumpParserData) - std::cout << "\t\t Flags: "; + LOG_S_DEBUG << "\t\t Flags: "; if (used_for_reference) { if (m_dumpParserData) - std::cout << "FRAME_IS_REFERENCE "; + LOG_S_DEBUG << "FRAME_IS_REFERENCE "; // picFlags.is_reference = true; } if (is_long_term) { if (m_dumpParserData) - std::cout << "IS_LONG_TERM "; + LOG_S_DEBUG << "IS_LONG_TERM "; picFlags.used_for_long_term_reference = true; } if (is_non_existing) { if (m_dumpParserData) - std::cout << "IS_NON_EXISTING "; + LOG_S_DEBUG << "IS_NON_EXISTING "; picFlags.is_non_existing = true; } if (is_field_ref) { if (m_dumpParserData) - std::cout << "IS_FIELD "; + LOG_S_DEBUG << "IS_FIELD "; // picFlags.field_pic_flag = true; } if (!currentPictureIsProgressive && (used_for_reference & topFieldMask)) { if (m_dumpParserData) - std::cout << "TOP_FIELD_IS_REF "; + LOG_S_DEBUG << "TOP_FIELD_IS_REF "; picFlags.top_field_flag = true; } if (!currentPictureIsProgressive && (used_for_reference & bottomFieldMask)) { if (m_dumpParserData) - std::cout << "BOTTOM_FIELD_IS_REF "; + LOG_S_DEBUG << "BOTTOM_FIELD_IS_REF "; picFlags.bottom_field_flag = true; } @@ -487,7 +487,7 @@ class VulkanVideoParser : public VkParserVideoDecodeClient, StdVideoDecodeH264ReferenceInfo* pRefPicInfo = &pDpbRefList[dpbEntryIdx].stdReferenceInfo; pRefPicInfo->FrameNum = FrameIdx; if (m_dumpParserData) { - std::cout << "\tdpbEntryIdx: " << dpbEntryIdx + LOG_S_DEBUG << "\tdpbEntryIdx: " << dpbEntryIdx << "dpbSlotIndex: " << dpbSlotIndex << " FrameIdx: " << (int32_t)FrameIdx; } @@ -495,7 +495,7 @@ class VulkanVideoParser : public VkParserVideoDecodeClient, pRefPicInfo->PicOrderCnt[0] = FieldOrderCnt[0]; pRefPicInfo->PicOrderCnt[1] = FieldOrderCnt[1]; if (m_dumpParserData) - std::cout << " fieldOrderCnt[0]: " << pRefPicInfo->PicOrderCnt[0] + LOG_S_DEBUG << " fieldOrderCnt[0]: " << pRefPicInfo->PicOrderCnt[0] << " fieldOrderCnt[1]: " << pRefPicInfo->PicOrderCnt[1] << std::endl; } @@ -518,15 +518,15 @@ class VulkanVideoParser : public VkParserVideoDecodeClient, pRefPicInfo->flags.used_for_long_term_reference = is_long_term; if (m_dumpParserData) { - std::cout << "\tdpbIndex: " << dpbSlotIndex + LOG_S_DEBUG << "\tdpbIndex: " << dpbSlotIndex << " picOrderCntValList: " << PicOrderCnt; - std::cout << "\t\t Flags: "; - std::cout << "FRAME IS REFERENCE "; + LOG_S_DEBUG << "\t\t Flags: "; + LOG_S_DEBUG << "FRAME IS REFERENCE "; if (pRefPicInfo->flags.used_for_long_term_reference) { - std::cout << "IS LONG TERM "; + LOG_S_DEBUG << "IS LONG TERM "; } - std::cout << std::endl; + LOG_S_DEBUG << std::endl; } } @@ -562,15 +562,15 @@ class VulkanVideoParser : public VkParserVideoDecodeClient, #endif if (m_dumpParserData) { - std::cout << "\tdpbIndex: " << dpbSlotIndex + LOG_S_DEBUG << "\tdpbIndex: " << dpbSlotIndex << " picOrderCntValList: " << PicOrderCnt; - std::cout << "\t\t Flags: "; - std::cout << "FRAME IS REFERENCE "; + LOG_S_DEBUG << "\t\t Flags: "; + LOG_S_DEBUG << "FRAME IS REFERENCE "; //if (pRefPicInfo->flags.used_for_long_term_reference) { - // std::cout << "IS LONG TERM "; + // LOG_S_DEBUG << "IS LONG TERM "; //} - std::cout << std::endl; + LOG_S_DEBUG << std::endl; } } @@ -737,7 +737,7 @@ bool VulkanVideoParser::DecodePicture(VkParserPictureData* pd) } if (m_dumpParserData) { - std::cout + LOG_S_DEBUG << "\t ==> VulkanVideoParser::DecodePicture " << picIdx << std::endl << "\t\t progressive: " << (bool)pd->progressive_frame << // Frame is progressive @@ -798,9 +798,9 @@ bool VulkanVideoParser::DisplayPicture(VkPicIf* pPicBuff, int64_t timestamp) int32_t picIdx = pVkPicBuff ? pVkPicBuff->m_picIdx : -1; if (m_dumpParserData) { - std::cout << "\t ======================< " << picIdx + LOG_S_DEBUG << "\t ======================< " << picIdx << " >============================" << std::endl; - std::cout << "\t ==> VulkanVideoParser::DisplayPicture " << picIdx + LOG_S_DEBUG << "\t ==> VulkanVideoParser::DisplayPicture " << picIdx << std::endl; } assert(picIdx != -1); @@ -820,9 +820,9 @@ bool VulkanVideoParser::DisplayPicture(VkPicIf* pPicBuff, int64_t timestamp) } if (m_dumpParserData) { - std::cout << "\t <== VulkanVideoParser::DisplayPicture " << picIdx + LOG_S_DEBUG << "\t <== VulkanVideoParser::DisplayPicture " << picIdx << std::endl; - std::cout << "\t ======================< " << picIdx + LOG_S_DEBUG << "\t ======================< " << picIdx << " >============================" << std::endl; } return result; @@ -1079,7 +1079,7 @@ uint32_t VulkanVideoParser::ResetPicDpbSlots(uint32_t picIndexSlotValidMask) if (resetSlotsMask & (1 << picIdx)) { resetSlotsMask &= ~(1 << picIdx); if (m_dumpDpbData) { - printf(";;; Resetting picIdx %d, was using dpb slot %d\n", picIdx, m_pictureToDpbSlotMap[picIdx]); + LOG_DEBUG(";;; Resetting picIdx %d, was using dpb slot %d\n", picIdx, m_pictureToDpbSlotMap[picIdx]); } SetPicDpbSlot(picIdx, -1); } @@ -1281,35 +1281,35 @@ uint32_t VulkanVideoParser::FillDpbH264State( assert(numUsedRef <= num_ref_frames); if (m_dumpDpbData) { - std::cout << " =>>> ********************* picIdx: " + LOG_S_DEBUG << " =>>> ********************* picIdx: " << (int32_t)GetPicIdx(pd->pCurrPic) << " *************************" << std::endl; - std::cout << "\tRef frames data in for picIdx: " + LOG_S_DEBUG << "\tRef frames data in for picIdx: " << (int32_t)GetPicIdx(pd->pCurrPic) << std::endl << "\tSlot Index:\t\t"; for (uint32_t slot = 0; slot < numUsedRef; slot++) { if (!refOnlyDpbIn[slot].is_non_existing) { - std::cout << slot << ",\t"; + LOG_S_DEBUG << slot << ",\t"; } else { - std::cout << 'X' << ",\t"; + LOG_S_DEBUG << 'X' << ",\t"; } } - std::cout << std::endl + LOG_S_DEBUG << std::endl << "\tPict Index:\t\t"; for (uint32_t slot = 0; slot < numUsedRef; slot++) { if (!refOnlyDpbIn[slot].is_non_existing) { - std::cout << refOnlyDpbIn[slot].m_picBuff->m_picIdx << ",\t"; + LOG_S_DEBUG << refOnlyDpbIn[slot].m_picBuff->m_picIdx << ",\t"; } else { - std::cout << 'X' << ",\t"; + LOG_S_DEBUG << 'X' << ",\t"; } } - std::cout << "\n\tTotal Ref frames for picIdx: " + LOG_S_DEBUG << "\n\tTotal Ref frames for picIdx: " << (int32_t)GetPicIdx(pd->pCurrPic) << " : " << numUsedRef << " out of " << num_ref_frames << " MAX(" << m_maxNumDpbSlots << ")" << std::endl << std::endl; - std::cout << std::flush; + LOG_S_DEBUG << std::flush; } // Map all frames not present in DPB as non-reference, and generate a mask of @@ -1419,42 +1419,42 @@ uint32_t VulkanVideoParser::FillDpbH264State( if (m_dumpDpbData) { uint32_t slotInUseMask = m_dpb.getSlotInUseMask(); uint32_t slotsInUseCount = 0; - std::cout << "\tAllocated Ref slot " << (int32_t)currPicDpbSlot << " for " + LOG_S_DEBUG << "\tAllocated Ref slot " << (int32_t)currPicDpbSlot << " for " << (pd->ref_pic_flag ? "REFERENCE" : "NON-REFERENCE") << " picIdx: " << (int32_t)currPicIdx << std::endl; - std::cout << "\tRef frames map for picIdx: " << (int32_t)currPicIdx + LOG_S_DEBUG << "\tRef frames map for picIdx: " << (int32_t)currPicIdx << std::endl << "\tSlot Index:\t\t"; for (uint32_t slot = 0; slot < m_dpb.getMaxSize(); slot++) { if (slotInUseMask & (1 << slot)) { - std::cout << slot << ",\t"; + LOG_S_DEBUG << slot << ",\t"; slotsInUseCount++; } else { - std::cout << 'X' << ",\t"; + LOG_S_DEBUG << 'X' << ",\t"; } } - std::cout << std::endl + LOG_S_DEBUG << std::endl << "\tPict Index:\t\t"; for (uint32_t slot = 0; slot < m_dpb.getMaxSize(); slot++) { if (slotInUseMask & (1 << slot)) { if (m_dpb[slot].getPictureResource()) { - std::cout << m_dpb[slot].getPictureResource()->m_picIdx << ",\t"; + LOG_S_DEBUG << m_dpb[slot].getPictureResource()->m_picIdx << ",\t"; } else { - std::cout << "non existent" + LOG_S_DEBUG << "non existent" << ",\t"; } } else { - std::cout << 'X' << ",\t"; + LOG_S_DEBUG << 'X' << ",\t"; } } - std::cout << "\n\tTotal slots in use for picIdx: " << (int32_t)currPicIdx + LOG_S_DEBUG << "\n\tTotal slots in use for picIdx: " << (int32_t)currPicIdx << " : " << slotsInUseCount << " out of " << m_dpb.getMaxSize() << std::endl; - std::cout << " <<<= ********************* picIdx: " + LOG_S_DEBUG << " <<<= ********************* picIdx: " << (int32_t)GetPicIdx(pd->pCurrPic) << " *************************" << std::endl << std::endl; - std::cout << std::flush; + LOG_S_DEBUG << std::flush; } return refDpbUsedAndValidMask ? numUsedRef : 0; } @@ -1476,7 +1476,7 @@ uint32_t VulkanVideoParser::FillDpbH265State( uint32_t refDpbUsedAndValidMask = 0; uint32_t numUsedRef = 0; if (m_dumpParserData) - std::cout << "Ref frames data: " << std::endl; + LOG_S_DEBUG << "Ref frames data: " << std::endl; for (int32_t inIdx = 0; inIdx < HEVC_MAX_DPB_SLOTS; inIdx++) { // used_for_reference: 0 = unused, 1 = top_field, 2 = bottom_field, 3 = // both_fields @@ -1498,7 +1498,7 @@ uint32_t VulkanVideoParser::FillDpbH265State( } if (m_dumpParserData) - std::cout << "Total Ref frames: " << numUsedRef << std::endl; + LOG_S_DEBUG << "Total Ref frames: " << numUsedRef << std::endl; assert(numUsedRef <= m_maxNumDpbSlots); assert(numUsedRef <= HEVC_MAX_DPB_SLOTS); @@ -1549,7 +1549,7 @@ uint32_t VulkanVideoParser::FillDpbH265State( frmListToDpb[originalDpbIndex] = dpbSlot; } else { // This should never happen - printf("DPB mapping logic broken!\n"); + LOG_ERROR("DPB mapping logic broken!\n"); assert(0); } } @@ -1582,9 +1582,9 @@ uint32_t VulkanVideoParser::FillDpbH265State( } if (m_dumpParserData) { - std::cout << "frmListToDpb:" << std::endl; + LOG_S_DEBUG << "frmListToDpb:" << std::endl; for (int8_t dpbResIdx = 0; dpbResIdx < HEVC_MAX_DPB_SLOTS; dpbResIdx++) { - std::cout << "\tfrmListToDpb[" << (int32_t)dpbResIdx << "] is " + LOG_S_DEBUG << "\tfrmListToDpb[" << (int32_t)dpbResIdx << "] is " << (int32_t)frmListToDpb[dpbResIdx] << std::endl; } } @@ -1594,13 +1594,13 @@ uint32_t VulkanVideoParser::FillDpbH265State( const size_t maxNumPocStCurrBefore = ARRAYSIZE(pStdPictureInfo->RefPicSetStCurrBefore); assert((size_t)pin->NumPocStCurrBefore <= maxNumPocStCurrBefore); if ((size_t)pin->NumPocStCurrBefore > maxNumPocStCurrBefore) { - fprintf(stderr, "\nERROR: FillDpbH265State() pin->NumPocStCurrBefore(%d) must be smaller than maxNumPocStCurrBefore(%zd)\n", pin->NumPocStCurrBefore, maxNumPocStCurrBefore); + LOG_ERROR("ERROR: FillDpbH265State() pin->NumPocStCurrBefore(%d) must be smaller than maxNumPocStCurrBefore(%zd)", pin->NumPocStCurrBefore, maxNumPocStCurrBefore); } for (int32_t i = 0; i < pin->NumPocStCurrBefore; i++) { uint8_t idx = (uint8_t)pin->RefPicSetStCurrBefore[i]; if (idx < HEVC_MAX_DPB_SLOTS) { if (m_dumpParserData) - std::cout << "\trefPicSetStCurrBefore[" << i << "] is " << (int32_t)idx + LOG_S_DEBUG << "\trefPicSetStCurrBefore[" << i << "] is " << (int32_t)idx << " -> " << (int32_t)frmListToDpb[idx] << std::endl; pStdPictureInfo->RefPicSetStCurrBefore[numPocStCurrBefore++] = frmListToDpb[idx] & 0xf; numPocTotalCurr++; @@ -1614,13 +1614,13 @@ uint32_t VulkanVideoParser::FillDpbH265State( const size_t maxNumPocStCurrAfter = ARRAYSIZE(pStdPictureInfo->RefPicSetStCurrAfter); assert((size_t)pin->NumPocStCurrAfter <= maxNumPocStCurrAfter); if ((size_t)pin->NumPocStCurrAfter > maxNumPocStCurrAfter) { - fprintf(stderr, "\nERROR: FillDpbH265State() pin->NumPocStCurrAfter(%d) must be smaller than maxNumPocStCurrAfter(%zd)\n", pin->NumPocStCurrAfter, maxNumPocStCurrAfter); + LOG_ERROR("ERROR: FillDpbH265State() pin->NumPocStCurrAfter(%d) must be smaller than maxNumPocStCurrAfter(%zd)", pin->NumPocStCurrAfter, maxNumPocStCurrAfter); } for (int32_t i = 0; i < pin->NumPocStCurrAfter; i++) { uint8_t idx = (uint8_t)pin->RefPicSetStCurrAfter[i]; if (idx < HEVC_MAX_DPB_SLOTS) { if (m_dumpParserData) - std::cout << "\trefPicSetStCurrAfter[" << i << "] is " << (int32_t)idx + LOG_S_DEBUG << "\trefPicSetStCurrAfter[" << i << "] is " << (int32_t)idx << " -> " << (int32_t)frmListToDpb[idx] << std::endl; pStdPictureInfo->RefPicSetStCurrAfter[numPocStCurrAfter++] = frmListToDpb[idx] & 0xf; numPocTotalCurr++; @@ -1634,13 +1634,13 @@ uint32_t VulkanVideoParser::FillDpbH265State( const size_t maxNumPocLtCurr = ARRAYSIZE(pStdPictureInfo->RefPicSetLtCurr); assert((size_t)pin->NumPocLtCurr <= maxNumPocLtCurr); if ((size_t)pin->NumPocLtCurr > maxNumPocLtCurr) { - fprintf(stderr, "\nERROR: FillDpbH265State() pin->NumPocLtCurr(%d) must be smaller than maxNumPocLtCurr(%zd)\n", pin->NumPocLtCurr, maxNumPocLtCurr); + LOG_ERROR("ERROR: FillDpbH265State() pin->NumPocLtCurr(%d) must be smaller than maxNumPocLtCurr(%zd)", pin->NumPocLtCurr, maxNumPocLtCurr); } for (int32_t i = 0; i < pin->NumPocLtCurr; i++) { uint8_t idx = (uint8_t)pin->RefPicSetLtCurr[i]; if (idx < HEVC_MAX_DPB_SLOTS) { if (m_dumpParserData) - std::cout << "\trefPicSetLtCurr[" << i << "] is " << (int32_t)idx + LOG_S_DEBUG << "\trefPicSetLtCurr[" << i << "] is " << (int32_t)idx << " -> " << (int32_t)frmListToDpb[idx] << std::endl; pStdPictureInfo->RefPicSetLtCurr[numPocLtCurr++] = frmListToDpb[idx] & 0xf; numPocTotalCurr++; @@ -1654,13 +1654,13 @@ uint32_t VulkanVideoParser::FillDpbH265State( for (int32_t i = 0; i < 8; i++) { if (m_dumpParserData) - std::cout << "\tlist indx " << i << ": " - << " refPicSetStCurrBefore: " - << (int32_t)pStdPictureInfo->RefPicSetStCurrBefore[i] - << " refPicSetStCurrAfter: " - << (int32_t)pStdPictureInfo->RefPicSetStCurrAfter[i] - << " refPicSetLtCurr: " - << (int32_t)pStdPictureInfo->RefPicSetLtCurr[i] << std::endl; + LOG_S_DEBUG << "\tlist indx " << i << ": " + << " refPicSetStCurrBefore: " + << (int32_t)pStdPictureInfo->RefPicSetStCurrBefore[i] + << " refPicSetStCurrAfter: " + << (int32_t)pStdPictureInfo->RefPicSetStCurrAfter[i] + << " refPicSetLtCurr: " + << (int32_t)pStdPictureInfo->RefPicSetLtCurr[i] << std::endl; } int8_t dpbSlot = AllocateDpbSlotForCurrentH265(GetPic(pd->pCurrPic), @@ -1690,14 +1690,14 @@ uint32_t VulkanVideoParser::FillDpbAV1State( uint32_t referenceIndex = 0; if (m_dumpParserData) - std::cout << "Ref frames data: " << std::endl; + LOG_DEBUG("Ref frames data:"); if (m_dumpDpbData) { - printf(";;;; ======= AV1 DPB fill begin %d =======\n", m_nCurrentPictureID); + printf(";;;; ======= AV1 DPB fill begin %d =======", m_nCurrentPictureID); printf("ref_frame_idx: "); for (int i = 0 ; i < 7; i++) printf("%02d ", i); - printf("\nref_frame_idx: "); + printf("ref_frame_idx: "); for (int i = 0 ; i < 7; i++) printf("%02d ", pin->ref_frame_idx[i]); printf("\n"); @@ -1910,13 +1910,13 @@ bool VulkanVideoParser::UpdatePictureParameters( VkSharedBaseObj& client) { if (false) { - std::cout << "################################################# " << std::endl; - std::cout << "Update Picture parameters " + LOG_S_DEBUG << "################################################# " << std::endl; + LOG_S_DEBUG << "Update Picture parameters " << PictureParametersTypeToName(pictureParametersObject->GetStdType()) << ": " << pictureParametersObject.Get() << ", count: " << (uint32_t)pictureParametersObject->GetUpdateSequenceCount() << std::endl << std::flush; - std::cout << "################################################# " << std::endl; + LOG_S_DEBUG << "################################################# " << std::endl; } if (m_decoderHandler == NULL) { @@ -1993,9 +1993,9 @@ bool VulkanVideoParser::DecodePicture( pCurrFrameDecParams->pStdSps = pin->pStdSps; pCurrFrameDecParams->pStdVps = nullptr; if (false) { - std::cout << "\n\tCurrent h.264 Picture SPS update : " + LOG_S_DEBUG << "\n\tCurrent h.264 Picture SPS update : " << pin->pStdSps->GetUpdateSequenceCount() << std::endl; - std::cout << "\tCurrent h.264 Picture PPS update : " + LOG_S_DEBUG << "\tCurrent h.264 Picture PPS update : " << pin->pStdPps->GetUpdateSequenceCount() << std::endl; } @@ -2104,11 +2104,11 @@ bool VulkanVideoParser::DecodePicture( pCurrFrameDecParams->pStdSps = pin->pStdSps; pCurrFrameDecParams->pStdVps = pin->pStdVps; if (false) { - std::cout << "\n\tCurrent h.265 Picture VPS update : " + LOG_S_DEBUG << "\n\tCurrent h.265 Picture VPS update : " << pin->pStdVps->GetUpdateSequenceCount() << std::endl; - std::cout << "\n\tCurrent h.265 Picture SPS update : " + LOG_S_DEBUG << "\n\tCurrent h.265 Picture SPS update : " << pin->pStdSps->GetUpdateSequenceCount() << std::endl; - std::cout << "\tCurrent h.265 Picture PPS update : " + LOG_S_DEBUG << "\tCurrent h.265 Picture PPS update : " << pin->pStdPps->GetUpdateSequenceCount() << std::endl; } @@ -2171,7 +2171,7 @@ bool VulkanVideoParser::DecodePicture( pStdPictureInfo->PicOrderCntVal = pin->CurrPicOrderCntVal; if (m_dumpParserData) - std::cout << "\tnumPocStCurrBefore: " << (int32_t)pin->NumPocStCurrBefore + LOG_S_DEBUG << "\tnumPocStCurrBefore: " << (int32_t)pin->NumPocStCurrBefore << " numPocStCurrAfter: " << (int32_t)pin->NumPocStCurrAfter << " numPocLtCurr: " << (int32_t)pin->NumPocLtCurr << std::endl; @@ -2204,24 +2204,24 @@ bool VulkanVideoParser::DecodePicture( if (m_dumpParserData) { for (int32_t i = 0; i < HEVC_MAX_DPB_SLOTS; i++) { - std::cout << "\tdpbIndex: " << i; + LOG_S_DEBUG << "\tdpbIndex: " << i; if (pDpbRefList[i]) { - std::cout << " REFERENCE FRAME"; + LOG_S_DEBUG << " REFERENCE FRAME"; - std::cout << " picOrderCntValList: " + LOG_S_DEBUG << " picOrderCntValList: " << (int32_t)pDpbRefList[i] .dpbSlotInfo.pStdReferenceInfo->PicOrderCntVal; - std::cout << "\t\t Flags: "; + LOG_S_DEBUG << "\t\t Flags: "; if (pDpbRefList[i] .dpbSlotInfo.pStdReferenceInfo->flags.used_for_long_term_reference) { - std::cout << "IS LONG TERM "; + LOG_S_DEBUG << "IS LONG TERM "; } } else { - std::cout << " NOT A REFERENCE "; + LOG_S_DEBUG << " NOT A REFERENCE "; } - std::cout << std::endl; + LOG_S_DEBUG << std::endl; } } @@ -2239,7 +2239,7 @@ bool VulkanVideoParser::DecodePicture( pCurrFrameDecParams->pStdVps = nullptr; if (false) { - std::cout << "\n\tCurrent AV1 Picture SPS update : " + LOG_S_DEBUG << "\n\tCurrent AV1 Picture SPS update : " << pin->pStdSps->GetUpdateSequenceCount() << std::endl; } @@ -2324,7 +2324,7 @@ bool VulkanVideoParser::DecodePicture( bRet = (m_decoderHandler->DecodePictureWithParameters(pCurrFrameDecParams, pDecodePictureInfo) >= 0); if (m_dumpParserData) { - std::cout << "\t <== VulkanVideoParser::DecodePicture " << PicIdx << std::endl; + LOG_S_DEBUG << "\t <== VulkanVideoParser::DecodePicture " << PicIdx << std::endl; } m_nCurrentPictureID++; return bRet; diff --git a/vk_video_decoder/libs/VulkanVideoFrameBuffer/VulkanVideoFrameBuffer.cpp b/vk_video_decoder/libs/VulkanVideoFrameBuffer/VulkanVideoFrameBuffer.cpp index 5f5dedc5..a6c1884b 100644 --- a/vk_video_decoder/libs/VulkanVideoFrameBuffer/VulkanVideoFrameBuffer.cpp +++ b/vk_video_decoder/libs/VulkanVideoFrameBuffer/VulkanVideoFrameBuffer.cpp @@ -393,7 +393,7 @@ class VkVideoFrameBuffer : public VulkanVideoFrameBuffer { m_displayFrames.push((uint8_t)picId); if (m_debug) { - std::cout << "==> Queue Display Picture picIdx: " << (uint32_t)picId + LOG_S_DEBUG << "==> Queue Display Picture picIdx: " << (uint32_t)picId << "\t\tdisplayOrder: " << m_perFrameDecodeImageSet[picId].m_displayOrder << "\tdecodeOrder: " << m_perFrameDecodeImageSet[picId].m_decodeOrder << "\ttimestamp " << m_perFrameDecodeImageSet[picId].m_timestamp << std::endl; } @@ -438,7 +438,7 @@ class VkVideoFrameBuffer : public VulkanVideoFrameBuffer { m_perFrameDecodeImageSet[picId].filterPoolNode = const_cast(pReferencedObjectsInfo->pFilterPoolNode); if (m_debug) { - std::cout << "==> Queue Decode Picture picIdx: " << (uint32_t)picId + LOG_S_DEBUG << "==> Queue Decode Picture picIdx: " << (uint32_t)picId << "\t\tdisplayOrder: " << m_perFrameDecodeImageSet[picId].m_displayOrder << "\tdecodeOrder: " << m_perFrameDecodeImageSet[picId].m_decodeOrder << std::endl; } @@ -548,7 +548,7 @@ class VkVideoFrameBuffer : public VulkanVideoFrameBuffer { } if (m_debug) { - std::cout << "<<<<<<<<<<< Dequeue from Display: " << pictureIndex << " out of " + LOG_S_DEBUG << "<<<<<<<<<<< Dequeue from Display: " << pictureIndex << " out of " << numberofPendingFrames << " ===========" << std::endl; } return numberofPendingFrames; @@ -700,7 +700,7 @@ class VkVideoFrameBuffer : public VulkanVideoFrameBuffer { m_perFrameDecodeImageSet[foundPicId].m_picIdx = foundPicId; if (m_debug) { - std::cout << "==> ReservePictureBuffer picIdx: " << (uint32_t)foundPicId << " of " << numAvailablePictures + LOG_S_DEBUG << "==> ReservePictureBuffer picIdx: " << (uint32_t)foundPicId << " of " << numAvailablePictures << "\t\tdisplayOrder: " << m_perFrameDecodeImageSet[foundPicId].m_decodeOrder << "\tdecodeOrder: " << m_perFrameDecodeImageSet[foundPicId].m_decodeOrder << "\ttimestamp " << m_perFrameDecodeImageSet[foundPicId].m_timestamp << std::endl; @@ -777,7 +777,7 @@ VkResult NvPerFrameDecodeResources::CreateImage( const VulkanDeviceContext* vkDe VkResult result = VK_SUCCESS; if (false) { - std::cout << "Create FB Image: " << (int)pImageSpec->imageTypeIdx << " : " << imageIndex + LOG_S_DEBUG << "Create FB Image: " << (int)pImageSpec->imageTypeIdx << " : " << imageIndex << ", extent: " << pImageSpec->createInfo.extent.width << " x " << pImageSpec->createInfo.extent.height << ", format " << pImageSpec->createInfo.format @@ -954,7 +954,7 @@ int32_t NvPerFrameDecodeImageSet::init(const VulkanDeviceContext* vkDevCtx, if (reconfigureImages || updateFrameBufferGeometry) { if (false) { - std::cout << "Reconfigure FB: " << (int)imageTypeIdx << ", extent: " << m_imageSpecs[imageTypeIdx].createInfo.extent.width << " x " + LOG_S_DEBUG << "Reconfigure FB: " << (int)imageTypeIdx << ", extent: " << m_imageSpecs[imageTypeIdx].createInfo.extent.width << " x " << m_imageSpecs[imageTypeIdx].createInfo.extent.height << " to " << imageSpecs[imageTypeIdx].createInfo.extent.width << " x " << imageSpecs[imageTypeIdx].createInfo.extent.height diff --git a/vk_video_encoder/demos/vk-video-enc/Main.cpp b/vk_video_encoder/demos/vk-video-enc/Main.cpp index 37b046a6..ba92a98c 100644 --- a/vk_video_encoder/demos/vk-video-enc/Main.cpp +++ b/vk_video_encoder/demos/vk-video-enc/Main.cpp @@ -20,6 +20,7 @@ #include "VkCodecUtils/VulkanVideoEncodeDisplayQueue.h" #include "VkCodecUtils/VulkanEncoderFrameProcessor.h" #include "VkShell/Shell.h" +#include "Logger.h" int main(int argc, char** argv) { @@ -104,7 +105,7 @@ int main(int argc, char** argv) VkResult result = vkDevCtxt.InitVulkanDevice(encoderConfig->appName.c_str(), VK_NULL_HANDLE, encoderConfig->verbose); if (result != VK_SUCCESS) { - printf("Could not initialize the Vulkan device!\n"); + LOG_ERROR("Could not initialize the Vulkan device!\n"); return -1; } @@ -154,6 +155,7 @@ int main(int argc, char** argv) VkSharedBaseObj frameProcessor; result = CreateEncoderFrameProcessor(&vkDevCtxt, frameProcessor); if (result != VK_SUCCESS) { + LOG_ERROR("Could not create the encoder frame processor!\n"); return -1; } @@ -287,7 +289,7 @@ int main(int argc, char** argv) for(; curFrameIndex < encoderConfig->numFrames; curFrameIndex++) { if (encoderConfig->verboseFrameStruct) { - std::cout << "####################################################################################" << std::endl + LOG_S_DEBUG << "####################################################################################" << std::endl << "Start processing current input frame index: " << curFrameIndex << std::endl; } @@ -297,18 +299,18 @@ int main(int argc, char** argv) // load frame data from the file result = encoder->LoadNextFrame(encodeFrameInfo); if (result != VK_SUCCESS) { - std::cout << "ERROR processing input frame index: " << curFrameIndex << std::endl; + LOG_S_ERROR << "ERROR processing input frame index: " << curFrameIndex << std::endl; break; } if (encoderConfig->verboseFrameStruct) { - std::cout << "End processing current input frame index: " << curFrameIndex << std::endl; + LOG_S_DEBUG << "End processing current input frame index: " << curFrameIndex << std::endl; } } encoder->WaitForThreadsToComplete(); - std::cout << "Done processing " << curFrameIndex << " input frames!" << std::endl + LOG_S_INFO << "Done processing " << curFrameIndex << " input frames!" << std::endl << "Encoded file's location is at " << encoderConfig->outputFileHandler.GetFileName() << std::endl; return 0; diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.cpp index ee79ab9b..10757467 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.cpp @@ -27,6 +27,7 @@ void printHelp(VkVideoCodecOperationFlagBitsKHR codec) -i, --input .yuv Input YUV File Name (YUV420p 8bpp only) \n\ -o, --output .264/5,ivf Output H264/5/AV1 File Name \n\ -c, --codec select codec type: avc (h264) or hevc (h265) or av1\n\ + --logLevel : select the log level, default 1 for errors\n\ --dpbMode : select DPB mode: layered, separate\n\ --inputWidth : Input Width \n\ --inputHeight : Input Height \n\ @@ -68,11 +69,11 @@ void printHelp(VkVideoCodecOperationFlagBitsKHR codec) --testOutOfOrderRecording Testing only: enable testing for out-of-order-recording\n"); if ((codec == VK_VIDEO_CODEC_OPERATION_NONE_KHR) || (codec == VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR)) { - fprintf(stderr, "\nH264 specific arguments: None\n"); + LOG_ERROR_CONFIG("\nH264 specific arguments: None\n"); } if ((codec == VK_VIDEO_CODEC_OPERATION_NONE_KHR) || (codec == VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR)) { - fprintf(stderr, "\nH265 specific arguments: None\n"); + LOG_ERROR_CONFIG("\nH265 specific arguments: None\n"); } if ((codec == VK_VIDEO_CODEC_OPERATION_NONE_KHR) || (codec == VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR)) { @@ -137,11 +138,23 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) appName = args[0]; + // Double argument list handling to have the log level ready during the argument parsing. + for (int32_t i = 1; i < argc; i++) { + if (args[i] == "-l" || args[i] == "--logLevel") { + uint32_t logLevel = LogLevel::LOG_INFO; + if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &logLevel) != 1)) { + fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + return -1; + } + Logger::instance().setLogLevel(logLevel); + } + } + for (int32_t i = 1; i < argc; i++) { if (args[i] == "-i" || args[i] == "--input") { if (++i >= argc) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } size_t fileSize = inputFileHandler.SetFileName(args[i].c_str()); @@ -155,7 +168,7 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) } } else if (args[i] == "-o" || args[i] == "--output") { if (++i >= argc) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } size_t fileSize = outputFileHandler.SetFileName(args[i].c_str()); @@ -175,13 +188,20 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) codec = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR; } else { // Invalid codec - fprintf(stderr, "Invalid codec: %s\n", codec_.c_str()); + LOG_ERROR_CONFIG("Invalid codec: %s\n", codec_.c_str()); return -1; } if (verbose) { - printf("Selected codec: %s\n", codec_.c_str()); + LOG_INFO("Selected codec: %s\n", codec_.c_str()); } i++; // Skip the next argument since it's the codec value + } else if (args[i] == "-l" || args[i] == "--logLevel") { + uint32_t logLevel = LogLevel::LOG_INFO; + if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &logLevel) != 1)) { + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); + return -1; + } + Logger::instance().setLogLevel(logLevel); } else if (args[i] == "--dpbMode") { std::string dpbMode = args[i + 1]; if (dpbMode == "separate") { @@ -190,31 +210,31 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) useDpbArray = true; } else { // Invalid codec - fprintf(stderr, "Invalid DPB mode: %s\n", dpbMode.c_str()); + LOG_ERROR_CONFIG("Invalid DPB mode: %s\n", dpbMode.c_str()); return -1; } if (verbose) { - printf("Selected DPB mode: %s\n", dpbMode.c_str()); + LOG_INFO("Selected DPB mode: %s\n", dpbMode.c_str()); } i++; // Skip the next argument since it's the dpbMode value } else if (args[i] == "--inputWidth") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &input.width) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--inputHeight") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &input.height) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--inputNumPlanes") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &input.numPlanes) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } if ((input.numPlanes < 2) || (input.numPlanes > 3)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); - fprintf(stderr, "Currently supported number of planes are 2 or 3\n"); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("Currently supported number of planes are 2 or 3\n"); } } else if (args[i] == "--inputChromaSubsampling") { std::string chromeSubsampling = args[i + 1]; @@ -228,116 +248,116 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) input.chromaSubsampling = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR; } else { // Invalid chromeSubsampling - fprintf(stderr, "Invalid chromeSubsampling: %s\nValid string values are 400, 420, 422, 444 \n", chromeSubsampling.c_str()); + LOG_ERROR_CONFIG("Invalid chromeSubsampling: %s\nValid string values are 400, 420, 422, 444 \n", chromeSubsampling.c_str()); return -1; } i++; // Skip the next argument since it's the chromeSubsampling value } else if (args[i] == "--inputLumaPlanePitch") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%llu", (long long unsigned int*)&input.planeLayouts[0].rowPitch) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--inputBpp") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%hhu", &input.bpp) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--msbShift") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%hhu", &input.msbShift) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--startFrame") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &startFrame) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--numFrames") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &numFrames) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG( "invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--encodeOffsetX") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &encodeOffsetX) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--encodeOffsetY") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &encodeOffsetY) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--encodeWidth") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &encodeWidth) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--encodeHeight") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &encodeHeight) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--encodeMaxWidth") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &encodeMaxWidth) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--encodeMaxHeight") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%u", &encodeMaxHeight) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--minQp") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &minQp) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--maxQp") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &maxQp) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } // GOP structure } else if (args[i] == "--gopFrameCount") { uint8_t gopFrameCount = EncoderConfig::DEFAULT_GOP_FRAME_COUNT; if (++i >= argc || sscanf(args[i].c_str(), "%hhu", &gopFrameCount) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } gopStructure.SetGopFrameCount(gopFrameCount); if (verbose) { - printf("Selected gopFrameCount: %d\n", gopFrameCount); + LOG_INFO ("Selected gopFrameCount: %d\n", gopFrameCount); } } else if (args[i] == "--idrPeriod") { int32_t idrPeriod = EncoderConfig::DEFAULT_GOP_IDR_PERIOD; if (++i >= argc || sscanf(args[i].c_str(), "%d", &idrPeriod) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } gopStructure.SetIdrPeriod(idrPeriod); if (verbose) { - printf("Selected idrPeriod: %d\n", idrPeriod); + LOG_INFO ("Selected idrPeriod: %d\n", idrPeriod); } } else if (args[i] == "--consecutiveBFrameCount") { uint8_t consecutiveBFrameCount = EncoderConfig::DEFAULT_CONSECUTIVE_B_FRAME_COUNT; if (++i >= argc || sscanf(args[i].c_str(), "%hhu", &consecutiveBFrameCount) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } gopStructure.SetConsecutiveBFrameCount(consecutiveBFrameCount); if (verbose) { - printf("Selected consecutiveBFrameCount: %d\n", consecutiveBFrameCount); + LOG_INFO ("Selected consecutiveBFrameCount: %d\n", consecutiveBFrameCount); } } else if (args[i] == "--temporalLayerCount") { uint8_t temporalLayerCount = EncoderConfig::DEFAULT_TEMPORAL_LAYER_COUNT; if (++i >= argc || sscanf(args[i].c_str(), "%hhu", &temporalLayerCount) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } gopStructure.SetTemporalLayerCount(temporalLayerCount); if (verbose) { - printf("Selected temporalLayerCount: %d\n", temporalLayerCount); + LOG_INFO("Selected temporalLayerCount: %d\n", temporalLayerCount); } } else if (args[i] == "--lastFrameType") { VkVideoGopStructure::FrameType lastFrameType = VkVideoGopStructure::FRAME_TYPE_P; @@ -350,24 +370,24 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) lastFrameType = VkVideoGopStructure::FRAME_TYPE_I; } else { // Invalid frameTypeName - fprintf(stderr, "Invalid frameTypeName: %s\n", frameTypeName.c_str()); + LOG_ERROR_CONFIG("Invalid frameTypeName: %s\n", frameTypeName.c_str()); return -1; } i++; // Skip the next argument since it's the frameTypeName value gopStructure.SetLastFrameType(lastFrameType); if (verbose) { - printf("Selected frameTypeName: %s\n", gopStructure.GetFrameTypeName(lastFrameType)); + LOG_INFO("Selected frameTypeName: %s\n", gopStructure.GetFrameTypeName(lastFrameType)); } } else if (args[i] == "--closedGop") { gopStructure.SetClosedGop(); } else if (args[i] == "--qualityLevel") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &qualityLevel) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--tuningMode") { if (++i >= argc) { - fprintf(stderr, "Invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("Invalid parameter for %s\n", args[i - 1].c_str()); return -1; } std::string tuningModeStr = argv[i]; @@ -382,12 +402,12 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) } else if (tuningModeStr == "4" || tuningModeStr == "lossless") { tuningMode = VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR; } else { - fprintf(stderr, "Invalid tuningMode: %s\n", tuningModeStr.c_str()); + LOG_ERROR_CONFIG("Invalid tuningMode: %s\n", tuningModeStr.c_str()); return -1; } } else if (args[i] == "--rateControlMode") { if (++i >= argc) { - fprintf(stderr, "invalid parameter for %s\n", args[i-1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i-1].c_str()); return -1; } std::string rc = args[i]; @@ -401,54 +421,54 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) rateControlMode = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR; }else { // Invalid rateControlMode - fprintf(stderr, "Invalid rateControlMode: %s\n", rc.c_str()); + LOG_ERROR_CONFIG("Invalid rateControlMode: %s\n", rc.c_str()); return -1; } } else if (args[i] == "--averageBitrate") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &averageBitrate) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--maxBitrate") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &maxBitrate) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--qpI") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &constQp.qpIntra) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--qpP") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &constQp.qpInterP) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--qpB") { if (++i >= argc || sscanf(args[i].c_str(), "%u", &constQp.qpInterB) != 1) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--deviceID") { if ((++i >= argc) || (sscanf(args[i].c_str(), "%x", &deviceId) != 1)) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } } else if (args[i] == "--deviceUuid") { if (++i >= argc) { - fprintf(stderr, "invalid parameter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s\n", args[i - 1].c_str()); return -1; } size_t size = SetHexDeviceUUID(args[i].c_str()); if (size != VK_UUID_SIZE) { - fprintf(stderr,"Invalid deviceUuid format used: %s with size: %zu." + LOG_ERROR_CONFIG("Invalid deviceUuid format used: %s with size: %zu." "deviceUuid must be represented by 16 hex (32 bytes) values.", args[i].c_str(), args[i].length()); return -1; } } else if (args[i] == "--qpMap") { if (++i >= argc) { - fprintf(stderr, "Invalid paramter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("Invalid paramter for %s\n", args[i - 1].c_str()); return -1; } if (args[i] == "deltaQpMap") { @@ -456,13 +476,13 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) } else if (args[i] == "emphasisMap") { qpMapMode = EMPHASIS_MAP; } else { - fprintf(stderr, "Invalid quntization map mode %s\n", args[i].c_str()); + LOG_ERROR_CONFIG("Invalid quntization map mode %s\n", args[i].c_str()); return -1; } enableQpMap = true; } else if (args[i] == "--qpMapFileName") { if (++i >= argc) { - fprintf(stderr, "Invaid paramter for %s\n", args[i - 1].c_str()); + LOG_ERROR_CONFIG("Invaid paramter for %s\n", args[i - 1].c_str()); return -1; } size_t fileSize = qpMapFileHandler.SetFileName(args[i].c_str()); @@ -472,9 +492,7 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) enableQpMap = true; } else if (args[i] == "--testOutOfOrderRecording") { // Testing only - don't use this feature for production! - if (verbose) { - fprintf(stdout, "Warning: %s should only be used for testing!\n", args[i].c_str()); - } + LOG_WARN_CONFIG("Warning: %s should only be used for testing!", args[i].c_str()); enableOutOfOrderRecording = true; } else { argcount++; @@ -483,26 +501,24 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) } if (!inputFileHandler.HasFileName()) { - fprintf(stderr, "An input file was not specified\n"); + LOG_ERROR_CONFIG("An input file was not specified\n"); return -1; } if (input.width == 0) { - fprintf(stderr, "The width was not specified\n"); + LOG_ERROR_CONFIG("The width was not specified\n"); return -1; } if (input.height == 0) { - fprintf(stderr, "The height was not specified\n"); + LOG_ERROR_CONFIG("The height was not specified\n"); return -1; } if (!outputFileHandler.HasFileName()) { const char* defaultOutName = (codec == VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR) ? "out.264" : (codec == VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR) ? "out.265" : "out.ivf"; - if (verbose) { - fprintf(stdout, "No output file name provided. Using %s.\n", defaultOutName); - } + LOG_DEBUG_CONFIG("No output file name provided. Using %s.\n", defaultOutName); size_t fileSize = outputFileHandler.SetFileName(defaultOutName); if (fileSize <= 0) { return (int)fileSize; @@ -542,28 +558,26 @@ int EncoderConfig::ParseArguments(int argc, char *argv[]) } if (minQp == -1) { - if (verbose) { - fprintf(stdout, "No QP was provided. Using default value: 20.\n"); - } + LOG_DEBUG_CONFIG("config:\t","No QP was provided. Using default value: 20.\n"); minQp = 20; } codecBlockAlignment = H264MbSizeAlignment; // H264 if (enableQpMap && !qpMapFileHandler.HasFileName()) { - fprintf(stderr, "No qpMap file was provided."); + LOG_ERROR_CONFIG ("No qpMap file was provided."); return -1; } frameCount = inputFileHandler.GetFrameCount(input.width, input.height, input.bpp, input.chromaSubsampling); if (numFrames == 0 || numFrames > frameCount) { - std::cout << "numFrames " << numFrames + LOG_S_INFO << "numFrames " << numFrames << " should be different from zero and inferior to input file frame count: " << frameCount << ". Use input file frame count." << std::endl; numFrames = frameCount; if (numFrames == 0) { - fprintf(stderr, "No frames found in the input file, frame count is zero. Exit."); + LOG_ERROR_CONFIG ("No frames found in the input file, frame count is zero. Exit."); return -1; } } @@ -590,8 +604,8 @@ VkResult EncoderConfig::CreateCodecConfig(int argc, char *argv[], codec = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR; } else { // Invalid codec - fprintf(stderr, "Invalid codec: %s\n", codecStr.c_str()); - fprintf(stderr, "Supported codecs are: avc, hevc and av1\n"); + LOG_ERROR_CONFIG("Invalid codec: %s\n", codecStr.c_str()); + LOG_ERROR_CONFIG("Supported codecs are: avc, hevc and av1\n"); return VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR; } } else if (args[i] == "--help" || args[i] == "-h") { @@ -655,7 +669,7 @@ VkResult EncoderConfig::CreateCodecConfig(int argc, char *argv[], return VK_SUCCESS; } else { - fprintf(stderr, "Codec type is not selected\n. Please select it with --codec parameters\n"); + LOG_ERROR_CONFIG("Codec type is not selected\n. Please select it with --codec parameters\n"); printHelp(codec); return VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR; } diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.h b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.h index 0dc81353..ee04e2eb 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.h +++ b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfig.h @@ -36,6 +36,8 @@ #else # include "VkCodecUtils/VulkanFilter.h" #endif +#include "Logger.h" + struct EncoderConfigH264; struct EncoderConfigH265; struct EncoderConfigAV1; @@ -119,13 +121,13 @@ struct EncoderInputImageParameters bool VerifyInputs() { if ((width == 0) || (height == 0)) { - fprintf(stderr, "Invalid input width (%d) and/or height(%d) parameters!", width, height); + LOG_S_ERROR << "Invalid input width" << width << " and/or height" << height << " parameters!" << std::endl; return false; } uint32_t bytesPerPixel = (bpp + 7) / 8; if ((bytesPerPixel < 1) || (bytesPerPixel > 2)) { - fprintf(stderr, "Invalid input bpp (%d) parameter!", bpp); + LOG_S_ERROR << "Invalid input bpp parameter: " << bpp << std::endl; return false; } @@ -176,7 +178,7 @@ struct EncoderInputImageParameters (numPlanes == 2)); if (vkFormat == VK_FORMAT_UNDEFINED) { - fprintf(stderr, "Invalid input parameters!"); + LOG_S_ERROR << "Invalid input parameters!" << std::endl; return false; } @@ -192,7 +194,6 @@ class EncoderInputFileHandler , m_fileHandle() , m_Y4MHeaderOffset(0) , m_memMapedFile() - , m_verbose(verbose) { } @@ -207,8 +208,8 @@ class EncoderInputFileHandler m_memMapedFile.unmap(); if (m_fileHandle != nullptr) { - if (fclose(m_fileHandle)) { - fprintf(stderr, "Failed to close input file %s", m_fileName); + if(fclose(m_fileHandle)) { + LOG_S_ERROR << "Failed to close input file " << m_fileName << std::endl; } m_fileHandle = nullptr; @@ -265,7 +266,7 @@ class EncoderInputFileHandler const uint64_t mappedLength = (uint64_t)m_memMapedFile.mapped_length(); if (mappedLength < offset) { - printf("File overflow at fileOffset %lld\n", (long long unsigned int)offset); + LOG_S_ERROR << "File overflow at fileOffset " << offset << std::endl; assert(!"Input file overflow"); return nullptr; } @@ -428,22 +429,19 @@ class EncoderInputFileHandler { m_fileHandle = fopen(m_fileName, "rb"); if (m_fileHandle == nullptr) { - fprintf(stderr, "Failed to open input file %s", m_fileName); + LOG_S_ERROR << "Failed to open input file " << m_fileName << std::endl; return 0; } std::error_code error; m_memMapedFile.map(m_fileName, 0, mio::map_entire_file, error); if (error) { - fprintf(stderr, "Failed to map the input file %s", m_fileName); const auto& errmsg = error.message(); - std::printf("error mapping file: %s, exiting...\n", errmsg.c_str()); + LOG_S_ERROR << "Failed to map the input file: " << m_fileName << " with error msg: " << errmsg << std::endl; return error.value(); } - if (m_verbose) { - printf("Input file size is: %zd\n", m_memMapedFile.length()); - } + LOG_DEBUG_CONFIG ("Input file size is: %zd\n", m_memMapedFile.length()); return m_memMapedFile.length(); } @@ -457,7 +455,6 @@ class EncoderInputFileHandler FILE* m_fileHandle; uint64_t m_Y4MHeaderOffset; mio::basic_mmap m_memMapedFile; - uint32_t m_verbose : 1; }; class EncoderOutputFileHandler @@ -484,7 +481,7 @@ class EncoderOutputFileHandler if (m_fileHandle != nullptr) { if(fclose(m_fileHandle)) { - fprintf(stderr, "Failed to close output file %s", m_fileName); + LOG_S_ERROR << "Failed to close output file " << m_fileName << std::endl; } m_fileHandle = nullptr; @@ -530,7 +527,7 @@ class EncoderOutputFileHandler { m_fileHandle = fopen(m_fileName, "wb"); if (m_fileHandle == nullptr) { - fprintf(stderr, "Failed to open output file %s", m_fileName); + LOG_S_ERROR << "Failed to open output file " << m_fileName << std::endl; return 0; } @@ -555,7 +552,6 @@ class EncoderQpMapFileHandler : m_fileName{} , m_fileHandle() , m_memMapedFile() - , m_verbose(verbose) { } @@ -571,7 +567,7 @@ class EncoderQpMapFileHandler if (m_fileHandle != nullptr) { if(fclose(m_fileHandle)) { - fprintf(stderr, "Failed to close input file %s", m_fileName); + LOG_S_ERROR << "Failed to close input file " << m_fileName << std::endl; } m_fileHandle = nullptr; @@ -612,7 +608,7 @@ class EncoderQpMapFileHandler const uint64_t mappedLength = (uint64_t)m_memMapedFile.mapped_length(); if (mappedLength < fileOffset) { - printf("File overflow at fileOffset %llu\n", (unsigned long long int)fileOffset); + LOG_S_ERROR << "File overflow at fileOffset " << fileOffset << std::endl; assert(!"Input file overflow"); return nullptr; } @@ -624,22 +620,19 @@ class EncoderQpMapFileHandler { m_fileHandle = fopen(m_fileName, "rb"); if (m_fileHandle == nullptr) { - fprintf(stderr, "Failed to open input file %s", m_fileName); + LOG_S_ERROR << "Failed to open input file " << m_fileName << std::endl; return 0; } std::error_code error; m_memMapedFile.map(m_fileName, 0, mio::map_entire_file, error); if (error) { - fprintf(stderr, "Failed to map the input file %s", m_fileName); const auto& errmsg = error.message(); - std::printf("error mapping file: %s, exiting...\n", errmsg.c_str()); + LOG_S_ERROR << "Failed to map input file " << m_fileName << " error: " << errmsg.c_str() << std::endl; return error.value(); } - if (m_verbose) { - printf("Input file size is: %zd\n", m_memMapedFile.length()); - } + LOG_DEBUG_CONFIG ("Input file size is: %zd\n", m_memMapedFile.length()); return m_memMapedFile.length(); } @@ -652,7 +645,6 @@ class EncoderQpMapFileHandler char m_fileName[256]; FILE* m_fileHandle; mio::basic_mmap m_memMapedFile; - uint32_t m_verbose : 1; }; struct EncoderConfig : public VkVideoRefCountBase { diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigAV1.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigAV1.cpp index 6050c004..d9aa1ad9 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigAV1.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigAV1.cpp @@ -19,7 +19,7 @@ #define READ_PARAM(i, param, type) { \ int32_t data = 0; \ if ((++i >= argc) || (sscanf(argv[i], "%d", &data) != 1)) { \ - fprintf(stderr, "invalid parameter"); \ + LOG_ERROR_CONFIG("invalid parameter"); \ return -1; \ } else { \ param = (type)data; \ @@ -140,7 +140,7 @@ int EncoderConfigAV1::DoParseArguments(int argc, char* argv[]) } } else if (args[i] == "--profile"){ if (++i >= argc) { - fprintf(stderr, "invalid parameter for %s\n", args[i-1].c_str()); + LOG_ERROR_CONFIG("invalid parameter for %s", args[i-1].c_str()); return -1; } std::string prfl = args[i]; @@ -152,11 +152,11 @@ int EncoderConfigAV1::DoParseArguments(int argc, char* argv[]) profile = STD_VIDEO_AV1_PROFILE_PROFESSIONAL; } else { // Invalid profile - fprintf(stderr, "Invalid profile: %s\n", prfl.c_str()); + LOG_ERROR_CONFIG("Invalid profile: %s", prfl.c_str()); return -1; } } else { - fprintf(stderr, "Unrecognized option: %s\n", argv[i]); + LOG_ERROR_CONFIG("Unrecognized option: %s", argv[i]); //printAV1Help(); return -1; } @@ -194,21 +194,21 @@ VkResult EncoderConfigAV1::InitDeviceCapabilities(const VulkanDeviceContext* vkD quantizationMapCapabilities, av1QuantizationMapCapabilities); if (result != VK_SUCCESS) { - std::cout << "*** Could not get video capabilities :" << result << " ***" << std::endl; + LOG_S_ERROR << "*** Could not get video capabilities :" << result << " ***" << std::endl; assert(!"Coult not get Video Capabilities!"); return result; } - if (verboseMsg) { - std::cout << "\t\t\t" << VkVideoCoreProfile::CodecToName(codec) << "encode capabilities: " << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << videoCapabilities.minBitstreamBufferOffsetAlignment << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << videoCapabilities.minBitstreamBufferSizeAlignment << std::endl; - std::cout << "\t\t\t" << "pictureAccessGranularity: " << videoCapabilities.pictureAccessGranularity.width << " x " << videoCapabilities.pictureAccessGranularity.height << std::endl; - std::cout << "\t\t\t" << "minExtent: " << videoCapabilities.minCodedExtent.width << " x " << videoCapabilities.minCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxExtent: " << videoCapabilities.maxCodedExtent.width << " x " << videoCapabilities.maxCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxDpbSlots: " << videoCapabilities.maxDpbSlots << std::endl; - std::cout << "\t\t\t" << "maxActiveReferencePictures: " << videoCapabilities.maxActiveReferencePictures << std::endl; - } + + LOG_S_INFO << "\t\t\t" << VkVideoCoreProfile::CodecToName(codec) << "encode capabilities: " << std::endl; + LOG_S_INFO << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << videoCapabilities.minBitstreamBufferOffsetAlignment << std::endl; + LOG_S_INFO << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << videoCapabilities.minBitstreamBufferSizeAlignment << std::endl; + LOG_S_INFO << "\t\t\t" << "pictureAccessGranularity: " << videoCapabilities.pictureAccessGranularity.width << " x " << videoCapabilities.pictureAccessGranularity.height << std::endl; + LOG_S_INFO << "\t\t\t" << "minExtent: " << videoCapabilities.minCodedExtent.width << " x " << videoCapabilities.minCodedExtent.height << std::endl; + LOG_S_INFO << "\t\t\t" << "maxExtent: " << videoCapabilities.maxCodedExtent.width << " x " << videoCapabilities.maxCodedExtent.height << std::endl; + LOG_S_INFO << "\t\t\t" << "maxDpbSlots: " << videoCapabilities.maxDpbSlots << std::endl; + LOG_S_INFO << "\t\t\t" << "maxActiveReferencePictures: " << videoCapabilities.maxActiveReferencePictures << std::endl; + return VK_SUCCESS; } diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH264.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH264.cpp index 3d76c7c8..df26383d 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH264.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH264.cpp @@ -358,21 +358,21 @@ VkResult EncoderConfigH264::InitDeviceCapabilities(const VulkanDeviceContext* vk quantizationMapCapabilities, h264QuantizationMapCapabilities); if (result != VK_SUCCESS) { - std::cout << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; + LOG_S_ERROR << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; assert(!"Could not get Video Capabilities!"); return result; } - if (verboseMsg) { - std::cout << "\t\t\t" << VkVideoCoreProfile::CodecToName(codec) << "encode capabilities: " << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << videoCapabilities.minBitstreamBufferOffsetAlignment << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << videoCapabilities.minBitstreamBufferSizeAlignment << std::endl; - std::cout << "\t\t\t" << "pictureAccessGranularity: " << videoCapabilities.pictureAccessGranularity.width << " x " << videoCapabilities.pictureAccessGranularity.height << std::endl; - std::cout << "\t\t\t" << "minExtent: " << videoCapabilities.minCodedExtent.width << " x " << videoCapabilities.minCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxExtent: " << videoCapabilities.maxCodedExtent.width << " x " << videoCapabilities.maxCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxDpbSlots: " << videoCapabilities.maxDpbSlots << std::endl; - std::cout << "\t\t\t" << "maxActiveReferencePictures: " << videoCapabilities.maxActiveReferencePictures << std::endl; - std::cout << "\t\t\t" << "maxBPictureL0ReferenceCount: " << h264EncodeCapabilities.maxBPictureL0ReferenceCount << std::endl; + if (verbose) { + LOG_S_INFO << "\t\t\t" << VkVideoCoreProfile::CodecToName(codec) << "encode capabilities: " << std::endl; + LOG_S_INFO << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << videoCapabilities.minBitstreamBufferOffsetAlignment << std::endl; + LOG_S_INFO << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << videoCapabilities.minBitstreamBufferSizeAlignment << std::endl; + LOG_S_INFO << "\t\t\t" << "pictureAccessGranularity: " << videoCapabilities.pictureAccessGranularity.width << " x " << videoCapabilities.pictureAccessGranularity.height << std::endl; + LOG_S_INFO << "\t\t\t" << "minExtent: " << videoCapabilities.minCodedExtent.width << " x " << videoCapabilities.minCodedExtent.height << std::endl; + LOG_S_INFO << "\t\t\t" << "maxExtent: " << videoCapabilities.maxCodedExtent.width << " x " << videoCapabilities.maxCodedExtent.height << std::endl; + LOG_S_INFO << "\t\t\t" << "maxDpbSlots: " << videoCapabilities.maxDpbSlots << std::endl; + LOG_S_INFO << "\t\t\t" << "maxActiveReferencePictures: " << videoCapabilities.maxActiveReferencePictures << std::endl; + LOG_S_INFO << "\t\t\t" << "maxBPictureL0ReferenceCount: " << h264EncodeCapabilities.maxBPictureL0ReferenceCount << std::endl; } return VK_SUCCESS; diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH265.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH265.cpp index 543b9e90..70baae9b 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH265.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderConfigH265.cpp @@ -78,21 +78,21 @@ VkResult EncoderConfigH265::InitDeviceCapabilities(const VulkanDeviceContext* vk quantizationMapCapabilities, h265QuantizationMapCapabilities); if (result != VK_SUCCESS) { - std::cout << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; + LOG_S_ERROR << "*** Could not get Video Capabilities :" << result << " ***" << std::endl; assert(!"Could not get Video Capabilities!"); return result; } - if (verboseMsg) { - std::cout << "\t\t\t" << VkVideoCoreProfile::CodecToName(codec) << "encode capabilities: " << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << videoCapabilities.minBitstreamBufferOffsetAlignment << std::endl; - std::cout << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << videoCapabilities.minBitstreamBufferSizeAlignment << std::endl; - std::cout << "\t\t\t" << "pictureAccessGranularity: " << videoCapabilities.pictureAccessGranularity.width << " x " << videoCapabilities.pictureAccessGranularity.height << std::endl; - std::cout << "\t\t\t" << "minExtent: " << videoCapabilities.minCodedExtent.width << " x " << videoCapabilities.minCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxExtent: " << videoCapabilities.maxCodedExtent.width << " x " << videoCapabilities.maxCodedExtent.height << std::endl; - std::cout << "\t\t\t" << "maxDpbSlots: " << videoCapabilities.maxDpbSlots << std::endl; - std::cout << "\t\t\t" << "maxActiveReferencePictures: " << videoCapabilities.maxActiveReferencePictures << std::endl; - std::cout << "\t\t\t" << "maxBPictureL0ReferenceCount: " << h265EncodeCapabilities.maxBPictureL0ReferenceCount << std::endl; + if (verbose) { + LOG_S_INFO << "\t\t\t" << VkVideoCoreProfile::CodecToName(codec) << "encode capabilities: " << std::endl; + LOG_S_INFO << "\t\t\t" << "minBitstreamBufferOffsetAlignment: " << videoCapabilities.minBitstreamBufferOffsetAlignment << std::endl; + LOG_S_INFO << "\t\t\t" << "minBitstreamBufferSizeAlignment: " << videoCapabilities.minBitstreamBufferSizeAlignment << std::endl; + LOG_S_INFO << "\t\t\t" << "pictureAccessGranularity: " << videoCapabilities.pictureAccessGranularity.width << " x " << videoCapabilities.pictureAccessGranularity.height << std::endl; + LOG_S_INFO << "\t\t\t" << "minExtent: " << videoCapabilities.minCodedExtent.width << " x " << videoCapabilities.minCodedExtent.height << std::endl; + LOG_S_INFO << "\t\t\t" << "maxExtent: " << videoCapabilities.maxCodedExtent.width << " x " << videoCapabilities.maxCodedExtent.height << std::endl; + LOG_S_INFO << "\t\t\t" << "maxDpbSlots: " << videoCapabilities.maxDpbSlots << std::endl; + LOG_S_INFO << "\t\t\t" << "maxActiveReferencePictures: " << videoCapabilities.maxActiveReferencePictures << std::endl; + LOG_S_INFO << "\t\t\t" << "maxBPictureL0ReferenceCount: " << h265EncodeCapabilities.maxBPictureL0ReferenceCount << std::endl; } return VK_SUCCESS; @@ -586,11 +586,10 @@ bool EncoderConfigH265::InitParamameters(VpsH265 *vpsInfo, SpsH265 *spsInfo, // pic_height_in_luma_samples shall not be equal to 0 and shall be an integer multiple of MinCbSizeY. spsInfo->sps.pic_height_in_luma_samples = picHeightAlignedToMinCbsY; - if (verbose) { - std::cout << "sps.pic_width_in_luma_samples: " << spsInfo->sps.pic_width_in_luma_samples - << ", sps.pic_height_in_luma_samples: " << spsInfo->sps.pic_height_in_luma_samples - << ", cuSize: " << (uint32_t)cuSize << ", cuMinSize: " << (uint32_t)cuMinSize << std::endl; - } + + LOG_S_DEBUG << "sps.pic_width_in_luma_samples: " << spsInfo->sps.pic_width_in_luma_samples + << ", sps.pic_height_in_luma_samples: " << spsInfo->sps.pic_height_in_luma_samples + << ", cuSize: " << (uint32_t)cuSize << ", cuMinSize: " << (uint32_t)cuMinSize << std::endl; spsInfo->sps.sps_video_parameter_set_id = vpsId; spsInfo->sps.sps_max_sub_layers_minus1 = 0; @@ -609,8 +608,8 @@ bool EncoderConfigH265::InitParamameters(VpsH265 *vpsInfo, SpsH265 *spsInfo, spsInfo->sps.log2_min_pcm_luma_coding_block_size_minus3 = (uint8_t)(minCbLog2SizeY - 3); spsInfo->sps.log2_diff_max_min_pcm_luma_coding_block_size = (uint8_t)(ctbLog2SizeY - minCbLog2SizeY); - if (verbose) { - std::cout << "sps.log2_min_luma_coding_block_size_minus3: " << (uint32_t)spsInfo->sps.log2_min_luma_coding_block_size_minus3 + + LOG_S_DEBUG << "sps.log2_min_luma_coding_block_size_minus3: " << (uint32_t)spsInfo->sps.log2_min_luma_coding_block_size_minus3 << ", sps.log2_diff_max_min_luma_coding_block_size: " << (uint32_t)spsInfo->sps.log2_diff_max_min_luma_coding_block_size << ", sps.log2_min_luma_transform_block_size_minus2: " << (uint32_t)spsInfo->sps.log2_min_luma_transform_block_size_minus2 << ", sps.log2_diff_max_min_luma_transform_block_size: " << (uint32_t)spsInfo->sps.log2_diff_max_min_luma_transform_block_size @@ -618,7 +617,6 @@ bool EncoderConfigH265::InitParamameters(VpsH265 *vpsInfo, SpsH265 *spsInfo, << ", sps.log2_min_pcm_luma_coding_block_size_minus3: " << (uint32_t)spsInfo->sps.log2_min_pcm_luma_coding_block_size_minus3 << ", sps.log2_diff_max_min_pcm_luma_coding_block_size: " << (uint32_t)spsInfo->sps.log2_diff_max_min_pcm_luma_coding_block_size << std::endl; - } uint32_t subWidthC = (encodeChromaSubsampling == 3) ? 1 : 2; uint32_t subHeightC = (encodeChromaSubsampling == 3) ? 1 : 2; @@ -631,14 +629,13 @@ bool EncoderConfigH265::InitParamameters(VpsH265 *vpsInfo, SpsH265 *spsInfo, (spsInfo->sps.conf_win_top_offset != 0) || (spsInfo->sps.conf_win_bottom_offset != 0)); - if (verbose) { - std::cout << "sps.conf_win_left_offset: " << spsInfo->sps.conf_win_left_offset - << ", sps.conf_win_right_offset: " << spsInfo->sps.conf_win_right_offset - << ", sps.conf_win_top_offset: " << spsInfo->sps.conf_win_top_offset - << ", sps.conf_win_bottom_offset: " << spsInfo->sps.conf_win_bottom_offset - << ", sps.flags.conformance_window_flag: " << spsInfo->sps.flags.conformance_window_flag - << std::endl; - } + + LOG_S_DEBUG << "sps.conf_win_left_offset: " << spsInfo->sps.conf_win_left_offset + << ", sps.conf_win_right_offset: " << spsInfo->sps.conf_win_right_offset + << ", sps.conf_win_top_offset: " << spsInfo->sps.conf_win_top_offset + << ", sps.conf_win_bottom_offset: " << spsInfo->sps.conf_win_bottom_offset + << ", sps.flags.conformance_window_flag: " << spsInfo->sps.flags.conformance_window_flag + << std::endl; spsInfo->sps.pScalingLists = NULL; diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderDpbH265.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderDpbH265.cpp index e873e009..8b3ed72d 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkEncoderDpbH265.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkEncoderDpbH265.cpp @@ -23,6 +23,7 @@ #include #include "VkEncoderDpbH265.h" +#include "Logger.h" template static inline T clampl(T value, T minbound) { @@ -306,7 +307,7 @@ void VkEncDpbH265::ApplyReferencePictureSet(const StdVideoEncodeH265PictureInfo } if (numRefPics > (m_dpbSize - 1)) { - printf("too many reference frames (%d, max is %d)\n", numRefPics, (m_dpbSize - 1)); + LOG_S_WARN << "too many reference frames" << numRefPics << ", max is " << (m_dpbSize - 1) << std::endl; } assert(numRefPics <= STD_VIDEO_H265_MAX_NUM_LIST_REF); @@ -410,7 +411,7 @@ void VkEncDpbH265::ApplyReferencePictureSet(const StdVideoEncodeH265PictureInfo } } if (pRefPicSet->ltCurr[i] < 0) - printf("long-term reference picture not available (POC=%d)\n", pocLtCurr[i]); + LOG_S_WARN << "long-term reference picture not available POC=" << pocLtCurr[i] << std::endl; } for (int32_t i = 0; i < m_numPocLtFoll; i++) { @@ -454,7 +455,7 @@ void VkEncDpbH265::ApplyReferencePictureSet(const StdVideoEncodeH265PictureInfo } } if (pRefPicSet->stCurrBefore[i] < 0) - printf("short-term reference picture not available (POC=%d)\n", pocStCurrBefore[i]); + LOG_S_WARN << "short-term reference picture not available POC=" << pocStCurrBefore[i] << std::endl; } for (int32_t i = 0; i < m_numPocStCurrAfter; i++) { @@ -466,7 +467,7 @@ void VkEncDpbH265::ApplyReferencePictureSet(const StdVideoEncodeH265PictureInfo } } if (pRefPicSet->stCurrAfter[i] < 0) - printf("short-term reference picture not available (POC=%d)\n", pocStCurrAfter[i]); + LOG_S_WARN << "short-term reference picture not available POC=" << pocStCurrAfter[i] << std::endl; } for (int32_t i = 0; i < m_numPocStFoll; i++) { diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoder.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoder.cpp index ed960743..70da294d 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoder.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoder.cpp @@ -544,17 +544,17 @@ VkResult VkVideoEncoder::AssembleBitstreamData(VkSharedBaseObjoutputFileHandler.GetFileHandle()); if (m_encoderConfig->verboseFrameStruct) { - std::cout << " == Non-Vcl data " << (nonVcl ? "SUCCESS" : "FAIL") - << " File Output non-VCL data with size: " << encodeFrameInfo->bitstreamHeaderBufferSize - << ", Input Order: " << encodeFrameInfo->gopPosition.inputOrder - << ", Encode Order: " << encodeFrameInfo->gopPosition.encodeOrder - << std::endl << std::flush; + LOG_S_DEBUG << " == Non-Vcl data " << (nonVcl ? "SUCCESS" : "FAIL") + << " File Output non-VCL data with size: " << encodeFrameInfo->bitstreamHeaderBufferSize + << ", Input Order: " << encodeFrameInfo->gopPosition.inputOrder + << ", Encode Order: " << encodeFrameInfo->gopPosition.encodeOrder + << std::endl << std::flush; } } VkResult result = encodeFrameInfo->encodeCmdBuffer->SyncHostOnCmdBuffComplete(false, "encoderEncodeFence"); if(result != VK_SUCCESS) { - fprintf(stderr, "\nWait on encoder complete fence has failed with result 0x%x.\n", result); + LOG_S_ERROR << "Wait on encoder complete fence has failed with result 0x" << result << std::endl; return result; } @@ -580,14 +580,16 @@ VkResult VkVideoEncoder::AssembleBitstreamData(VkSharedBaseObjoutputFileHandler.GetFileHandle()); if (m_encoderConfig->verboseFrameStruct) { - std::cout << " == Output VCL data " << (vcl ? "SUCCESS" : "FAIL") << " with size: " << encodeResult.bitstreamSize - << " and offset: " << encodeResult.bitstreamStartOffset - << ", Input Order: " << encodeFrameInfo->gopPosition.inputOrder - << ", Encode Order: " << encodeFrameInfo->gopPosition.encodeOrder << std::endl << std::flush; + LOG_S_DEBUG << " == Output VCL data " << (vcl ? "SUCCESS" : "FAIL") << " with size: " << encodeResult.bitstreamSize + << " and offset: " << encodeResult.bitstreamStartOffset + << ", Input Order: " << encodeFrameInfo->gopPosition.inputOrder + << ", Encode Order: " << encodeFrameInfo->gopPosition.encodeOrder << std::endl << std::flush; } return result; } @@ -613,7 +615,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf if (!VulkanVideoCapabilities::IsCodecTypeSupported(m_vkDevCtx, m_vkDevCtx->GetVideoEncodeQueueFamilyIdx(), encoderConfig->codec)) { - std::cout << "*** The video codec " << VkVideoCoreProfile::CodecToName(encoderConfig->codec) << " is not supported! ***" << std::endl; + LOG_S_ERROR << "*** The video codec " << VkVideoCoreProfile::CodecToName(encoderConfig->codec) << " is not supported! ***" << std::endl; assert(!"The video codec is not supported"); return VK_ERROR_INITIALIZATION_FAILED; } @@ -627,21 +629,21 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf if (encoderConfig->useDpbArray == false && (encoderConfig->videoCapabilities.flags & VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR) == 0) { - std::cout << "Separate DPB was requested, but the implementation does not support it!" << std::endl; - std::cout << "Fallback to layered DPB!" << std::endl; + LOG_S_WARN << "Separate DPB was requested, but the implementation does not support it!" << std::endl; + LOG_S_WARN<< "Fallback to layered DPB!" << std::endl; encoderConfig->useDpbArray = true; } if (m_encoderConfig->enableQpMap) { if ((m_encoderConfig->qpMapMode == EncoderConfig::DELTA_QP_MAP) && ((m_encoderConfig->videoEncodeCapabilities.flags & VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR) == 0)) { - std::cout << "Delta QP Map was requested, but the implementation does not support it!" << std::endl; + LOG_S_ERROR << "Delta QP Map was requested, but the implementation does not support it!" << std::endl; assert(!"Delta QP Map is not supported"); return VK_ERROR_INITIALIZATION_FAILED; } if ((m_encoderConfig->qpMapMode == EncoderConfig::EMPHASIS_MAP) && ((m_encoderConfig->videoEncodeCapabilities.flags & VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR) == 0)) { - std::cout << "Emphasis Map was requested, but the implementation does not support it!" << std::endl; + LOG_S_ERROR << "Emphasis Map was requested, but the implementation does not support it!" << std::endl; assert(!"Emphasis QP Map is not supported"); return VK_ERROR_INITIALIZATION_FAILED; } @@ -653,16 +655,16 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf m_encoderConfig->gopStructure.Init(m_encoderConfig->numFrames); if (encoderConfig->GetMaxBFrameCount() < m_encoderConfig->gopStructure.GetConsecutiveBFrameCount()) { if (m_encoderConfig->verbose) { - std::cout << "Max consecutive B frames: " << (uint32_t)encoderConfig->GetMaxBFrameCount() << " lower than the configured one: " << (uint32_t)m_encoderConfig->gopStructure.GetConsecutiveBFrameCount() << std::endl; - std::cout << "Fallback to the max value: " << (uint32_t)m_encoderConfig->gopStructure.GetConsecutiveBFrameCount() << std::endl; + LOG_S_INFO << "Max consecutive B frames: " << (uint32_t)encoderConfig->GetMaxBFrameCount() << " lower than the configured one: " << (uint32_t)m_encoderConfig->gopStructure.GetConsecutiveBFrameCount() << std::endl; + LOG_S_INFO << "Fallback to the max value: " << (uint32_t)m_encoderConfig->gopStructure.GetConsecutiveBFrameCount() << std::endl; } m_encoderConfig->gopStructure.SetConsecutiveBFrameCount(encoderConfig->GetMaxBFrameCount()); } if (m_encoderConfig->verbose) { - std::cout << std::endl << "GOP frame count: " << (uint32_t)m_encoderConfig->gopStructure.GetGopFrameCount(); - std::cout << ", IDR period: " << (uint32_t)m_encoderConfig->gopStructure.GetIdrPeriod(); - std::cout << ", Consecutive B frames: " << (uint32_t)m_encoderConfig->gopStructure.GetConsecutiveBFrameCount(); - std::cout << std::endl; + LOG_S_INFO << std::endl << "GOP frame count: " << (uint32_t)m_encoderConfig->gopStructure.GetGopFrameCount(); + LOG_S_INFO << ", IDR period: " << (uint32_t)m_encoderConfig->gopStructure.GetIdrPeriod(); + LOG_S_INFO << ", Consecutive B frames: " << (uint32_t)m_encoderConfig->gopStructure.GetConsecutiveBFrameCount(); + LOG_S_INFO << std::endl; const uint64_t maxFramesToDump = std::min(m_encoderConfig->numFrames, m_encoderConfig->gopStructure.GetGopFrameCount() + 19); m_encoderConfig->gopStructure.PrintGopStructure(maxFramesToDump); @@ -704,7 +706,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf formatCount, supportedDpbFormats); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to get desired video format for the decoded picture buffer.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to get desired video format for the decoded picture buffer." << std::endl; return result; } @@ -713,7 +715,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf formatCount, supportedInFormats); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to get desired video format for input images.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to get desired video format for input images." << std::endl; return result; } @@ -733,7 +735,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf true, supportedQpMapTexelSize); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to get desired video format for qpMap images.\n"); + LOG_ERROR("InitEncoder Error: Failed to get desired video format for qpMap images."); return result; } @@ -808,7 +810,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf result = VulkanVideoImagePool::Create(m_vkDevCtx, m_linearInputImagePool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create linearInputImagePool.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to create linearInputImagePool." << std::endl; return result; } @@ -834,13 +836,13 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf true // useLinear ); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure linearInputImagePool.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to Configure linearInputImagePool." << std::endl; return result; } result = VulkanVideoImagePool::Create(m_vkDevCtx, m_inputImagePool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create inputImagePool.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to create inputImagePool." << std::endl; return result; } @@ -862,7 +864,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf false // useLinear ); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure inputImagePool.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to Configure inputImagePool." << std::endl; return result; } @@ -873,7 +875,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf // If the linear tiling is not supported, we need to stage the image result = VulkanVideoImagePool::Create(m_vkDevCtx, m_linearQpMapImagePool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create linearQpMapImagePool.\n"); + LOG_ERROR("InitEncoder Error: Failed to create linearQpMapImagePool."); return result; } @@ -897,13 +899,13 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf true // useLinear ); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure linearQpMapImagePool.\n"); + LOG_ERROR("InitEncoder Error: Failed to configure linearQpMapImagePool."); return result; } } result = VulkanVideoImagePool::Create(m_vkDevCtx, m_qpMapImagePool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create inputImagePool.\n"); + LOG_ERROR("InitEncoder Error: Failed to create inputImagePool."); return result; } @@ -935,14 +937,14 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf m_qpMapTiling == VK_IMAGE_TILING_LINEAR // useLinear ); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure qpMapImagePool.\n"); + LOG_ERROR("InitEncoder Error: Failed to Configure qpMapImagePool."); return result; } } result = VulkanVideoImagePool::Create(m_vkDevCtx, m_dpbImagePool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create dpbImagePool.\n"); + LOG_ERROR ("InitEncoder Error: Failed to create dpbImagePool."); return result; } @@ -960,7 +962,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf false // useLinear ); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure inputImagePool.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to Configure inputImagePool." << std::endl; return result; } @@ -988,7 +990,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf nullptr, 0, bitstreamBuffer); assert(result == VK_SUCCESS); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: VulkanBitstreamBufferImpl::Create() result: 0x%x\n", result); + LOG_S_ERROR << "ERROR: VulkanBitstreamBufferImpl::Create() result: 0x" << result << std::endl; break; } @@ -1055,7 +1057,7 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf result = VulkanCommandBufferPool::Create(m_vkDevCtx, m_inputCommandBufferPool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create m_inputCommandBufferPool.\n"); + LOG_ERROR("InitEncoder Error: Failed to create m_inputCommandBufferPool.\n"); return result; } @@ -1072,13 +1074,13 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf } if (result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure m_inputCommandBufferPool.\n"); + LOG_ERROR("InitEncoder Error: Failed to Configure m_inputCommandBufferPool.\n"); return result; } result = VulkanCommandBufferPool::Create(m_vkDevCtx, m_encodeCommandBufferPool); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create m_encodeCommandBufferPool.\n"); + LOG_ERROR("InitEncoder Error: Failed to create m_encodeCommandBufferPool."); return result; } @@ -1098,13 +1100,13 @@ VkResult VkVideoEncoder::InitEncoder(VkSharedBaseObj& encoderConf true // createFences ); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to Configure m_encodeCommandBufferPool.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to Configure m_encodeCommandBufferPool." << std::endl; return result; } result = CreateFrameInfoBuffersQueue(encoderConfig->numInputImages); if(result != VK_SUCCESS) { - fprintf(stderr, "\nInitEncoder Error: Failed to create FrameInfoBuffersQueue.\n"); + LOG_S_ERROR << "InitEncoder Error: Failed to create FrameInfoBuffersQueue." << std::endl; return result; } @@ -1142,11 +1144,11 @@ VkDeviceSize VkVideoEncoder::GetBitstreamBuffer(VkSharedBaseObjMemsetData(0x0, copySize, newSize - copySize); #endif if (debugBitstreamBufferDumpAlloc) { - std::cout << "\t\tFrom bitstream buffer pool with size " << newSize << " B, " << + LOG_S_DEBUG << "\t\tFrom bitstream buffer pool with size " << newSize << " B, " << newSize/1024 << " KB, " << newSize/1024/1024 << " MB" << std::endl; - std::cout << "\t\t\t FreeNodes " << m_bitstreamBuffersQueue.GetFreeNodesNumber(); - std::cout << " of MaxNodes " << m_bitstreamBuffersQueue.GetMaxNodes(); - std::cout << ", AvailableNodes " << m_bitstreamBuffersQueue.GetAvailableNodesNumber(); - std::cout << std::endl; + LOG_S_DEBUG << "\t\t\t FreeNodes " << m_bitstreamBuffersQueue.GetFreeNodesNumber(); + LOG_S_DEBUG << " of MaxNodes " << m_bitstreamBuffersQueue.GetMaxNodes(); + LOG_S_DEBUG << ", AvailableNodes " << m_bitstreamBuffersQueue.GetAvailableNodesNumber(); + LOG_S_DEBUG << std::endl; } } bitstreamBuffer = newBitstreamBuffer; if (newSize > m_streamBufferSize) { - std::cout << "\tAllocated bitstream buffer with size " << newSize << " B, " << + LOG_S_INFO << "\tAllocated bitstream buffer with size " << newSize << " B, " << newSize/1024 << " KB, " << newSize/1024/1024 << " MB" << std::endl; m_streamBufferSize = (size_t)newSize; } @@ -1675,10 +1677,8 @@ VkResult VkVideoEncoder::ProcessOrderedFrames(VkSharedBaseObjverbose) { - const std::string& description = pair.first; - std::cout << "====== Total number of frames processed by " << description << ": " << processedFramesCount << " : " << result << std::endl; - } + const std::string& description = pair.first; + LOG_S_DEBUG << "====== Total number of frames processed by " << description << ": " << processedFramesCount << " : " << result << std::endl; if (result != VK_SUCCESS) { break; @@ -1725,7 +1725,7 @@ void VkVideoEncoder::DumpStateInfo(const char* stageName, uint32_t ident, VkSharedBaseObj& encodeFrameInfo, int32_t frameIdx, uint32_t ofTotalFrames) const { - std::cout << std::string(ident, ' ') << "===> " + LOG_S_DEBUG << std::string(ident, ' ') << "===> " << VkVideoCoreProfile::CodecToName(m_encoderConfig->codec) << ": " << stageName << " [" << frameIdx << " of " << ofTotalFrames << "]" << " type " << VkVideoGopStructure::GetFrameTypeName(encodeFrameInfo->gopPosition.pictureType) @@ -1778,12 +1778,12 @@ int32_t VkVideoEncoder::DeinitEncoder() void VkVideoEncoder::ConsumerThread() { - std::cout << "ConsumerThread is stating now.\n" << std::endl; + LOG_S_DEBUG << "ConsumerThread is stating now.\n" << std::endl; do { VkSharedBaseObj encodeFrameInfo; bool success = m_encoderThreadQueue.WaitAndPop(encodeFrameInfo); if (success) { // 5 seconds in nanoseconds - std::cout << "==>>>> Consumed: " << (uint32_t)encodeFrameInfo->gopPosition.inputOrder + LOG_S_DEBUG << "==>>>> Consumed: " << (uint32_t)encodeFrameInfo->gopPosition.inputOrder << ", Order: " << (uint32_t)encodeFrameInfo->gopPosition.encodeOrder << std::endl << std::flush; VkResult result; @@ -1796,15 +1796,15 @@ void VkVideoEncoder::ConsumerThread() VkVideoEncodeFrameInfo::ReleaseChildrenFrames(encodeFrameInfo); assert(encodeFrameInfo == nullptr); if (result != VK_SUCCESS) { - std::cout << "Error processing frames from the frame thread!" << std::endl; + LOG_S_ERROR << "Error processing frames from the frame thread!" << std::endl; m_encoderThreadQueue.SetFlushAndExit(); } } else { bool shouldExit = m_encoderThreadQueue.ExitQueue(); - std::cout << "Thread should exit: " << (shouldExit ? "Yes" : "No") << std::endl; + LOG_S_DEBUG << "Thread should exit: " << (shouldExit ? "Yes" : "No") << std::endl; } } while (!m_encoderThreadQueue.ExitQueue()); - std::cout << "ConsumerThread is exiting now.\n" << std::endl; + LOG_S_DEBUG << "ConsumerThread is exiting now.\n" << std::endl; } diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderAV1.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderAV1.cpp index 305fe06e..cc84b3d5 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderAV1.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderAV1.cpp @@ -83,7 +83,7 @@ VkResult VkVideoEncoderAV1::InitEncoderCodec(VkSharedBaseObj& enc VkResult result = InitEncoder(encoderConfig); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: InitEncoder() failed with ret(%d)\n", result); + LOG_S_ERROR << "ERROR: InitEncoder() failed with ret: " << result << std::endl; return result; } @@ -92,7 +92,7 @@ VkResult VkVideoEncoderAV1::InitEncoderCodec(VkSharedBaseObj& enc encodeCaps.maxSingleReferenceCount < 2 && encodeCaps.maxUnidirectionalCompoundReferenceCount == 0 && encodeCaps.maxBidirectionalCompoundReferenceCount == 0) { - std::cout << "B-frames were requested but the implementation does not support multiple reference frames!" << std::endl; + LOG_S_INFO << "B-frames were requested but the implementation does not support multiple reference frames!" << std::endl; assert(!"B-frames not supported"); return VK_ERROR_INITIALIZATION_FAILED; } @@ -117,14 +117,14 @@ VkResult VkVideoEncoderAV1::InitEncoderCodec(VkSharedBaseObj& enc nullptr, &sessionParameters); if (result != VK_SUCCESS) { - fprintf(stderr, "\nEncodeFrame Error: Failed to get create video session parameters.\n"); + LOG_S_ERROR << "EncodeFrame Error: Failed to get create video session parameters." << std::endl; return result; } result = VulkanVideoSessionParameters::Create(m_vkDevCtx, m_videoSession, sessionParameters, m_videoSessionParameters); if (result != VK_SUCCESS) { - fprintf(stderr, "\nEncodeFrame Error: Failed to get create video session object.\n"); + LOG_S_ERROR << "EncodeFrame Error: Failed to get create video session object." << std::endl; return result; } @@ -493,7 +493,7 @@ VkResult VkVideoEncoderAV1::EncodeFrame(VkSharedBaseObj& DumpStateInfo("input", 1, encodeFrameInfo); if (encodeFrameInfo->lastFrame) { - std::cout << "#### It is the last frame: " << encodeFrameInfo->frameInputOrderNum + LOG_S_INFO << "#### It is the last frame: " << encodeFrameInfo->frameInputOrderNum << " of type " << VkVideoGopStructure::GetFrameTypeName(encodeFrameInfo->gopPosition.pictureType) << " ###" << std::endl << std::flush; @@ -822,7 +822,7 @@ VkResult VkVideoEncoderAV1::AssembleBitstreamData(VkSharedBaseObjencodeCmdBuffer->SyncHostOnCmdBuffComplete(false, "encoderEncodeFence"); if(result != VK_SUCCESS) { - fprintf(stderr, "\nWait on encoder complete fence has failed with result 0x%x.\n", result); + LOG_S_ERROR << "Wait on encoder complete fence has failed with result 0x" << result << std::endl; return result; } @@ -850,7 +850,7 @@ VkResult VkVideoEncoderAV1::AssembleBitstreamData(VkSharedBaseObjverboseFrameStruct) { - std::cout << " == Output VCL data SUCCESS for " << frameIdx << " with size: " << encodeResult.bitstreamSize + LOG_S_DEBUG << " == Output VCL data SUCCESS for " << frameIdx << " with size: " << encodeResult.bitstreamSize << " and offset: " << encodeResult.bitstreamStartOffset << ", Input Order: " << (uint32_t)encodeFrameInfo->gopPosition.inputOrder << ", Encode Order: " << (uint32_t)encodeFrameInfo->gopPosition.encodeOrder << std::endl << std::flush; @@ -910,13 +910,13 @@ VkResult VkVideoEncoderAV1::AssembleBitstreamData(VkSharedBaseObjverboseFrameStruct) { - std::cout << ">>>>>> Assembly VCL index " << curIndex << " has size: " << frameSize + LOG_S_DEBUG << ">>>>>> Assembly VCL index " << curIndex << " has size: " << frameSize << std::endl << std::flush; } } if (m_encoderConfig->verboseFrameStruct) { - std::cout << ">>>>>> Assembly total VCL data at " << frameIdx << " is: " + LOG_S_DEBUG << ">>>>>> Assembly total VCL data at " << frameIdx << " is: " << framesSize - (2 + encodeFrameInfo->bitstreamHeaderBufferSize) << std::endl << std::flush; } @@ -945,7 +945,7 @@ VkResult VkVideoEncoderAV1::AssembleBitstreamData(VkSharedBaseObjoutputFileHandler.GetFileHandle()); if (m_encoderConfig->verboseFrameStruct) { - std::cout << " == Non-Vcl data " << (nonVcl ? "SUCCESS" : "FAIL") + LOG_S_DEBUG << " == Non-Vcl data " << (nonVcl ? "SUCCESS" : "FAIL") << " File Output non-VCL data with size: " << encodeFrameInfo->bitstreamHeaderBufferSize << ", Input Order: " << (uint32_t)encodeFrameInfo->gopPosition.inputOrder << ", Encode Order: " << (uint32_t)encodeFrameInfo->gopPosition.encodeOrder diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH264.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH264.cpp index e7d31ddd..852b8d28 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH264.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH264.cpp @@ -47,7 +47,7 @@ VkResult VkVideoEncoderH264::InitEncoderCodec(VkSharedBaseObj& en VkResult result = InitEncoder(encoderConfig); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: InitEncoder() failed with ret(%d)\n", result); + LOG_S_ERROR << "ERROR: InitEncoder() failed with ret: " << result << std::endl; return result; } @@ -77,14 +77,14 @@ VkResult VkVideoEncoderH264::InitEncoderCodec(VkSharedBaseObj& en nullptr, &sessionParameters); if(result != VK_SUCCESS) { - fprintf(stderr, "\nEncodeFrame Error: Failed to get create video session parameters.\n"); + LOG_S_ERROR << "EncodeFrame Error: Failed to get create video session parameters." << std::endl; return result; } result = VulkanVideoSessionParameters::Create(m_vkDevCtx, m_videoSession, sessionParameters, m_videoSessionParameters); if(result != VK_SUCCESS) { - fprintf(stderr, "\nEncodeFrame Error: Failed to get create video session object.\n"); + LOG_S_ERROR << "EncodeFrame Error: Failed to get create video session object." << std::endl; return result; } @@ -511,10 +511,10 @@ VkResult VkVideoEncoderH264::EncodeFrame(VkSharedBaseObj DumpStateInfo("input", 1, encodeFrameInfo); if (encodeFrameInfo->lastFrame) { - std::cout << "#### It is the last frame: " << encodeFrameInfo->frameInputOrderNum - << " of type " << VkVideoGopStructure::GetFrameTypeName(encodeFrameInfo->gopPosition.pictureType) - << " ###" - << std::endl << std::flush; + LOG_S_DEBUG << "#### It is the last frame: " << encodeFrameInfo->frameInputOrderNum + << " of type " << VkVideoGopStructure::GetFrameTypeName(encodeFrameInfo->gopPosition.pictureType) + << " ###" + << std::endl << std::flush; } } diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH265.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH265.cpp index ed11d453..63c8500b 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH265.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkVideoEncoderH265.cpp @@ -47,7 +47,7 @@ VkResult VkVideoEncoderH265::InitEncoderCodec(VkSharedBaseObj& en VkResult result = InitEncoder(encoderConfig); if (result != VK_SUCCESS) { - fprintf(stderr, "\nERROR: InitEncoder() failed with ret(%d)\n", result); + LOG_S_ERROR << "ERROR: InitEncoder() failed with ret: " << result << std::endl; return result; } @@ -55,7 +55,7 @@ VkResult VkVideoEncoderH265::InitEncoderCodec(VkSharedBaseObj& en m_dpb.DpbSequenceStart(m_maxDpbPicturesCount, (m_encoderConfig->numRefL0 > 0)); if (m_encoderConfig->verbose) { - std::cout << ", numRefL0: " << (uint32_t)m_encoderConfig->numRefL0 + LOG_S_DEBUG << ", numRefL0: " << (uint32_t)m_encoderConfig->numRefL0 << ", numRefL1: " << (uint32_t)m_encoderConfig->numRefL1 << std::endl; } @@ -94,14 +94,14 @@ VkResult VkVideoEncoderH265::InitEncoderCodec(VkSharedBaseObj& en nullptr, &sessionParameters); if(result != VK_SUCCESS) { - fprintf(stderr, "\nEncodeFrame Error: Failed to get create video session parameters.\n"); + LOG_S_ERROR << "\nEncodeFrame Error: Failed to get create video session parameters" << std::endl; return result; } result = VulkanVideoSessionParameters::Create(m_vkDevCtx, m_videoSession, sessionParameters, m_videoSessionParameters); if(result != VK_SUCCESS) { - fprintf(stderr, "\nEncodeFrame Error: Failed to get create video session object.\n"); + LOG_S_ERROR << "EncodeFrame Error: Failed to get create video session object." << std::endl; return result; } @@ -393,10 +393,10 @@ VkResult VkVideoEncoderH265::EncodeFrame(VkSharedBaseObj DumpStateInfo("input", 1, encodeFrameInfo); if (encodeFrameInfo->lastFrame) { - std::cout << "#### It is the last frame: " << encodeFrameInfo->frameInputOrderNum - << " of type " << VkVideoGopStructure::GetFrameTypeName(encodeFrameInfo->gopPosition.pictureType) - << " ###" - << std::endl << std::flush; + LOG_S_DEBUG << "#### It is the last frame: " << encodeFrameInfo->frameInputOrderNum + << " of type " << VkVideoGopStructure::GetFrameTypeName(encodeFrameInfo->gopPosition.pictureType) + << " ###" + << std::endl << std::flush; } } diff --git a/vk_video_encoder/libs/VkVideoEncoder/VkVideoGopStructure.cpp b/vk_video_encoder/libs/VkVideoEncoder/VkVideoGopStructure.cpp index 72caddb2..a37888ed 100644 --- a/vk_video_encoder/libs/VkVideoEncoder/VkVideoGopStructure.cpp +++ b/vk_video_encoder/libs/VkVideoEncoder/VkVideoGopStructure.cpp @@ -15,6 +15,7 @@ */ #include "VkVideoGopStructure.h" +#include "Logger.h" VkVideoGopStructure::VkVideoGopStructure(uint8_t gopFrameCount, int32_t idrPeriod, @@ -48,33 +49,33 @@ bool VkVideoGopStructure::Init(uint64_t maxNumFrames) void VkVideoGopStructure::PrintGopStructure(uint64_t numFrames) const { - std::cout << std::endl << "Input order: "; + LOG_S_INFO << std::endl << "Input order: "; for (uint64_t frameNum = 0; frameNum < numFrames; frameNum++) { - std::cout << std::setw(3) << frameNum << " "; + LOG_S_INFO << std::setw(3) << frameNum << " "; } - std::cout << std::endl << "Frame Type: "; + LOG_S_INFO << std::endl << "Frame Type: "; GopState gopState; GopPosition gopPos(gopState.positionInInputOrder); for (uint64_t frameNum = 0; frameNum < (numFrames - 1); frameNum++) { GetPositionInGOP(gopState, gopPos); - std::cout << std::setw(4) << GetFrameTypeName(gopPos.pictureType); + LOG_S_INFO << std::setw(4) << GetFrameTypeName(gopPos.pictureType); } GetPositionInGOP(gopState, gopPos, false, true); - std::cout << std::setw(4) << GetFrameTypeName(gopPos.pictureType); + LOG_S_INFO << std::setw(4) << GetFrameTypeName(gopPos.pictureType); - std::cout << std::endl << "Encode order: "; + LOG_S_INFO << std::endl << "Encode order: "; gopState = GopState(); for (uint64_t i = 0; i < (numFrames - 1); i++) { GetPositionInGOP(gopState, gopPos); - std::cout << std::setw(3) << gopPos.encodeOrder << " "; + LOG_S_INFO << std::setw(3) << gopPos.encodeOrder << " "; } GetPositionInGOP(gopState, gopPos, false, true); - std::cout << std::setw(3) << gopPos.encodeOrder << " "; + LOG_S_INFO << std::setw(3) << gopPos.encodeOrder << " "; - std::cout << std::endl; + LOG_S_INFO << std::endl; } void VkVideoGopStructure::DumpFrameGopStructure(GopState& gopState, @@ -83,18 +84,18 @@ void VkVideoGopStructure::DumpFrameGopStructure(GopState& gopState, GopPosition gopPos(gopState.positionInInputOrder); GetPositionInGOP(gopState, gopPos); - std::cout << " " << gopPos.inputOrder << ", " + LOG_S_DEBUG << " " << gopPos.inputOrder << ", " << "\t" << gopPos.encodeOrder << ", " << "\t" << (uint32_t)gopPos.inGop << ", " << "\t" << GetFrameTypeName(gopPos.pictureType); - std::cout << std::endl; + LOG_S_DEBUG << std::endl; } void VkVideoGopStructure::DumpFramesGopStructure(uint64_t firstFrameNumInInputOrder, uint64_t numFrames) const { - std::cout << "Input Encode Position Frame " << std::endl; - std::cout << "order order in GOP type " << std::endl; + LOG_S_DEBUG<< "Input Encode Position Frame " << std::endl; + LOG_S_DEBUG << "order order in GOP type " << std::endl; const uint64_t lastFrameNumInInputOrder = firstFrameNumInInputOrder + numFrames - 1; GopState gopState; for (uint64_t frameNumInDisplayOrder = firstFrameNumInInputOrder; frameNumInDisplayOrder < lastFrameNumInInputOrder; ++frameNumInDisplayOrder) { diff --git a/vk_video_encoder/src/vulkan_video_encoder.cpp b/vk_video_encoder/src/vulkan_video_encoder.cpp index 18831f2a..1df25226 100644 --- a/vk_video_encoder/src/vulkan_video_encoder.cpp +++ b/vk_video_encoder/src/vulkan_video_encoder.cpp @@ -46,7 +46,7 @@ class VulkanVideoEncoderImpl : public VulkanVideoEncoder { m_encoder->WaitForThreadsToComplete(); if (m_encoderConfig->verbose) { - std::cout << "Done processing " << m_lastFrameIndex << " input frames!" << std::endl + LOG_S_INFO << "Done processing " << m_lastFrameIndex << " input frames!" << std::endl << "Encoded file's location is at " << m_encoderConfig->outputFileHandler.GetFileName() << std::endl; } @@ -128,7 +128,7 @@ VkResult VulkanVideoEncoderImpl::Initialize(VkVideoCodecOperationFlagBitsKHR vid result = m_vkDevCtxt.InitVulkanDevice(m_encoderConfig->appName.c_str(), VK_NULL_HANDLE, m_encoderConfig->verbose); if (result != VK_SUCCESS) { - printf("Could not initialize the Vulkan device!\n"); + LOG_S_ERROR << "Could not initialize the Vulkan device!" << std::endl; return result; } @@ -229,7 +229,7 @@ VkResult VulkanVideoEncoderImpl::EncodeNextFrame(int64_t& frameNumEncoded) } if (m_encoderConfig->verboseFrameStruct) { - std::cout << "####################################################################################" << std::endl + LOG_S_DEBUG << "####################################################################################" << std::endl << "Start processing current input frame index: " << m_lastFrameIndex << std::endl; } @@ -239,14 +239,14 @@ VkResult VulkanVideoEncoderImpl::EncodeNextFrame(int64_t& frameNumEncoded) // load frame data from the file VkResult result = m_encoder->LoadNextFrame(encodeFrameInfo); if (result != VK_SUCCESS) { - std::cout << "ERROR processing input frame index: " << m_lastFrameIndex << std::endl; + LOG_S_ERROR << "ERROR processing input frame index: " << m_lastFrameIndex << std::endl; return result; } frameNumEncoded = encodeFrameInfo->frameInputOrderNum; if (m_encoderConfig->verboseFrameStruct) { - std::cout << "End processing current input frame index: " << m_lastFrameIndex << std::endl; + LOG_S_DEBUG << "End processing current input frame index: " << m_lastFrameIndex << std::endl; } m_lastFrameIndex++; diff --git a/vk_video_encoder/test/vulkan-video-enc/Main.cpp b/vk_video_encoder/test/vulkan-video-enc/Main.cpp index 536729f6..b3c8e88f 100644 --- a/vk_video_encoder/test/vulkan-video-enc/Main.cpp +++ b/vk_video_encoder/test/vulkan-video-enc/Main.cpp @@ -16,6 +16,7 @@ #include #include "vulkan_video_encoder.h" +#include "Logger.h" int main(int argc, char** argv) { @@ -25,7 +26,7 @@ int main(int argc, char** argv) argc, argv, vulkanVideoEncoder); if (result != VK_SUCCESS) { - std::cerr << "Error creating the encoder instance: " << result << std::endl; + LOG_S_ERROR << "Error creating the encoder instance: " << result << std::endl; } int64_t numFrames = vulkanVideoEncoder->GetNumberOfFrames(); @@ -35,16 +36,16 @@ int main(int argc, char** argv) int64_t frameNumEncoded = -1; result = vulkanVideoEncoder->EncodeNextFrame(frameNumEncoded); if (result != VK_SUCCESS) { - std::cerr << "Error encoding frame: " << frameNum << ", error: " << result << std::endl; + LOG_S_ERROR << "Error encoding frame: " << frameNum << ", error: " << result << std::endl; } } result = vulkanVideoEncoder->GetBitstream(); if (result != VK_SUCCESS) { - std::cerr << "Error obtaining the encoded bitstream file: " << result << std::endl; + LOG_S_ERROR << "Error obtaining the encoded bitstream file: " << result << std::endl; } - std::cout << "Exit encoder test" << std::endl; + LOG_S_INFO << "Exit encoder test successfully" << std::endl; }