diff --git a/README.md b/README.md index 6939dd124..ac457c077 100755 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ yuzu emulator early access ============= -This is the source code for early-access 1961. +This is the source code for early-access 1962. ## Legal Notice diff --git a/src/video_core/command_classes/codecs/codec.cpp b/src/video_core/command_classes/codecs/codec.cpp index f4985c9b7..f798a0053 100755 --- a/src/video_core/command_classes/codecs/codec.cpp +++ b/src/video_core/command_classes/codecs/codec.cpp @@ -16,36 +16,14 @@ extern "C" { } namespace Tegra { - -void AVFrameDeleter(AVFrame* ptr) { - av_frame_free(&ptr); -} - -Codec::Codec(GPU& gpu_, const NvdecCommon::NvdecRegisters& regs) - : gpu(gpu_), state{regs}, h264_decoder(std::make_unique(gpu)), - vp9_decoder(std::make_unique(gpu)) {} - -Codec::~Codec() { - if (!initialized) { - return; - } - - // Free libav memory - AVFrame* av_frame; - avcodec_send_packet(av_codec_ctx, nullptr); - av_frame = av_frame_alloc(); - avcodec_receive_frame(av_codec_ctx, av_frame); - avcodec_flush_buffers(av_codec_ctx); - - av_frame_unref(av_frame); - av_free(av_frame); - avcodec_close(av_codec_ctx); - av_buffer_unref(&av_hw_device); -} - -// Hardware acceleration code from FFmpeg/doc/examples/hw_decode.c under MIT license #if defined(LIBVA_FOUND) +// Hardware acceleration code from FFmpeg/doc/examples/hw_decode.c originally under MIT license namespace { +constexpr std::array VAAPI_DRIVERS = { + "i915", + "amdgpu", +}; + AVPixelFormat GetHwFormat(AVCodecContext*, const AVPixelFormat* pix_fmts) { for (const AVPixelFormat* p = pix_fmts; *p != AV_PIX_FMT_NONE; ++p) { if (*p == AV_PIX_FMT_VAAPI) { @@ -56,15 +34,10 @@ AVPixelFormat GetHwFormat(AVCodecContext*, const AVPixelFormat* pix_fmts) { return *pix_fmts; } -constexpr std::array VAAPI_DRIVERS = { - "i915", - "amdgpu", -}; - bool CreateVaapiHwdevice(AVBufferRef** av_hw_device) { AVDictionary* hwdevice_options = nullptr; av_dict_set(&hwdevice_options, "connection_type", "drm", 0); - for (auto driver : VAAPI_DRIVERS) { + for (const auto& driver : VAAPI_DRIVERS) { av_dict_set(&hwdevice_options, "kernel_driver", driver, 0); const int hwdevice_error = av_hwdevice_ctx_create(av_hw_device, AV_HWDEVICE_TYPE_VAAPI, nullptr, hwdevice_options, 0); @@ -82,7 +55,30 @@ bool CreateVaapiHwdevice(AVBufferRef** av_hw_device) { } // namespace #endif +void AVFrameDeleter(AVFrame* ptr) { + av_frame_free(&ptr); +} + +Codec::Codec(GPU& gpu_, const NvdecCommon::NvdecRegisters& regs) + : gpu(gpu_), state{regs}, h264_decoder(std::make_unique(gpu)), + vp9_decoder(std::make_unique(gpu)) {} + +Codec::~Codec() { + if (!initialized) { + return; + } + // Free libav memory + avcodec_send_packet(av_codec_ctx, nullptr); + AVFrame* av_frame = av_frame_alloc(); + avcodec_receive_frame(av_codec_ctx, av_frame); + avcodec_flush_buffers(av_codec_ctx); + av_frame_free(&av_frame); + avcodec_close(av_codec_ctx); + av_buffer_unref(&av_hw_device); +} + void Codec::InitializeHwdec() { + // Prioritize integrated GPU to mitigate bandwidth bottlenecks #if defined(LIBVA_FOUND) if (CreateVaapiHwdevice(&av_hw_device)) { const auto hw_device_ctx = av_buffer_ref(av_hw_device); @@ -92,65 +88,7 @@ void Codec::InitializeHwdec() { return; } #endif - - // TODO NVDEC, but integrated GPU should be used first to avoid PCI -} - -[[nodiscard]] AVFrame* Codec::DecodeImpl(RawFrame& raw_frame) { - AVPacket packet{}; - av_init_packet(&packet); - packet.data = raw_frame.frame_data.data(); - packet.size = static_cast(raw_frame.frame_data.size()); - - if (const int ret = avcodec_send_packet(av_codec_ctx, &packet); ret) { - LOG_DEBUG(Service_NVDRV, "avcodec_send_packet error {}", ret); - return nullptr; - } - - // Only receive/store visible frames - if (raw_frame.vp9_hidden_frame) { - return nullptr; - } - AVFrame* hw_frame = av_frame_alloc(); - AVFrame* sw_frame = hw_frame; - ASSERT_MSG(hw_frame, "av_frame_alloc hw_frame failed"); - if (const int ret = avcodec_receive_frame(av_codec_ctx, hw_frame); ret) { - LOG_DEBUG(Service_NVDRV, "avcodec_receive_frame error {}", ret); - av_frame_free(&hw_frame); - return nullptr; - } - - if (!hw_frame->width || !hw_frame->height) { - LOG_WARNING(Service_NVDRV, "Zero width or height in frame"); - av_frame_free(&hw_frame); - return nullptr; - } - -#if defined(LIBVA_FOUND) - // Hardware acceleration code from FFmpeg/doc/examples/hw_decode.c under MIT license - if (hw_frame->format == AV_PIX_FMT_VAAPI) { - sw_frame = av_frame_alloc(); - ASSERT_MSG(sw_frame, "av_frame_alloc sw_frame failed"); - // Can't use AV_PIX_FMT_YUV420P and share code with software decoding in vic.cpp - // because Intel drivers crash unless using AV_PIX_FMT_NV12 - sw_frame->format = AV_PIX_FMT_NV12; - const int transfer_data_ret = av_hwframe_transfer_data(sw_frame, hw_frame, 0); - ASSERT_MSG(!transfer_data_ret, "av_hwframe_transfer_data error {}", transfer_data_ret); - av_frame_free(&hw_frame); - } -#endif - - switch (sw_frame->format) { - case AV_PIX_FMT_YUV420P: - case AV_PIX_FMT_NV12: - break; - default: - UNIMPLEMENTED_MSG("Unexpected video format from host graphics: {}", sw_frame->format); - av_frame_free(&sw_frame); - return nullptr; - } - - return sw_frame; + // TODO more GPU accelerated decoders } void Codec::Initialize() { @@ -169,12 +107,10 @@ void Codec::Initialize() { av_codec = avcodec_find_decoder(codec); av_codec_ctx = avcodec_alloc_context3(av_codec); av_opt_set(av_codec_ctx->priv_data, "tune", "zerolatency", 0); - InitializeHwdec(); if (!av_codec_ctx->hw_device_ctx) { LOG_INFO(Service_NVDRV, "Using FFmpeg software decoding"); } - const auto av_error = avcodec_open2(av_codec_ctx, av_codec, nullptr); if (av_error < 0) { LOG_ERROR(Service_NVDRV, "avcodec_open2() Failed."); @@ -182,7 +118,6 @@ void Codec::Initialize() { av_buffer_unref(&av_hw_device); return; } - initialized = true; } @@ -198,28 +133,61 @@ void Codec::Decode() { if (is_first_frame) { Initialize(); } - bool vp9_hidden_frame = false; std::vector frame_data; - if (current_codec == NvdecCommon::VideoCodec::H264) { frame_data = h264_decoder->ComposeFrameHeader(state, is_first_frame); } else if (current_codec == NvdecCommon::VideoCodec::Vp9) { frame_data = vp9_decoder->ComposeFrameHeader(state); vp9_hidden_frame = vp9_decoder->WasFrameHidden(); } - - RawFrame raw_frame{ - .frame_data = frame_data, - .vp9_hidden_frame = vp9_hidden_frame, - }; - // TODO async - AVFrame* sw_frame = DecodeImpl(raw_frame); - if (sw_frame) { - if (av_frames.push(AVFramePtr{sw_frame, AVFrameDeleter}); av_frames.size() > 10) { - LOG_TRACE(Service_NVDRV, "av_frames.push overflow dropped frame"); - av_frames.pop(); - } + AVPacket packet{}; + av_init_packet(&packet); + packet.data = frame_data.data(); + packet.size = static_cast(frame_data.size()); + if (const int ret = avcodec_send_packet(av_codec_ctx, &packet); ret) { + LOG_DEBUG(Service_NVDRV, "avcodec_send_packet error {}", ret); + return; + } + // Only receive/store visible frames + if (vp9_hidden_frame) { + return; + } + AVFrame* hw_frame = av_frame_alloc(); + AVFrame* sw_frame = hw_frame; + ASSERT_MSG(hw_frame, "av_frame_alloc hw_frame failed"); + if (const int ret = avcodec_receive_frame(av_codec_ctx, hw_frame); ret) { + LOG_DEBUG(Service_NVDRV, "avcodec_receive_frame error {}", ret); + av_frame_free(&hw_frame); + return; + } + if (!hw_frame->width || !hw_frame->height) { + LOG_WARNING(Service_NVDRV, "Zero width or height in frame"); + av_frame_free(&hw_frame); + return; + } +#if defined(LIBVA_FOUND) + // Hardware acceleration code from FFmpeg/doc/examples/hw_decode.c under MIT license + if (hw_frame->format == AV_PIX_FMT_VAAPI) { + sw_frame = av_frame_alloc(); + ASSERT_MSG(sw_frame, "av_frame_alloc sw_frame failed"); + // Can't use AV_PIX_FMT_YUV420P and share code with software decoding in vic.cpp + // because Intel drivers crash unless using AV_PIX_FMT_NV12 + sw_frame->format = AV_PIX_FMT_NV12; + const int transfer_data_ret = av_hwframe_transfer_data(sw_frame, hw_frame, 0); + ASSERT_MSG(!transfer_data_ret, "av_hwframe_transfer_data error {}", transfer_data_ret); + av_frame_free(&hw_frame); + } +#endif + if (sw_frame->format != AV_PIX_FMT_YUV420P && sw_frame->format != AV_PIX_FMT_NV12) { + UNIMPLEMENTED_MSG("Unexpected video format from host graphics: {}", sw_frame->format); + av_frame_free(&sw_frame); + return; + } + av_frames.push(AVFramePtr{sw_frame, AVFrameDeleter}); + if (av_frames.size() > 10) { + LOG_TRACE(Service_NVDRV, "av_frames.push overflow dropped frame"); + av_frames.pop(); } } @@ -229,7 +197,6 @@ AVFramePtr Codec::GetCurrentFrame() { if (av_frames.empty()) { return AVFramePtr{nullptr, AVFrameDeleter}; } - AVFramePtr frame = std::move(av_frames.front()); av_frames.pop(); return frame; @@ -255,5 +222,4 @@ std::string_view Codec::GetCurrentCodecName() const { return "Unknown"; } } - } // namespace Tegra diff --git a/src/video_core/command_classes/codecs/codec.h b/src/video_core/command_classes/codecs/codec.h index 62ac6cd9a..71936203f 100755 --- a/src/video_core/command_classes/codecs/codec.h +++ b/src/video_core/command_classes/codecs/codec.h @@ -54,13 +54,7 @@ public: [[nodiscard]] std::string_view GetCurrentCodecName() const; private: - struct RawFrame { - std::vector frame_data; - bool vp9_hidden_frame; - }; - void InitializeHwdec(); - AVFrame* DecodeImpl(RawFrame&); bool initialized{}; NvdecCommon::VideoCodec current_codec{NvdecCommon::VideoCodec::None}; diff --git a/src/video_core/command_classes/vic.cpp b/src/video_core/command_classes/vic.cpp index 9b279a1cc..d5e77941c 100755 --- a/src/video_core/command_classes/vic.cpp +++ b/src/video_core/command_classes/vic.cpp @@ -88,7 +88,9 @@ void Vic::Execute() { scaler_height = frame->height; } // Get Converted frame - const std::size_t linear_size = static_cast(frame->width * frame->height * 4); + const u32 width = static_cast(frame->width); + const u32 height = static_cast(frame->height); + const std::size_t linear_size = width * height * 4; // Only allocate frame_buffer once per stream, as the size is not expected to change if (!converted_frame_buffer) { @@ -105,8 +107,6 @@ void Vic::Execute() { if (blk_kind != 0) { // swizzle pitch linear to block linear const u32 block_height = static_cast(config.block_linear_height_log2); - const u32 width = static_cast(frame->width); - const u32 height = static_cast(frame->height); const auto size = Tegra::Texture::CalculateSize(true, 4, width, height, 1, block_height, 0); luma_buffer.resize(size); diff --git a/src/yuzu/configuration/configure_tas.cpp b/src/yuzu/configuration/configure_tas.cpp index 00d6c1ba5..b666b175a 100755 --- a/src/yuzu/configuration/configure_tas.cpp +++ b/src/yuzu/configuration/configure_tas.cpp @@ -55,7 +55,7 @@ void ConfigureTasDialog::SetDirectory(DirectoryTarget target, QLineEdit* edit) { QString str = QFileDialog::getExistingDirectory(this, caption, edit->text()); - if (str.isNull() || str.isEmpty()) { + if (str.isEmpty()) { return; } diff --git a/src/yuzu/configuration/configure_tas.ui b/src/yuzu/configuration/configure_tas.ui index 6574943db..d92a5d9e4 100755 --- a/src/yuzu/configuration/configure_tas.ui +++ b/src/yuzu/configuration/configure_tas.ui @@ -19,13 +19,13 @@ - Tool Assisted Speedrun tool + TAS - This tool reads controller input from a script files under the same format as TAS-nx files. For more delailed explanation please consult the FAQ on the yuzu website. + Reads controller input from scripts in the same format as TAS-nx scripts. For a more detailed explanation please consult the FAQ on the yuzu website. true @@ -35,34 +35,13 @@ - How to use it: + To check which hotkeys control the playback/recording, please refer to the Hotkey settings (General -> Hotkeys). true - - - - Ctrl + F5: Start or stop TAS bot - - - - - - - Ctrl + F6: Reload file or restart bot from the beggining - - - - - - - Ctrl + F7: Start or stop recording input from player 1 - - - @@ -73,7 +52,7 @@ - TAS Settings + Settings @@ -117,7 +96,7 @@ - TAS Directories + Script Directory