ffmpeg/tools: Set correct information when encoding

Improves the previous logic and makes it compatible with the new additions in 26.x, such as sRGB. This was previously broken as the focus was on existing features which could be tested without requiring a compiler to be installed.

Incorrect understanding of how sRGB works with RGB and YCC/YUV formats also caused sRGB to be treated as RGB when I444 was selected. This should also now be fixed, hopefully permanently.

Fixes #331
This commit is contained in:
Michael Fabian 'Xaymar' Dirks 2020-10-06 12:18:35 +02:00
parent e8d5edc9eb
commit 9938422d5c
4 changed files with 100 additions and 82 deletions

View file

@ -32,6 +32,7 @@
// Common C++ includes // Common C++ includes
#include <algorithm> #include <algorithm>
#include <array>
#include <limits> #include <limits>
#include <map> #include <map>
#include <memory> #include <memory>

View file

@ -439,16 +439,18 @@ void ffmpeg_instance::initialize_sw(obs_data_t* settings)
} }
} }
// Setup from OBS information.
::ffmpeg::tools::context_setup_from_obs(voi, _context);
// Override with other information.
_context->width = static_cast<int>(obs_encoder_get_width(_self)); _context->width = static_cast<int>(obs_encoder_get_width(_self));
_context->height = static_cast<int>(obs_encoder_get_height(_self)); _context->height = static_cast<int>(obs_encoder_get_height(_self));
::ffmpeg::tools::setup_obs_color(voi->colorspace, voi->range, _context);
_context->pix_fmt = _pixfmt_target; _context->pix_fmt = _pixfmt_target;
_context->field_order = AV_FIELD_PROGRESSIVE;
_context->ticks_per_frame = 1; // Prevent pixelation by sampling "center" instead of corners. This creates
_context->sample_aspect_ratio.num = _context->sample_aspect_ratio.den = 1; // a smoother look, which may not be H.264/AVC standard compliant, however it
_context->framerate.num = _context->time_base.den = static_cast<int>(voi->fps_num); // provides better support for scaling algorithms, such as Bicubic.
_context->framerate.den = _context->time_base.num = static_cast<int>(voi->fps_den); _context->chroma_sample_location = AVCHROMA_LOC_CENTER;
_scaler.set_source_size(static_cast<uint32_t>(_context->width), static_cast<uint32_t>(_context->height)); _scaler.set_source_size(static_cast<uint32_t>(_context->width), static_cast<uint32_t>(_context->height));
_scaler.set_source_color(_context->color_range == AVCOL_RANGE_JPEG, _context->colorspace); _scaler.set_source_color(_context->color_range == AVCOL_RANGE_JPEG, _context->colorspace);
@ -473,36 +475,37 @@ void ffmpeg_instance::initialize_sw(obs_data_t* settings)
void ffmpeg_instance::initialize_hw(obs_data_t*) void ffmpeg_instance::initialize_hw(obs_data_t*)
{ {
#ifdef D_PLATFORM_WINDOWS #ifndef D_PLATFORM_WINDOWS
throw std::runtime_error("OBS Studio currently does not support zero copy encoding for this platform.");
#else
// Initialize Video Encoding // Initialize Video Encoding
auto voi = video_output_get_info(obs_encoder_video(_self)); const video_output_info* voi = video_output_get_info(obs_encoder_video(_self));
_context->width = static_cast<int>(voi->width); // Apply pixel format settings.
_context->height = static_cast<int>(voi->height); ::ffmpeg::tools::context_setup_from_obs(voi, _context);
_context->field_order = AV_FIELD_PROGRESSIVE; _context->sw_pix_fmt = _context->pix_fmt;
_context->ticks_per_frame = 1;
_context->sample_aspect_ratio.num = _context->sample_aspect_ratio.den = 1;
_context->framerate.num = _context->time_base.den = static_cast<int>(voi->fps_num);
_context->framerate.den = _context->time_base.num = static_cast<int>(voi->fps_den);
::ffmpeg::tools::setup_obs_color(voi->colorspace, voi->range, _context);
_context->sw_pix_fmt = ::ffmpeg::tools::obs_videoformat_to_avpixelformat(voi->format);
_context->pix_fmt = AV_PIX_FMT_D3D11; _context->pix_fmt = AV_PIX_FMT_D3D11;
// Try to create a hardware context.
_context->hw_device_ctx = _hwinst->create_device_context(); _context->hw_device_ctx = _hwinst->create_device_context();
_context->hw_frames_ctx = av_hwframe_ctx_alloc(_context->hw_device_ctx); _context->hw_frames_ctx = av_hwframe_ctx_alloc(_context->hw_device_ctx);
if (!_context->hw_frames_ctx) if (!_context->hw_frames_ctx) {
throw std::runtime_error("Allocating hardware context failed, chosen pixel format is likely not supported."); throw std::runtime_error("Creating hardware context failed.");
}
// Initialize Hardware Context
AVHWFramesContext* ctx = reinterpret_cast<AVHWFramesContext*>(_context->hw_frames_ctx->data); AVHWFramesContext* ctx = reinterpret_cast<AVHWFramesContext*>(_context->hw_frames_ctx->data);
ctx->width = _context->width; ctx->width = _context->width;
ctx->height = _context->height; ctx->height = _context->height;
ctx->format = _context->pix_fmt; ctx->format = _context->pix_fmt;
ctx->sw_format = _context->sw_pix_fmt; ctx->sw_format = _context->sw_pix_fmt;
if (int32_t res = av_hwframe_ctx_init(_context->hw_frames_ctx); res < 0) {
if (av_hwframe_ctx_init(_context->hw_frames_ctx) < 0) std::array<char, 2048> buffer;
throw std::runtime_error("Initializing hardware context failed, chosen pixel format is likely not supported."); size_t len = static_cast<size_t>(snprintf(buffer.data(), buffer.size(),
#else "Initializing hardware context failed with error: %s (%" PRIu32 ")",
throw std::runtime_error("OBS Studio currently does not support zero copy encoding for this platform."); ::ffmpeg::tools::get_error_description(res), res));
throw std::runtime_error(std::string(buffer.data(), buffer.data() + len));
}
#endif #endif
} }

View file

@ -128,21 +128,7 @@ AVPixelFormat tools::get_least_lossy_format(const AVPixelFormat* haystack, AVPix
return avcodec_find_best_pix_fmt_of_list(haystack, needle, 0, &data_loss); return avcodec_find_best_pix_fmt_of_list(haystack, needle, 0, &data_loss);
} }
AVColorSpace tools::obs_videocolorspace_to_avcolorspace(video_colorspace v) AVColorRange tools::obs_to_av_color_range(video_range_type v)
{
switch (v) {
case VIDEO_CS_DEFAULT:
case VIDEO_CS_709:
return AVCOL_SPC_BT709;
case VIDEO_CS_601:
return AVCOL_SPC_BT470BG;
case VIDEO_CS_SRGB:
return AVCOL_SPC_RGB;
}
throw std::invalid_argument("unknown color space");
}
AVColorRange tools::obs_videorangetype_to_avcolorrange(video_range_type v)
{ {
switch (v) { switch (v) {
case VIDEO_RANGE_DEFAULT: case VIDEO_RANGE_DEFAULT:
@ -151,7 +137,50 @@ AVColorRange tools::obs_videorangetype_to_avcolorrange(video_range_type v)
case VIDEO_RANGE_FULL: case VIDEO_RANGE_FULL:
return AVCOL_RANGE_JPEG; return AVCOL_RANGE_JPEG;
} }
throw std::invalid_argument("unknown range"); throw std::invalid_argument("Unknown Color Range");
}
AVColorSpace tools::obs_to_av_color_space(video_colorspace v)
{
switch (v) {
case VIDEO_CS_601:
return AVCOL_SPC_BT470BG;
case VIDEO_CS_DEFAULT:
case VIDEO_CS_709:
case VIDEO_CS_SRGB:
return AVCOL_SPC_BT709;
default:
throw std::invalid_argument("Unknown Color Space");
}
}
AVColorPrimaries ffmpeg::tools::obs_to_av_color_primary(video_colorspace v)
{
switch (v) {
case VIDEO_CS_601:
return AVCOL_PRI_BT470BG;
case VIDEO_CS_DEFAULT:
case VIDEO_CS_709:
case VIDEO_CS_SRGB:
return AVCOL_PRI_BT709;
default:
throw std::invalid_argument("Unknown Color Primaries");
}
}
AVColorTransferCharacteristic ffmpeg::tools::obs_to_av_color_transfer_characteristics(video_colorspace v)
{
switch (v) {
case VIDEO_CS_601:
return AVCOL_TRC_LINEAR;
case VIDEO_CS_DEFAULT:
case VIDEO_CS_709:
return AVCOL_TRC_BT709;
case VIDEO_CS_SRGB:
return AVCOL_TRC_IEC61966_2_1;
default:
throw std::invalid_argument("Unknown Color Transfer Characteristics");
}
} }
bool tools::can_hardware_encode(const AVCodec* codec) bool tools::can_hardware_encode(const AVCodec* codec)
@ -204,43 +233,28 @@ std::vector<AVPixelFormat> tools::get_software_formats(const AVPixelFormat* list
return std::move(fmts); return std::move(fmts);
} }
void tools::setup_obs_color(video_colorspace colorspace, video_range_type range, AVCodecContext* context) void tools::context_setup_from_obs(const video_output_info* voi, AVCodecContext* context)
{ {
std::map<video_colorspace, std::tuple<AVColorSpace, AVColorPrimaries, AVColorTransferCharacteristic>> colorspaces = // Resolution
{ context->width = static_cast<int>(voi->width);
{VIDEO_CS_601, {AVCOL_SPC_BT470BG, AVCOL_PRI_BT470BG, AVCOL_TRC_SMPTE170M}}, context->height = static_cast<int>(voi->height);
{VIDEO_CS_709, {AVCOL_SPC_BT709, AVCOL_PRI_BT709, AVCOL_TRC_BT709}},
{VIDEO_CS_SRGB, {AVCOL_SPC_RGB, AVCOL_PRI_BT709, AVCOL_TRC_IEC61966_2_1}},
};
std::map<video_range_type, AVColorRange> colorranges = {
{VIDEO_RANGE_PARTIAL, AVCOL_RANGE_MPEG},
{VIDEO_RANGE_FULL, AVCOL_RANGE_JPEG},
};
{ // Framerate
if (colorspace == VIDEO_CS_DEFAULT) context->ticks_per_frame = 1;
colorspace = VIDEO_CS_601; context->framerate.num = context->time_base.den = static_cast<int>(voi->fps_num);
if (range == VIDEO_RANGE_DEFAULT) context->framerate.den = context->time_base.num = static_cast<int>(voi->fps_den);
range = VIDEO_RANGE_PARTIAL;
}
{ // Aspect Ratio, Progressive
auto found = colorspaces.find(colorspace); context->sample_aspect_ratio.num = 1;
if (found != colorspaces.end()) { context->sample_aspect_ratio.den = 1;
context->colorspace = std::get<AVColorSpace>(found->second); context->field_order = AV_FIELD_PROGRESSIVE;
context->color_primaries = std::get<AVColorPrimaries>(found->second);
context->color_trc = std::get<AVColorTransferCharacteristic>(found->second);
}
}
{
auto found = colorranges.find(range);
if (found != colorranges.end()) {
context->color_range = found->second;
}
}
// Downscaling should result in downscaling, not pixelation // Decipher Pixel information
context->chroma_sample_location = AVCHROMA_LOC_CENTER; context->pix_fmt = obs_videoformat_to_avpixelformat(voi->format);
context->color_range = obs_to_av_color_range(voi->range);
context->colorspace = obs_to_av_color_space(voi->colorspace);
context->color_primaries = obs_to_av_color_primary(voi->colorspace);
context->color_trc = obs_to_av_color_transfer_characteristics(voi->colorspace);
} }
const char* tools::get_std_compliance_name(int compliance) const char* tools::get_std_compliance_name(int compliance)

View file

@ -43,20 +43,20 @@ namespace ffmpeg::tools {
const char* get_error_description(int error); const char* get_error_description(int error);
AVPixelFormat obs_videoformat_to_avpixelformat(video_format v); AVPixelFormat obs_videoformat_to_avpixelformat(video_format v);
video_format avpixelformat_to_obs_videoformat(AVPixelFormat v); video_format avpixelformat_to_obs_videoformat(AVPixelFormat v);
AVPixelFormat get_least_lossy_format(const AVPixelFormat* haystack, AVPixelFormat needle); AVPixelFormat get_least_lossy_format(const AVPixelFormat* haystack, AVPixelFormat needle);
AVColorSpace obs_videocolorspace_to_avcolorspace(video_colorspace v); AVColorRange obs_to_av_color_range(video_range_type v);
AVColorSpace obs_to_av_color_space(video_colorspace v);
AVColorRange obs_videorangetype_to_avcolorrange(video_range_type v); AVColorPrimaries obs_to_av_color_primary(video_colorspace v);
AVColorTransferCharacteristic obs_to_av_color_transfer_characteristics(video_colorspace v);
bool can_hardware_encode(const AVCodec* codec); bool can_hardware_encode(const AVCodec* codec);
std::vector<AVPixelFormat> get_software_formats(const AVPixelFormat* list); std::vector<AVPixelFormat> get_software_formats(const AVPixelFormat* list);
void setup_obs_color(video_colorspace colorspace, video_range_type range, AVCodecContext* context); void context_setup_from_obs(const video_output_info* voi, AVCodecContext* context);
const char* get_std_compliance_name(int compliance); const char* get_std_compliance_name(int compliance);