diff --git a/.clang-format b/.clang-format index 72c5d127..aae287a2 100644 --- a/.clang-format +++ b/.clang-format @@ -5,11 +5,12 @@ # Basic Formatting TabWidth: 4 UseTab: ForContinuationAndIndentation -ColumnLimit: 120 +ColumnLimit: 65535 +#- 0 does not respect the original line breaks! # Language Language: Cpp -Standard: Cpp11 +Standard: c++17 # Indentation AccessModifierOffset: 0 @@ -48,7 +49,7 @@ AlignConsecutiveAssignments: true AlignConsecutiveDeclarations: true AlignEscapedNewlines: Left AlignOperands: true -AlignTrailingComments: true +AlignTrailingComments: false DerivePointerAlignment: false PointerAlignment: Left diff --git a/source/configuration.cpp b/source/configuration.cpp index 31459580..35b54105 100644 --- a/source/configuration.cpp +++ b/source/configuration.cpp @@ -43,8 +43,7 @@ streamfx::configuration::configuration() : _config_path(), _data(), _task_lock() if (!std::filesystem::exists(_config_path) || !std::filesystem::is_regular_file(_config_path)) { throw std::runtime_error("Configuration does not exist."); } else { - obs_data_t* data = - obs_data_create_from_json_file_safe(_config_path.u8string().c_str(), path_backup_ext.data()); + obs_data_t* data = obs_data_create_from_json_file_safe(_config_path.u8string().c_str(), path_backup_ext.data()); if (!data) { throw std::runtime_error("Failed to load configuration from disk."); } else { @@ -70,8 +69,7 @@ void streamfx::configuration::save() if (_config_path.has_parent_path()) { std::filesystem::create_directories(_config_path.parent_path()); } - if (!obs_data_save_json_safe(_data.get(), _config_path.u8string().c_str(), ".tmp", - path_backup_ext.data())) { + if (!obs_data_save_json_safe(_data.get(), _config_path.u8string().c_str(), ".tmp", path_backup_ext.data())) { D_LOG_ERROR("Failed to save configuration file.", nullptr); } }); diff --git a/source/encoders/codecs/hevc.cpp b/source/encoders/codecs/hevc.cpp index 8967c5ab..c5aadf72 100644 --- a/source/encoders/codecs/hevc.cpp +++ b/source/encoders/codecs/hevc.cpp @@ -181,8 +181,7 @@ void progress_parse(uint8_t*& ptr, uint8_t* end, size_t& sz) sz = get_nal_size(ptr, end); } -void hevc::extract_header_sei(uint8_t* data, std::size_t sz_data, std::vector& header, - std::vector& sei) +void hevc::extract_header_sei(uint8_t* data, std::size_t sz_data, std::vector& header, std::vector& sei) { uint8_t* ptr = data; uint8_t* end = data + sz_data; diff --git a/source/encoders/codecs/hevc.hpp b/source/encoders/codecs/hevc.hpp index e417f522..c20943ba 100644 --- a/source/encoders/codecs/hevc.hpp +++ b/source/encoders/codecs/hevc.hpp @@ -42,6 +42,5 @@ namespace streamfx::encoder::codec::hevc { UNKNOWN = -1, }; - void extract_header_sei(uint8_t* data, std::size_t sz_data, std::vector& header, - std::vector& sei); + void extract_header_sei(uint8_t* data, std::size_t sz_data, std::vector& header, std::vector& sei); } // namespace streamfx::encoder::codec::hevc diff --git a/source/encoders/encoder-ffmpeg.cpp b/source/encoders/encoder-ffmpeg.cpp index db703df3..3401433a 100644 --- a/source/encoders/encoder-ffmpeg.cpp +++ b/source/encoders/encoder-ffmpeg.cpp @@ -97,10 +97,8 @@ ffmpeg_instance::ffmpeg_instance(obs_data_t* settings, obs_encoder_t* self, bool // Initialize GPU Stuff if (is_hw) { // Abort if user specified manual override. - if ((obs_data_get_int(settings, ST_KEY_FFMPEG_GPU) != -1) || (obs_encoder_scaling_enabled(_self)) - || (video_output_get_info(obs_encoder_video(_self))->format != VIDEO_FORMAT_NV12)) { - throw std::runtime_error( - "Selected settings prevent the use of hardware encoding, falling back to software."); + if ((obs_data_get_int(settings, ST_KEY_FFMPEG_GPU) != -1) || (obs_encoder_scaling_enabled(_self)) || (video_output_get_info(obs_encoder_video(_self))->format != VIDEO_FORMAT_NV12)) { + throw std::runtime_error("Selected settings prevent the use of hardware encoding, falling back to software."); } #ifdef WIN32 @@ -201,8 +199,7 @@ bool ffmpeg_instance::update(obs_data_t* settings) bool support_reconfig_gpu = false; bool support_reconfig_keyframes = false; if (_handler) { - support_reconfig = _handler->supports_reconfigure(_factory, support_reconfig_threads, support_reconfig_gpu, - support_reconfig_keyframes); + support_reconfig = _handler->supports_reconfigure(_factory, support_reconfig_threads, support_reconfig_gpu, support_reconfig_keyframes); } if (!_context->internal) { @@ -259,10 +256,8 @@ bool ffmpeg_instance::update(obs_data_t* settings) bool is_seconds = (kf_type == 0); if (is_seconds) { - double framerate = - static_cast(ovi.fps_num) / (static_cast(ovi.fps_den) * _framerate_divisor); - _context->gop_size = - static_cast(obs_data_get_double(settings, ST_KEY_KEYFRAMES_INTERVAL_SECONDS) * framerate); + double framerate = static_cast(ovi.fps_num) / (static_cast(ovi.fps_den) * _framerate_divisor); + _context->gop_size = static_cast(obs_data_get_double(settings, ST_KEY_KEYFRAMES_INTERVAL_SECONDS) * framerate); } else { _context->gop_size = static_cast(obs_data_get_int(settings, ST_KEY_KEYFRAMES_INTERVAL_FRAMES)); } @@ -291,36 +286,20 @@ bool ffmpeg_instance::update(obs_data_t* settings) if (!_context->internal || support_reconfig) { DLOG_INFO("[%s] Configuration:", _codec->name); DLOG_INFO("[%s] FFmpeg:", _codec->name); - DLOG_INFO("[%s] Custom Settings: %s", _codec->name, - obs_data_get_string(settings, ST_KEY_FFMPEG_CUSTOMSETTINGS)); - DLOG_INFO("[%s] Standard Compliance: %s", _codec->name, - ::streamfx::ffmpeg::tools::get_std_compliance_name(_context->strict_std_compliance)); - DLOG_INFO("[%s] Threading: %s (with %i threads)", _codec->name, - ::streamfx::ffmpeg::tools::get_thread_type_name(_context->thread_type), _context->thread_count); + DLOG_INFO("[%s] Custom Settings: %s", _codec->name, obs_data_get_string(settings, ST_KEY_FFMPEG_CUSTOMSETTINGS)); + DLOG_INFO("[%s] Standard Compliance: %s", _codec->name, ::streamfx::ffmpeg::tools::get_std_compliance_name(_context->strict_std_compliance)); + DLOG_INFO("[%s] Threading: %s (with %i threads)", _codec->name, ::streamfx::ffmpeg::tools::get_thread_type_name(_context->thread_type), _context->thread_count); DLOG_INFO("[%s] Video:", _codec->name); if (_hwinst) { - DLOG_INFO("[%s] Texture: %" PRId32 "x%" PRId32 " %s %s %s", _codec->name, _context->width, - _context->height, ::streamfx::ffmpeg::tools::get_pixel_format_name(_context->sw_pix_fmt), - ::streamfx::ffmpeg::tools::get_color_space_name(_context->colorspace), - av_color_range_name(_context->color_range)); + DLOG_INFO("[%s] Texture: %" PRId32 "x%" PRId32 " %s %s %s", _codec->name, _context->width, _context->height, ::streamfx::ffmpeg::tools::get_pixel_format_name(_context->sw_pix_fmt), ::streamfx::ffmpeg::tools::get_color_space_name(_context->colorspace), av_color_range_name(_context->color_range)); } else { - DLOG_INFO("[%s] Input: %" PRId32 "x%" PRId32 " %s %s %s", _codec->name, _scaler.get_source_width(), - _scaler.get_source_height(), - ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_source_format()), - ::streamfx::ffmpeg::tools::get_color_space_name(_scaler.get_source_colorspace()), - _scaler.is_source_full_range() ? "Full" : "Partial"); - DLOG_INFO("[%s] Output: %" PRId32 "x%" PRId32 " %s %s %s", _codec->name, _scaler.get_target_width(), - _scaler.get_target_height(), - ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_target_format()), - ::streamfx::ffmpeg::tools::get_color_space_name(_scaler.get_target_colorspace()), - _scaler.is_target_full_range() ? "Full" : "Partial"); + DLOG_INFO("[%s] Input: %" PRId32 "x%" PRId32 " %s %s %s", _codec->name, _scaler.get_source_width(), _scaler.get_source_height(), ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_source_format()), ::streamfx::ffmpeg::tools::get_color_space_name(_scaler.get_source_colorspace()), _scaler.is_source_full_range() ? "Full" : "Partial"); + DLOG_INFO("[%s] Output: %" PRId32 "x%" PRId32 " %s %s %s", _codec->name, _scaler.get_target_width(), _scaler.get_target_height(), ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_target_format()), ::streamfx::ffmpeg::tools::get_color_space_name(_scaler.get_target_colorspace()), _scaler.is_target_full_range() ? "Full" : "Partial"); if (!_hwinst) DLOG_INFO("[%s] On GPU Index: %lli", _codec->name, obs_data_get_int(settings, ST_KEY_FFMPEG_GPU)); } - DLOG_INFO("[%s] Framerate: %" PRId32 "/%" PRId32 " (%f FPS)", _codec->name, _context->time_base.den, - _context->time_base.num, - static_cast(_context->time_base.den) / static_cast(_context->time_base.num)); + DLOG_INFO("[%s] Framerate: %" PRId32 "/%" PRId32 " (%f FPS)", _codec->name, _context->time_base.den, _context->time_base.num, static_cast(_context->time_base.den) / static_cast(_context->time_base.num)); DLOG_INFO("[%s] Keyframes: ", _codec->name); if (_context->keyint_min != _context->gop_size) { @@ -391,16 +370,12 @@ bool ffmpeg_instance::encode_video(struct encoder_frame* frame, struct encoder_p vframe->color_trc = _context->color_trc; vframe->pts = frame->pts; - if ((_scaler.is_source_full_range() == _scaler.is_target_full_range()) - && (_scaler.get_source_colorspace() == _scaler.get_target_colorspace()) - && (_scaler.get_source_format() == _scaler.get_target_format())) { + if ((_scaler.is_source_full_range() == _scaler.is_target_full_range()) && (_scaler.get_source_colorspace() == _scaler.get_target_colorspace()) && (_scaler.get_source_format() == _scaler.get_target_format())) { copy_data(frame, vframe.get()); } else { - int res = _scaler.convert(reinterpret_cast(frame->data), reinterpret_cast(frame->linesize), - 0, _context->height, vframe->data, vframe->linesize); + int res = _scaler.convert(reinterpret_cast(frame->data), reinterpret_cast(frame->linesize), 0, _context->height, vframe->data, vframe->linesize); if (res <= 0) { - DLOG_ERROR("Failed to convert frame: %s (%" PRId32 ").", - ::streamfx::ffmpeg::tools::get_error_description(res), res); + DLOG_ERROR("Failed to convert frame: %s (%" PRId32 ").", ::streamfx::ffmpeg::tools::get_error_description(res), res); return false; } } @@ -412,8 +387,7 @@ bool ffmpeg_instance::encode_video(struct encoder_frame* frame, struct encoder_p return true; } -bool ffmpeg_instance::encode_video(uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, - struct encoder_packet* packet, bool* received_packet) +bool ffmpeg_instance::encode_video(uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, struct encoder_packet* packet, bool* received_packet) { if ((_framerate_divisor > 1) && (pts % _framerate_divisor != 0)) { *next_key = lock_key; @@ -486,12 +460,7 @@ void ffmpeg_instance::initialize_sw(obs_data_t* settings) // Create Scaler if (!_scaler.initialize(SWS_SINC | SWS_FULL_CHR_H_INT | SWS_FULL_CHR_H_INP | SWS_ACCURATE_RND | SWS_BITEXACT)) { std::stringstream sstr; - sstr << "Initializing scaler failed for conversion from '" - << ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_source_format()) << "' to '" - << ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_target_format()) - << "' with color space '" - << ::streamfx::ffmpeg::tools::get_color_space_name(_scaler.get_source_colorspace()) << "' and " - << (_scaler.is_source_full_range() ? "full" : "partial") << " range."; + sstr << "Initializing scaler failed for conversion from '" << ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_source_format()) << "' to '" << ::streamfx::ffmpeg::tools::get_pixel_format_name(_scaler.get_target_format()) << "' with color space '" << ::streamfx::ffmpeg::tools::get_color_space_name(_scaler.get_source_colorspace()) << "' and " << (_scaler.is_source_full_range() ? "full" : "partial") << " range."; throw std::runtime_error(sstr.str()); } } @@ -526,8 +495,7 @@ void ffmpeg_instance::initialize_hw(obs_data_t*) if (int32_t res = av_hwframe_ctx_init(_context->hw_frames_ctx); res < 0) { std::array buffer; - int len = snprintf(buffer.data(), buffer.size(), "Failed initialize hardware context: %s (%" PRIu32 ")", - ::streamfx::ffmpeg::tools::get_error_description(res), res); + int len = snprintf(buffer.data(), buffer.size(), "Failed initialize hardware context: %s (%" PRIu32 ")", ::streamfx::ffmpeg::tools::get_error_description(res), res); throw std::runtime_error(std::string(buffer.data(), buffer.data() + len)); } #endif @@ -637,8 +605,7 @@ int ffmpeg_instance::receive_packet(bool* received_packet, struct encoder_packet uint8_t* tmp_sei; std::size_t sz_packet, sz_header, sz_sei; - obs_extract_avc_headers(_packet->data, static_cast(_packet->size), &tmp_packet, &sz_packet, - &tmp_header, &sz_header, &tmp_sei, &sz_sei); + obs_extract_avc_headers(_packet->data, static_cast(_packet->size), &tmp_packet, &sz_packet, &tmp_header, &sz_header, &tmp_sei, &sz_sei); if (sz_header) { _extra_data.resize(sz_header); @@ -691,7 +658,7 @@ int ffmpeg_instance::receive_packet(bool* received_packet, struct encoder_packet } else if (side_data.type == AV_PKT_DATA_QUALITY_STATS) { // Decisions based on picture type, if present. switch (side_data.data[sizeof(uint32_t)]) { - case AV_PICTURE_TYPE_I: // I-Frame + case AV_PICTURE_TYPE_I: // I-Frame case AV_PICTURE_TYPE_SI: // Switching I-Frame if (_packet->flags & AV_PKT_FLAG_KEY) { // Recovery only via IDR-Frame. @@ -703,23 +670,23 @@ int ffmpeg_instance::receive_packet(bool* received_packet, struct encoder_packet packet->drop_priority = 2; // OBS_NAL_PRIORITY_HIGH } break; - case AV_PICTURE_TYPE_P: // P-Frame + case AV_PICTURE_TYPE_P: // P-Frame case AV_PICTURE_TYPE_SP: // Switching P-Frame // Recovery via I- or IDR-Frame. packet->priority = 1; // OBS_NAL_PRIORITY_LOW packet->drop_priority = 2; // OBS_NAL_PRIORITY_HIGH break; - case AV_PICTURE_TYPE_B: // B-Frame + case AV_PICTURE_TYPE_B: // B-Frame // Recovery via I- or IDR-Frame. packet->priority = 0; // OBS_NAL_PRIORITY_DISPOSABLE packet->drop_priority = 2; // OBS_NAL_PRIORITY_HIGH break; - case AV_PICTURE_TYPE_BI: // BI-Frame, theoretically identical to I-Frame. + case AV_PICTURE_TYPE_BI: // BI-Frame, theoretically identical to I-Frame. // Recovery via I- or IDR-Frame. packet->priority = 2; // OBS_NAL_PRIORITY_HIGH packet->drop_priority = 2; // OBS_NAL_PRIORITY_HIGH break; - default: // Unknown picture type. + default: // Unknown picture type. // Recovery only via IDR-Frame packet->priority = 2; // OBS_NAL_PRIORITY_HIGH packet->drop_priority = 3; // OBS_NAL_PRIORITY_HIGHEST @@ -781,8 +748,7 @@ bool ffmpeg_instance::encode_avframe(std::shared_ptr frame, encoder_pac sent_frame = true; break; default: - DLOG_ERROR("Failed to encode frame: %s (%" PRId32 ").", - ::streamfx::ffmpeg::tools::get_error_description(res), res); + DLOG_ERROR("Failed to encode frame: %s (%" PRId32 ").", ::streamfx::ffmpeg::tools::get_error_description(res), res); return false; } } @@ -807,8 +773,7 @@ bool ffmpeg_instance::encode_avframe(std::shared_ptr frame, encoder_pac } break; default: - DLOG_ERROR("Failed to receive packet: %s (%" PRId32 ").", - ::streamfx::ffmpeg::tools::get_error_description(res), res); + DLOG_ERROR("Failed to receive packet: %s (%" PRId32 ").", ::streamfx::ffmpeg::tools::get_error_description(res), res); return false; } } @@ -867,7 +832,7 @@ void ffmpeg_instance::parse_ffmpeg_commandline(std::string_view text) // Not supported yet. p += 3; } else if (here2 == 'u') { // 4 or 8 wide Unicode. - // Not supported yet. + // Not supported yet. } else if (here2 == 'a') { opt_stream << '\a'; p++; @@ -955,8 +920,7 @@ void ffmpeg_instance::parse_ffmpeg_commandline(std::string_view text) int res = av_opt_set(_context, key.c_str(), value.c_str(), AV_OPT_SEARCH_CHILDREN); if (res < 0) { - DLOG_WARNING("Option '%s' (key: '%s', value: '%s') encountered error: %s", opt.c_str(), key.c_str(), - value.c_str(), ::streamfx::ffmpeg::tools::get_error_description(res)); + DLOG_WARNING("Option '%s' (key: '%s', value: '%s') encountered error: %s", opt.c_str(), key.c_str(), value.c_str(), ::streamfx::ffmpeg::tools::get_error_description(res)); } } catch (const std::exception& ex) { DLOG_ERROR("Option '%s' encountered exception: %s", opt.c_str(), ex.what()); @@ -1088,8 +1052,7 @@ obs_properties_t* ffmpeg_factory::get_properties2(instance_t* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(props, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::encoder::ffmpeg::ffmpeg_factory::on_manual_open, this); + obs_properties_add_button2(props, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::encoder::ffmpeg::ffmpeg_factory::on_manual_open, this); } #endif @@ -1109,23 +1072,17 @@ obs_properties_t* ffmpeg_factory::get_properties2(instance_t* data) } { // Key-Frame Interval Type - auto p = - obs_properties_add_list(grp, ST_KEY_KEYFRAMES_INTERVALTYPE, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVALTYPE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_KEYFRAMES_INTERVALTYPE, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVALTYPE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_keyframes); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVALTYPE_("Seconds")), 0); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVALTYPE_("Frames")), 1); } { // Key-Frame Interval Seconds - auto p = obs_properties_add_float(grp, ST_KEY_KEYFRAMES_INTERVAL_SECONDS, - D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVAL), 0.00, - std::numeric_limits::max(), 0.01); + auto p = obs_properties_add_float(grp, ST_KEY_KEYFRAMES_INTERVAL_SECONDS, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVAL), 0.00, std::numeric_limits::max(), 0.01); obs_property_float_set_suffix(p, " seconds"); } { // Key-Frame Interval Frames - auto p = - obs_properties_add_int(grp, ST_KEY_KEYFRAMES_INTERVAL_FRAMES, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVAL), - 0, std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_KEYFRAMES_INTERVAL_FRAMES, D_TRANSLATE(ST_I18N_KEYFRAMES_INTERVAL), 0, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " frames"); } } @@ -1139,19 +1096,15 @@ obs_properties_t* ffmpeg_factory::get_properties2(instance_t* data) } { // Custom Settings - auto p = - obs_properties_add_text(grp, ST_KEY_FFMPEG_CUSTOMSETTINGS, D_TRANSLATE(ST_I18N_FFMPEG_CUSTOMSETTINGS), - obs_text_type::OBS_TEXT_DEFAULT); + auto p = obs_properties_add_text(grp, ST_KEY_FFMPEG_CUSTOMSETTINGS, D_TRANSLATE(ST_I18N_FFMPEG_CUSTOMSETTINGS), obs_text_type::OBS_TEXT_DEFAULT); } if (_handler && _handler->is_hardware_encoder(this)) { - auto p = obs_properties_add_int(grp, ST_KEY_FFMPEG_GPU, D_TRANSLATE(ST_I18N_FFMPEG_GPU), -1, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_FFMPEG_GPU, D_TRANSLATE(ST_I18N_FFMPEG_GPU), -1, std::numeric_limits::max(), 1); } if (_handler && _handler->has_threading_support(this)) { - auto p = obs_properties_add_int_slider(grp, ST_KEY_FFMPEG_THREADS, D_TRANSLATE(ST_I18N_FFMPEG_THREADS), 0, - static_cast(std::thread::hardware_concurrency()) * 2, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_FFMPEG_THREADS, D_TRANSLATE(ST_I18N_FFMPEG_THREADS), 0, static_cast(std::thread::hardware_concurrency()) * 2, 1); } { // Frame Skipping @@ -1160,15 +1113,13 @@ obs_properties_t* ffmpeg_factory::get_properties2(instance_t* data) throw std::runtime_error("obs_get_video_info failed unexpectedly."); } - auto p = obs_properties_add_list(grp, ST_KEY_FFMPEG_FRAMERATE, D_TRANSLATE(ST_I18N_FFMPEG_FRAMERATE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_FFMPEG_FRAMERATE, D_TRANSLATE(ST_I18N_FFMPEG_FRAMERATE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); // For now, an arbitrary limit of 1/10th the Framerate should be fine. std::vector buf{size_t{256}, 0, std::allocator()}; for (uint32_t divisor = 1; divisor <= 10; divisor++) { double fps_num = static_cast(ovi.fps_num) / static_cast(divisor); double fps = fps_num / static_cast(ovi.fps_den); - snprintf(buf.data(), buf.size(), "%8.2f (%" PRIu32 "/%" PRIu32 ")", fps, ovi.fps_num, - ovi.fps_den * divisor); + snprintf(buf.data(), buf.size(), "%8.2f (%" PRIu32 "/%" PRIu32 ")", fps, ovi.fps_num, ovi.fps_den * divisor); obs_property_list_add_int(p, buf.data(), divisor); } } diff --git a/source/encoders/encoder-ffmpeg.hpp b/source/encoders/encoder-ffmpeg.hpp index 9631b119..d2a76468 100644 --- a/source/encoders/encoder-ffmpeg.hpp +++ b/source/encoders/encoder-ffmpeg.hpp @@ -73,8 +73,7 @@ namespace streamfx::encoder::ffmpeg { bool encode_video(struct encoder_frame* frame, struct encoder_packet* packet, bool* received_packet) override; - bool encode_video(uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, - struct encoder_packet* packet, bool* received_packet) override; + bool encode_video(uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, struct encoder_packet* packet, bool* received_packet) override; bool get_extra_data(uint8_t** extra_data, size_t* size) override; diff --git a/source/encoders/handlers/amf_h264_handler.cpp b/source/encoders/handlers/amf_h264_handler.cpp index e523415e..18c42653 100644 --- a/source/encoders/handlers/amf_h264_handler.cpp +++ b/source/encoders/handlers/amf_h264_handler.cpp @@ -32,14 +32,10 @@ static std::map profiles{ }; static std::map levels{ - {level::L1_0, "1.0"}, {level::L1_0b, "1.0b"}, {level::L1_1, "1.1"}, {level::L1_2, "1.2"}, {level::L1_3, "1.3"}, - {level::L2_0, "2.0"}, {level::L2_1, "2.1"}, {level::L2_2, "2.2"}, {level::L3_0, "3.0"}, {level::L3_1, "3.1"}, - {level::L3_2, "3.2"}, {level::L4_0, "4.0"}, {level::L4_1, "4.1"}, {level::L4_2, "4.2"}, {level::L5_0, "5.0"}, - {level::L5_1, "5.1"}, {level::L5_2, "5.2"}, {level::L6_0, "6.0"}, {level::L6_1, "6.1"}, {level::L6_2, "6.2"}, + {level::L1_0, "1.0"}, {level::L1_0b, "1.0b"}, {level::L1_1, "1.1"}, {level::L1_2, "1.2"}, {level::L1_3, "1.3"}, {level::L2_0, "2.0"}, {level::L2_1, "2.1"}, {level::L2_2, "2.2"}, {level::L3_0, "3.0"}, {level::L3_1, "3.1"}, {level::L3_2, "3.2"}, {level::L4_0, "4.0"}, {level::L4_1, "4.1"}, {level::L4_2, "4.2"}, {level::L5_0, "5.0"}, {level::L5_1, "5.1"}, {level::L5_2, "5.2"}, {level::L6_0, "6.0"}, {level::L6_1, "6.1"}, {level::L6_2, "6.2"}, }; -void amf_h264_handler::adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id) +void amf_h264_handler::adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id) { name = "AMD AMF H.264/AVC (via FFmpeg)"; if (!amf::is_available()) @@ -75,8 +71,7 @@ bool amf_h264_handler::has_pixel_format_support(ffmpeg_factory* instance) return false; } -void amf_h264_handler::get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode) +void amf_h264_handler::get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode) { if (!context) { this->get_encoder_properties(props, codec); @@ -116,10 +111,8 @@ void amf_h264_handler::log_options(obs_data_t* settings, const AVCodec* codec, A amf::log_options(settings, codec, context); DLOG_INFO("[%s] H.264/AVC:", codec->name); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "profile", " Profile", - [](int64_t v, std::string_view o) { return std::string(o); }); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "level", " Level", - [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "profile", " Profile", [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "level", " Level", [](int64_t v, std::string_view o) { return std::string(o); }); } void amf_h264_handler::get_encoder_properties(obs_properties_t* props, const AVCodec* codec) @@ -131,8 +124,7 @@ void amf_h264_handler::get_encoder_properties(obs_properties_t* props, const AVC obs_properties_add_group(props, S_CODEC_H264, D_TRANSLATE(S_CODEC_H264), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_H264_PROFILE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_H264_PROFILE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DEFAULT), static_cast(profile::UNKNOWN)); for (auto const kv : profiles) { std::string trans = std::string(S_CODEC_H264_PROFILE) + "." + kv.second; @@ -140,8 +132,7 @@ void amf_h264_handler::get_encoder_properties(obs_properties_t* props, const AVC } } { - auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_H264_LEVEL), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_H264_LEVEL), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), static_cast(level::UNKNOWN)); for (auto const kv : levels) { obs_property_list_add_int(p, kv.second.c_str(), static_cast(kv.first)); @@ -157,14 +148,12 @@ void amf_h264_handler::get_runtime_properties(obs_properties_t* props, const AVC amf::get_runtime_properties(props, codec, context); } -void amf_h264_handler::migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, - AVCodecContext* context) +void amf_h264_handler::migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, AVCodecContext* context) { amf::migrate(settings, version, codec, context); } -void amf_h264_handler::override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext* context) +void amf_h264_handler::override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) { target_format = AV_PIX_FMT_NV12; } diff --git a/source/encoders/handlers/amf_h264_handler.hpp b/source/encoders/handlers/amf_h264_handler.hpp index 8644b3a0..7747bb75 100644 --- a/source/encoders/handlers/amf_h264_handler.hpp +++ b/source/encoders/handlers/amf_h264_handler.hpp @@ -22,8 +22,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual ~amf_h264_handler(){}; public /*factory*/: - void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id) override; + void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id) override; void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; @@ -42,11 +41,9 @@ namespace streamfx::encoder::ffmpeg::handler { bool has_pixel_format_support(ffmpeg_factory* instance) override; public /*settings*/: - void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode) override; + void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; - void migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, - AVCodecContext* context) override; + void migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, AVCodecContext* context) override; void update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; @@ -55,9 +52,7 @@ namespace streamfx::encoder::ffmpeg::handler { void log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; public /*instance*/: - - void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext* context) override; + void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; private: void get_encoder_properties(obs_properties_t* props, const AVCodec* codec); diff --git a/source/encoders/handlers/amf_hevc_handler.cpp b/source/encoders/handlers/amf_hevc_handler.cpp index 45b163d9..2646e7f7 100644 --- a/source/encoders/handlers/amf_hevc_handler.cpp +++ b/source/encoders/handlers/amf_hevc_handler.cpp @@ -39,13 +39,10 @@ static std::map tiers{ }; static std::map levels{ - {level::L1_0, "1.0"}, {level::L2_0, "2.0"}, {level::L2_1, "2.1"}, {level::L3_0, "3.0"}, {level::L3_1, "3.1"}, - {level::L4_0, "4.0"}, {level::L4_1, "4.1"}, {level::L5_0, "5.0"}, {level::L5_1, "5.1"}, {level::L5_2, "5.2"}, - {level::L6_0, "6.0"}, {level::L6_1, "6.1"}, {level::L6_2, "6.2"}, + {level::L1_0, "1.0"}, {level::L2_0, "2.0"}, {level::L2_1, "2.1"}, {level::L3_0, "3.0"}, {level::L3_1, "3.1"}, {level::L4_0, "4.0"}, {level::L4_1, "4.1"}, {level::L5_0, "5.0"}, {level::L5_1, "5.1"}, {level::L5_2, "5.2"}, {level::L6_0, "6.0"}, {level::L6_1, "6.1"}, {level::L6_2, "6.2"}, }; -void amf_hevc_handler::adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id) +void amf_hevc_handler::adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id) { name = "AMD AMF H.265/HEVC (via FFmpeg)"; if (!amf::is_available()) @@ -127,12 +124,9 @@ void amf_hevc_handler::log_options(obs_data_t* settings, const AVCodec* codec, A amf::log_options(settings, codec, context); DLOG_INFO("[%s] H.265/HEVC:", codec->name); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, "profile", " Profile", - [](int64_t v, std::string_view o) { return std::string(o); }); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, "level", " Level", - [](int64_t v, std::string_view o) { return std::string(o); }); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, "tier", " Tier", - [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, "profile", " Profile", [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, "level", " Level", [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, "tier", " Tier", [](int64_t v, std::string_view o) { return std::string(o); }); } void amf_hevc_handler::get_encoder_properties(obs_properties_t* props, const AVCodec* codec) @@ -144,8 +138,7 @@ void amf_hevc_handler::get_encoder_properties(obs_properties_t* props, const AVC obs_properties_add_group(props, S_CODEC_HEVC, D_TRANSLATE(S_CODEC_HEVC), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_HEVC_PROFILE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_HEVC_PROFILE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DEFAULT), static_cast(profile::UNKNOWN)); for (auto const kv : profiles) { std::string trans = std::string(S_CODEC_HEVC_PROFILE) + "." + kv.second; @@ -153,8 +146,7 @@ void amf_hevc_handler::get_encoder_properties(obs_properties_t* props, const AVC } } { - auto p = obs_properties_add_list(grp, ST_KEY_TIER, D_TRANSLATE(S_CODEC_HEVC_TIER), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_TIER, D_TRANSLATE(S_CODEC_HEVC_TIER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DEFAULT), static_cast(tier::UNKNOWN)); for (auto const kv : tiers) { std::string trans = std::string(S_CODEC_HEVC_TIER) + "." + kv.second; @@ -162,8 +154,7 @@ void amf_hevc_handler::get_encoder_properties(obs_properties_t* props, const AVC } } { - auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_HEVC_LEVEL), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_HEVC_LEVEL), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), static_cast(level::UNKNOWN)); for (auto const kv : levels) { obs_property_list_add_int(p, kv.second.c_str(), static_cast(kv.first)); @@ -179,8 +170,7 @@ void amf_hevc_handler::get_runtime_properties(obs_properties_t* props, const AVC amf::get_runtime_properties(props, codec, context); } -void streamfx::encoder::ffmpeg::handler::amf_hevc_handler::migrate(obs_data_t* settings, std::uint64_t version, - const AVCodec* codec, AVCodecContext* context) +void streamfx::encoder::ffmpeg::handler::amf_hevc_handler::migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, AVCodecContext* context) { amf::migrate(settings, version, codec, context); } diff --git a/source/encoders/handlers/amf_hevc_handler.hpp b/source/encoders/handlers/amf_hevc_handler.hpp index 85eca630..a404b4cb 100644 --- a/source/encoders/handlers/amf_hevc_handler.hpp +++ b/source/encoders/handlers/amf_hevc_handler.hpp @@ -21,8 +21,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual ~amf_hevc_handler(){}; public /*factory*/: - virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id); + virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id); virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode); @@ -41,11 +40,9 @@ namespace streamfx::encoder::ffmpeg::handler { virtual bool has_pixel_format_support(ffmpeg_factory* instance); public /*settings*/: - virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode); + virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode); - virtual void migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, - AVCodecContext* context); + virtual void migrate(obs_data_t* settings, std::uint64_t version, const AVCodec* codec, AVCodecContext* context); virtual void update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context); diff --git a/source/encoders/handlers/amf_shared.cpp b/source/encoders/handlers/amf_shared.cpp index 551b01eb..4be507c2 100644 --- a/source/encoders/handlers/amf_shared.cpp +++ b/source/encoders/handlers/amf_shared.cpp @@ -181,8 +181,7 @@ void amf::get_properties_pre(obs_properties_t* props, const AVCodec* codec) obs_property_text_set_info_word_wrap(p, true); } - auto p = obs_properties_add_list(props, ST_KEY_PRESET, D_TRANSLATE(ST_I18N_PRESET), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(props, ST_KEY_PRESET, D_TRANSLATE(ST_I18N_PRESET), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); for (auto kv : presets) { obs_property_list_add_int(p, D_TRANSLATE(kv.second.c_str()), static_cast(kv.first)); } @@ -195,58 +194,46 @@ void amf::get_properties_post(obs_properties_t* props, const AVCodec* codec) obs_properties_add_group(props, ST_I18N_RATECONTROL, D_TRANSLATE(ST_I18N_RATECONTROL), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_RATECONTROL_MODE, D_TRANSLATE(ST_I18N_RATECONTROL_MODE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_RATECONTROL_MODE, D_TRANSLATE(ST_I18N_RATECONTROL_MODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_ratecontrol); for (auto kv : ratecontrolmodes) { obs_property_list_add_int(p, D_TRANSLATE(kv.second.c_str()), static_cast(kv.first)); } } - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_LOOKAHEAD, - D_TRANSLATE(ST_I18N_RATECONTROL_LOOKAHEAD)); - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_FRAMESKIPPING, - D_TRANSLATE(ST_I18N_RATECONTROL_FRAMESKIPPING)); + streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_LOOKAHEAD, D_TRANSLATE(ST_I18N_RATECONTROL_LOOKAHEAD)); + streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_FRAMESKIPPING, D_TRANSLATE(ST_I18N_RATECONTROL_FRAMESKIPPING)); } { obs_properties_t* grp = obs_properties_create(); - obs_properties_add_group(props, ST_I18N_RATECONTROL_LIMITS, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS), - OBS_GROUP_NORMAL, grp); + obs_properties_add_group(props, ST_I18N_RATECONTROL_LIMITS, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_TARGET, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_TARGET), -1, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_TARGET, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_TARGET), -1, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " kbit/s"); } { - auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_MAXIMUM, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_MAXIMUM), -1, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_MAXIMUM, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_MAXIMUM), -1, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " kbit/s"); } { - auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BUFFERSIZE, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BUFFERSIZE), 0, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BUFFERSIZE, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BUFFERSIZE), 0, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " kbit"); } } { obs_properties_t* grp = obs_properties_create(); - obs_properties_add_group(props, ST_I18N_RATECONTROL_QP, D_TRANSLATE(ST_I18N_RATECONTROL_QP), OBS_GROUP_NORMAL, - grp); + obs_properties_add_group(props, ST_I18N_RATECONTROL_QP, D_TRANSLATE(ST_I18N_RATECONTROL_QP), OBS_GROUP_NORMAL, grp); obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_I, D_TRANSLATE(ST_I18N_RATECONTROL_QP_I), -1, 51, 1); obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_P, D_TRANSLATE(ST_I18N_RATECONTROL_QP_P), -1, 51, 1); if (std::string_view("amf_h264") == codec->name) { - obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_B, D_TRANSLATE(ST_I18N_RATECONTROL_QP_B), -1, 51, - 1); + obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_B, D_TRANSLATE(ST_I18N_RATECONTROL_QP_B), -1, 51, 1); } } @@ -255,23 +242,18 @@ void amf::get_properties_post(obs_properties_t* props, const AVCodec* codec) obs_properties_add_group(props, ST_I18N_OTHER, D_TRANSLATE(ST_I18N_OTHER), OBS_GROUP_NORMAL, grp); { - auto p = - obs_properties_add_int_slider(grp, ST_KEY_OTHER_BFRAMES, D_TRANSLATE(ST_I18N_OTHER_BFRAMES), -1, 4, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_BFRAMES, D_TRANSLATE(ST_I18N_OTHER_BFRAMES), -1, 4, 1); obs_property_int_set_suffix(p, " frames"); } - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_BFRAMEREFERENCES, - D_TRANSLATE(ST_I18N_OTHER_BFRAMEREFERENCES)); + streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_BFRAMEREFERENCES, D_TRANSLATE(ST_I18N_OTHER_BFRAMEREFERENCES)); { - auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_REFERENCEFRAMES, - D_TRANSLATE(ST_I18N_OTHER_REFERENCEFRAMES), -1, 16, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_REFERENCEFRAMES, D_TRANSLATE(ST_I18N_OTHER_REFERENCEFRAMES), -1, 16, 1); obs_property_int_set_suffix(p, " frames"); } - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_ENFORCEHRD, - D_TRANSLATE(ST_I18N_OTHER_ENFORCEHRD)); + streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_ENFORCEHRD, D_TRANSLATE(ST_I18N_OTHER_ENFORCEHRD)); streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_VBAQ, D_TRANSLATE(ST_I18N_OTHER_VBAQ)); - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_ACCESSUNITDELIMITER, - D_TRANSLATE(ST_I18N_OTHER_ACCESSUNITDELIMITER)); + streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_ACCESSUNITDELIMITER, D_TRANSLATE(ST_I18N_OTHER_ACCESSUNITDELIMITER)); } } @@ -325,14 +307,12 @@ void amf::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* con } // Look Ahead (Pre-analysis, single frame lookahead) - if (int la = static_cast(obs_data_get_int(settings, ST_KEY_RATECONTROL_LOOKAHEAD)); - !streamfx::util::is_tristate_default(la)) { + if (int la = static_cast(obs_data_get_int(settings, ST_KEY_RATECONTROL_LOOKAHEAD)); !streamfx::util::is_tristate_default(la)) { av_opt_set_int(context->priv_data, "preanalysis", la, AV_OPT_SEARCH_CHILDREN); } // Frame Skipping (Drop frames to maintain bitrate limits) - if (int la = static_cast(obs_data_get_int(settings, ST_KEY_RATECONTROL_FRAMESKIPPING)); - !streamfx::util::is_tristate_default(la)) { + if (int la = static_cast(obs_data_get_int(settings, ST_KEY_RATECONTROL_FRAMESKIPPING)); !streamfx::util::is_tristate_default(la)) { if (std::string_view("amf_h264") == codec->name) { av_opt_set_int(context->priv_data, "frame_skipping", la, AV_OPT_SEARCH_CHILDREN); } else { @@ -390,8 +370,7 @@ void amf::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* con if (int64_t bf = obs_data_get_int(settings, ST_KEY_OTHER_BFRAMES); bf > -1) { context->max_b_frames = static_cast(bf); } - if (int64_t zl = obs_data_get_int(settings, ST_KEY_OTHER_BFRAMEREFERENCES); - !streamfx::util::is_tristate_default(zl)) { + if (int64_t zl = obs_data_get_int(settings, ST_KEY_OTHER_BFRAMEREFERENCES); !streamfx::util::is_tristate_default(zl)) { av_opt_set_int(context->priv_data, "bf_ref", zl, AV_OPT_SEARCH_CHILDREN); } } @@ -408,8 +387,7 @@ void amf::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* con av_opt_set_int(context->priv_data, "vbaq", v, AV_OPT_SEARCH_CHILDREN); } - if (int64_t v = obs_data_get_int(settings, ST_KEY_OTHER_ACCESSUNITDELIMITER); - !streamfx::util::is_tristate_default(v)) { + if (int64_t v = obs_data_get_int(settings, ST_KEY_OTHER_ACCESSUNITDELIMITER); !streamfx::util::is_tristate_default(v)) { av_opt_set_int(context->priv_data, "aud", v, AV_OPT_SEARCH_CHILDREN); } @@ -423,12 +401,9 @@ void amf::log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext using namespace ::streamfx::ffmpeg; DLOG_INFO("[%s] AMD AMF:", codec->name); - tools::print_av_option_string2(context, "usage", " Usage", - [](int64_t v, std::string_view o) { return std::string(o); }); - tools::print_av_option_string2(context, "quality", " Preset", - [](int64_t v, std::string_view o) { return std::string(o); }); - tools::print_av_option_string2(context, "rc", " Rate Control", - [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "usage", " Usage", [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "quality", " Preset", [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "rc", " Rate Control", [](int64_t v, std::string_view o) { return std::string(o); }); tools::print_av_option_bool(context, "preanalysis", " Look-Ahead"); if (std::string_view("amf_h264") == codec->name) { tools::print_av_option_bool(context, "frame_skipping", " Frame Skipping"); @@ -463,12 +438,8 @@ void amf::log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext tools::print_av_option_bool(context, "me_quarter_pel", " Quarter-Pel Motion Estimation"); } -void streamfx::encoder::ffmpeg::handler::amf::get_runtime_properties(obs_properties_t* props, const AVCodec* codec, - AVCodecContext* context) -{} +void streamfx::encoder::ffmpeg::handler::amf::get_runtime_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context) {} -void streamfx::encoder::ffmpeg::handler::amf::migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, - AVCodecContext* context) -{} +void streamfx::encoder::ffmpeg::handler::amf::migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context) {} void streamfx::encoder::ffmpeg::handler::amf::override_update(ffmpeg_instance* instance, obs_data_t* settings) {} diff --git a/source/encoders/handlers/debug_handler.cpp b/source/encoders/handlers/debug_handler.cpp index 35d1b3ca..5d6e8fcf 100644 --- a/source/encoders/handlers/debug_handler.cpp +++ b/source/encoders/handlers/debug_handler.cpp @@ -69,25 +69,7 @@ void debug_handler::get_properties(obs_properties_t*, const AVCodec* codec, AVCo DLOG_INFO("Options for '%s':", codec->name); std::pair opt_type_name[] = { - {AV_OPT_TYPE_FLAGS, "Flags"}, - {AV_OPT_TYPE_INT, "Int"}, - {AV_OPT_TYPE_INT64, "Int64"}, - {AV_OPT_TYPE_DOUBLE, "Double"}, - {AV_OPT_TYPE_FLOAT, "Float"}, - {AV_OPT_TYPE_STRING, "String"}, - {AV_OPT_TYPE_RATIONAL, "Rational"}, - {AV_OPT_TYPE_BINARY, "Binary"}, - {AV_OPT_TYPE_DICT, "Dictionary"}, - {AV_OPT_TYPE_UINT64, "Unsigned Int64"}, - {AV_OPT_TYPE_CONST, "Constant"}, - {AV_OPT_TYPE_IMAGE_SIZE, "Image Size"}, - {AV_OPT_TYPE_PIXEL_FMT, "Pixel Format"}, - {AV_OPT_TYPE_SAMPLE_FMT, "Sample Format"}, - {AV_OPT_TYPE_VIDEO_RATE, "Video Rate"}, - {AV_OPT_TYPE_DURATION, "Duration"}, - {AV_OPT_TYPE_COLOR, "Color"}, - {AV_OPT_TYPE_CHANNEL_LAYOUT, "Layout"}, - {AV_OPT_TYPE_BOOL, "Bool"}, + {AV_OPT_TYPE_FLAGS, "Flags"}, {AV_OPT_TYPE_INT, "Int"}, {AV_OPT_TYPE_INT64, "Int64"}, {AV_OPT_TYPE_DOUBLE, "Double"}, {AV_OPT_TYPE_FLOAT, "Float"}, {AV_OPT_TYPE_STRING, "String"}, {AV_OPT_TYPE_RATIONAL, "Rational"}, {AV_OPT_TYPE_BINARY, "Binary"}, {AV_OPT_TYPE_DICT, "Dictionary"}, {AV_OPT_TYPE_UINT64, "Unsigned Int64"}, {AV_OPT_TYPE_CONST, "Constant"}, {AV_OPT_TYPE_IMAGE_SIZE, "Image Size"}, {AV_OPT_TYPE_PIXEL_FMT, "Pixel Format"}, {AV_OPT_TYPE_SAMPLE_FMT, "Sample Format"}, {AV_OPT_TYPE_VIDEO_RATE, "Video Rate"}, {AV_OPT_TYPE_DURATION, "Duration"}, {AV_OPT_TYPE_COLOR, "Color"}, {AV_OPT_TYPE_CHANNEL_LAYOUT, "Layout"}, {AV_OPT_TYPE_BOOL, "Bool"}, }; std::map unit_types; @@ -107,8 +89,7 @@ void debug_handler::get_properties(obs_properties_t*, const AVCodec* codec, AVCo } else { auto unit_type = unit_types.find(opt->unit); if (unit_type == unit_types.end()) { - DLOG_INFO(" [%s] Flag '%s' and help text '%s' with value '%" PRId64 "'.", opt->unit, opt->name, - opt->help, opt->default_val.i64); + DLOG_INFO(" [%s] Flag '%s' and help text '%s' with value '%" PRId64 "'.", opt->unit, opt->name, opt->help, opt->default_val.i64); } else { std::string out; switch (unit_type->second) { @@ -135,8 +116,7 @@ void debug_handler::get_properties(obs_properties_t*, const AVCodec* codec, AVCo break; } - DLOG_INFO(" [%s] Constant '%s' and help text '%s' with value '%s'.", opt->unit, opt->name, - opt->help, out.c_str()); + DLOG_INFO(" [%s] Constant '%s' and help text '%s' with value '%s'.", opt->unit, opt->name, opt->help, out.c_str()); } } } else { @@ -176,8 +156,7 @@ void debug_handler::get_properties(obs_properties_t*, const AVCodec* codec, AVCo DLOG_INFO( " Option '%s'%s%s%s with help '%s' of type '%s' with default value '%s', minimum '%s' and maximum " "'%s'.", - opt->name, opt->unit ? " with unit (" : "", opt->unit ? opt->unit : "", opt->unit ? ")" : "", opt->help, - type_name.c_str(), out.c_str(), minimum.c_str(), maximum.c_str()); + opt->name, opt->unit ? " with unit (" : "", opt->unit ? opt->unit : "", opt->unit ? ")" : "", opt->help, type_name.c_str(), out.c_str(), minimum.c_str(), maximum.c_str()); } } } diff --git a/source/encoders/handlers/debug_handler.hpp b/source/encoders/handlers/debug_handler.hpp index 9f87505e..a66e79e2 100644 --- a/source/encoders/handlers/debug_handler.hpp +++ b/source/encoders/handlers/debug_handler.hpp @@ -10,11 +10,9 @@ namespace streamfx::encoder::ffmpeg::handler { public: virtual ~debug_handler(){}; - virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, - bool hw_encode) override; + virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; - virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode) override; + virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; virtual void update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; }; diff --git a/source/encoders/handlers/dnxhd_handler.cpp b/source/encoders/handlers/dnxhd_handler.cpp index 11b43d78..fe4598de 100644 --- a/source/encoders/handlers/dnxhd_handler.cpp +++ b/source/encoders/handlers/dnxhd_handler.cpp @@ -24,13 +24,9 @@ void dnxhd_handler::adjust_info(ffmpeg_factory* fac, const AVCodec*, std::string name = "Avid DNxHR (via FFmpeg)"; } -void dnxhd_handler::override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext*) +void dnxhd_handler::override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext*) { - static const std::array, static_cast(5)> profile_to_format_map{ - std::pair{"dnxhr_lb", AV_PIX_FMT_YUV422P}, std::pair{"dnxhr_sq", AV_PIX_FMT_YUV422P}, - std::pair{"dnxhr_hq", AV_PIX_FMT_YUV422P}, std::pair{"dnxhr_hqx", AV_PIX_FMT_YUV422P10}, - std::pair{"dnxhr_444", AV_PIX_FMT_YUV444P10}}; + static const std::array, static_cast(5)> profile_to_format_map{std::pair{"dnxhr_lb", AV_PIX_FMT_YUV422P}, std::pair{"dnxhr_sq", AV_PIX_FMT_YUV422P}, std::pair{"dnxhr_hq", AV_PIX_FMT_YUV422P}, std::pair{"dnxhr_hqx", AV_PIX_FMT_YUV422P10}, std::pair{"dnxhr_444", AV_PIX_FMT_YUV444P10}}; const char* selected_profile = obs_data_get_string(settings, S_CODEC_DNXHR_PROFILE); for (const auto& kv : profile_to_format_map) { @@ -78,8 +74,7 @@ void dnxhd_handler::get_properties(obs_properties_t* props, const AVCodec* codec return; } } - auto p = obs_properties_add_list(props, S_CODEC_DNXHR_PROFILE, D_TRANSLATE(S_CODEC_DNXHR_PROFILE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(props, S_CODEC_DNXHR_PROFILE, D_TRANSLATE(S_CODEC_DNXHR_PROFILE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); streamfx::ffmpeg::tools::avoption_list_add_entries(ctx->priv_data, "profile", [&p](const AVOption* opt) { if (strcmp(opt->name, "dnxhd") == 0) { @@ -109,6 +104,5 @@ void dnxhd_handler::update(obs_data_t* settings, const AVCodec* codec, AVCodecCo void dnxhd_handler::log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) { DLOG_INFO("[%s] Avid DNxHR:", codec->name); - streamfx::ffmpeg::tools::print_av_option_string2(context, "profile", " Profile", - [](int64_t v, std::string_view o) { return std::string(o); }); + streamfx::ffmpeg::tools::print_av_option_string2(context, "profile", " Profile", [](int64_t v, std::string_view o) { return std::string(o); }); } diff --git a/source/encoders/handlers/dnxhd_handler.hpp b/source/encoders/handlers/dnxhd_handler.hpp index cc204f11..919d806b 100644 --- a/source/encoders/handlers/dnxhd_handler.hpp +++ b/source/encoders/handlers/dnxhd_handler.hpp @@ -18,8 +18,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual ~dnxhd_handler(){}; public /*factory*/: - virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id); + virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id); public /*factory*/: void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; @@ -36,15 +35,13 @@ namespace streamfx::encoder::ffmpeg::handler { bool has_pixel_format_support(ffmpeg_factory* instance) override; public /*settings*/: - void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode) override; + void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; void update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; void log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; public /*instance*/: - void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext* context) override; + void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; }; } // namespace streamfx::encoder::ffmpeg::handler diff --git a/source/encoders/handlers/handler.hpp b/source/encoders/handlers/handler.hpp index aac77fdb..db83252a 100644 --- a/source/encoders/handlers/handler.hpp +++ b/source/encoders/handlers/handler.hpp @@ -23,11 +23,9 @@ namespace streamfx::encoder::ffmpeg { virtual ~handler(){}; public /*factory*/: - virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id){}; + virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id){}; - virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, - bool hw_encode){}; + virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode){}; virtual std::string_view get_help_url(const AVCodec* codec) { @@ -46,11 +44,9 @@ namespace streamfx::encoder::ffmpeg { virtual bool supports_reconfigure(ffmpeg_factory* instance, bool& threads, bool& gpu, bool& keyframes); public /*settings*/: - virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode){}; + virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode){}; - virtual void migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, - AVCodecContext* context){}; + virtual void migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context){}; virtual void update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context){}; @@ -59,12 +55,9 @@ namespace streamfx::encoder::ffmpeg { virtual void log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context){}; public /*instance*/: + virtual void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext* context){}; - virtual void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext* context){}; - - virtual void process_avpacket(std::shared_ptr packet, const AVCodec* codec, - AVCodecContext* context){}; + virtual void process_avpacket(std::shared_ptr packet, const AVCodec* codec, AVCodecContext* context){}; }; } // namespace handler } // namespace streamfx::encoder::ffmpeg diff --git a/source/encoders/handlers/nvenc_h264_handler.cpp b/source/encoders/handlers/nvenc_h264_handler.cpp index f897e6ef..1989578c 100644 --- a/source/encoders/handlers/nvenc_h264_handler.cpp +++ b/source/encoders/handlers/nvenc_h264_handler.cpp @@ -92,10 +92,8 @@ void nvenc_h264_handler::log_options(obs_data_t* settings, const AVCodec* codec, nvenc::log_options(settings, codec, context); DLOG_INFO("[%s] H.264/AVC:", codec->name); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "profile", " Profile", - [](int64_t v, std::string_view o) { return std::string(o); }); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "level", " Level", - [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "profile", " Profile", [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, context->priv_data, "level", " Level", [](int64_t v, std::string_view o) { return std::string(o); }); } void nvenc_h264_handler::get_encoder_properties(obs_properties_t* props, const AVCodec* codec) @@ -116,19 +114,16 @@ void nvenc_h264_handler::get_encoder_properties(obs_properties_t* props, const A } { - auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_H264_PROFILE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_H264_PROFILE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, D_TRANSLATE(S_STATE_DEFAULT), ""); - streamfx::ffmpeg::tools::avoption_list_add_entries( - context->priv_data, "profile", [&p](const AVOption* opt) { - char buffer[1024]; - snprintf(buffer, sizeof(buffer), "%s.%s", S_CODEC_H264_PROFILE, opt->name); - obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); - }); + streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "profile", [&p](const AVOption* opt) { + char buffer[1024]; + snprintf(buffer, sizeof(buffer), "%s.%s", S_CODEC_H264_PROFILE, opt->name); + obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); + }); } { - auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_H264_LEVEL), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_H264_LEVEL), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "level", [&p](const AVOption* opt) { if (opt->default_val.i64 == 0) { @@ -152,8 +147,7 @@ void nvenc_h264_handler::get_runtime_properties(obs_properties_t* props, const A nvenc::get_runtime_properties(props, codec, context); } -void streamfx::encoder::ffmpeg::handler::nvenc_h264_handler::migrate(obs_data_t* settings, uint64_t version, - const AVCodec* codec, AVCodecContext* context) +void streamfx::encoder::ffmpeg::handler::nvenc_h264_handler::migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context) { nvenc::migrate(settings, version, codec, context); diff --git a/source/encoders/handlers/nvenc_h264_handler.hpp b/source/encoders/handlers/nvenc_h264_handler.hpp index 9384e90e..0e316656 100644 --- a/source/encoders/handlers/nvenc_h264_handler.hpp +++ b/source/encoders/handlers/nvenc_h264_handler.hpp @@ -17,8 +17,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual ~nvenc_h264_handler(){}; public /*factory*/: - virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id); + virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id); virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode); @@ -39,8 +38,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual bool supports_reconfigure(ffmpeg_factory* instance, bool& threads, bool& gpu, bool& keyframes); public /*settings*/: - virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode); + virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode); virtual void migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context); diff --git a/source/encoders/handlers/nvenc_hevc_handler.cpp b/source/encoders/handlers/nvenc_hevc_handler.cpp index 1e1dc07d..28996f58 100644 --- a/source/encoders/handlers/nvenc_hevc_handler.cpp +++ b/source/encoders/handlers/nvenc_hevc_handler.cpp @@ -97,12 +97,9 @@ void nvenc_hevc_handler::log_options(obs_data_t* settings, const AVCodec* codec, nvenc::log_options(settings, codec, context); DLOG_INFO("[%s] H.265/HEVC:", codec->name); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, "profile", " Profile", - [](int64_t v, std::string_view o) { return std::string(o); }); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, "level", " Level", - [](int64_t v, std::string_view o) { return std::string(o); }); - ::streamfx::ffmpeg::tools::print_av_option_string2(context, "tier", " Tier", - [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, "profile", " Profile", [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, "level", " Level", [](int64_t v, std::string_view o) { return std::string(o); }); + ::streamfx::ffmpeg::tools::print_av_option_string2(context, "tier", " Tier", [](int64_t v, std::string_view o) { return std::string(o); }); } void nvenc_hevc_handler::get_encoder_properties(obs_properties_t* props, const AVCodec* codec) @@ -123,19 +120,16 @@ void nvenc_hevc_handler::get_encoder_properties(obs_properties_t* props, const A } { - auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_HEVC_PROFILE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_PROFILE, D_TRANSLATE(S_CODEC_HEVC_PROFILE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DEFAULT), -1); - streamfx::ffmpeg::tools::avoption_list_add_entries( - context->priv_data, "profile", [&p](const AVOption* opt) { - char buffer[1024]; - snprintf(buffer, sizeof(buffer), "%s.%s", S_CODEC_HEVC_PROFILE, opt->name); - obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); - }); + streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "profile", [&p](const AVOption* opt) { + char buffer[1024]; + snprintf(buffer, sizeof(buffer), "%s.%s", S_CODEC_HEVC_PROFILE, opt->name); + obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); + }); } { - auto p = obs_properties_add_list(grp, ST_KEY_TIER, D_TRANSLATE(S_CODEC_HEVC_TIER), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_TIER, D_TRANSLATE(S_CODEC_HEVC_TIER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DEFAULT), -1); streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "tier", [&p](const AVOption* opt) { char buffer[1024]; @@ -144,8 +138,7 @@ void nvenc_hevc_handler::get_encoder_properties(obs_properties_t* props, const A }); } { - auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_HEVC_LEVEL), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_LEVEL, D_TRANSLATE(S_CODEC_HEVC_LEVEL), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "level", [&p](const AVOption* opt) { if (opt->default_val.i64 == 0) { @@ -169,8 +162,7 @@ void nvenc_hevc_handler::get_runtime_properties(obs_properties_t* props, const A nvenc::get_runtime_properties(props, codec, context); } -void streamfx::encoder::ffmpeg::handler::nvenc_hevc_handler::migrate(obs_data_t* settings, uint64_t version, - const AVCodec* codec, AVCodecContext* context) +void streamfx::encoder::ffmpeg::handler::nvenc_hevc_handler::migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context) { nvenc::migrate(settings, version, codec, context); diff --git a/source/encoders/handlers/nvenc_hevc_handler.hpp b/source/encoders/handlers/nvenc_hevc_handler.hpp index 3223e697..f08f8f30 100644 --- a/source/encoders/handlers/nvenc_hevc_handler.hpp +++ b/source/encoders/handlers/nvenc_hevc_handler.hpp @@ -17,8 +17,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual ~nvenc_hevc_handler(){}; public /*factory*/: - virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, - std::string& codec_id); + virtual void adjust_info(ffmpeg_factory* factory, const AVCodec* codec, std::string& id, std::string& name, std::string& codec_id); virtual void get_defaults(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context, bool hw_encode); @@ -39,8 +38,7 @@ namespace streamfx::encoder::ffmpeg::handler { virtual bool supports_reconfigure(ffmpeg_factory* instance, bool& threads, bool& gpu, bool& keyframes); public /*settings*/: - virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode); + virtual void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode); virtual void migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context); diff --git a/source/encoders/handlers/nvenc_shared.cpp b/source/encoders/handlers/nvenc_shared.cpp index 18a6700a..36782476 100644 --- a/source/encoders/handlers/nvenc_shared.cpp +++ b/source/encoders/handlers/nvenc_shared.cpp @@ -235,8 +235,7 @@ static bool modified_aq(obs_properties_t* props, obs_property_t*, obs_data_t* se void nvenc::get_properties_pre(obs_properties_t* props, const AVCodec*, const AVCodecContext* context) { { - auto p = obs_properties_add_list(props, ST_KEY_PRESET, D_TRANSLATE(ST_I18N_PRESET), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(props, ST_KEY_PRESET, D_TRANSLATE(ST_I18N_PRESET), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "preset", [&p](const AVOption* opt) { char buffer[1024]; snprintf(buffer, sizeof(buffer), "%s.%s", ST_I18N_PRESET, opt->name); @@ -245,8 +244,7 @@ void nvenc::get_properties_pre(obs_properties_t* props, const AVCodec*, const AV } if (streamfx::ffmpeg::tools::avoption_exists(context->priv_data, "tune")) { - auto p = obs_properties_add_list(props, ST_KEY_TUNE, D_TRANSLATE(ST_I18N_TUNE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(props, ST_KEY_TUNE, D_TRANSLATE(ST_I18N_TUNE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "tune", [&p](const AVOption* opt) { char buffer[1024]; snprintf(buffer, sizeof(buffer), "%s.%s", ST_I18N_TUNE, opt->name); @@ -261,13 +259,11 @@ void nvenc::get_properties_post(obs_properties_t* props, const AVCodec* codec, c obs_properties_t* grp = props; if (!streamfx::util::are_property_groups_broken()) { grp = obs_properties_create(); - obs_properties_add_group(props, ST_I18N_RATECONTROL, D_TRANSLATE(ST_I18N_RATECONTROL), OBS_GROUP_NORMAL, - grp); + obs_properties_add_group(props, ST_I18N_RATECONTROL, D_TRANSLATE(ST_I18N_RATECONTROL), OBS_GROUP_NORMAL, grp); } { - auto p = obs_properties_add_list(grp, ST_KEY_RATECONTROL_MODE, D_TRANSLATE(ST_I18N_RATECONTROL_MODE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_RATECONTROL_MODE, D_TRANSLATE(ST_I18N_RATECONTROL_MODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_set_modified_callback(p, modified_ratecontrol); obs_property_list_add_string(p, D_TRANSLATE(S_STATE_DEFAULT), ""); streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "rc", [&p](const AVOption* opt) { @@ -282,36 +278,29 @@ void nvenc::get_properties_post(obs_properties_t* props, const AVCodec* codec, c } if (streamfx::ffmpeg::tools::avoption_exists(context->priv_data, "multipass")) { - auto p = - obs_properties_add_list(grp, ST_KEY_RATECONTROL_MULTIPASS, D_TRANSLATE(ST_I18N_RATECONTROL_MULTIPASS), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_RATECONTROL_MULTIPASS, D_TRANSLATE(ST_I18N_RATECONTROL_MULTIPASS), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, D_TRANSLATE(S_STATE_DEFAULT), ""); - streamfx::ffmpeg::tools::avoption_list_add_entries( - context->priv_data, "multipass", [&p](const AVOption* opt) { - char buffer[1024]; - snprintf(buffer, sizeof(buffer), "%s.%s", ST_I18N_RATECONTROL_MULTIPASS, opt->name); - obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); - }); + streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "multipass", [&p](const AVOption* opt) { + char buffer[1024]; + snprintf(buffer, sizeof(buffer), "%s.%s", ST_I18N_RATECONTROL_MULTIPASS, opt->name); + obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); + }); } else { - auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_TWOPASS, - D_TRANSLATE(ST_I18N_RATECONTROL_TWOPASS)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_TWOPASS, D_TRANSLATE(ST_I18N_RATECONTROL_TWOPASS)); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_LOOKAHEAD, - D_TRANSLATE(ST_I18N_RATECONTROL_LOOKAHEAD), -1, 32, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_LOOKAHEAD, D_TRANSLATE(ST_I18N_RATECONTROL_LOOKAHEAD), -1, 32, 1); obs_property_int_set_suffix(p, " frames"); //obs_property_set_modified_callback(p, modified_lookahead); } { - auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_ADAPTIVEI, - D_TRANSLATE(ST_I18N_RATECONTROL_ADAPTIVEI)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_ADAPTIVEI, D_TRANSLATE(ST_I18N_RATECONTROL_ADAPTIVEI)); } if (strcmp(codec->name, "h264_nvenc") == 0) { - auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_ADAPTIVEB, - D_TRANSLATE(ST_I18N_RATECONTROL_ADAPTIVEB)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_RATECONTROL_ADAPTIVEB, D_TRANSLATE(ST_I18N_RATECONTROL_ADAPTIVEB)); } } @@ -319,33 +308,25 @@ void nvenc::get_properties_post(obs_properties_t* props, const AVCodec* codec, c obs_properties_t* grp = props; if (!streamfx::util::are_property_groups_broken()) { grp = obs_properties_create(); - obs_properties_add_group(props, ST_I18N_RATECONTROL_LIMITS, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS), - OBS_GROUP_NORMAL, grp); + obs_properties_add_group(props, ST_I18N_RATECONTROL_LIMITS, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS), OBS_GROUP_NORMAL, grp); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_RATECONTROL_LIMITS_QUALITY, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_QUALITY), 0, 51, 0.01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_RATECONTROL_LIMITS_QUALITY, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_QUALITY), 0, 51, 0.01); } { - auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_TARGET, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_TARGET), -1, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_TARGET, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_TARGET), -1, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " kbit/s"); } { - auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_MAXIMUM, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_MAXIMUM), -1, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BITRATE_MAXIMUM, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BITRATE_MAXIMUM), -1, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " kbit/s"); } { - auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BUFFERSIZE, - D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BUFFERSIZE), 0, - std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(grp, ST_KEY_RATECONTROL_LIMITS_BUFFERSIZE, D_TRANSLATE(ST_I18N_RATECONTROL_LIMITS_BUFFERSIZE), 0, std::numeric_limits::max(), 1); obs_property_int_set_suffix(p, " kbit"); } } @@ -354,30 +335,24 @@ void nvenc::get_properties_post(obs_properties_t* props, const AVCodec* codec, c obs_properties_t* grp = props; if (!streamfx::util::are_property_groups_broken()) { grp = obs_properties_create(); - obs_properties_add_group(props, ST_I18N_RATECONTROL_QP, D_TRANSLATE(ST_I18N_RATECONTROL_QP), - OBS_GROUP_NORMAL, grp); + obs_properties_add_group(props, ST_I18N_RATECONTROL_QP, D_TRANSLATE(ST_I18N_RATECONTROL_QP), OBS_GROUP_NORMAL, grp); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_MINIMUM, - D_TRANSLATE(ST_I18N_RATECONTROL_QP_MINIMUM), -1, 51, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_MINIMUM, D_TRANSLATE(ST_I18N_RATECONTROL_QP_MINIMUM), -1, 51, 1); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_MAXIMUM, - D_TRANSLATE(ST_I18N_RATECONTROL_QP_MAXIMUM), -1, 51, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_MAXIMUM, D_TRANSLATE(ST_I18N_RATECONTROL_QP_MAXIMUM), -1, 51, 1); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_I, D_TRANSLATE(ST_I18N_RATECONTROL_QP_I), - -1, 51, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_I, D_TRANSLATE(ST_I18N_RATECONTROL_QP_I), -1, 51, 1); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_P, D_TRANSLATE(ST_I18N_RATECONTROL_QP_P), - -1, 51, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_P, D_TRANSLATE(ST_I18N_RATECONTROL_QP_P), -1, 51, 1); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_B, D_TRANSLATE(ST_I18N_RATECONTROL_QP_B), - -1, 51, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_RATECONTROL_QP_B, D_TRANSLATE(ST_I18N_RATECONTROL_QP_B), -1, 51, 1); } } @@ -389,17 +364,14 @@ void nvenc::get_properties_post(obs_properties_t* props, const AVCodec* codec, c } { - auto p = - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_AQ_SPATIAL, D_TRANSLATE(ST_I18N_AQ_SPATIAL)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_AQ_SPATIAL, D_TRANSLATE(ST_I18N_AQ_SPATIAL)); obs_property_set_modified_callback(p, modified_aq); } { - auto p = - obs_properties_add_int_slider(grp, ST_KEY_AQ_STRENGTH, D_TRANSLATE(ST_I18N_AQ_STRENGTH), -1, 15, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_AQ_STRENGTH, D_TRANSLATE(ST_I18N_AQ_STRENGTH), -1, 15, 1); } { - auto p = - streamfx::util::obs_properties_add_tristate(grp, ST_KEY_AQ_TEMPORAL, D_TRANSLATE(ST_I18N_AQ_TEMPORAL)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_AQ_TEMPORAL, D_TRANSLATE(ST_I18N_AQ_TEMPORAL)); } } @@ -411,49 +383,39 @@ void nvenc::get_properties_post(obs_properties_t* props, const AVCodec* codec, c } { - auto p = - obs_properties_add_int_slider(grp, ST_KEY_OTHER_BFRAMES, D_TRANSLATE(ST_I18N_OTHER_BFRAMES), -1, 4, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_BFRAMES, D_TRANSLATE(ST_I18N_OTHER_BFRAMES), -1, 4, 1); obs_property_int_set_suffix(p, " frames"); } { - auto p = obs_properties_add_list(grp, ST_KEY_OTHER_BFRAMEREFERENCEMODE, - D_TRANSLATE(ST_I18N_OTHER_BFRAMEREFERENCEMODE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_OTHER_BFRAMEREFERENCEMODE, D_TRANSLATE(ST_I18N_OTHER_BFRAMEREFERENCEMODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, D_TRANSLATE(S_STATE_DEFAULT), ""); - streamfx::ffmpeg::tools::avoption_list_add_entries( - context->priv_data, "b_ref_mode", [&p](const AVOption* opt) { - char buffer[1024]; - snprintf(buffer, sizeof(buffer), "%s.%s", ST_I18N_OTHER_BFRAMEREFERENCEMODE, opt->name); - obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); - }); + streamfx::ffmpeg::tools::avoption_list_add_entries(context->priv_data, "b_ref_mode", [&p](const AVOption* opt) { + char buffer[1024]; + snprintf(buffer, sizeof(buffer), "%s.%s", ST_I18N_OTHER_BFRAMEREFERENCEMODE, opt->name); + obs_property_list_add_string(p, D_TRANSLATE(buffer), opt->name); + }); } { - auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_ZEROLATENCY, - D_TRANSLATE(ST_I18N_OTHER_ZEROLATENCY)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_ZEROLATENCY, D_TRANSLATE(ST_I18N_OTHER_ZEROLATENCY)); } { - auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_WEIGHTEDPREDICTION, - D_TRANSLATE(ST_I18N_OTHER_WEIGHTEDPREDICTION)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_WEIGHTEDPREDICTION, D_TRANSLATE(ST_I18N_OTHER_WEIGHTEDPREDICTION)); } { - auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_NONREFERENCEPFRAMES, - D_TRANSLATE(ST_I18N_OTHER_NONREFERENCEPFRAMES)); + auto p = streamfx::util::obs_properties_add_tristate(grp, ST_KEY_OTHER_NONREFERENCEPFRAMES, D_TRANSLATE(ST_I18N_OTHER_NONREFERENCEPFRAMES)); } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_REFERENCEFRAMES, - D_TRANSLATE(ST_I18N_OTHER_REFERENCEFRAMES), -1, - (strcmp(codec->name, "h264_nvenc") == 0) ? 16 : 4, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_REFERENCEFRAMES, D_TRANSLATE(ST_I18N_OTHER_REFERENCEFRAMES), -1, (strcmp(codec->name, "h264_nvenc") == 0) ? 16 : 4, 1); obs_property_int_set_suffix(p, " frames"); } if (streamfx::ffmpeg::tools::avoption_exists(context->priv_data, "ldkfs")) { - auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_LOWDELAYKEYFRAMESCALE, - D_TRANSLATE(ST_I18N_OTHER_LOWDELAYKEYFRAMESCALE), -1, 255, 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_OTHER_LOWDELAYKEYFRAMESCALE, D_TRANSLATE(ST_I18N_OTHER_LOWDELAYKEYFRAMESCALE), -1, 255, 1); } } } @@ -496,8 +458,7 @@ void nvenc::get_runtime_properties(obs_properties_t* props, const AVCodec*, AVCo void nvenc::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) { - if (const char* v = obs_data_get_string(settings, ST_KEY_PRESET); - !context->internal && (v != nullptr) && (v[0] != '\0')) { + if (const char* v = obs_data_get_string(settings, ST_KEY_PRESET); !context->internal && (v != nullptr) && (v[0] != '\0')) { av_opt_set(context->priv_data, "preset", v, AV_OPT_SEARCH_CHILDREN); } @@ -555,8 +516,7 @@ void nvenc::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* c if (!context->internal) { if (streamfx::ffmpeg::tools::avoption_exists(context->priv_data, "multipass")) { // Multi-Pass - if (const char* v = obs_data_get_string(settings, ST_KEY_RATECONTROL_MULTIPASS); - (v != nullptr) && (v[0] != '\0')) { + if (const char* v = obs_data_get_string(settings, ST_KEY_RATECONTROL_MULTIPASS); (v != nullptr) && (v[0] != '\0')) { av_opt_set(context->priv_data, "multipass", v, AV_OPT_SEARCH_CHILDREN); av_opt_set_int(context->priv_data, "2pass", 0, AV_OPT_SEARCH_CHILDREN); } @@ -574,16 +534,14 @@ void nvenc::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* c } // Adaptive I-Frames - if (int64_t adapt_i = obs_data_get_int(settings, ST_KEY_RATECONTROL_ADAPTIVEI); - !streamfx::util::is_tristate_default(adapt_i) && (la != 0)) { + if (int64_t adapt_i = obs_data_get_int(settings, ST_KEY_RATECONTROL_ADAPTIVEI); !streamfx::util::is_tristate_default(adapt_i) && (la != 0)) { // no-scenecut is inverted compared to our UI. av_opt_set_int(context->priv_data, "no-scenecut", 1 - adapt_i, AV_OPT_SEARCH_CHILDREN); } // Adaptive B-Frames if (std::string_view("h264_nvenc") == codec->name) { - if (int64_t adapt_b = obs_data_get_int(settings, ST_KEY_RATECONTROL_ADAPTIVEB); - !streamfx::util::is_tristate_default(adapt_b) && (la != 0)) { + if (int64_t adapt_b = obs_data_get_int(settings, ST_KEY_RATECONTROL_ADAPTIVEB); !streamfx::util::is_tristate_default(adapt_b) && (la != 0)) { av_opt_set_int(context->priv_data, "b_adapt", adapt_b, AV_OPT_SEARCH_CHILDREN); } } @@ -686,8 +644,7 @@ void nvenc::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* c if (int64_t zl = obs_data_get_int(settings, ST_KEY_OTHER_ZEROLATENCY); !streamfx::util::is_tristate_default(zl)) av_opt_set_int(context->priv_data, "zerolatency", zl, AV_OPT_SEARCH_CHILDREN); - if (int64_t nrp = obs_data_get_int(settings, ST_KEY_OTHER_NONREFERENCEPFRAMES); - !streamfx::util::is_tristate_default(nrp)) + if (int64_t nrp = obs_data_get_int(settings, ST_KEY_OTHER_NONREFERENCEPFRAMES); !streamfx::util::is_tristate_default(nrp)) av_opt_set_int(context->priv_data, "nonref_p", nrp, AV_OPT_SEARCH_CHILDREN); if (int64_t v = obs_data_get_int(settings, ST_KEY_OTHER_REFERENCEFRAMES); v > -1) av_opt_set_int(context, "refs", v, AV_OPT_SEARCH_CHILDREN); @@ -700,8 +657,7 @@ void nvenc::update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* c av_opt_set_int(context->priv_data, "weighted_pred", wp, AV_OPT_SEARCH_CHILDREN); } - if (const char* v = obs_data_get_string(settings, ST_KEY_OTHER_BFRAMEREFERENCEMODE); - (v != nullptr) && (v[0] != '\0')) { + if (const char* v = obs_data_get_string(settings, ST_KEY_OTHER_BFRAMEREFERENCEMODE); (v != nullptr) && (v[0] != '\0')) { av_opt_set(context->priv_data, "b_ref_mode", v, AV_OPT_SEARCH_CHILDREN); } @@ -716,13 +672,10 @@ void nvenc::log_options(obs_data_t*, const AVCodec* codec, AVCodecContext* conte using namespace ::streamfx::ffmpeg; DLOG_INFO("[%s] NVIDIA NVENC:", codec->name); - tools::print_av_option_string2(context, "preset", " Preset", - [](int64_t v, std::string_view o) { return std::string(o); }); - tools::print_av_option_string2(context, "rc", " Rate Control", - [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "preset", " Preset", [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "rc", " Rate Control", [](int64_t v, std::string_view o) { return std::string(o); }); tools::print_av_option_bool(context, "2pass", " Two Pass"); - tools::print_av_option_string2(context, "multipass", " Multi-Pass", - [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "multipass", " Multi-Pass", [](int64_t v, std::string_view o) { return std::string(o); }); tools::print_av_option_int(context, "rc-lookahead", " Look-Ahead", "Frames"); tools::print_av_option_bool(context, "no-scenecut", " Adaptive I-Frames", true); if (strcmp(codec->name, "h264_nvenc") == 0) @@ -745,8 +698,7 @@ void nvenc::log_options(obs_data_t*, const AVCodec* codec, AVCodecContext* conte tools::print_av_option_int(context, "qp_cr_offset", " CR Offset", ""); tools::print_av_option_int(context, "bf", " B-Frames", "Frames"); - tools::print_av_option_string2(context, "b_ref_mode", " Reference Mode", - [](int64_t v, std::string_view o) { return std::string(o); }); + tools::print_av_option_string2(context, "b_ref_mode", " Reference Mode", [](int64_t v, std::string_view o) { return std::string(o); }); DLOG_INFO("[%s] Adaptive Quantization:", codec->name); if (strcmp(codec->name, "h264_nvenc") == 0) { @@ -777,8 +729,7 @@ void nvenc::log_options(obs_data_t*, const AVCodec* codec, AVCodecContext* conte tools::print_av_option_bool(context, "constrained-encoding", " Constrained Encoding"); } -void streamfx::encoder::ffmpeg::handler::nvenc::migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, - AVCodecContext* context) +void streamfx::encoder::ffmpeg::handler::nvenc::migrate(obs_data_t* settings, uint64_t version, const AVCodec* codec, AVCodecContext* context) { // Only test for A.B.C in A.B.C.D version = version & STREAMFX_MASK_UPDATE; @@ -807,8 +758,7 @@ void streamfx::encoder::ffmpeg::handler::nvenc::migrate(obs_data_t* settings, ui // Preset if (auto v = obs_data_get_int(settings, ST_KEY_PRESET); v != -1) { std::map preset{ - {0, "default"}, {1, "slow"}, {2, "medium"}, {3, "fast"}, {4, "hp"}, {5, "hq"}, - {6, "bd"}, {7, "ll"}, {8, "llhq"}, {9, "llhp"}, {10, "lossless"}, {11, "losslesshp"}, + {0, "default"}, {1, "slow"}, {2, "medium"}, {3, "fast"}, {4, "hp"}, {5, "hq"}, {6, "bd"}, {7, "ll"}, {8, "llhq"}, {9, "llhp"}, {10, "lossless"}, {11, "losslesshp"}, }; if (auto k = preset.find(v); k != preset.end()) { obs_data_set_string(settings, ST_KEY_PRESET, k->second.data()); diff --git a/source/encoders/handlers/prores_aw_handler.cpp b/source/encoders/handlers/prores_aw_handler.cpp index 16d12085..d643054e 100644 --- a/source/encoders/handlers/prores_aw_handler.cpp +++ b/source/encoders/handlers/prores_aw_handler.cpp @@ -15,15 +15,11 @@ using namespace streamfx::encoder::ffmpeg::handler; using namespace streamfx::encoder::codec::prores; -void prores_aw_handler::override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext*) +void prores_aw_handler::override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext*) { - static const std::array, static_cast(profile::_COUNT)> - profile_to_format_map{ - std::pair{profile::APCO, AV_PIX_FMT_YUV422P10}, std::pair{profile::APCS, AV_PIX_FMT_YUV422P10}, - std::pair{profile::APCN, AV_PIX_FMT_YUV422P10}, std::pair{profile::APCH, AV_PIX_FMT_YUV422P10}, - std::pair{profile::AP4H, AV_PIX_FMT_YUV444P10}, std::pair{profile::AP4X, AV_PIX_FMT_YUV444P10}, - }; + static const std::array, static_cast(profile::_COUNT)> profile_to_format_map{ + std::pair{profile::APCO, AV_PIX_FMT_YUV422P10}, std::pair{profile::APCS, AV_PIX_FMT_YUV422P10}, std::pair{profile::APCN, AV_PIX_FMT_YUV422P10}, std::pair{profile::APCH, AV_PIX_FMT_YUV422P10}, std::pair{profile::AP4H, AV_PIX_FMT_YUV444P10}, std::pair{profile::AP4X, AV_PIX_FMT_YUV444P10}, + }; const int64_t profile_id = obs_data_get_int(settings, S_CODEC_PRORES_PROFILE); for (auto kv : profile_to_format_map) { @@ -67,8 +63,7 @@ inline const char* profile_to_name(const AVProfile* ptr) void prores_aw_handler::get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool) { if (!context) { - auto p = obs_properties_add_list(props, S_CODEC_PRORES_PROFILE, D_TRANSLATE(S_CODEC_PRORES_PROFILE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(props, S_CODEC_PRORES_PROFILE, D_TRANSLATE(S_CODEC_PRORES_PROFILE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); for (auto ptr = codec->profiles; ptr->profile != FF_PROFILE_UNKNOWN; ptr++) { obs_property_list_add_int(p, profile_to_name(ptr), static_cast(ptr->profile)); } diff --git a/source/encoders/handlers/prores_aw_handler.hpp b/source/encoders/handlers/prores_aw_handler.hpp index 306f9d5d..8e3e5bd4 100644 --- a/source/encoders/handlers/prores_aw_handler.hpp +++ b/source/encoders/handlers/prores_aw_handler.hpp @@ -30,15 +30,13 @@ namespace streamfx::encoder::ffmpeg::handler { bool has_keyframe_support(ffmpeg_factory* instance) override; public /*settings*/: - void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, - bool hw_encode) override; + void get_properties(obs_properties_t* props, const AVCodec* codec, AVCodecContext* context, bool hw_encode) override; void update(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; void log_options(obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; public /*instance*/: - void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, - AVCodecContext* context) override; + void override_colorformat(AVPixelFormat& target_format, obs_data_t* settings, const AVCodec* codec, AVCodecContext* context) override; }; } // namespace streamfx::encoder::ffmpeg::handler diff --git a/source/ffmpeg/avframe-queue.cpp b/source/ffmpeg/avframe-queue.cpp index 609b4918..33e6f17f 100644 --- a/source/ffmpeg/avframe-queue.cpp +++ b/source/ffmpeg/avframe-queue.cpp @@ -97,9 +97,7 @@ std::shared_ptr avframe_queue::pop() ret = create_frame(); } else { _frames.pop_front(); - if ((static_cast(ret->width) != this->_resolution.first) - || (static_cast(ret->height) != this->_resolution.second) - || (ret->format != this->_format)) { + if ((static_cast(ret->width) != this->_resolution.first) || (static_cast(ret->height) != this->_resolution.second) || (ret->format != this->_format)) { ret = nullptr; } } diff --git a/source/ffmpeg/hwapi/base.hpp b/source/ffmpeg/hwapi/base.hpp index acd2156e..45c08e61 100644 --- a/source/ffmpeg/hwapi/base.hpp +++ b/source/ffmpeg/hwapi/base.hpp @@ -32,11 +32,9 @@ namespace streamfx::ffmpeg::hwapi { virtual std::shared_ptr allocate_frame(AVBufferRef* frames) = 0; - virtual void copy_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key, - std::shared_ptr frame) = 0; + virtual void copy_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key, std::shared_ptr frame) = 0; - virtual std::shared_ptr avframe_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, - uint64_t* next_lock_key) = 0; + virtual std::shared_ptr avframe_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key) = 0; }; class base { diff --git a/source/ffmpeg/hwapi/d3d11.cpp b/source/ffmpeg/hwapi/d3d11.cpp index ad5151a9..900bf317 100644 --- a/source/ffmpeg/hwapi/d3d11.cpp +++ b/source/ffmpeg/hwapi/d3d11.cpp @@ -69,9 +69,7 @@ std::list d3d11::enumerate_adapters() dxgi_adapter->GetDesc1(&desc); std::vector buf(1024); - std::size_t len = - static_cast(snprintf(buf.data(), buf.size(), "%ls (VEN_%04x/DEV_%04x/SUB_%04x/REV_%04x)", - desc.Description, desc.VendorId, desc.DeviceId, desc.SubSysId, desc.Revision)); + std::size_t len = static_cast(snprintf(buf.data(), buf.size(), "%ls (VEN_%04x/DEV_%04x/SUB_%04x/REV_%04x)", desc.Description, desc.VendorId, desc.DeviceId, desc.SubSysId, desc.Revision)); device dev; dev.name = std::string(buf.data(), buf.data() + len); @@ -104,11 +102,9 @@ std::shared_ptr d3d11::create(const device& target) // Create a D3D11 Device UINT device_flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; - std::vector feature_levels = {D3D_FEATURE_LEVEL_12_1, D3D_FEATURE_LEVEL_12_0, - D3D_FEATURE_LEVEL_11_1}; + std::vector feature_levels = {D3D_FEATURE_LEVEL_12_1, D3D_FEATURE_LEVEL_12_0, D3D_FEATURE_LEVEL_11_1}; - if (FAILED(_D3D11CreateDevice(adapter, D3D_DRIVER_TYPE_HARDWARE, NULL, device_flags, feature_levels.data(), - static_cast(feature_levels.size()), D3D11_SDK_VERSION, &device, NULL, NULL))) { + if (FAILED(_D3D11CreateDevice(adapter, D3D_DRIVER_TYPE_HARDWARE, NULL, device_flags, feature_levels.data(), static_cast(feature_levels.size()), D3D11_SDK_VERSION, &device, NULL, NULL))) { throw std::runtime_error("Failed to create D3D11 device for target."); } @@ -123,8 +119,7 @@ std::shared_ptr d3d11::create_from_obs() throw std::runtime_error("OBS Device is not a D3D11 Device."); } - ATL::CComPtr device = - ATL::CComPtr(reinterpret_cast(gs_get_device_obj())); + ATL::CComPtr device = ATL::CComPtr(reinterpret_cast(gs_get_device_obj())); return std::make_shared(device); } @@ -187,15 +182,13 @@ std::shared_ptr d3d11_instance::allocate_frame(AVBufferRef* frames) return frame; } -void d3d11_instance::copy_from_obs(AVBufferRef*, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key, - std::shared_ptr frame) +void d3d11_instance::copy_from_obs(AVBufferRef*, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key, std::shared_ptr frame) { auto gctx = streamfx::obs::gs::context(); // Attempt to acquire shared texture. ATL::CComPtr input; - if (FAILED(_device->OpenSharedResource(reinterpret_cast(static_cast(handle)), - __uuidof(ID3D11Texture2D), reinterpret_cast(&input)))) { + if (FAILED(_device->OpenSharedResource(reinterpret_cast(static_cast(handle)), __uuidof(ID3D11Texture2D), reinterpret_cast(&input)))) { throw std::runtime_error("Failed to open shared texture resource."); } @@ -230,8 +223,7 @@ void d3d11_instance::copy_from_obs(AVBufferRef*, uint32_t handle, uint64_t lock_ mutex->ReleaseSync(*next_lock_key); } -std::shared_ptr d3d11_instance::avframe_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, - uint64_t* next_lock_key) +std::shared_ptr d3d11_instance::avframe_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key) { auto gctx = streamfx::obs::gs::context(); diff --git a/source/ffmpeg/hwapi/d3d11.hpp b/source/ffmpeg/hwapi/d3d11.hpp index f85e4434..ef16a565 100644 --- a/source/ffmpeg/hwapi/d3d11.hpp +++ b/source/ffmpeg/hwapi/d3d11.hpp @@ -17,9 +17,7 @@ namespace streamfx::ffmpeg::hwapi { class d3d11 : public streamfx::ffmpeg::hwapi::base { typedef HRESULT(__stdcall* CreateDXGIFactory_t)(REFIID, void**); typedef HRESULT(__stdcall* CreateDXGIFactory1_t)(REFIID, void**); - typedef HRESULT(__stdcall* D3D11CreateDevice_t)(IDXGIAdapter*, D3D_DRIVER_TYPE, HMODULE, UINT, - CONST D3D_FEATURE_LEVEL*, UINT, UINT, ID3D11Device**, - D3D_FEATURE_LEVEL*, ID3D11DeviceContext**); + typedef HRESULT(__stdcall* D3D11CreateDevice_t)(IDXGIAdapter*, D3D_DRIVER_TYPE, HMODULE, UINT, CONST D3D_FEATURE_LEVEL*, UINT, UINT, ID3D11Device**, D3D_FEATURE_LEVEL*, ID3D11DeviceContext**); HMODULE _dxgi_module; CreateDXGIFactory_t _CreateDXGIFactory; @@ -53,10 +51,8 @@ namespace streamfx::ffmpeg::hwapi { virtual std::shared_ptr allocate_frame(AVBufferRef* frames) override; - virtual void copy_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key, - std::shared_ptr frame) override; + virtual void copy_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key, std::shared_ptr frame) override; - virtual std::shared_ptr avframe_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, - uint64_t* next_lock_key) override; + virtual std::shared_ptr avframe_from_obs(AVBufferRef* frames, uint32_t handle, uint64_t lock_key, uint64_t* next_lock_key) override; }; } // namespace streamfx::ffmpeg::hwapi diff --git a/source/ffmpeg/swscale.cpp b/source/ffmpeg/swscale.cpp index 858535e5..0fcae530 100644 --- a/source/ffmpeg/swscale.cpp +++ b/source/ffmpeg/swscale.cpp @@ -149,26 +149,19 @@ bool swscale::initialize(int flags) if (this->context) { return false; } - if (source_size.first == 0 || source_size.second == 0 || source_format == AV_PIX_FMT_NONE - || source_colorspace == AVCOL_SPC_UNSPECIFIED) { + if (source_size.first == 0 || source_size.second == 0 || source_format == AV_PIX_FMT_NONE || source_colorspace == AVCOL_SPC_UNSPECIFIED) { throw std::invalid_argument("not all source parameters were set"); } - if (target_size.first == 0 || target_size.second == 0 || target_format == AV_PIX_FMT_NONE - || target_colorspace == AVCOL_SPC_UNSPECIFIED) { + if (target_size.first == 0 || target_size.second == 0 || target_format == AV_PIX_FMT_NONE || target_colorspace == AVCOL_SPC_UNSPECIFIED) { throw std::invalid_argument("not all target parameters were set"); } - this->context = - sws_getContext(static_cast(source_size.first), static_cast(source_size.second), source_format, - static_cast(target_size.first), static_cast(target_size.second), target_format, flags, - nullptr, nullptr, nullptr); + this->context = sws_getContext(static_cast(source_size.first), static_cast(source_size.second), source_format, static_cast(target_size.first), static_cast(target_size.second), target_format, flags, nullptr, nullptr, nullptr); if (!this->context) { return false; } - sws_setColorspaceDetails(this->context, sws_getCoefficients(source_colorspace), source_full_range ? 1 : 0, - sws_getCoefficients(target_colorspace), target_full_range ? 1 : 0, 1L << 16 | 0L, - 1L << 16 | 0L, 1L << 16 | 0L); + sws_setColorspaceDetails(this->context, sws_getCoefficients(source_colorspace), source_full_range ? 1 : 0, sws_getCoefficients(target_colorspace), target_full_range ? 1 : 0, 1L << 16 | 0L, 1L << 16 | 0L, 1L << 16 | 0L); return true; } @@ -183,13 +176,11 @@ bool swscale::finalize() return false; } -int32_t swscale::convert(const uint8_t* const source_data[], const int source_stride[], int32_t source_row, - int32_t source_rows, uint8_t* const target_data[], const int target_stride[]) +int32_t swscale::convert(const uint8_t* const source_data[], const int source_stride[], int32_t source_row, int32_t source_rows, uint8_t* const target_data[], const int target_stride[]) { if (!this->context) { return 0; } - int height = - sws_scale(this->context, source_data, source_stride, source_row, source_rows, target_data, target_stride); + int height = sws_scale(this->context, source_data, source_stride, source_row, source_rows, target_data, target_stride); return height; } diff --git a/source/ffmpeg/swscale.hpp b/source/ffmpeg/swscale.hpp index 2e728d30..dfbbc813 100644 --- a/source/ffmpeg/swscale.hpp +++ b/source/ffmpeg/swscale.hpp @@ -63,7 +63,6 @@ namespace streamfx::ffmpeg { bool initialize(int flags); bool finalize(); - int32_t convert(const uint8_t* const source_data[], const int source_stride[], int32_t source_row, - int32_t source_rows, uint8_t* const target_data[], const int target_stride[]); + int32_t convert(const uint8_t* const source_data[], const int source_stride[], int32_t source_row, int32_t source_rows, uint8_t* const target_data[], const int target_stride[]); }; } // namespace streamfx::ffmpeg diff --git a/source/ffmpeg/tools.cpp b/source/ffmpeg/tools.cpp index d72be5c7..694ac887 100644 --- a/source/ffmpeg/tools.cpp +++ b/source/ffmpeg/tools.cpp @@ -72,26 +72,26 @@ const char* tools::get_error_description(int error) } static std::map const obs_to_av_format_map = { - {VIDEO_FORMAT_I420, AV_PIX_FMT_YUV420P}, // 4:2:0 YUV, 8bit, Planar - {VIDEO_FORMAT_NV12, AV_PIX_FMT_NV12}, // 4:2:0 YUV, 8bit, Packed (Y+UV) - {VIDEO_FORMAT_YVYU, AV_PIX_FMT_YVYU422}, // 4:2:0 YUV, 8bit, Packed (Y+UV) - {VIDEO_FORMAT_YUY2, AV_PIX_FMT_YUYV422}, // 4:2:2 YUV, 8bit, Packed (Y+UV) - {VIDEO_FORMAT_UYVY, AV_PIX_FMT_UYVY422}, // 4:2:2 YUV, 8bit, Packed (Y+UV) - {VIDEO_FORMAT_RGBA, AV_PIX_FMT_RGBA}, // 4:4:4:4 RGBA, 8bit, Planar - {VIDEO_FORMAT_BGRA, AV_PIX_FMT_BGRA}, // 4:4:4:4 BGRA, 8bit, Planar - {VIDEO_FORMAT_BGRX, AV_PIX_FMT_BGR0}, // 4:4:4 BGR, 8bit, Planar - {VIDEO_FORMAT_Y800, AV_PIX_FMT_GRAY8}, // 4:0:0 Y, 8bit, Planar - {VIDEO_FORMAT_I444, AV_PIX_FMT_YUV444P}, // 4:4:4 YUV, 8bit, Planar - {VIDEO_FORMAT_BGR3, AV_PIX_FMT_BGR24}, // 4:4:4 BGR, 8bit, Planar - {VIDEO_FORMAT_I422, AV_PIX_FMT_YUV422P}, // 4:2:2 YUV, 8bit, Planar - {VIDEO_FORMAT_I40A, AV_PIX_FMT_YUVA420P}, // 4:2:0:4 YUVA, 8bit, Planar - {VIDEO_FORMAT_I42A, AV_PIX_FMT_YUVA422P}, // 4:2:2:4 YUVA, 8bit, Planar - {VIDEO_FORMAT_YUVA, AV_PIX_FMT_YUVA444P}, // 4:4:4:4 YUVA, 8bit, Planar - {VIDEO_FORMAT_AYUV, AV_PIX_FMT_NONE}, // No compatible format known - {VIDEO_FORMAT_I010, AV_PIX_FMT_YUV420P10}, // 4:2:0, 10bit, Planar - {VIDEO_FORMAT_P010, AV_PIX_FMT_P010}, // 4:2:0, 10bit, Packed (Y+UV) - {VIDEO_FORMAT_I210, AV_PIX_FMT_YUV422P10}, // 4:2:2 YUV, 10bit, Planar - {VIDEO_FORMAT_I412, AV_PIX_FMT_YUV444P12}, // 4:4:4 YUV, 12bit, Planar + {VIDEO_FORMAT_I420, AV_PIX_FMT_YUV420P}, // 4:2:0 YUV, 8bit, Planar + {VIDEO_FORMAT_NV12, AV_PIX_FMT_NV12}, // 4:2:0 YUV, 8bit, Packed (Y+UV) + {VIDEO_FORMAT_YVYU, AV_PIX_FMT_YVYU422}, // 4:2:0 YUV, 8bit, Packed (Y+UV) + {VIDEO_FORMAT_YUY2, AV_PIX_FMT_YUYV422}, // 4:2:2 YUV, 8bit, Packed (Y+UV) + {VIDEO_FORMAT_UYVY, AV_PIX_FMT_UYVY422}, // 4:2:2 YUV, 8bit, Packed (Y+UV) + {VIDEO_FORMAT_RGBA, AV_PIX_FMT_RGBA}, // 4:4:4:4 RGBA, 8bit, Planar + {VIDEO_FORMAT_BGRA, AV_PIX_FMT_BGRA}, // 4:4:4:4 BGRA, 8bit, Planar + {VIDEO_FORMAT_BGRX, AV_PIX_FMT_BGR0}, // 4:4:4 BGR, 8bit, Planar + {VIDEO_FORMAT_Y800, AV_PIX_FMT_GRAY8}, // 4:0:0 Y, 8bit, Planar + {VIDEO_FORMAT_I444, AV_PIX_FMT_YUV444P}, // 4:4:4 YUV, 8bit, Planar + {VIDEO_FORMAT_BGR3, AV_PIX_FMT_BGR24}, // 4:4:4 BGR, 8bit, Planar + {VIDEO_FORMAT_I422, AV_PIX_FMT_YUV422P}, // 4:2:2 YUV, 8bit, Planar + {VIDEO_FORMAT_I40A, AV_PIX_FMT_YUVA420P}, // 4:2:0:4 YUVA, 8bit, Planar + {VIDEO_FORMAT_I42A, AV_PIX_FMT_YUVA422P}, // 4:2:2:4 YUVA, 8bit, Planar + {VIDEO_FORMAT_YUVA, AV_PIX_FMT_YUVA444P}, // 4:4:4:4 YUVA, 8bit, Planar + {VIDEO_FORMAT_AYUV, AV_PIX_FMT_NONE}, // No compatible format known + {VIDEO_FORMAT_I010, AV_PIX_FMT_YUV420P10}, // 4:2:0, 10bit, Planar + {VIDEO_FORMAT_P010, AV_PIX_FMT_P010}, // 4:2:0, 10bit, Packed (Y+UV) + {VIDEO_FORMAT_I210, AV_PIX_FMT_YUV422P10}, // 4:2:2 YUV, 10bit, Planar + {VIDEO_FORMAT_I412, AV_PIX_FMT_YUV444P12}, // 4:4:4 YUV, 12bit, Planar {VIDEO_FORMAT_YA2L, AV_PIX_FMT_YUVA444P12}, // 4:4:4:4 YUVA, 12bit, Planar }; @@ -138,7 +138,7 @@ AVColorSpace tools::obs_to_av_color_space(video_colorspace v) case VIDEO_CS_601: // BT.601 return AVCOL_SPC_SMPTE170M; case VIDEO_CS_DEFAULT: - case VIDEO_CS_709: // BT.709 + case VIDEO_CS_709: // BT.709 case VIDEO_CS_SRGB: // sRGB return AVCOL_SPC_BT709; case VIDEO_CS_2100_PQ: @@ -155,7 +155,7 @@ AVColorPrimaries streamfx::ffmpeg::tools::obs_to_av_color_primary(video_colorspa case VIDEO_CS_601: // BT.601 return AVCOL_PRI_SMPTE170M; case VIDEO_CS_DEFAULT: - case VIDEO_CS_709: // BT.709 + case VIDEO_CS_709: // BT.709 case VIDEO_CS_SRGB: // sRGB return AVCOL_PRI_BT709; case VIDEO_CS_2100_PQ: @@ -211,8 +211,7 @@ bool tools::avoption_exists(const void* obj, std::string_view name) return false; } -void tools::avoption_list_add_entries(const void* obj, std::string_view unit, - std::function inserter) +void tools::avoption_list_add_entries(const void* obj, std::string_view unit, std::function inserter) { for (const AVOption* opt = nullptr; (opt = av_opt_next(obj, opt)) != nullptr;) { // Skip all irrelevant options. @@ -364,28 +363,22 @@ void tools::print_av_option_bool(AVCodecContext* ctx_codec, const char* option, print_av_option_bool(ctx_codec, ctx_codec, option, text, inverse); } -void tools::print_av_option_bool(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, - bool inverse) +void tools::print_av_option_bool(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, bool inverse) { int64_t v = 0; if (int err = av_opt_get_int(ctx_option, option, AV_OPT_SEARCH_CHILDREN, &v); err != 0) { - DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data(), - streamfx::ffmpeg::tools::get_error_description(err)); + DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data(), streamfx::ffmpeg::tools::get_error_description(err)); } else { - DLOG_INFO("[%s] %s: %s%s", ctx_codec->codec->name, text.data(), - (inverse ? v != 0 : v == 0) ? "Disabled" : "Enabled", - av_opt_is_set_to_default_by_name(ctx_option, option, AV_OPT_SEARCH_CHILDREN) > 0 ? " " : ""); + DLOG_INFO("[%s] %s: %s%s", ctx_codec->codec->name, text.data(), (inverse ? v != 0 : v == 0) ? "Disabled" : "Enabled", av_opt_is_set_to_default_by_name(ctx_option, option, AV_OPT_SEARCH_CHILDREN) > 0 ? " " : ""); } } -void tools::print_av_option_int(AVCodecContext* ctx_codec, const char* option, std::string_view text, - std::string_view suffix) +void tools::print_av_option_int(AVCodecContext* ctx_codec, const char* option, std::string_view text, std::string_view suffix) { print_av_option_int(ctx_codec, ctx_codec, option, text, suffix); } -void tools::print_av_option_int(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, - std::string_view suffix) +void tools::print_av_option_int(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, std::string_view suffix) { int64_t v = 0; bool is_default = av_opt_is_set_to_default_by_name(ctx_option, option, AV_OPT_SEARCH_CHILDREN) > 0; @@ -393,46 +386,37 @@ void tools::print_av_option_int(AVCodecContext* ctx_codec, void* ctx_option, con if (is_default) { DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data()); } else { - DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data(), - streamfx::ffmpeg::tools::get_error_description(err)); + DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data(), streamfx::ffmpeg::tools::get_error_description(err)); } } else { - DLOG_INFO("[%s] %s: %" PRId64 " %s%s", ctx_codec->codec->name, text.data(), v, suffix.data(), - is_default ? " " : ""); + DLOG_INFO("[%s] %s: %" PRId64 " %s%s", ctx_codec->codec->name, text.data(), v, suffix.data(), is_default ? " " : ""); } } -void tools::print_av_option_string(AVCodecContext* ctx_codec, const char* option, std::string_view text, - std::function decoder) +void tools::print_av_option_string(AVCodecContext* ctx_codec, const char* option, std::string_view text, std::function decoder) { print_av_option_string(ctx_codec, ctx_codec, option, text, decoder); } -void tools::print_av_option_string(AVCodecContext* ctx_codec, void* ctx_option, const char* option, - std::string_view text, std::function decoder) +void tools::print_av_option_string(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, std::function decoder) { int64_t v = 0; if (int err = av_opt_get_int(ctx_option, option, AV_OPT_SEARCH_CHILDREN, &v); err != 0) { - DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data(), - streamfx::ffmpeg::tools::get_error_description(err)); + DLOG_INFO("[%s] %s: ", ctx_codec->codec->name, text.data(), streamfx::ffmpeg::tools::get_error_description(err)); } else { std::string name = ""; if (decoder) name = decoder(v); - DLOG_INFO("[%s] %s: %s%s", ctx_codec->codec->name, text.data(), name.c_str(), - av_opt_is_set_to_default_by_name(ctx_option, option, AV_OPT_SEARCH_CHILDREN) > 0 ? " " : ""); + DLOG_INFO("[%s] %s: %s%s", ctx_codec->codec->name, text.data(), name.c_str(), av_opt_is_set_to_default_by_name(ctx_option, option, AV_OPT_SEARCH_CHILDREN) > 0 ? " " : ""); } } -void tools::print_av_option_string2(AVCodecContext* ctx_codec, std::string_view option, std::string_view text, - std::function decoder) +void tools::print_av_option_string2(AVCodecContext* ctx_codec, std::string_view option, std::string_view text, std::function decoder) { print_av_option_string2(ctx_codec, ctx_codec, option, text, decoder); } -void tools::print_av_option_string2(AVCodecContext* ctx_codec, void* ctx_option, std::string_view option, - std::string_view text, - std::function decoder) +void tools::print_av_option_string2(AVCodecContext* ctx_codec, void* ctx_option, std::string_view option, std::string_view text, std::function decoder) { int64_t v = 0; if (int err = av_opt_get_int(ctx_option, option.data(), AV_OPT_SEARCH_CHILDREN, &v); err != 0) { @@ -459,15 +443,9 @@ void tools::print_av_option_string2(AVCodecContext* ctx_codec, void* ctx_option, if (decoder) { name = decoder(v, name); } - DLOG_INFO("[%s] %s: %s%s", ctx_codec->codec->name, text.data(), name.c_str(), - av_opt_is_set_to_default_by_name(ctx_option, option.data(), AV_OPT_SEARCH_CHILDREN) > 0 - ? " " - : ""); + DLOG_INFO("[%s] %s: %s%s", ctx_codec->codec->name, text.data(), name.c_str(), av_opt_is_set_to_default_by_name(ctx_option, option.data(), AV_OPT_SEARCH_CHILDREN) > 0 ? " " : ""); } else { - DLOG_INFO("[%s] %s: %" PRId64 "%s", ctx_codec->codec->name, text.data(), v, - av_opt_is_set_to_default_by_name(ctx_option, option.data(), AV_OPT_SEARCH_CHILDREN) > 0 - ? " " - : ""); + DLOG_INFO("[%s] %s: %" PRId64 "%s", ctx_codec->codec->name, text.data(), v, av_opt_is_set_to_default_by_name(ctx_option, option.data(), AV_OPT_SEARCH_CHILDREN) > 0 ? " " : ""); } } } diff --git a/source/ffmpeg/tools.hpp b/source/ffmpeg/tools.hpp index 98a6b23c..9572b424 100644 --- a/source/ffmpeg/tools.hpp +++ b/source/ffmpeg/tools.hpp @@ -46,29 +46,21 @@ namespace streamfx::ffmpeg::tools { const char* get_thread_type_name(int thread_type); void print_av_option_bool(AVCodecContext* context, const char* option, std::string_view text, bool inverse = false); - void print_av_option_bool(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, - bool inverse = false); + void print_av_option_bool(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, bool inverse = false); - void print_av_option_int(AVCodecContext* context, const char* option, std::string_view text, - std::string_view suffix); - void print_av_option_int(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, - std::string_view suffix); + void print_av_option_int(AVCodecContext* context, const char* option, std::string_view text, std::string_view suffix); + void print_av_option_int(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, std::string_view suffix); - void print_av_option_string(AVCodecContext* context, const char* option, std::string_view text, - std::function decoder); - void print_av_option_string(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, - std::function decoder); + void print_av_option_string(AVCodecContext* context, const char* option, std::string_view text, std::function decoder); + void print_av_option_string(AVCodecContext* ctx_codec, void* ctx_option, const char* option, std::string_view text, std::function decoder); - void print_av_option_string2(AVCodecContext* context, std::string_view option, std::string_view text, - std::function decoder); - void print_av_option_string2(AVCodecContext* ctx_codec, void* ctx_option, std::string_view option, - std::string_view text, std::function decoder); + void print_av_option_string2(AVCodecContext* context, std::string_view option, std::string_view text, std::function decoder); + void print_av_option_string2(AVCodecContext* ctx_codec, void* ctx_option, std::string_view option, std::string_view text, std::function decoder); bool avoption_exists(const void* obj, std::string_view name); const char* avoption_name_from_unit_value(const void* obj, std::string_view unit, int64_t value); - void avoption_list_add_entries(const void* obj, std::string_view unit, - std::function inserter = nullptr); + void avoption_list_add_entries(const void* obj, std::string_view unit, std::function inserter = nullptr); } // namespace streamfx::ffmpeg::tools diff --git a/source/filters/filter-autoframing.cpp b/source/filters/filter-autoframing.cpp index f3f9859f..787b60dd 100644 --- a/source/filters/filter-autoframing.cpp +++ b/source/filters/filter-autoframing.cpp @@ -164,16 +164,13 @@ autoframing_instance::autoframing_instance(obs_data_t* data, obs_source_t* self) _gfx_debug(), _standard_effect(), _input(), _vb(), - _provider(tracking_provider::INVALID), _provider_ui(tracking_provider::INVALID), _provider_ready(false), - _provider_lock(), _provider_task(), + _provider(tracking_provider::INVALID), _provider_ui(tracking_provider::INVALID), _provider_ready(false), _provider_lock(), _provider_task(), _track_mode(tracking_mode::SOLO), _track_frequency(1), - _motion_smoothing(0.0), _motion_smoothing_kalman_pnc(1.), _motion_smoothing_kalman_mnc(1.), - _motion_prediction(0.0), + _motion_smoothing(0.0), _motion_smoothing_kalman_pnc(1.), _motion_smoothing_kalman_mnc(1.), _motion_prediction(0.0), - _frame_stability(0.), _frame_stability_kalman(1.), _frame_padding_prc(), _frame_padding(), _frame_offset_prc(), - _frame_offset(), _frame_aspect_ratio(0.0), + _frame_stability(0.), _frame_stability_kalman(1.), _frame_padding_prc(), _frame_padding(), _frame_offset_prc(), _frame_offset(), _frame_aspect_ratio(0.0), _track_frequency_counter(0), _tracked_elements(), _predicted_elements(), @@ -194,8 +191,7 @@ autoframing_instance::autoframing_instance(obs_data_t* data, obs_source_t* self) _input->render(1, 1); // Preallocate the RT on the driver and GPU. // Load the required effect. - _standard_effect = - std::make_shared<::streamfx::obs::gs::effect>(::streamfx::data_file_path("effects/standard.effect")); + _standard_effect = std::make_shared<::streamfx::obs::gs::effect>(::streamfx::data_file_path("effects/standard.effect")); // Create the Vertex Buffer for rendering. _vb = std::make_shared<::streamfx::obs::gs::vertex_buffer>(uint32_t{4}, uint8_t{1}); @@ -253,10 +249,8 @@ void autoframing_instance::update(obs_data_t* data) _motion_smoothing_kalman_mnc = streamfx::util::math::lerp(0.001f, 1000.0f, _motion_smoothing); for (auto kv : _predicted_elements) { // Regenerate filters. - kv.second->filter_pos_x = {_frame_stability_kalman, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, - kv.second->filter_pos_x.get()}; - kv.second->filter_pos_y = {_frame_stability_kalman, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, - kv.second->filter_pos_y.get()}; + kv.second->filter_pos_x = {_frame_stability_kalman, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, kv.second->filter_pos_x.get()}; + kv.second->filter_pos_y = {_frame_stability_kalman, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, kv.second->filter_pos_y.get()}; } // Framing @@ -418,13 +412,9 @@ void autoframing_instance::video_tick(float_t seconds) _out_size = _size; if (_frame_aspect_ratio > 0.0) { if (width > height) { - _out_size.first = - static_cast(std::lroundf(static_cast(_out_size.second) * _frame_aspect_ratio), 0, - std::numeric_limits::max()); + _out_size.first = static_cast(std::lroundf(static_cast(_out_size.second) * _frame_aspect_ratio), 0, std::numeric_limits::max()); } else { - _out_size.second = - static_cast(std::lroundf(static_cast(_out_size.first) * _frame_aspect_ratio), 0, - std::numeric_limits::max()); + _out_size.second = static_cast(std::lroundf(static_cast(_out_size.first) * _frame_aspect_ratio), 0, std::numeric_limits::max()); } } } @@ -459,8 +449,7 @@ void autoframing_instance::video_render(gs_effect_t* effect) #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler0{::streamfx::obs::gs::debug_color_source, "StreamFX Auto-Framing"}; - ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(parent)}; + ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(parent)}; #endif if (_dirty) { @@ -528,43 +517,29 @@ void autoframing_instance::video_render(gs_effect_t* effect) for (auto kv : _predicted_elements) { // Tracked Area (Red) - _gfx_debug->draw_rectangle(kv.first->pos.x - kv.first->size.x / 2.f, - kv.first->pos.y - kv.first->size.y / 2.f, kv.first->size.x, kv.first->size.y, - true, 0x7E0000FF); + _gfx_debug->draw_rectangle(kv.first->pos.x - kv.first->size.x / 2.f, kv.first->pos.y - kv.first->size.y / 2.f, kv.first->size.x, kv.first->size.y, true, 0x7E0000FF); // Velocity Arrow (Black) - _gfx_debug->draw_arrow(kv.first->pos.x, kv.first->pos.y, kv.first->pos.x + kv.first->vel.x, - kv.first->pos.y + kv.first->vel.y, 0., 0x7E000000); + _gfx_debug->draw_arrow(kv.first->pos.x, kv.first->pos.y, kv.first->pos.x + kv.first->vel.x, kv.first->pos.y + kv.first->vel.y, 0., 0x7E000000); // Predicted Area (Orange) - _gfx_debug->draw_rectangle(kv.second->mp_pos.x - kv.first->size.x / 2.f, - kv.second->mp_pos.y - kv.first->size.y / 2.f, kv.first->size.x, - kv.first->size.y, true, 0x7E007EFF); + _gfx_debug->draw_rectangle(kv.second->mp_pos.x - kv.first->size.x / 2.f, kv.second->mp_pos.y - kv.first->size.y / 2.f, kv.first->size.x, kv.first->size.y, true, 0x7E007EFF); // Filtered Area (Yellow) - _gfx_debug->draw_rectangle(kv.second->filter_pos_x.get() - kv.first->size.x / 2.f, - kv.second->filter_pos_y.get() - kv.first->size.y / 2.f, kv.first->size.x, - kv.first->size.y, true, 0x7E00FFFF); + _gfx_debug->draw_rectangle(kv.second->filter_pos_x.get() - kv.first->size.x / 2.f, kv.second->filter_pos_y.get() - kv.first->size.y / 2.f, kv.first->size.x, kv.first->size.y, true, 0x7E00FFFF); // Offset Filtered Area (Blue) - _gfx_debug->draw_rectangle(kv.second->offset_pos.x - kv.first->size.x / 2.f, - kv.second->offset_pos.y - kv.first->size.y / 2.f, kv.first->size.x, - kv.first->size.y, true, 0x7EFF0000); + _gfx_debug->draw_rectangle(kv.second->offset_pos.x - kv.first->size.x / 2.f, kv.second->offset_pos.y - kv.first->size.y / 2.f, kv.first->size.x, kv.first->size.y, true, 0x7EFF0000); // Padded Offset Filtered Area (Cyan) - _gfx_debug->draw_rectangle(kv.second->offset_pos.x - kv.second->pad_size.x / 2.f, - kv.second->offset_pos.y - kv.second->pad_size.y / 2.f, kv.second->pad_size.x, - kv.second->pad_size.y, true, 0x7EFFFF00); + _gfx_debug->draw_rectangle(kv.second->offset_pos.x - kv.second->pad_size.x / 2.f, kv.second->offset_pos.y - kv.second->pad_size.y / 2.f, kv.second->pad_size.x, kv.second->pad_size.y, true, 0x7EFFFF00); // Aspect-Ratio-Corrected Padded Offset Filtered Area (Green) - _gfx_debug->draw_rectangle(kv.second->offset_pos.x - kv.second->aspected_size.x / 2.f, - kv.second->offset_pos.y - kv.second->aspected_size.y / 2.f, - kv.second->aspected_size.x, kv.second->aspected_size.y, true, 0x7E00FF00); + _gfx_debug->draw_rectangle(kv.second->offset_pos.x - kv.second->aspected_size.x / 2.f, kv.second->offset_pos.y - kv.second->aspected_size.y / 2.f, kv.second->aspected_size.x, kv.second->aspected_size.y, true, 0x7E00FF00); } // Final Region (White) - _gfx_debug->draw_rectangle(_frame_pos.x - _frame_size.x / 2.f, _frame_pos.y - _frame_size.y / 2.f, - _frame_size.x, _frame_size.y, true, 0x7EFFFFFF); + _gfx_debug->draw_rectangle(_frame_pos.x - _frame_size.x / 2.f, _frame_pos.y - _frame_size.y / 2.f, _frame_size.x, _frame_size.y, true, 0x7EFFFFFF); } else { float x0 = (_frame_pos.x - _frame_size.x / 2.f) / static_cast(_size.first); float x1 = (_frame_pos.x + _frame_size.x / 2.f) / static_cast(_size.first); @@ -606,8 +581,7 @@ void autoframing_instance::video_render(gs_effect_t* effect) gs_draw(GS_TRISTRIP, 0, 4); } } else { - gs_effect_set_texture(gs_effect_get_param_by_name(effect, "image"), - _input->get_texture()->get_object()); + gs_effect_set_texture(gs_effect_get_param_by_name(effect, "image"), _input->get_texture()->get_object()); while (gs_effect_loop(effect, "Draw")) { gs_draw(GS_TRISTRIP, 0, 4); @@ -657,10 +631,8 @@ void streamfx::filter::autoframing::autoframing_instance::tracking_tick(float se if (iter == _predicted_elements.end()) { pred = std::make_shared(); _predicted_elements.insert_or_assign(trck, pred); - pred->filter_pos_x = {_motion_smoothing_kalman_pnc, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, - trck->pos.x}; - pred->filter_pos_y = {_motion_smoothing_kalman_pnc, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, - trck->pos.y}; + pred->filter_pos_x = {_motion_smoothing_kalman_pnc, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, trck->pos.x}; + pred->filter_pos_y = {_motion_smoothing_kalman_pnc, _motion_smoothing_kalman_mnc, ST_KALMAN_EEC, trck->pos.y}; } else { pred = iter->second; } @@ -800,9 +772,7 @@ void streamfx::filter::autoframing::autoframing_instance::tracking_tick(float se } { // Aspect Ratio correction is a three step process: - float aspect = _frame_aspect_ratio > 0. - ? _frame_aspect_ratio - : (static_cast(_size.first) / static_cast(_size.second)); + float aspect = _frame_aspect_ratio > 0. ? _frame_aspect_ratio : (static_cast(_size.first) / static_cast(_size.second)); { // 1. Adjust aspect ratio so that all elements end up contained. float frame_aspect = _frame_size.x / _frame_size.y; @@ -816,12 +786,12 @@ void streamfx::filter::autoframing::autoframing_instance::tracking_tick(float se // 2. Limit the size of the frame to the allowed region, and adjust it so it's inside the frame. // This will move the center, which might not be a wanted side effect. vec4 rect; - rect.x = std::clamp(_frame_pos.x - _frame_size.x / 2.f, 0.f, static_cast(_size.first)); - rect.z = std::clamp(_frame_pos.x + _frame_size.x / 2.f, 0.f, static_cast(_size.first)); - rect.y = std::clamp(_frame_pos.y - _frame_size.y / 2.f, 0.f, static_cast(_size.second)); - rect.w = std::clamp(_frame_pos.y + _frame_size.y / 2.f, 0.f, static_cast(_size.second)); - _frame_pos.x = (rect.x + rect.z) / 2.f; - _frame_pos.y = (rect.y + rect.w) / 2.f; + rect.x = std::clamp(_frame_pos.x - _frame_size.x / 2.f, 0.f, static_cast(_size.first)); + rect.z = std::clamp(_frame_pos.x + _frame_size.x / 2.f, 0.f, static_cast(_size.first)); + rect.y = std::clamp(_frame_pos.y - _frame_size.y / 2.f, 0.f, static_cast(_size.second)); + rect.w = std::clamp(_frame_pos.y + _frame_size.y / 2.f, 0.f, static_cast(_size.second)); + _frame_pos.x = (rect.x + rect.z) / 2.f; + _frame_pos.y = (rect.y + rect.w) / 2.f; _frame_size.x = (rect.z - rect.x); _frame_size.y = (rect.w - rect.y); @@ -858,8 +828,7 @@ void streamfx::filter::autoframing::autoframing_instance::switch_provider(tracki // - Doesn't guarantee that the task is properly killed off. // Log information. - D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), - cstring(provider)); + D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), cstring(provider)); // If there is an ongoing task to switch provider, cancel it. if (_provider_task) { @@ -879,8 +848,7 @@ void streamfx::filter::autoframing::autoframing_instance::switch_provider(tracki _provider = provider; // Then spawn a new task to switch provider. - _provider_task = streamfx::threadpool()->push( - std::bind(&autoframing_instance::task_switch_provider, this, std::placeholders::_1), spd); + _provider_task = streamfx::threadpool()->push(std::bind(&autoframing_instance::task_switch_provider, this, std::placeholders::_1), spd); } void streamfx::filter::autoframing::autoframing_instance::task_switch_provider(util::threadpool::task_data_t data) @@ -917,8 +885,7 @@ void streamfx::filter::autoframing::autoframing_instance::task_switch_provider(u } // Log information. - D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), - cstring(spd->provider), cstring(_provider)); + D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(spd->provider), cstring(_provider)); _provider_ready = true; } catch (std::exception const& ex) { @@ -945,8 +912,7 @@ void streamfx::filter::autoframing::autoframing_instance::nvar_facedetection_pro } // Frames may not move more than this distance. - float max_dst = - sqrtf(static_cast(_size.first * _size.first) + static_cast(_size.second * _size.second)) * 0.667f; + float max_dst = sqrtf(static_cast(_size.first * _size.first) + static_cast(_size.second * _size.second)) * 0.667f; max_dst *= 1.f / (1.f - _track_frequency); // Fine-tune this? // Process the current frame (if requested). @@ -1133,8 +1099,7 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), autoframing_factory::on_manual_open, - nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), autoframing_factory::on_manual_open, nullptr); } #endif @@ -1143,18 +1108,14 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat obs_properties_add_group(pr, ST_I18N_TRACKING, D_TRANSLATE(ST_I18N_TRACKING), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_TRACKING_MODE, D_TRANSLATE(ST_I18N_TRACKING_MODE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_TRACKING_MODE, D_TRANSLATE(ST_I18N_TRACKING_MODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_provider); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_FRAMING_MODE_SOLO), - static_cast(tracking_mode::SOLO)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_FRAMING_MODE_GROUP), - static_cast(tracking_mode::GROUP)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_FRAMING_MODE_SOLO), static_cast(tracking_mode::SOLO)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_FRAMING_MODE_GROUP), static_cast(tracking_mode::GROUP)); } { - auto p = obs_properties_add_text(grp, ST_KEY_TRACKING_FREQUENCY, D_TRANSLATE(ST_I18N_TRACKING_FREQUENCY), - OBS_TEXT_DEFAULT); + auto p = obs_properties_add_text(grp, ST_KEY_TRACKING_FREQUENCY, D_TRANSLATE(ST_I18N_TRACKING_FREQUENCY), OBS_TEXT_DEFAULT); } } @@ -1163,14 +1124,12 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat obs_properties_add_group(pr, ST_I18N_MOTION, D_TRANSLATE(ST_I18N_MOTION), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_MOTION_SMOOTHING, - D_TRANSLATE(ST_I18N_MOTION_SMOOTHING), 0.0, 100.0, 0.01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_MOTION_SMOOTHING, D_TRANSLATE(ST_I18N_MOTION_SMOOTHING), 0.0, 100.0, 0.01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_MOTION_PREDICTION, - D_TRANSLATE(ST_I18N_MOTION_PREDICTION), 0.0, 500.0, 0.01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_MOTION_PREDICTION, D_TRANSLATE(ST_I18N_MOTION_PREDICTION), 0.0, 500.0, 0.01); obs_property_float_set_suffix(p, " %"); } } @@ -1180,15 +1139,13 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat obs_properties_add_group(pr, ST_I18N_FRAMING, D_TRANSLATE(ST_I18N_FRAMING), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_FRAMING_STABILITY, - D_TRANSLATE(ST_I18N_FRAMING_STABILITY), 0.0, 100.0, 0.01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_FRAMING_STABILITY, D_TRANSLATE(ST_I18N_FRAMING_STABILITY), 0.0, 100.0, 0.01); obs_property_float_set_suffix(p, " %"); } { auto grp2 = obs_properties_create(); - obs_properties_add_group(grp, ST_KEY_FRAMING_PADDING, D_TRANSLATE(ST_I18N_FRAMING_PADDING), - OBS_GROUP_NORMAL, grp2); + obs_properties_add_group(grp, ST_KEY_FRAMING_PADDING, D_TRANSLATE(ST_I18N_FRAMING_PADDING), OBS_GROUP_NORMAL, grp2); { auto p = obs_properties_add_text(grp2, ST_KEY_FRAMING_PADDING ".X", "X", OBS_TEXT_DEFAULT); @@ -1200,8 +1157,7 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat { auto grp2 = obs_properties_create(); - obs_properties_add_group(grp, ST_KEY_FRAMING_OFFSET, D_TRANSLATE(ST_I18N_FRAMING_OFFSET), OBS_GROUP_NORMAL, - grp2); + obs_properties_add_group(grp, ST_KEY_FRAMING_OFFSET, D_TRANSLATE(ST_I18N_FRAMING_OFFSET), OBS_GROUP_NORMAL, grp2); { auto p = obs_properties_add_text(grp2, ST_KEY_FRAMING_OFFSET ".X", "X", OBS_TEXT_DEFAULT); @@ -1212,8 +1168,7 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat } { - auto p = obs_properties_add_list(grp, ST_KEY_FRAMING_ASPECTRATIO, D_TRANSLATE(ST_I18N_FRAMING_ASPECTRATIO), - OBS_COMBO_TYPE_EDITABLE, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_FRAMING_ASPECTRATIO, D_TRANSLATE(ST_I18N_FRAMING_ASPECTRATIO), OBS_COMBO_TYPE_EDITABLE, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, "None", ""); obs_property_list_add_string(p, "1:1", "1:1"); @@ -1255,14 +1210,11 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat obs_properties_add_group(pr, S_ADVANCED, D_TRANSLATE(S_ADVANCED), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_ADVANCED_PROVIDER, D_TRANSLATE(ST_I18N_ADVANCED_PROVIDER), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_ADVANCED_PROVIDER, D_TRANSLATE(ST_I18N_ADVANCED_PROVIDER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_provider); - obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), - static_cast(tracking_provider::AUTOMATIC)); + obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), static_cast(tracking_provider::AUTOMATIC)); #ifdef ENABLE_FILTER_AUTOFRAMING_NVIDIA - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_ADVANCED_PROVIDER_NVIDIA_FACEDETECTION), - static_cast(tracking_provider::NVIDIA_FACEDETECTION)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_ADVANCED_PROVIDER_NVIDIA_FACEDETECTION), static_cast(tracking_provider::NVIDIA_FACEDETECTION)); #endif } @@ -1273,8 +1225,7 @@ obs_properties_t* autoframing_factory::get_properties2(autoframing_instance* dat } #ifdef ENABLE_FRONTEND -bool streamfx::filter::autoframing::autoframing_factory::on_manual_open(obs_properties_t* props, - obs_property_t* property, void* data) +bool streamfx::filter::autoframing::autoframing_factory::on_manual_open(obs_properties_t* props, obs_property_t* property, void* data) { streamfx::open_url(HELP_URL); return false; diff --git a/source/filters/filter-autoframing.hpp b/source/filters/filter-autoframing.hpp index 87fab045..1dba7b6c 100644 --- a/source/filters/filter-autoframing.hpp +++ b/source/filters/filter-autoframing.hpp @@ -144,8 +144,7 @@ namespace streamfx::filter::autoframing { #endif }; - class autoframing_factory : public obs::source_factory { + class autoframing_factory : public obs::source_factory { #ifdef ENABLE_FILTER_AUTOFRAMING_NVIDIA bool _nvidia_available; std::shared_ptr<::streamfx::nvidia::cuda::obs> _nvcuda; diff --git a/source/filters/filter-blur.cpp b/source/filters/filter-blur.cpp index 7d43b4f9..64b9f117 100644 --- a/source/filters/filter-blur.cpp +++ b/source/filters/filter-blur.cpp @@ -111,11 +111,7 @@ struct local_blur_subtype_t { }; static std::map list_of_types = { - {"box", {&::streamfx::gfx::blur::box_factory::get, S_BLUR_TYPE_BOX}}, - {"box_linear", {&::streamfx::gfx::blur::box_linear_factory::get, S_BLUR_TYPE_BOX_LINEAR}}, - {"gaussian", {&::streamfx::gfx::blur::gaussian_factory::get, S_BLUR_TYPE_GAUSSIAN}}, - {"gaussian_linear", {&::streamfx::gfx::blur::gaussian_linear_factory::get, S_BLUR_TYPE_GAUSSIAN_LINEAR}}, - {"dual_filtering", {&::streamfx::gfx::blur::dual_filtering_factory::get, S_BLUR_TYPE_DUALFILTERING}}, + {"box", {&::streamfx::gfx::blur::box_factory::get, S_BLUR_TYPE_BOX}}, {"box_linear", {&::streamfx::gfx::blur::box_linear_factory::get, S_BLUR_TYPE_BOX_LINEAR}}, {"gaussian", {&::streamfx::gfx::blur::gaussian_factory::get, S_BLUR_TYPE_GAUSSIAN}}, {"gaussian_linear", {&::streamfx::gfx::blur::gaussian_linear_factory::get, S_BLUR_TYPE_GAUSSIAN_LINEAR}}, {"dual_filtering", {&::streamfx::gfx::blur::dual_filtering_factory::get, S_BLUR_TYPE_DUALFILTERING}}, }; static std::map list_of_subtypes = { {"area", {::streamfx::gfx::blur::type::Area, S_BLUR_SUBTYPE_AREA}}, @@ -124,9 +120,7 @@ static std::map list_of_subtypes = { {"zoom", {::streamfx::gfx::blur::type::Zoom, S_BLUR_SUBTYPE_ZOOM}}, }; -blur_instance::blur_instance(obs_data_t* settings, obs_source_t* self) - : obs::source_instance(settings, self), _gfx_util(::streamfx::gfx::util::get()), _source_rendered(false), - _output_rendered(false) +blur_instance::blur_instance(obs_data_t* settings, obs_source_t* self) : obs::source_instance(settings, self), _gfx_util(::streamfx::gfx::util::get()), _source_rendered(false), _output_rendered(false) { { auto gctx = streamfx::obs::gs::context(); @@ -151,8 +145,7 @@ blur_instance::blur_instance(obs_data_t* settings, obs_source_t* self) blur_instance::~blur_instance() {} -bool blur_instance::apply_mask_parameters(streamfx::obs::gs::effect effect, gs_texture_t* original_texture, - gs_texture_t* blurred_texture) +bool blur_instance::apply_mask_parameters(streamfx::obs::gs::effect effect, gs_texture_t* original_texture, gs_texture_t* blurred_texture) { if (effect.has_parameter("image_orig")) { effect.get_parameter("image_orig").set_texture(original_texture); @@ -297,14 +290,13 @@ void blur_instance::update(obs_data_t* settings) _mask.type = static_cast(obs_data_get_int(settings, ST_KEY_MASK_TYPE)); switch (_mask.type) { case mask_type::Region: - _mask.region.left = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_LEFT) / 100.0); - _mask.region.top = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_TOP) / 100.0); - _mask.region.right = 1.0f - float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_RIGHT) / 100.0); - _mask.region.bottom = 1.0f - float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_BOTTOM) / 100.0); - _mask.region.feather = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_FEATHER) / 100.0); - _mask.region.feather_shift = - float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_FEATHER_SHIFT) / 100.0); - _mask.region.invert = obs_data_get_bool(settings, ST_KEY_MASK_REGION_INVERT); + _mask.region.left = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_LEFT) / 100.0); + _mask.region.top = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_TOP) / 100.0); + _mask.region.right = 1.0f - float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_RIGHT) / 100.0); + _mask.region.bottom = 1.0f - float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_BOTTOM) / 100.0); + _mask.region.feather = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_FEATHER) / 100.0); + _mask.region.feather_shift = float_t(obs_data_get_double(settings, ST_KEY_MASK_REGION_FEATHER_SHIFT) / 100.0); + _mask.region.invert = obs_data_get_bool(settings, ST_KEY_MASK_REGION_INVERT); break; case mask_type::Image: _mask.image.path = obs_data_get_string(settings, ST_KEY_MASK_IMAGE); @@ -335,13 +327,11 @@ void blur_instance::video_tick(float) } else { _blur->set_step_scale(1.0, 1.0); } - if ((_blur->get_type() == ::streamfx::gfx::blur::type::Directional) - || (_blur->get_type() == ::streamfx::gfx::blur::type::Rotational)) { + if ((_blur->get_type() == ::streamfx::gfx::blur::type::Directional) || (_blur->get_type() == ::streamfx::gfx::blur::type::Rotational)) { auto obj = std::dynamic_pointer_cast<::streamfx::gfx::blur::base_angle>(_blur); obj->set_angle(_blur_angle); } - if ((_blur->get_type() == ::streamfx::gfx::blur::type::Zoom) - || (_blur->get_type() == ::streamfx::gfx::blur::type::Rotational)) { + if ((_blur->get_type() == ::streamfx::gfx::blur::type::Zoom) || (_blur->get_type() == ::streamfx::gfx::blur::type::Rotational)) { auto obj = std::dynamic_pointer_cast<::streamfx::gfx::blur::base_center>(_blur); obj->set_center(_blur_center.first, _blur_center.second); } @@ -354,20 +344,17 @@ void blur_instance::video_tick(float) _mask.image.texture = std::make_shared(_mask.image.path); _mask.image.path_old = _mask.image.path; } catch (...) { - DLOG_ERROR(" Instance '%s' failed to load image '%s'.", obs_source_get_name(_self), - _mask.image.path.c_str()); + DLOG_ERROR(" Instance '%s' failed to load image '%s'.", obs_source_get_name(_self), _mask.image.path.c_str()); } } } else if (_mask.type == mask_type::Source) { if (_mask.source.name_old != _mask.source.name) { try { - _mask.source.source_texture = std::make_shared( - ::streamfx::obs::source{_mask.source.name}, ::streamfx::obs::source{_self, false}); - _mask.source.is_scene = (obs_scene_from_source(_mask.source.source_texture->get_object()) != nullptr); - _mask.source.name_old = _mask.source.name; + _mask.source.source_texture = std::make_shared(::streamfx::obs::source{_mask.source.name}, ::streamfx::obs::source{_self, false}); + _mask.source.is_scene = (obs_scene_from_source(_mask.source.source_texture->get_object()) != nullptr); + _mask.source.name_old = _mask.source.name; } catch (...) { - DLOG_ERROR(" Instance '%s' failed to grab source '%s'.", obs_source_get_name(_self), - _mask.source.name.c_str()); + DLOG_ERROR(" Instance '%s' failed to grab source '%s'.", obs_source_get_name(_self), _mask.source.name.c_str()); } } } @@ -391,8 +378,7 @@ void blur_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Blur '%s'", - obs_source_get_name(_self)}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Blur '%s'", obs_source_get_name(_self)}; #endif if (!_source_rendered) { @@ -517,8 +503,7 @@ void blur_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_capture, "Capture '%s'", - obs_source_get_name(_mask.source.source_texture->get_object())}; + streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_capture, "Capture '%s'", obs_source_get_name(_mask.source.source_texture->get_object())}; #endif this->_mask.source.texture = this->_mask.source.source_texture->render(source_width, source_height); @@ -697,25 +682,19 @@ bool modified_properties(void*, obs_properties_t* props, obs_property* prop, obs // Blur Sub-Type { - bool has_angle_support = (subtype_found->second.type == ::streamfx::gfx::blur::type::Directional) - || (subtype_found->second.type == ::streamfx::gfx::blur::type::Rotational); - bool has_center_support = (subtype_found->second.type == ::streamfx::gfx::blur::type::Rotational) - || (subtype_found->second.type == ::streamfx::gfx::blur::type::Zoom); + bool has_angle_support = (subtype_found->second.type == ::streamfx::gfx::blur::type::Directional) || (subtype_found->second.type == ::streamfx::gfx::blur::type::Rotational); + bool has_center_support = (subtype_found->second.type == ::streamfx::gfx::blur::type::Rotational) || (subtype_found->second.type == ::streamfx::gfx::blur::type::Zoom); bool has_stepscale_support = type_found->second.fn().is_step_scale_supported(subtype_found->second.type); bool show_scaling = obs_data_get_bool(settings, ST_KEY_STEPSCALE) && has_stepscale_support; /// Size p = obs_properties_get(props, ST_KEY_SIZE); - obs_property_float_set_limits(p, type_found->second.fn().get_min_size(subtype_found->second.type), - type_found->second.fn().get_max_size(subtype_found->second.type), - type_found->second.fn().get_step_size(subtype_found->second.type)); + obs_property_float_set_limits(p, type_found->second.fn().get_min_size(subtype_found->second.type), type_found->second.fn().get_max_size(subtype_found->second.type), type_found->second.fn().get_step_size(subtype_found->second.type)); /// Angle p = obs_properties_get(props, ST_KEY_ANGLE); obs_property_set_visible(p, has_angle_support); - obs_property_float_set_limits(p, type_found->second.fn().get_min_angle(subtype_found->second.type), - type_found->second.fn().get_max_angle(subtype_found->second.type), - type_found->second.fn().get_step_angle(subtype_found->second.type)); + obs_property_float_set_limits(p, type_found->second.fn().get_min_angle(subtype_found->second.type), type_found->second.fn().get_max_angle(subtype_found->second.type), type_found->second.fn().get_step_angle(subtype_found->second.type)); /// Center, Radius obs_property_set_visible(obs_properties_get(props, ST_KEY_CENTER_X), has_center_support); @@ -725,14 +704,10 @@ bool modified_properties(void*, obs_properties_t* props, obs_property* prop, obs obs_property_set_visible(obs_properties_get(props, ST_KEY_STEPSCALE), has_stepscale_support); p = obs_properties_get(props, ST_KEY_STEPSCALE_X); obs_property_set_visible(p, show_scaling); - obs_property_float_set_limits(p, type_found->second.fn().get_min_step_scale_x(subtype_found->second.type), - type_found->second.fn().get_max_step_scale_x(subtype_found->second.type), - type_found->second.fn().get_step_step_scale_x(subtype_found->second.type)); + obs_property_float_set_limits(p, type_found->second.fn().get_min_step_scale_x(subtype_found->second.type), type_found->second.fn().get_max_step_scale_x(subtype_found->second.type), type_found->second.fn().get_step_step_scale_x(subtype_found->second.type)); p = obs_properties_get(props, ST_KEY_STEPSCALE_Y); obs_property_set_visible(p, show_scaling); - obs_property_float_set_limits(p, type_found->second.fn().get_min_step_scale_x(subtype_found->second.type), - type_found->second.fn().get_max_step_scale_x(subtype_found->second.type), - type_found->second.fn().get_step_step_scale_x(subtype_found->second.type)); + obs_property_float_set_limits(p, type_found->second.fn().get_min_step_scale_x(subtype_found->second.type), type_found->second.fn().get_max_step_scale_x(subtype_found->second.type), type_found->second.fn().get_step_step_scale_x(subtype_found->second.type)); } { // Masking @@ -771,15 +746,13 @@ obs_properties_t* blur_factory::get_properties2(blur_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::filter::blur::blur_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::filter::blur::blur_factory::on_manual_open, nullptr); } #endif // Blur Type and Sub-Type { - p = obs_properties_add_list(pr, ST_KEY_TYPE, D_TRANSLATE(ST_I18N_TYPE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + p = obs_properties_add_list(pr, ST_KEY_TYPE, D_TRANSLATE(ST_I18N_TYPE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_set_modified_callback2(p, modified_properties, this); obs_property_list_add_string(p, D_TRANSLATE(S_BLUR_TYPE_BOX), "box"); obs_property_list_add_string(p, D_TRANSLATE(S_BLUR_TYPE_BOX_LINEAR), "box_linear"); @@ -787,8 +760,7 @@ obs_properties_t* blur_factory::get_properties2(blur_instance* data) obs_property_list_add_string(p, D_TRANSLATE(S_BLUR_TYPE_GAUSSIAN_LINEAR), "gaussian_linear"); obs_property_list_add_string(p, D_TRANSLATE(S_BLUR_TYPE_DUALFILTERING), "dual_filtering"); - p = obs_properties_add_list(pr, ST_KEY_SUBTYPE, D_TRANSLATE(ST_I18N_SUBTYPE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + p = obs_properties_add_list(pr, ST_KEY_SUBTYPE, D_TRANSLATE(ST_I18N_SUBTYPE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_set_modified_callback2(p, modified_properties, this); obs_property_list_add_string(p, D_TRANSLATE(S_BLUR_SUBTYPE_AREA), "area"); obs_property_list_add_string(p, D_TRANSLATE(S_BLUR_SUBTYPE_DIRECTIONAL), "directional"); @@ -805,47 +777,35 @@ obs_properties_t* blur_factory::get_properties2(blur_instance* data) p = obs_properties_add_bool(pr, ST_KEY_STEPSCALE, D_TRANSLATE(ST_I18N_STEPSCALE)); obs_property_set_modified_callback2(p, modified_properties, this); - p = obs_properties_add_float_slider(pr, ST_KEY_STEPSCALE_X, D_TRANSLATE(ST_I18N_STEPSCALE_X), 0.0, 1000.0, - 0.01); - p = obs_properties_add_float_slider(pr, ST_KEY_STEPSCALE_Y, D_TRANSLATE(ST_I18N_STEPSCALE_Y), 0.0, 1000.0, - 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_STEPSCALE_X, D_TRANSLATE(ST_I18N_STEPSCALE_X), 0.0, 1000.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_STEPSCALE_Y, D_TRANSLATE(ST_I18N_STEPSCALE_Y), 0.0, 1000.0, 0.01); } // Masking { p = obs_properties_add_bool(pr, ST_KEY_MASK, D_TRANSLATE(ST_I18N_MASK)); obs_property_set_modified_callback2(p, modified_properties, this); - p = obs_properties_add_list(pr, ST_KEY_MASK_TYPE, D_TRANSLATE(ST_I18N_MASK_TYPE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + p = obs_properties_add_list(pr, ST_KEY_MASK_TYPE, D_TRANSLATE(ST_I18N_MASK_TYPE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback2(p, modified_properties, this); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_MASK_TYPE_REGION), static_cast(mask_type::Region)); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_MASK_TYPE_IMAGE), static_cast(mask_type::Image)); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_MASK_TYPE_SOURCE), static_cast(mask_type::Source)); /// Region - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_LEFT, D_TRANSLATE(ST_I18N_MASK_REGION_LEFT), 0.0, - 100.0, 0.01); - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_TOP, D_TRANSLATE(ST_I18N_MASK_REGION_TOP), 0.0, - 100.0, 0.01); - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_RIGHT, D_TRANSLATE(ST_I18N_MASK_REGION_RIGHT), 0.0, - 100.0, 0.01); - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_BOTTOM, D_TRANSLATE(ST_I18N_MASK_REGION_BOTTOM), 0.0, - 100.0, 0.01); - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_FEATHER, D_TRANSLATE(ST_I18N_MASK_REGION_FEATHER), - 0.0, 50.0, 0.01); - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_FEATHER_SHIFT, - D_TRANSLATE(ST_I18N_MASK_REGION_FEATHER_SHIFT), -100.0, 100.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_LEFT, D_TRANSLATE(ST_I18N_MASK_REGION_LEFT), 0.0, 100.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_TOP, D_TRANSLATE(ST_I18N_MASK_REGION_TOP), 0.0, 100.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_RIGHT, D_TRANSLATE(ST_I18N_MASK_REGION_RIGHT), 0.0, 100.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_BOTTOM, D_TRANSLATE(ST_I18N_MASK_REGION_BOTTOM), 0.0, 100.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_FEATHER, D_TRANSLATE(ST_I18N_MASK_REGION_FEATHER), 0.0, 50.0, 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_REGION_FEATHER_SHIFT, D_TRANSLATE(ST_I18N_MASK_REGION_FEATHER_SHIFT), -100.0, 100.0, 0.01); p = obs_properties_add_bool(pr, ST_KEY_MASK_REGION_INVERT, D_TRANSLATE(ST_I18N_MASK_REGION_INVERT)); /// Image { - std::string filter = - translate_string("%s (%s);;* (*.*)", D_TRANSLATE(S_FILETYPE_IMAGES), S_FILEFILTERS_TEXTURE); + std::string filter = translate_string("%s (%s);;* (*.*)", D_TRANSLATE(S_FILETYPE_IMAGES), S_FILEFILTERS_TEXTURE); _translation_cache.push_back(filter); - p = obs_properties_add_path(pr, ST_KEY_MASK_IMAGE, D_TRANSLATE(ST_I18N_MASK_IMAGE), OBS_PATH_FILE, - _translation_cache.back().c_str(), nullptr); + p = obs_properties_add_path(pr, ST_KEY_MASK_IMAGE, D_TRANSLATE(ST_I18N_MASK_IMAGE), OBS_PATH_FILE, _translation_cache.back().c_str(), nullptr); } /// Source - p = obs_properties_add_list(pr, ST_KEY_MASK_SOURCE, D_TRANSLATE(ST_I18N_MASK_SOURCE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + p = obs_properties_add_list(pr, ST_KEY_MASK_SOURCE, D_TRANSLATE(ST_I18N_MASK_SOURCE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, "", ""); obs::source_tracker::get()->enumerate( [&p](std::string name, ::streamfx::obs::source) { @@ -863,8 +823,7 @@ obs_properties_t* blur_factory::get_properties2(blur_instance* data) /// Shared p = obs_properties_add_color(pr, ST_KEY_MASK_COLOR, D_TRANSLATE(ST_I18N_MASK_COLOR)); p = obs_properties_add_float_slider(pr, ST_KEY_MASK_ALPHA, D_TRANSLATE(ST_I18N_MASK_ALPHA), 0.0, 100.0, 0.1); - p = obs_properties_add_float_slider(pr, ST_KEY_MASK_MULTIPLIER, D_TRANSLATE(ST_I18N_MASK_MULTIPLIER), 0.0, 10.0, - 0.01); + p = obs_properties_add_float_slider(pr, ST_KEY_MASK_MULTIPLIER, D_TRANSLATE(ST_I18N_MASK_MULTIPLIER), 0.0, 10.0, 0.01); } return pr; diff --git a/source/filters/filter-blur.hpp b/source/filters/filter-blur.hpp index 975bd978..692c10d4 100644 --- a/source/filters/filter-blur.hpp +++ b/source/filters/filter-blur.hpp @@ -97,8 +97,7 @@ namespace streamfx::filter::blur { virtual void video_render(gs_effect_t* effect) override; private: - bool apply_mask_parameters(streamfx::obs::gs::effect effect, gs_texture_t* original_texture, - gs_texture_t* blurred_texture); + bool apply_mask_parameters(streamfx::obs::gs::effect effect, gs_texture_t* original_texture, gs_texture_t* blurred_texture); }; class blur_factory : public obs::source_factory { diff --git a/source/filters/filter-color-grade.cpp b/source/filters/filter-color-grade.cpp index 0a19f89e..6cf3bfd7 100644 --- a/source/filters/filter-color-grade.cpp +++ b/source/filters/filter-color-grade.cpp @@ -113,12 +113,7 @@ static constexpr std::string_view HELP_URL = "https://github.com/Xaymar/obs-Stre color_grade_instance::~color_grade_instance() {} -color_grade_instance::color_grade_instance(obs_data_t* data, obs_source_t* self) - : obs::source_instance(data, self), _effect(), _gfx_util(::streamfx::gfx::util::get()), _lift(), _gamma(), _gain(), - _offset(), _tint_detection(), _tint_luma(), _tint_exponent(), _tint_low(), _tint_mid(), _tint_hig(), - _correction(), _lut_enabled(true), _lut_depth(), _ccache_rt(), _ccache_texture(), _ccache_fresh(false), - _lut_initialized(false), _lut_dirty(true), _lut_producer(), _lut_consumer(), _lut_rt(), _lut_texture(), - _cache_rt(), _cache_texture(), _cache_fresh(false) +color_grade_instance::color_grade_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self), _effect(), _gfx_util(::streamfx::gfx::util::get()), _lift(), _gamma(), _gain(), _offset(), _tint_detection(), _tint_luma(), _tint_exponent(), _tint_low(), _tint_mid(), _tint_hig(), _correction(), _lut_enabled(true), _lut_depth(), _ccache_rt(), _ccache_texture(), _ccache_fresh(false), _lut_initialized(false), _lut_dirty(true), _lut_producer(), _lut_consumer(), _lut_rt(), _lut_texture(), _cache_rt(), _cache_texture(), _cache_fresh(false) { { auto gctx = streamfx::obs::gs::context(); @@ -357,8 +352,7 @@ void color_grade_instance::video_render(gs_effect_t* shader) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Color Grading '%s'", - obs_source_get_name(_self)}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Color Grading '%s'", obs_source_get_name(_self)}; #endif // TODO: Optimize this once (https://github.com/obsproject/obs-studio/pull/4199) is merged. @@ -367,8 +361,7 @@ void color_grade_instance::video_render(gs_effect_t* shader) // 1. Capture the filter/source rendered above this. if (!_ccache_fresh || !_ccache_texture) { #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_cache, "Cache '%s'", - obs_source_get_name(target)}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_cache, "Cache '%s'", obs_source_get_name(target)}; #endif // If the input cache render target doesn't exist, create it. if (!_ccache_rt) { @@ -554,8 +547,7 @@ void color_grade_instance::video_render(gs_effect_t* shader) // Draw the render cache. while (gs_effect_loop(shader, "Draw")) { - gs_effect_set_texture(gs_effect_get_param_by_name(shader, "image"), - _cache_texture ? _cache_texture->get_object() : nullptr); + gs_effect_set_texture(gs_effect_get_param_by_name(shader, "image"), _cache_texture ? _cache_texture->get_object() : nullptr); gs_draw_sprite(nullptr, 0, width, height); } } @@ -623,8 +615,7 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::filter::color_grade::color_grade_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::filter::color_grade::color_grade_factory::on_manual_open, nullptr); } #endif @@ -633,23 +624,19 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, ST_KEY_LIFT, D_TRANSLATE(ST_I18N_LIFT), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_RED), D_TRANSLATE(ST_I18N_LIFT_(ST_RED)), - -1000., 100., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_RED), D_TRANSLATE(ST_I18N_LIFT_(ST_RED)), -1000., 100., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_GREEN), D_TRANSLATE(ST_I18N_LIFT_(ST_GREEN)), - -1000., 100., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_GREEN), D_TRANSLATE(ST_I18N_LIFT_(ST_GREEN)), -1000., 100., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_BLUE), D_TRANSLATE(ST_I18N_LIFT_(ST_BLUE)), - -1000., 100., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_BLUE), D_TRANSLATE(ST_I18N_LIFT_(ST_BLUE)), -1000., 100., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_ALL), D_TRANSLATE(ST_I18N_LIFT_(ST_ALL)), - -1000., 100., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_LIFT_(ST_ALL), D_TRANSLATE(ST_I18N_LIFT_(ST_ALL)), -1000., 100., .01); obs_property_float_set_suffix(p, " %"); } } @@ -659,23 +646,19 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, ST_KEY_GAMMA, D_TRANSLATE(ST_I18N_GAMMA), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_RED), D_TRANSLATE(ST_I18N_GAMMA_(ST_RED)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_RED), D_TRANSLATE(ST_I18N_GAMMA_(ST_RED)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_GREEN), - D_TRANSLATE(ST_I18N_GAMMA_(ST_GREEN)), -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_GREEN), D_TRANSLATE(ST_I18N_GAMMA_(ST_GREEN)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_BLUE), D_TRANSLATE(ST_I18N_GAMMA_(ST_BLUE)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_BLUE), D_TRANSLATE(ST_I18N_GAMMA_(ST_BLUE)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_ALL), D_TRANSLATE(ST_I18N_GAMMA_(ST_ALL)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAMMA_(ST_ALL), D_TRANSLATE(ST_I18N_GAMMA_(ST_ALL)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } } @@ -685,23 +668,19 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, ST_KEY_GAIN, D_TRANSLATE(ST_I18N_GAIN), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_RED), D_TRANSLATE(ST_I18N_GAIN_(ST_RED)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_RED), D_TRANSLATE(ST_I18N_GAIN_(ST_RED)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_GREEN), D_TRANSLATE(ST_I18N_GAIN_(ST_GREEN)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_GREEN), D_TRANSLATE(ST_I18N_GAIN_(ST_GREEN)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_BLUE), D_TRANSLATE(ST_I18N_GAIN_(ST_BLUE)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_BLUE), D_TRANSLATE(ST_I18N_GAIN_(ST_BLUE)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_ALL), D_TRANSLATE(ST_I18N_GAIN_(ST_ALL)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_GAIN_(ST_ALL), D_TRANSLATE(ST_I18N_GAIN_(ST_ALL)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } } @@ -711,23 +690,19 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, ST_KEY_OFFSET, D_TRANSLATE(ST_I18N_OFFSET), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_RED), D_TRANSLATE(ST_I18N_OFFSET_(ST_RED)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_RED), D_TRANSLATE(ST_I18N_OFFSET_(ST_RED)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_GREEN), - D_TRANSLATE(ST_I18N_OFFSET_(ST_GREEN)), -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_GREEN), D_TRANSLATE(ST_I18N_OFFSET_(ST_GREEN)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_BLUE), - D_TRANSLATE(ST_I18N_OFFSET_(ST_BLUE)), -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_BLUE), D_TRANSLATE(ST_I18N_OFFSET_(ST_BLUE)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_ALL), D_TRANSLATE(ST_I18N_OFFSET_(ST_ALL)), - -1000., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_OFFSET_(ST_ALL), D_TRANSLATE(ST_I18N_OFFSET_(ST_ALL)), -1000., 1000., .01); obs_property_float_set_suffix(p, " %"); } } @@ -737,50 +712,41 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, ST_KEY_TINT, D_TRANSLATE(ST_I18N_TINT), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_LOW, ST_RED), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_LOW, ST_RED)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_LOW, ST_RED), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_LOW, ST_RED)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_LOW, ST_GREEN), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_LOW, ST_GREEN)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_LOW, ST_GREEN), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_LOW, ST_GREEN)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_LOW, ST_BLUE), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_LOW, ST_BLUE)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_LOW, ST_BLUE), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_LOW, ST_BLUE)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_MID, ST_RED), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_MID, ST_RED)), 0, 1000., 0.01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_MID, ST_RED), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_MID, ST_RED)), 0, 1000., 0.01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_MID, ST_GREEN), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_MID, ST_GREEN)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_MID, ST_GREEN), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_MID, ST_GREEN)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_MID, ST_BLUE), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_MID, ST_BLUE)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_MID, ST_BLUE), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_MID, ST_BLUE)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_HIGH, ST_RED), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_HIGH, ST_RED)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_HIGH, ST_RED), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_HIGH, ST_RED)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_HIGH, ST_GREEN), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_HIGH, ST_GREEN)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_HIGH, ST_GREEN), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_HIGH, ST_GREEN)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_HIGH, ST_BLUE), - D_TRANSLATE(ST_I18N_TINT_(ST_TONE_HIGH, ST_BLUE)), 0, 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_TINT_(ST_TONE_HIGH, ST_BLUE), D_TRANSLATE(ST_I18N_TINT_(ST_TONE_HIGH, ST_BLUE)), 0, 1000., .01); obs_property_float_set_suffix(p, " %"); } } @@ -790,23 +756,19 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, ST_KEY_CORRECTION, D_TRANSLATE(ST_I18N_CORRECTION), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_HUE), - D_TRANSLATE(ST_I18N_CORRECTION_(ST_HUE)), -180., 180., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_HUE), D_TRANSLATE(ST_I18N_CORRECTION_(ST_HUE)), -180., 180., .01); obs_property_float_set_suffix(p, " °"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_SATURATION), - D_TRANSLATE(ST_I18N_CORRECTION_(ST_SATURATION)), 0., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_SATURATION), D_TRANSLATE(ST_I18N_CORRECTION_(ST_SATURATION)), 0., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_LIGHTNESS), - D_TRANSLATE(ST_I18N_CORRECTION_(ST_LIGHTNESS)), 0., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_LIGHTNESS), D_TRANSLATE(ST_I18N_CORRECTION_(ST_LIGHTNESS)), 0., 1000., .01); obs_property_float_set_suffix(p, " %"); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_CONTRAST), - D_TRANSLATE(ST_I18N_CORRECTION_(ST_CONTRAST)), 0., 1000., .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_CORRECTION_(ST_CONTRAST), D_TRANSLATE(ST_I18N_CORRECTION_(ST_CONTRAST)), 0., 1000., .01); obs_property_float_set_suffix(p, " %"); } } @@ -816,25 +778,16 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_group(pr, S_ADVANCED, D_TRANSLATE(S_ADVANCED), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_TINT_MODE, D_TRANSLATE(ST_I18N_TINT_MODE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); - std::pair els[] = {{ST_I18N_TINT_MODE_(ST_MODE_LINEAR), luma_mode::Linear}, - {ST_I18N_TINT_MODE_(ST_MODE_EXP), luma_mode::Exp}, - {ST_I18N_TINT_MODE_(ST_MODE_EXP2), luma_mode::Exp2}, - {ST_I18N_TINT_MODE_(ST_MODE_LOG), luma_mode::Log}, - {ST_I18N_TINT_MODE_(ST_MODE_LOG10), luma_mode::Log10}}; + auto p = obs_properties_add_list(grp, ST_KEY_TINT_MODE, D_TRANSLATE(ST_I18N_TINT_MODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + std::pair els[] = {{ST_I18N_TINT_MODE_(ST_MODE_LINEAR), luma_mode::Linear}, {ST_I18N_TINT_MODE_(ST_MODE_EXP), luma_mode::Exp}, {ST_I18N_TINT_MODE_(ST_MODE_EXP2), luma_mode::Exp2}, {ST_I18N_TINT_MODE_(ST_MODE_LOG), luma_mode::Log}, {ST_I18N_TINT_MODE_(ST_MODE_LOG10), luma_mode::Log10}}; for (auto kv : els) { obs_property_list_add_int(p, D_TRANSLATE(kv.first), static_cast(kv.second)); } } { - auto p = obs_properties_add_list(grp, ST_KEY_TINT_DETECTION, D_TRANSLATE(ST_I18N_TINT_DETECTION), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); - std::pair els[] = { - {ST_I18N_TINT_DETECTION_(ST_DETECTION_HSV), detection_mode::HSV}, - {ST_I18N_TINT_DETECTION_(ST_DETECTION_HSL), detection_mode::HSL}, - {ST_I18N_TINT_DETECTION_(ST_DETECTION_YUV_SDR), detection_mode::YUV_SDR}}; + auto p = obs_properties_add_list(grp, ST_KEY_TINT_DETECTION, D_TRANSLATE(ST_I18N_TINT_DETECTION), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + std::pair els[] = {{ST_I18N_TINT_DETECTION_(ST_DETECTION_HSV), detection_mode::HSV}, {ST_I18N_TINT_DETECTION_(ST_DETECTION_HSL), detection_mode::HSL}, {ST_I18N_TINT_DETECTION_(ST_DETECTION_YUV_SDR), detection_mode::YUV_SDR}}; for (auto kv : els) { obs_property_list_add_int(p, D_TRANSLATE(kv.first), static_cast(kv.second)); } @@ -843,15 +796,9 @@ obs_properties_t* color_grade_factory::get_properties2(color_grade_instance* dat obs_properties_add_float_slider(grp, ST_KEY_TINT_EXPONENT, D_TRANSLATE(ST_I18N_TINT_EXPONENT), 0., 10., .01); { - auto p = obs_properties_add_list(grp, ST_KEY_RENDERMODE, D_TRANSLATE(ST_I18N_RENDERMODE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_RENDERMODE, D_TRANSLATE(ST_I18N_RENDERMODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); std::pair els[] = { - {S_STATE_AUTOMATIC, -1}, - {ST_I18N_RENDERMODE_DIRECT, 0}, - {ST_I18N_RENDERMODE_LUT_2BIT, static_cast(streamfx::gfx::lut::color_depth::_2)}, - {ST_I18N_RENDERMODE_LUT_4BIT, static_cast(streamfx::gfx::lut::color_depth::_4)}, - {ST_I18N_RENDERMODE_LUT_6BIT, static_cast(streamfx::gfx::lut::color_depth::_6)}, - {ST_I18N_RENDERMODE_LUT_8BIT, static_cast(streamfx::gfx::lut::color_depth::_8)}, + {S_STATE_AUTOMATIC, -1}, {ST_I18N_RENDERMODE_DIRECT, 0}, {ST_I18N_RENDERMODE_LUT_2BIT, static_cast(streamfx::gfx::lut::color_depth::_2)}, {ST_I18N_RENDERMODE_LUT_4BIT, static_cast(streamfx::gfx::lut::color_depth::_4)}, {ST_I18N_RENDERMODE_LUT_6BIT, static_cast(streamfx::gfx::lut::color_depth::_6)}, {ST_I18N_RENDERMODE_LUT_8BIT, static_cast(streamfx::gfx::lut::color_depth::_8)}, //{ST_RENDERMODE_LUT_10BIT, static_cast(gfx::lut::color_depth::_10)}, }; for (auto kv : els) { diff --git a/source/filters/filter-color-grade.hpp b/source/filters/filter-color-grade.hpp index 53082067..21e2ff13 100644 --- a/source/filters/filter-color-grade.hpp +++ b/source/filters/filter-color-grade.hpp @@ -87,8 +87,7 @@ namespace streamfx::filter::color_grade { virtual void video_render(gs_effect_t* effect) override; }; - class color_grade_factory : public obs::source_factory { + class color_grade_factory : public obs::source_factory { public: color_grade_factory(); virtual ~color_grade_factory(); diff --git a/source/filters/filter-denoising.cpp b/source/filters/filter-denoising.cpp index 72754e2a..ae6d9765 100644 --- a/source/filters/filter-denoising.cpp +++ b/source/filters/filter-denoising.cpp @@ -74,8 +74,7 @@ std::string streamfx::filter::denoising::string(denoising_provider provider) denoising_instance::denoising_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self), - _size(1, 1), _provider(denoising_provider::INVALID), _provider_ui(denoising_provider::INVALID), - _provider_ready(false), _provider_lock(), _provider_task(), _input(), _output() + _size(1, 1), _provider(denoising_provider::INVALID), _provider_ui(denoising_provider::INVALID), _provider_ready(false), _provider_lock(), _provider_task(), _input(), _output() { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); @@ -88,8 +87,7 @@ denoising_instance::denoising_instance(obs_data_t* data, obs_source_t* self) _output = _input->get_texture(); // Load the required effect. - _standard_effect = - std::make_shared<::streamfx::obs::gs::effect>(::streamfx::data_file_path("effects/standard.effect")); + _standard_effect = std::make_shared<::streamfx::obs::gs::effect>(::streamfx::data_file_path("effects/standard.effect")); // Create Samplers _channel0_sampler = std::make_shared<::streamfx::obs::gs::sampler>(); @@ -246,8 +244,7 @@ void denoising_instance::video_render(gs_effect_t* effect) #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler0{::streamfx::obs::gs::debug_color_source, "StreamFX Denoising"}; - ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(parent)}; + ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(parent)}; #endif if (_dirty) { // Lock the provider from being changed. @@ -368,8 +365,7 @@ void streamfx::filter::denoising::denoising_instance::switch_provider(denoising_ // - Doesn't guarantee that the task is properly killed off. // Log information. - D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), - cstring(provider)); + D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), cstring(provider)); // If there is an existing task, attempt to cancel it. if (_provider_task) { @@ -389,8 +385,7 @@ void streamfx::filter::denoising::denoising_instance::switch_provider(denoising_ _provider = provider; // Then spawn a new task to switch provider. - _provider_task = streamfx::threadpool()->push( - std::bind(&denoising_instance::task_switch_provider, this, std::placeholders::_1), spd); + _provider_task = streamfx::threadpool()->push(std::bind(&denoising_instance::task_switch_provider, this, std::placeholders::_1), spd); } void streamfx::filter::denoising::denoising_instance::task_switch_provider(util::threadpool::task_data_t data) @@ -427,8 +422,7 @@ void streamfx::filter::denoising::denoising_instance::task_switch_provider(util: } // Log information. - D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), - cstring(spd->provider), cstring(_provider)); + D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(spd->provider), cstring(_provider)); _provider_ready = true; } catch (std::exception const& ex) { @@ -470,13 +464,10 @@ void streamfx::filter::denoising::denoising_instance::nvvfx_denoising_process() void streamfx::filter::denoising::denoising_instance::nvvfx_denoising_properties(obs_properties_t* props) { obs_properties_t* grp = obs_properties_create(); - obs_properties_add_group(props, ST_KEY_NVIDIA_DENOISING, D_TRANSLATE(ST_I18N_NVIDIA_DENOISING), OBS_GROUP_NORMAL, - grp); + obs_properties_add_group(props, ST_KEY_NVIDIA_DENOISING, D_TRANSLATE(ST_I18N_NVIDIA_DENOISING), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_NVIDIA_DENOISING_STRENGTH, - D_TRANSLATE(ST_I18N_NVIDIA_DENOISING_STRENGTH), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_NVIDIA_DENOISING_STRENGTH, D_TRANSLATE(ST_I18N_NVIDIA_DENOISING_STRENGTH), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_DENOISING_STRENGTH_WEAK), 0); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_DENOISING_STRENGTH_STRONG), 1); } @@ -487,8 +478,7 @@ void streamfx::filter::denoising::denoising_instance::nvvfx_denoising_update(obs if (!_nvidia_fx) return; - _nvidia_fx->set_strength( - static_cast(obs_data_get_int(data, ST_KEY_NVIDIA_DENOISING_STRENGTH) == 0 ? 0. : 1.)); + _nvidia_fx->set_strength(static_cast(obs_data_get_int(data, ST_KEY_NVIDIA_DENOISING_STRENGTH) == 0 ? 0. : 1.)); } #endif @@ -577,8 +567,7 @@ obs_properties_t* denoising_factory::get_properties2(denoising_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), denoising_factory::on_manual_open, - nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), denoising_factory::on_manual_open, nullptr); } #endif @@ -591,13 +580,10 @@ obs_properties_t* denoising_factory::get_properties2(denoising_instance* data) obs_properties_add_group(pr, S_ADVANCED, D_TRANSLATE(S_ADVANCED), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_PROVIDER, D_TRANSLATE(ST_I18N_PROVIDER), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_PROVIDER, D_TRANSLATE(ST_I18N_PROVIDER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_provider); - obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), - static_cast(denoising_provider::AUTOMATIC)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_PROVIDER_NVIDIA_DENOISING), - static_cast(denoising_provider::NVIDIA_DENOISING)); + obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), static_cast(denoising_provider::AUTOMATIC)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_PROVIDER_NVIDIA_DENOISING), static_cast(denoising_provider::NVIDIA_DENOISING)); } } diff --git a/source/filters/filter-denoising.hpp b/source/filters/filter-denoising.hpp index 90fa7844..bdb7aedb 100644 --- a/source/filters/filter-denoising.hpp +++ b/source/filters/filter-denoising.hpp @@ -81,8 +81,7 @@ namespace streamfx::filter::denoising { #endif }; - class denoising_factory : public obs::source_factory<::streamfx::filter::denoising::denoising_factory, - ::streamfx::filter::denoising::denoising_instance> { + class denoising_factory : public obs::source_factory<::streamfx::filter::denoising::denoising_factory, ::streamfx::filter::denoising::denoising_instance> { #ifdef ENABLE_FILTER_DENOISING_NVIDIA bool _nvidia_available; std::shared_ptr<::streamfx::nvidia::cuda::obs> _nvcuda; diff --git a/source/filters/filter-dynamic-mask.cpp b/source/filters/filter-dynamic-mask.cpp index 23a09022..7ec9c02b 100644 --- a/source/filters/filter-dynamic-mask.cpp +++ b/source/filters/filter-dynamic-mask.cpp @@ -88,37 +88,37 @@ std::shared_ptr data::get() std::lock_guard lock(instance_lock); auto instance = weak_instance.lock(); if (!instance) { - instance = std::shared_ptr{new streamfx::filter::dynamic_mask::data()}; + instance = std::shared_ptr{new streamfx::filter::dynamic_mask::data()}; weak_instance = instance; } return instance; } dynamic_mask_instance::dynamic_mask_instance(obs_data_t* settings, obs_source_t* self) - : obs::source_instance(settings, self), // + : obs::source_instance(settings, self), // _data(streamfx::filter::dynamic_mask::data::get()), // - _gfx_util(::streamfx::gfx::util::get()), // - _translation_map(), // - _input(), // - _input_child(), // - _input_vs(), // - _input_ac(), // - _have_base(false), // - _base_rt(), // - _base_tex(), // - _base_color_space(GS_CS_SRGB), // - _base_color_format(GS_RGBA), // - _have_input(false), // - _input_rt(), // - _input_tex(), // - _input_color_space(GS_CS_SRGB), // - _input_color_format(GS_RGBA), // - _have_final(false), // - _final_rt(), // - _final_tex(), // - _channels(), // - _precalc(), // - _debug_texture(-1) // + _gfx_util(::streamfx::gfx::util::get()), // + _translation_map(), // + _input(), // + _input_child(), // + _input_vs(), // + _input_ac(), // + _have_base(false), // + _base_rt(), // + _base_tex(), // + _base_color_space(GS_CS_SRGB), // + _base_color_format(GS_RGBA), // + _have_input(false), // + _input_rt(), // + _input_tex(), // + _input_color_space(GS_CS_SRGB), // + _input_color_format(GS_RGBA), // + _have_final(false), // + _final_rt(), // + _final_tex(), // + _channels(), // + _precalc(), // + _debug_texture(-1) // { update(settings); } @@ -157,12 +157,12 @@ void dynamic_mask_instance::update(obs_data_t* settings) } } - std::string chv_key = std::string(ST_KEY_CHANNEL_VALUE) + "." + kv1.second; - found->second.value = static_cast(obs_data_get_double(settings, chv_key.c_str())); + std::string chv_key = std::string(ST_KEY_CHANNEL_VALUE) + "." + kv1.second; + found->second.value = static_cast(obs_data_get_double(settings, chv_key.c_str())); _precalc.base.ptr[static_cast(kv1.first)] = found->second.value; - std::string chm_key = std::string(ST_KEY_CHANNEL_MULTIPLIER) + "." + kv1.second; - found->second.scale = static_cast(obs_data_get_double(settings, chm_key.c_str())); + std::string chm_key = std::string(ST_KEY_CHANNEL_MULTIPLIER) + "." + kv1.second; + found->second.scale = static_cast(obs_data_get_double(settings, chm_key.c_str())); _precalc.scale.ptr[static_cast(kv1.first)] = found->second.scale; vec4* ch = &_precalc.matrix.x; @@ -184,10 +184,9 @@ void dynamic_mask_instance::update(obs_data_t* settings) } for (auto kv2 : channel_translations) { - std::string ab_key = std::string(ST_KEY_CHANNEL_INPUT) + "." + kv1.second + "." + kv2.second; - found->second.values.ptr[static_cast(kv2.first)] = - static_cast(obs_data_get_double(settings, ab_key.c_str())); - ch->ptr[static_cast(kv2.first)] = found->second.values.ptr[static_cast(kv2.first)]; + std::string ab_key = std::string(ST_KEY_CHANNEL_INPUT) + "." + kv1.second + "." + kv2.second; + found->second.values.ptr[static_cast(kv2.first)] = static_cast(obs_data_get_double(settings, ab_key.c_str())); + ch->ptr[static_cast(kv2.first)] = found->second.values.ptr[static_cast(kv2.first)]; } } @@ -219,8 +218,7 @@ void dynamic_mask_instance::save(obs_data_t* settings) for (auto kv2 : channel_translations) { std::string ab_key = std::string(ST_KEY_CHANNEL_INPUT) + "." + kv1.second + "." + kv2.second; - obs_data_set_double(settings, ab_key.c_str(), - static_cast(found->second.values.ptr[static_cast(kv2.first)])); + obs_data_set_double(settings, ab_key.c_str(), static_cast(found->second.values.ptr[static_cast(kv2.first)])); } } } @@ -236,8 +234,7 @@ void dynamic_mask_instance::video_tick(float time) _have_base = false; std::array preferred_formats = {GS_CS_SRGB}; - _base_color_space = obs_source_get_color_space(obs_filter_get_target(_self), preferred_formats.size(), - preferred_formats.data()); + _base_color_space = obs_source_get_color_space(obs_filter_get_target(_self), preferred_formats.size(), preferred_formats.data()); switch (_base_color_space) { case GS_CS_SRGB: _base_color_format = GS_RGBA; @@ -262,7 +259,7 @@ void dynamic_mask_instance::video_tick(float time) _have_input = false; std::array preferred_formats = {GS_CS_SRGB}; - _input_color_space = obs_source_get_color_space(input, preferred_formats.size(), preferred_formats.data()); + _input_color_space = obs_source_get_color_space(input, preferred_formats.size(), preferred_formats.data()); switch (_input_color_space) { case GS_CS_SRGB: _input_color_format = GS_RGBA; @@ -300,8 +297,7 @@ void dynamic_mask_instance::video_render(gs_effect_t* in_effect) auto input = _input.lock(); #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Dynamic Mask '%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Dynamic Mask '%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; #endif // If there's some issue acquiring information, skip rendering entirely. @@ -329,8 +325,7 @@ void dynamic_mask_instance::video_render(gs_effect_t* in_effect) gs_enable_framebuffer_srgb(false); // Begin rendering the source with a certain color space. - if (obs_source_process_filter_begin_with_color_space(_self, _base_color_format, _base_color_space, - OBS_ALLOW_DIRECT_RENDERING)) { + if (obs_source_process_filter_begin_with_color_space(_self, _base_color_format, _base_color_space, OBS_ALLOW_DIRECT_RENDERING)) { try { { auto op = _base_rt->render(width, height, _base_color_space); @@ -402,8 +397,7 @@ void dynamic_mask_instance::video_render(gs_effect_t* in_effect) _input_color_space = _base_color_space; } else { #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_source, "Input '%s'", - input.name().data()}; + streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_source, "Input '%s'", input.name().data()}; #endif // Ensure the Render Target matches the expected format. if (!_input_rt || (_input_rt->get_color_format() != _input_color_format)) { @@ -714,8 +708,7 @@ void dynamic_mask_factory::get_defaults2(obs_data_t* data) obs_data_set_default_double(data, (std::string(ST_KEY_CHANNEL_VALUE) + "." + kv.second).c_str(), 1.0); obs_data_set_default_double(data, (std::string(ST_KEY_CHANNEL_MULTIPLIER) + "." + kv.second).c_str(), 1.0); for (auto kv2 : channel_translations) { - obs_data_set_default_double( - data, (std::string(ST_KEY_CHANNEL_INPUT) + "." + kv.second + "." + kv2.second).c_str(), 0.0); + obs_data_set_default_double(data, (std::string(ST_KEY_CHANNEL_INPUT) + "." + kv.second + "." + kv2.second).c_str(), 0.0); } } obs_data_set_default_int(data, ST_KEY_DEBUG_TEXTURE, -1); @@ -730,14 +723,12 @@ obs_properties_t* dynamic_mask_factory::get_properties2(dynamic_mask_instance* d #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(props, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::filter::dynamic_mask::dynamic_mask_factory::on_manual_open, nullptr); + obs_properties_add_button2(props, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::filter::dynamic_mask::dynamic_mask_factory::on_manual_open, nullptr); } #endif { // Input - p = obs_properties_add_list(props, ST_KEY_INPUT, D_TRANSLATE(ST_I18N_INPUT), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + p = obs_properties_add_list(props, ST_KEY_INPUT, D_TRANSLATE(ST_I18N_INPUT), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, "", ""); obs::source_tracker::get()->enumerate( [&p](std::string name, ::streamfx::obs::source) { @@ -764,8 +755,7 @@ obs_properties_t* dynamic_mask_factory::get_properties2(dynamic_mask_instance* d { _translation_cache.push_back(translate_string(D_TRANSLATE(ST_I18N_CHANNEL_VALUE), D_TRANSLATE(pri_ch))); std::string buf = std::string(ST_KEY_CHANNEL_VALUE) + "." + pri_ch; - p = obs_properties_add_float_slider(grp, buf.c_str(), _translation_cache.back().c_str(), -100.0, 100.0, - 0.01); + p = obs_properties_add_float_slider(grp, buf.c_str(), _translation_cache.back().c_str(), -100.0, 100.0, 0.01); obs_property_set_long_description(p, _translation_cache.back().c_str()); } @@ -773,25 +763,21 @@ obs_properties_t* dynamic_mask_factory::get_properties2(dynamic_mask_instance* d for (auto sec_ch : sec_chs) { _translation_cache.push_back(translate_string(D_TRANSLATE(ST_I18N_CHANNEL_INPUT), D_TRANSLATE(sec_ch))); std::string buf = std::string(ST_KEY_CHANNEL_INPUT) + "." + pri_ch + "." + sec_ch; - p = obs_properties_add_float_slider(grp, buf.c_str(), _translation_cache.back().c_str(), -100.0, 100.0, - 0.01); + p = obs_properties_add_float_slider(grp, buf.c_str(), _translation_cache.back().c_str(), -100.0, 100.0, 0.01); obs_property_set_long_description(p, _translation_cache.back().c_str()); } { - _translation_cache.push_back( - translate_string(D_TRANSLATE(ST_I18N_CHANNEL_MULTIPLIER), D_TRANSLATE(pri_ch))); + _translation_cache.push_back(translate_string(D_TRANSLATE(ST_I18N_CHANNEL_MULTIPLIER), D_TRANSLATE(pri_ch))); std::string buf = std::string(ST_KEY_CHANNEL_MULTIPLIER) + "." + pri_ch; - p = obs_properties_add_float_slider(grp, buf.c_str(), _translation_cache.back().c_str(), -100.0, 100.0, - 0.01); + p = obs_properties_add_float_slider(grp, buf.c_str(), _translation_cache.back().c_str(), -100.0, 100.0, 0.01); obs_property_set_long_description(p, _translation_cache.back().c_str()); } { _translation_cache.push_back(translate_string(D_TRANSLATE(ST_I18N_CHANNEL), D_TRANSLATE(pri_ch))); std::string buf = std::string(ST_KEY_CHANNEL) + "." + pri_ch; - obs_properties_add_group(props, buf.c_str(), _translation_cache.back().c_str(), - obs_group_type::OBS_GROUP_NORMAL, grp); + obs_properties_add_group(props, buf.c_str(), _translation_cache.back().c_str(), obs_group_type::OBS_GROUP_NORMAL, grp); } } @@ -800,8 +786,7 @@ obs_properties_t* dynamic_mask_factory::get_properties2(dynamic_mask_instance* d obs_properties_add_group(props, "Debug", D_TRANSLATE(S_ADVANCED), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_DEBUG_TEXTURE, D_TRANSLATE(ST_I18N_DEBUG_TEXTURE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_DEBUG_TEXTURE, D_TRANSLATE(ST_I18N_DEBUG_TEXTURE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DISABLED), -1); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_DEBUG_TEXTURE_BASE), 0); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_DEBUG_TEXTURE_INPUT), 1); diff --git a/source/filters/filter-dynamic-mask.hpp b/source/filters/filter-dynamic-mask.hpp index 2cdf65bb..ceba4954 100644 --- a/source/filters/filter-dynamic-mask.hpp +++ b/source/filters/filter-dynamic-mask.hpp @@ -109,8 +109,7 @@ namespace streamfx::filter::dynamic_mask { void release(); }; - class dynamic_mask_factory : public obs::source_factory { + class dynamic_mask_factory : public obs::source_factory { std::list _translation_cache; public: diff --git a/source/filters/filter-sdf-effects.cpp b/source/filters/filter-sdf-effects.cpp index 16ed4858..1546abd3 100644 --- a/source/filters/filter-sdf-effects.cpp +++ b/source/filters/filter-sdf-effects.cpp @@ -102,15 +102,7 @@ using namespace streamfx::filter::sdf_effects; static constexpr std::string_view HELP_URL = "https://github.com/Xaymar/obs-StreamFX/wiki/Filter-SDF-Effects"; -sdf_effects_instance::sdf_effects_instance(obs_data_t* settings, obs_source_t* self) - : obs::source_instance(settings, self), _gfx_util(::streamfx::gfx::util::get()), _source_rendered(false), - _sdf_scale(1.0), _sdf_threshold(), _output_rendered(false), _inner_shadow(false), _inner_shadow_color(), - _inner_shadow_range_min(), _inner_shadow_range_max(), _inner_shadow_offset_x(), _inner_shadow_offset_y(), - _outer_shadow(false), _outer_shadow_color(), _outer_shadow_range_min(), _outer_shadow_range_max(), - _outer_shadow_offset_x(), _outer_shadow_offset_y(), _inner_glow(false), _inner_glow_color(), _inner_glow_width(), - _inner_glow_sharpness(), _inner_glow_sharpness_inv(), _outer_glow(false), _outer_glow_color(), - _outer_glow_width(), _outer_glow_sharpness(), _outer_glow_sharpness_inv(), _outline(false), _outline_color(), - _outline_width(), _outline_offset(), _outline_sharpness(), _outline_sharpness_inv() +sdf_effects_instance::sdf_effects_instance(obs_data_t* settings, obs_source_t* self) : obs::source_instance(settings, self), _gfx_util(::streamfx::gfx::util::get()), _source_rendered(false), _sdf_scale(1.0), _sdf_threshold(), _output_rendered(false), _inner_shadow(false), _inner_shadow_color(), _inner_shadow_range_min(), _inner_shadow_range_max(), _inner_shadow_offset_x(), _inner_shadow_offset_y(), _outer_shadow(false), _outer_shadow_color(), _outer_shadow_range_min(), _outer_shadow_range_max(), _outer_shadow_offset_x(), _outer_shadow_offset_y(), _inner_glow(false), _inner_glow_color(), _inner_glow_width(), _inner_glow_sharpness(), _inner_glow_sharpness_inv(), _outer_glow(false), _outer_glow_color(), _outer_glow_width(), _outer_glow_sharpness(), _outer_glow_sharpness_inv(), _outline(false), _outline_color(), _outline_width(), _outline_offset(), _outline_sharpness(), _outline_sharpness_inv() { { auto gctx = streamfx::obs::gs::context(); @@ -121,8 +113,7 @@ sdf_effects_instance::sdf_effects_instance(obs_data_t* settings, obs_source_t* s _sdf_read = std::make_shared(GS_RGBA32F, GS_ZS_NONE); _output_rt = std::make_shared(GS_RGBA, GS_ZS_NONE); - std::shared_ptr initialize_rts[] = {_source_rt, _sdf_write, _sdf_read, - _output_rt}; + std::shared_ptr initialize_rts[] = {_source_rt, _sdf_write, _sdf_read, _output_rt}; for (auto rt : initialize_rts) { auto op = rt->render(1, 1); gs_clear(GS_CLEAR_COLOR | GS_CLEAR_DEPTH, &transparent, 0, 0); @@ -158,9 +149,7 @@ void sdf_effects_instance::migrate(obs_data_t* data, uint64_t version) {} void sdf_effects_instance::update(obs_data_t* data) { { - _outer_shadow = - obs_data_get_bool(data, ST_KEY_SHADOW_OUTER) - && (obs_data_get_double(data, ST_KEY_SHADOW_OUTER_ALPHA) >= std::numeric_limits::epsilon()); + _outer_shadow = obs_data_get_bool(data, ST_KEY_SHADOW_OUTER) && (obs_data_get_double(data, ST_KEY_SHADOW_OUTER_ALPHA) >= std::numeric_limits::epsilon()); { struct cs { uint8_t r, g, b, a; @@ -183,9 +172,7 @@ void sdf_effects_instance::update(obs_data_t* data) } { - _inner_shadow = - obs_data_get_bool(data, ST_KEY_SHADOW_INNER) - && (obs_data_get_double(data, ST_KEY_SHADOW_INNER_ALPHA) >= std::numeric_limits::epsilon()); + _inner_shadow = obs_data_get_bool(data, ST_KEY_SHADOW_INNER) && (obs_data_get_double(data, ST_KEY_SHADOW_INNER_ALPHA) >= std::numeric_limits::epsilon()); { struct cs { uint8_t r, g, b, a; @@ -208,9 +195,7 @@ void sdf_effects_instance::update(obs_data_t* data) } { - _outer_glow = - obs_data_get_bool(data, ST_KEY_GLOW_OUTER) - && (obs_data_get_double(data, ST_KEY_GLOW_OUTER_ALPHA) >= std::numeric_limits::epsilon()); + _outer_glow = obs_data_get_bool(data, ST_KEY_GLOW_OUTER) && (obs_data_get_double(data, ST_KEY_GLOW_OUTER_ALPHA) >= std::numeric_limits::epsilon()); { struct cs { uint8_t r, g, b, a; @@ -235,9 +220,7 @@ void sdf_effects_instance::update(obs_data_t* data) } { - _inner_glow = - obs_data_get_bool(data, ST_KEY_GLOW_INNER) - && (obs_data_get_double(data, ST_KEY_GLOW_INNER_ALPHA) >= std::numeric_limits::epsilon()); + _inner_glow = obs_data_get_bool(data, ST_KEY_GLOW_INNER) && (obs_data_get_double(data, ST_KEY_GLOW_INNER_ALPHA) >= std::numeric_limits::epsilon()); { struct cs { uint8_t r, g, b, a; @@ -262,8 +245,7 @@ void sdf_effects_instance::update(obs_data_t* data) } { - _outline = obs_data_get_bool(data, ST_KEY_OUTLINE) - && (obs_data_get_double(data, ST_KEY_OUTLINE_ALPHA) >= std::numeric_limits::epsilon()); + _outline = obs_data_get_bool(data, ST_KEY_OUTLINE) && (obs_data_get_double(data, ST_KEY_OUTLINE_ALPHA) >= std::numeric_limits::epsilon()); { struct cs { uint8_t r, g, b, a; @@ -315,8 +297,7 @@ void sdf_effects_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "SDF Effects '%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "SDF Effects '%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; #endif auto gctx = streamfx::obs::gs::context(); @@ -383,8 +364,7 @@ void sdf_effects_instance::video_render(gs_effect_t* effect) { #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_convert, - "Update Distance Field"}; + streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_convert, "Update Distance Field"}; #endif auto op = _sdf_write->render(uint32_t(sdfW), uint32_t(sdfH)); @@ -467,8 +447,7 @@ void sdf_effects_instance::video_render(gs_effect_t* effect) _sdf_consumer_effect.get_parameter("pShadowColor").set_float4(_outer_shadow_color); _sdf_consumer_effect.get_parameter("pShadowMin").set_float(_outer_shadow_range_min); _sdf_consumer_effect.get_parameter("pShadowMax").set_float(_outer_shadow_range_max); - _sdf_consumer_effect.get_parameter("pShadowOffset") - .set_float2(_outer_shadow_offset_x / float_t(baseW), _outer_shadow_offset_y / float_t(baseH)); + _sdf_consumer_effect.get_parameter("pShadowOffset").set_float2(_outer_shadow_offset_x / float_t(baseW), _outer_shadow_offset_y / float_t(baseH)); while (gs_effect_loop(_sdf_consumer_effect.get_object(), "ShadowOuter")) { _gfx_util->draw_fullscreen_triangle(); } @@ -480,8 +459,7 @@ void sdf_effects_instance::video_render(gs_effect_t* effect) _sdf_consumer_effect.get_parameter("pShadowColor").set_float4(_inner_shadow_color); _sdf_consumer_effect.get_parameter("pShadowMin").set_float(_inner_shadow_range_min); _sdf_consumer_effect.get_parameter("pShadowMax").set_float(_inner_shadow_range_max); - _sdf_consumer_effect.get_parameter("pShadowOffset") - .set_float2(_inner_shadow_offset_x / float_t(baseW), _inner_shadow_offset_y / float_t(baseH)); + _sdf_consumer_effect.get_parameter("pShadowOffset").set_float2(_inner_shadow_offset_x / float_t(baseW), _inner_shadow_offset_y / float_t(baseH)); while (gs_effect_loop(_sdf_consumer_effect.get_object(), "ShadowInner")) { _gfx_util->draw_fullscreen_triangle(); } @@ -618,8 +596,7 @@ obs_properties_t* sdf_effects_factory::get_properties2(sdf_effects_instance* dat #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(prs, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::filter::sdf_effects::sdf_effects_factory::on_manual_open, nullptr); + obs_properties_add_button2(prs, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::filter::sdf_effects::sdf_effects_factory::on_manual_open, nullptr); } #endif @@ -627,34 +604,24 @@ obs_properties_t* sdf_effects_factory::get_properties2(sdf_effects_instance* dat auto pr = obs_properties_create(); obs_properties_add_group(prs, ST_KEY_SHADOW_OUTER, D_TRANSLATE(ST_I18N_SHADOW_OUTER), OBS_GROUP_CHECKABLE, pr); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_RANGE_MINIMUM, - D_TRANSLATE(ST_I18N_SHADOW_OUTER_RANGE_MINIMUM), -16.0, 16.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_RANGE_MAXIMUM, - D_TRANSLATE(ST_I18N_SHADOW_OUTER_RANGE_MAXIMUM), -16.0, 16.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_OFFSET_X, D_TRANSLATE(ST_I18N_SHADOW_OUTER_OFFSET_X), - -100.0, 100.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_OFFSET_Y, D_TRANSLATE(ST_I18N_SHADOW_OUTER_OFFSET_Y), - -100.0, 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_RANGE_MINIMUM, D_TRANSLATE(ST_I18N_SHADOW_OUTER_RANGE_MINIMUM), -16.0, 16.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_RANGE_MAXIMUM, D_TRANSLATE(ST_I18N_SHADOW_OUTER_RANGE_MAXIMUM), -16.0, 16.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_OFFSET_X, D_TRANSLATE(ST_I18N_SHADOW_OUTER_OFFSET_X), -100.0, 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_OFFSET_Y, D_TRANSLATE(ST_I18N_SHADOW_OUTER_OFFSET_Y), -100.0, 100.0, 0.01); obs_properties_add_color(pr, ST_KEY_SHADOW_OUTER_COLOR, D_TRANSLATE(ST_I18N_SHADOW_OUTER_COLOR)); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_ALPHA, D_TRANSLATE(ST_I18N_SHADOW_OUTER_ALPHA), 0.0, - 100.0, 0.1); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_OUTER_ALPHA, D_TRANSLATE(ST_I18N_SHADOW_OUTER_ALPHA), 0.0, 100.0, 0.1); } { // Shadow Inner auto pr = obs_properties_create(); obs_properties_add_group(prs, ST_KEY_SHADOW_INNER, D_TRANSLATE(ST_I18N_SHADOW_INNER), OBS_GROUP_CHECKABLE, pr); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_RANGE_MINIMUM, - D_TRANSLATE(ST_I18N_SHADOW_INNER_RANGE_MINIMUM), -16.0, 16.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_RANGE_MAXIMUM, - D_TRANSLATE(ST_I18N_SHADOW_INNER_RANGE_MAXIMUM), -16.0, 16.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_OFFSET_X, D_TRANSLATE(ST_I18N_SHADOW_INNER_OFFSET_X), - -100.0, 100.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_OFFSET_Y, D_TRANSLATE(ST_I18N_SHADOW_INNER_OFFSET_Y), - -100.0, 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_RANGE_MINIMUM, D_TRANSLATE(ST_I18N_SHADOW_INNER_RANGE_MINIMUM), -16.0, 16.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_RANGE_MAXIMUM, D_TRANSLATE(ST_I18N_SHADOW_INNER_RANGE_MAXIMUM), -16.0, 16.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_OFFSET_X, D_TRANSLATE(ST_I18N_SHADOW_INNER_OFFSET_X), -100.0, 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_OFFSET_Y, D_TRANSLATE(ST_I18N_SHADOW_INNER_OFFSET_Y), -100.0, 100.0, 0.01); obs_properties_add_color(pr, ST_KEY_SHADOW_INNER_COLOR, D_TRANSLATE(ST_I18N_SHADOW_INNER_COLOR)); - obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_ALPHA, D_TRANSLATE(ST_I18N_SHADOW_INNER_ALPHA), 0.0, - 100.0, 0.1); + obs_properties_add_float_slider(pr, ST_KEY_SHADOW_INNER_ALPHA, D_TRANSLATE(ST_I18N_SHADOW_INNER_ALPHA), 0.0, 100.0, 0.1); } { // Glow Outer @@ -662,12 +629,9 @@ obs_properties_t* sdf_effects_factory::get_properties2(sdf_effects_instance* dat obs_properties_add_group(prs, ST_KEY_GLOW_OUTER, D_TRANSLATE(ST_I18N_GLOW_OUTER), OBS_GROUP_CHECKABLE, pr); obs_properties_add_color(pr, ST_KEY_GLOW_OUTER_COLOR, D_TRANSLATE(ST_I18N_GLOW_OUTER_COLOR)); - obs_properties_add_float_slider(pr, ST_KEY_GLOW_OUTER_ALPHA, D_TRANSLATE(ST_I18N_GLOW_OUTER_ALPHA), 0.0, 100.0, - 0.1); - obs_properties_add_float_slider(pr, ST_KEY_GLOW_OUTER_WIDTH, D_TRANSLATE(ST_I18N_GLOW_OUTER_WIDTH), 0.0, 16.0, - 0.01); - obs_properties_add_float_slider(pr, ST_KEY_GLOW_OUTER_SHARPNESS, D_TRANSLATE(ST_I18N_GLOW_OUTER_SHARPNESS), - 0.00, 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_GLOW_OUTER_ALPHA, D_TRANSLATE(ST_I18N_GLOW_OUTER_ALPHA), 0.0, 100.0, 0.1); + obs_properties_add_float_slider(pr, ST_KEY_GLOW_OUTER_WIDTH, D_TRANSLATE(ST_I18N_GLOW_OUTER_WIDTH), 0.0, 16.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_GLOW_OUTER_SHARPNESS, D_TRANSLATE(ST_I18N_GLOW_OUTER_SHARPNESS), 0.00, 100.0, 0.01); } { // Glow Inner @@ -675,12 +639,9 @@ obs_properties_t* sdf_effects_factory::get_properties2(sdf_effects_instance* dat obs_properties_add_group(prs, ST_KEY_GLOW_INNER, D_TRANSLATE(ST_I18N_GLOW_INNER), OBS_GROUP_CHECKABLE, pr); obs_properties_add_color(pr, ST_KEY_GLOW_INNER_COLOR, D_TRANSLATE(ST_I18N_GLOW_INNER_COLOR)); - obs_properties_add_float_slider(pr, ST_KEY_GLOW_INNER_ALPHA, D_TRANSLATE(ST_I18N_GLOW_INNER_ALPHA), 0.0, 100.0, - 0.1); - obs_properties_add_float_slider(pr, ST_KEY_GLOW_INNER_WIDTH, D_TRANSLATE(ST_I18N_GLOW_INNER_WIDTH), 0.0, 16.0, - 0.01); - obs_properties_add_float_slider(pr, ST_KEY_GLOW_INNER_SHARPNESS, D_TRANSLATE(ST_I18N_GLOW_INNER_SHARPNESS), - 0.00, 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_GLOW_INNER_ALPHA, D_TRANSLATE(ST_I18N_GLOW_INNER_ALPHA), 0.0, 100.0, 0.1); + obs_properties_add_float_slider(pr, ST_KEY_GLOW_INNER_WIDTH, D_TRANSLATE(ST_I18N_GLOW_INNER_WIDTH), 0.0, 16.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_GLOW_INNER_SHARPNESS, D_TRANSLATE(ST_I18N_GLOW_INNER_SHARPNESS), 0.00, 100.0, 0.01); } { // Outline @@ -692,11 +653,9 @@ obs_properties_t* sdf_effects_factory::get_properties2(sdf_effects_instance* dat obs_properties_add_float_slider(pr, ST_KEY_OUTLINE_WIDTH, D_TRANSLATE(ST_I18N_OUTLINE_WIDTH), 0.0, 16.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_OUTLINE_OFFSET, D_TRANSLATE(ST_I18N_OUTLINE_OFFSET), -16.0, 16.0, - 0.01); + obs_properties_add_float_slider(pr, ST_KEY_OUTLINE_OFFSET, D_TRANSLATE(ST_I18N_OUTLINE_OFFSET), -16.0, 16.0, 0.01); - obs_properties_add_float_slider(pr, ST_KEY_OUTLINE_SHARPNESS, D_TRANSLATE(ST_I18N_OUTLINE_SHARPNESS), 0.00, - 100.0, 0.01); + obs_properties_add_float_slider(pr, ST_KEY_OUTLINE_SHARPNESS, D_TRANSLATE(ST_I18N_OUTLINE_SHARPNESS), 0.00, 100.0, 0.01); } { // Advanced Options diff --git a/source/filters/filter-sdf-effects.hpp b/source/filters/filter-sdf-effects.hpp index 08d157ef..3d2ebd3b 100644 --- a/source/filters/filter-sdf-effects.hpp +++ b/source/filters/filter-sdf-effects.hpp @@ -80,8 +80,7 @@ namespace streamfx::filter::sdf_effects { virtual void video_render(gs_effect_t*) override; }; - class sdf_effects_factory : public obs::source_factory { + class sdf_effects_factory : public obs::source_factory { public: sdf_effects_factory(); virtual ~sdf_effects_factory(); diff --git a/source/filters/filter-shader.cpp b/source/filters/filter-shader.cpp index f9d8a027..af8ca44a 100644 --- a/source/filters/filter-shader.cpp +++ b/source/filters/filter-shader.cpp @@ -30,8 +30,7 @@ using namespace streamfx::filter::shader; -static constexpr std::string_view HELP_URL = - "https://github.com/Xaymar/obs-StreamFX/wiki/Source-Filter-Transition-Shader"; +static constexpr std::string_view HELP_URL = "https://github.com/Xaymar/obs-StreamFX/wiki/Source-Filter-Transition-Shader"; shader_instance::shader_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self) { @@ -93,9 +92,7 @@ void shader_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Shader Filter '%s' on '%s'", - obs_source_get_name(_self), - obs_source_get_name(obs_filter_get_parent(_self))}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Shader Filter '%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; #endif { @@ -193,8 +190,7 @@ obs_properties_t* shader_factory::get_properties2(shader::shader_instance* data) #ifdef ENABLE_FRONTEND { - auto p = obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::filter::shader::shader_factory::on_manual_open, nullptr); + auto p = obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::filter::shader::shader_factory::on_manual_open, nullptr); } #endif diff --git a/source/filters/filter-transform.cpp b/source/filters/filter-transform.cpp index 8031820b..bb49143a 100644 --- a/source/filters/filter-transform.cpp +++ b/source/filters/filter-transform.cpp @@ -93,10 +93,7 @@ enum RotationOrder : int64_t { ZYX = 5, }; -transform_instance::transform_instance(obs_data_t* data, obs_source_t* context) - : obs::source_instance(data, context), _gfx_util(::streamfx::gfx::util::get()), _camera_mode(), _camera_fov(), - _params(), _corners(), _standard_effect(), _transform_effect(), _sampler(), _cache_rendered(), _mipmap_enabled(), - _source_rendered(), _source_size(), _update_mesh(true) +transform_instance::transform_instance(obs_data_t* data, obs_source_t* context) : obs::source_instance(data, context), _gfx_util(::streamfx::gfx::util::get()), _camera_mode(), _camera_fov(), _params(), _corners(), _standard_effect(), _transform_effect(), _sampler(), _cache_rendered(), _mipmap_enabled(), _source_rendered(), _source_size(), _update_mesh(true) { { auto gctx = obs::gs::context(); @@ -228,10 +225,7 @@ void transform_instance::update(obs_data_t* settings) } { // Corners std::pair opts[] = { - {ST_KEY_CORNERS_TOPLEFT "X", _corners.tl.x}, {ST_KEY_CORNERS_TOPLEFT "Y", _corners.tl.y}, - {ST_KEY_CORNERS_TOPRIGHT "X", _corners.tr.x}, {ST_KEY_CORNERS_TOPRIGHT "Y", _corners.tr.y}, - {ST_KEY_CORNERS_BOTTOMLEFT "X", _corners.bl.x}, {ST_KEY_CORNERS_BOTTOMLEFT "Y", _corners.bl.y}, - {ST_KEY_CORNERS_BOTTOMRIGHT "X", _corners.br.x}, {ST_KEY_CORNERS_BOTTOMRIGHT "Y", _corners.br.y}, + {ST_KEY_CORNERS_TOPLEFT "X", _corners.tl.x}, {ST_KEY_CORNERS_TOPLEFT "Y", _corners.tl.y}, {ST_KEY_CORNERS_TOPRIGHT "X", _corners.tr.x}, {ST_KEY_CORNERS_TOPRIGHT "Y", _corners.tr.y}, {ST_KEY_CORNERS_BOTTOMLEFT "X", _corners.bl.x}, {ST_KEY_CORNERS_BOTTOMLEFT "Y", _corners.bl.y}, {ST_KEY_CORNERS_BOTTOMRIGHT "X", _corners.br.x}, {ST_KEY_CORNERS_BOTTOMRIGHT "Y", _corners.br.y}, }; for (auto opt : opts) { opt.second = static_cast(obs_data_get_double(settings, opt.first.c_str()) / 100.0); @@ -377,15 +371,13 @@ void transform_instance::video_render(gs_effect_t* effect) if (!effect) effect = default_effect; - if (!base_width || !base_height || !parent || !target || !_standard_effect - || !_transform_effect) { // Skip if something is wrong. + if (!base_width || !base_height || !parent || !target || !_standard_effect || !_transform_effect) { // Skip if something is wrong. obs_source_skip_video_filter(_self); return; } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "3D Transform '%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "3D Transform '%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(obs_filter_get_parent(_self))}; #endif uint32_t cache_width = base_width; @@ -394,19 +386,13 @@ void transform_instance::video_render(gs_effect_t* effect) if (_mipmap_enabled) { double_t aspect = double_t(base_width) / double_t(base_height); double_t aspect2 = 1.0 / aspect; - cache_width = - std::clamp(uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(cache_width))), 1u, 16384u); - cache_height = std::clamp(uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(cache_height))), - 1u, 16384u); + cache_width = std::clamp(uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(cache_width))), 1u, 16384u); + cache_height = std::clamp(uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(cache_height))), 1u, 16384u); if (aspect > 1.0) { - cache_height = std::clamp( - uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(uint64_t(cache_width * aspect2)))), - 1u, 16384u); + cache_height = std::clamp(uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(uint64_t(cache_width * aspect2)))), 1u, 16384u); } else if (aspect < 1.0) { - cache_width = std::clamp( - uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(uint64_t(cache_height * aspect)))), - 1u, 16384u); + cache_width = std::clamp(uint32_t(pow(2, streamfx::util::math::get_power_of_two_exponent_ceil(uint64_t(cache_height * aspect)))), 1u, 16384u); } } @@ -455,16 +441,13 @@ void transform_instance::video_render(gs_effect_t* effect) streamfx::obs::gs::debug_marker gdm{streamfx::obs::gs::debug_color_convert, "Mipmap"}; #endif - if (!_mipmap_texture || (_mipmap_texture->get_width() != cache_width) - || (_mipmap_texture->get_height() != cache_height)) { + if (!_mipmap_texture || (_mipmap_texture->get_width() != cache_width) || (_mipmap_texture->get_height() != cache_height)) { #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG streamfx::obs::gs::debug_marker gdr{streamfx::obs::gs::debug_color_allocate, "Allocate Mipmapped Texture"}; #endif std::size_t mip_levels = _mipmapper.calculate_max_mip_level(cache_width, cache_height); - _mipmap_texture = std::make_shared(cache_width, cache_height, GS_RGBA, - static_cast(mip_levels), nullptr, - streamfx::obs::gs::texture::flags::None); + _mipmap_texture = std::make_shared(cache_width, cache_height, GS_RGBA, static_cast(mip_levels), nullptr, streamfx::obs::gs::texture::flags::None); } _mipmapper.rebuild(_cache_texture, _mipmap_texture); @@ -513,11 +496,8 @@ void transform_instance::video_render(gs_effect_t* effect) if (_camera_mode != transform_mode::CORNER_PIN) { gs_load_vertexbuffer(_vertex_buffer->update(false)); gs_load_indexbuffer(nullptr); - if (auto v = _standard_effect.get_parameter("InputA"); - v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Texture) { - v.set_texture(_mipmap_enabled - ? (_mipmap_texture ? _mipmap_texture->get_object() : _cache_texture->get_object()) - : _cache_texture->get_object()); + if (auto v = _standard_effect.get_parameter("InputA"); v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Texture) { + v.set_texture(_mipmap_enabled ? (_mipmap_texture ? _mipmap_texture->get_object() : _cache_texture->get_object()) : _cache_texture->get_object()); v.set_sampler(_sampler.get_object()); } while (gs_effect_loop(_standard_effect.get_object(), "Draw")) { @@ -527,27 +507,20 @@ void transform_instance::video_render(gs_effect_t* effect) } else { gs_load_vertexbuffer(nullptr); gs_load_indexbuffer(nullptr); - if (auto v = _transform_effect.get_parameter("InputA"); - v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Texture) { - v.set_texture(_mipmap_enabled - ? (_mipmap_texture ? _mipmap_texture->get_object() : _cache_texture->get_object()) - : _cache_texture->get_object()); + if (auto v = _transform_effect.get_parameter("InputA"); v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Texture) { + v.set_texture(_mipmap_enabled ? (_mipmap_texture ? _mipmap_texture->get_object() : _cache_texture->get_object()) : _cache_texture->get_object()); v.set_sampler(_sampler.get_object()); } - if (auto v = _transform_effect.get_parameter("CornerTL"); - v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { + if (auto v = _transform_effect.get_parameter("CornerTL"); v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { v.set_float2(_corners.tl); } - if (auto v = _transform_effect.get_parameter("CornerTR"); - v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { + if (auto v = _transform_effect.get_parameter("CornerTR"); v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { v.set_float2(_corners.tr); } - if (auto v = _transform_effect.get_parameter("CornerBL"); - v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { + if (auto v = _transform_effect.get_parameter("CornerBL"); v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { v.set_float2(_corners.bl); } - if (auto v = _transform_effect.get_parameter("CornerBR"); - v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { + if (auto v = _transform_effect.get_parameter("CornerBR"); v.get_type() == ::streamfx::obs::gs::effect_parameter::type::Float2) { v.set_float2(_corners.br); } while (gs_effect_loop(_transform_effect.get_object(), "CornerPin")) { @@ -652,8 +625,7 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::filter::transform::transform_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::filter::transform::transform_factory::on_manual_open, nullptr); } #endif @@ -662,19 +634,14 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) auto grp = obs_properties_create(); { // Projection Mode - auto p = obs_properties_add_list(grp, ST_KEY_CAMERA_MODE, D_TRANSLATE(ST_I18N_CAMERA_MODE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_CAMERA_MODE_CORNER_PIN), - static_cast(transform_mode::CORNER_PIN)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_CAMERA_MODE_ORTHOGRAPHIC), - static_cast(transform_mode::ORTHOGRAPHIC)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_CAMERA_MODE_PERSPECTIVE), - static_cast(transform_mode::PERSPECTIVE)); + auto p = obs_properties_add_list(grp, ST_KEY_CAMERA_MODE, D_TRANSLATE(ST_I18N_CAMERA_MODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_CAMERA_MODE_CORNER_PIN), static_cast(transform_mode::CORNER_PIN)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_CAMERA_MODE_ORTHOGRAPHIC), static_cast(transform_mode::ORTHOGRAPHIC)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_CAMERA_MODE_PERSPECTIVE), static_cast(transform_mode::PERSPECTIVE)); obs_property_set_modified_callback(p, modified_camera_mode); } { // Field Of View - auto p = obs_properties_add_float_slider(grp, ST_KEY_CAMERA_FIELDOFVIEW, - D_TRANSLATE(ST_I18N_CAMERA_FIELDOFVIEW), 1.0, 179.0, 0.01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_CAMERA_FIELDOFVIEW, D_TRANSLATE(ST_I18N_CAMERA_FIELDOFVIEW), 1.0, 179.0, 0.01); } obs_properties_add_group(pr, ST_I18N_CAMERA, D_TRANSLATE(ST_I18N_CAMERA), OBS_GROUP_NORMAL, grp); @@ -692,9 +659,7 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_POSITION_Z, "Z"}, }; for (const auto& opt : opts) { - auto p = obs_properties_add_float(grp, opt.first.c_str(), D_TRANSLATE(opt.second.c_str()), - std::numeric_limits::lowest(), - std::numeric_limits::max(), 0.01); + auto p = obs_properties_add_float(grp, opt.first.c_str(), D_TRANSLATE(opt.second.c_str()), std::numeric_limits::lowest(), std::numeric_limits::max(), 0.01); } obs_properties_add_group(pr, ST_I18N_POSITION, D_TRANSLATE(ST_I18N_POSITION), OBS_GROUP_NORMAL, grp); @@ -708,8 +673,7 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_ROTATION_Z, D_TRANSLATE(ST_I18N_ROTATION ".Z")}, }; for (const auto& opt : opts) { - auto p = obs_properties_add_float_slider(grp, opt.first.c_str(), D_TRANSLATE(opt.second.c_str()), - -180.0, 180.0, 0.01); + auto p = obs_properties_add_float_slider(grp, opt.first.c_str(), D_TRANSLATE(opt.second.c_str()), -180.0, 180.0, 0.01); obs_property_float_set_suffix(p, "° Deg"); } @@ -737,8 +701,7 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_SHEAR_Y, "Y"}, }; for (const auto& opt : opts) { - auto p = - obs_properties_add_float_slider(grp, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); + auto p = obs_properties_add_float_slider(grp, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); obs_property_float_set_suffix(p, "%"); } @@ -757,13 +720,11 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_CORNERS_TOPLEFT "Y", "Y"}, }; for (auto& opt : opts) { - auto p = - obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); + auto p = obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); obs_property_float_set_suffix(p, "%"); } - obs_properties_add_group(grp, ST_I18N_CORNERS_TOPLEFT, D_TRANSLATE(ST_I18N_CORNERS_TOPLEFT), - OBS_GROUP_NORMAL, grp2); + obs_properties_add_group(grp, ST_I18N_CORNERS_TOPLEFT, D_TRANSLATE(ST_I18N_CORNERS_TOPLEFT), OBS_GROUP_NORMAL, grp2); } { // Top Right auto grp2 = obs_properties_create(); @@ -773,13 +734,11 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_CORNERS_TOPRIGHT "Y", "Y"}, }; for (auto& opt : opts) { - auto p = - obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); + auto p = obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); obs_property_float_set_suffix(p, "%"); } - obs_properties_add_group(grp, ST_I18N_CORNERS_TOPRIGHT, D_TRANSLATE(ST_I18N_CORNERS_TOPRIGHT), - OBS_GROUP_NORMAL, grp2); + obs_properties_add_group(grp, ST_I18N_CORNERS_TOPRIGHT, D_TRANSLATE(ST_I18N_CORNERS_TOPRIGHT), OBS_GROUP_NORMAL, grp2); } { // Bottom Left auto grp2 = obs_properties_create(); @@ -789,13 +748,11 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_CORNERS_BOTTOMLEFT "Y", "Y"}, }; for (auto& opt : opts) { - auto p = - obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); + auto p = obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); obs_property_float_set_suffix(p, "%"); } - obs_properties_add_group(grp, ST_I18N_CORNERS_BOTTOMLEFT, D_TRANSLATE(ST_I18N_CORNERS_BOTTOMLEFT), - OBS_GROUP_NORMAL, grp2); + obs_properties_add_group(grp, ST_I18N_CORNERS_BOTTOMLEFT, D_TRANSLATE(ST_I18N_CORNERS_BOTTOMLEFT), OBS_GROUP_NORMAL, grp2); } { // Bottom Right auto grp2 = obs_properties_create(); @@ -805,13 +762,11 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) {ST_KEY_CORNERS_BOTTOMRIGHT "Y", "Y"}, }; for (auto& opt : opts) { - auto p = - obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); + auto p = obs_properties_add_float_slider(grp2, opt.first.c_str(), opt.second.c_str(), -200.0, 200.0, 0.01); obs_property_float_set_suffix(p, "%"); } - obs_properties_add_group(grp, ST_I18N_CORNERS_BOTTOMRIGHT, D_TRANSLATE(ST_I18N_CORNERS_BOTTOMRIGHT), - OBS_GROUP_NORMAL, grp2); + obs_properties_add_group(grp, ST_I18N_CORNERS_BOTTOMRIGHT, D_TRANSLATE(ST_I18N_CORNERS_BOTTOMRIGHT), OBS_GROUP_NORMAL, grp2); } obs_properties_add_group(pr, ST_I18N_CORNERS, D_TRANSLATE(ST_I18N_CORNERS), OBS_GROUP_NORMAL, grp); @@ -826,8 +781,7 @@ obs_properties_t* transform_factory::get_properties2(transform_instance* data) } { // Order - auto p = obs_properties_add_list(grp, ST_KEY_ROTATION_ORDER, D_TRANSLATE(ST_I18N_ROTATION_ORDER), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_ROTATION_ORDER, D_TRANSLATE(ST_I18N_ROTATION_ORDER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_ROTATION_ORDER_XYZ), RotationOrder::XYZ); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_ROTATION_ORDER_XZY), RotationOrder::XZY); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_ROTATION_ORDER_YXZ), RotationOrder::YXZ); diff --git a/source/filters/filter-transform.hpp b/source/filters/filter-transform.hpp index fc57c535..a994cc54 100644 --- a/source/filters/filter-transform.hpp +++ b/source/filters/filter-transform.hpp @@ -80,8 +80,7 @@ namespace streamfx::filter::transform { virtual void video_render(gs_effect_t*) override; }; - class transform_factory - : public obs::source_factory { + class transform_factory : public obs::source_factory { public: transform_factory(); virtual ~transform_factory() override; diff --git a/source/filters/filter-upscaling.cpp b/source/filters/filter-upscaling.cpp index d471fdaa..4c443147 100644 --- a/source/filters/filter-upscaling.cpp +++ b/source/filters/filter-upscaling.cpp @@ -76,10 +76,7 @@ std::string streamfx::filter::upscaling::string(upscaling_provider provider) //------------------------------------------------------------------------------ // Instance //------------------------------------------------------------------------------ -upscaling_instance::upscaling_instance(obs_data_t* data, obs_source_t* self) - : obs::source_instance(data, self), _in_size(1, 1), _out_size(1, 1), _provider(upscaling_provider::INVALID), - _provider_ui(upscaling_provider::INVALID), _provider_ready(false), _provider_lock(), _provider_task(), _input(), - _output(), _dirty(false) +upscaling_instance::upscaling_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self), _in_size(1, 1), _out_size(1, 1), _provider(upscaling_provider::INVALID), _provider_ui(upscaling_provider::INVALID), _provider_ready(false), _provider_lock(), _provider_task(), _input(), _output(), _dirty(false) { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); @@ -92,8 +89,7 @@ upscaling_instance::upscaling_instance(obs_data_t* data, obs_source_t* self) _output = _input->get_texture(); // Load the required effect. - _standard_effect = - std::make_shared<::streamfx::obs::gs::effect>(::streamfx::data_file_path("effects/standard.effect")); + _standard_effect = std::make_shared<::streamfx::obs::gs::effect>(::streamfx::data_file_path("effects/standard.effect")); // Create Samplers _channel0_sampler = std::make_shared<::streamfx::obs::gs::sampler>(); @@ -246,8 +242,7 @@ void upscaling_instance::video_render(gs_effect_t* effect) #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler0{::streamfx::obs::gs::debug_color_source, "StreamFX Upscaling"}; - ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(parent)}; + ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(parent)}; #endif if (_dirty) { @@ -353,8 +348,7 @@ void streamfx::filter::upscaling::upscaling_instance::switch_provider(upscaling_ // - Doesn't guarantee that the task is properly killed off. // Log information. - D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), - cstring(provider)); + D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), cstring(provider)); // If there is an existing task, attempt to cancel it. if (_provider_task) { @@ -374,8 +368,7 @@ void streamfx::filter::upscaling::upscaling_instance::switch_provider(upscaling_ _provider = provider; // Then spawn a new task to switch provider. - _provider_task = streamfx::threadpool()->push( - std::bind(&upscaling_instance::task_switch_provider, this, std::placeholders::_1), spd); + _provider_task = streamfx::threadpool()->push(std::bind(&upscaling_instance::task_switch_provider, this, std::placeholders::_1), spd); } void streamfx::filter::upscaling::upscaling_instance::task_switch_provider(util::threadpool::task_data_t data) @@ -417,8 +410,7 @@ void streamfx::filter::upscaling::upscaling_instance::task_switch_provider(util: } // Log information. - D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), - cstring(spd->provider), cstring(_provider)); + D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(spd->provider), cstring(_provider)); // 5. Set the new provider as valid. _provider_ready = true; @@ -462,20 +454,16 @@ void streamfx::filter::upscaling::upscaling_instance::nvvfxsr_process() void streamfx::filter::upscaling::upscaling_instance::nvvfxsr_properties(obs_properties_t* props) { obs_properties_t* grp = obs_properties_create(); - obs_properties_add_group(props, ST_KEY_NVIDIA_SUPERRES, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES), OBS_GROUP_NORMAL, - grp); + obs_properties_add_group(props, ST_KEY_NVIDIA_SUPERRES, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES), OBS_GROUP_NORMAL, grp); { - auto p = - obs_properties_add_list(grp, ST_KEY_NVIDIA_SUPERRES_STRENGTH, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES_STRENGTH), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_NVIDIA_SUPERRES_STRENGTH, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES_STRENGTH), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES_STRENGTH_WEAK), 0); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES_STRENGTH_STRONG), 1); } { - auto p = obs_properties_add_float_slider(grp, ST_KEY_NVIDIA_SUPERRES_SCALE, - D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES_SCALE), 100.00, 400.00, .01); + auto p = obs_properties_add_float_slider(grp, ST_KEY_NVIDIA_SUPERRES_SCALE, D_TRANSLATE(ST_I18N_NVIDIA_SUPERRES_SCALE), 100.00, 400.00, .01); obs_property_float_set_suffix(p, " %"); } } @@ -485,8 +473,7 @@ void streamfx::filter::upscaling::upscaling_instance::nvvfxsr_update(obs_data_t* if (!_nvidia_fx) return; - _nvidia_fx->set_strength( - static_cast(obs_data_get_int(data, ST_KEY_NVIDIA_SUPERRES_STRENGTH) == 0 ? 0. : 1.)); + _nvidia_fx->set_strength(static_cast(obs_data_get_int(data, ST_KEY_NVIDIA_SUPERRES_STRENGTH) == 0 ? 0. : 1.)); _nvidia_fx->set_scale(static_cast(obs_data_get_double(data, ST_KEY_NVIDIA_SUPERRES_SCALE) / 100.)); } @@ -577,8 +564,7 @@ obs_properties_t* upscaling_factory::get_properties2(upscaling_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), upscaling_factory::on_manual_open, - nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), upscaling_factory::on_manual_open, nullptr); } #endif @@ -591,13 +577,10 @@ obs_properties_t* upscaling_factory::get_properties2(upscaling_instance* data) obs_properties_add_group(pr, S_ADVANCED, D_TRANSLATE(S_ADVANCED), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_PROVIDER, D_TRANSLATE(ST_I18N_PROVIDER), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_PROVIDER, D_TRANSLATE(ST_I18N_PROVIDER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_provider); - obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), - static_cast(upscaling_provider::AUTOMATIC)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_PROVIDER_NVIDIA_SUPERRES), - static_cast(upscaling_provider::NVIDIA_SUPERRESOLUTION)); + obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), static_cast(upscaling_provider::AUTOMATIC)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_PROVIDER_NVIDIA_SUPERRES), static_cast(upscaling_provider::NVIDIA_SUPERRESOLUTION)); } } diff --git a/source/filters/filter-upscaling.hpp b/source/filters/filter-upscaling.hpp index abc31894..aac5b54b 100644 --- a/source/filters/filter-upscaling.hpp +++ b/source/filters/filter-upscaling.hpp @@ -82,9 +82,7 @@ namespace streamfx::filter::upscaling { #endif }; - class upscaling_factory - : public ::streamfx::obs::source_factory<::streamfx::filter::upscaling::upscaling_factory, - ::streamfx::filter::upscaling::upscaling_instance> { + class upscaling_factory : public ::streamfx::obs::source_factory<::streamfx::filter::upscaling::upscaling_factory, ::streamfx::filter::upscaling::upscaling_instance> { #ifdef ENABLE_FILTER_UPSCALING_NVIDIA bool _nvidia_available; std::shared_ptr<::streamfx::nvidia::cuda::obs> _nvcuda; diff --git a/source/filters/filter-virtual-greenscreen.cpp b/source/filters/filter-virtual-greenscreen.cpp index d6a4a319..772c3207 100644 --- a/source/filters/filter-virtual-greenscreen.cpp +++ b/source/filters/filter-virtual-greenscreen.cpp @@ -77,9 +77,7 @@ std::string streamfx::filter::virtual_greenscreen::string(virtual_greenscreen_pr virtual_greenscreen_instance::virtual_greenscreen_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self), - _size(1, 1), _provider(virtual_greenscreen_provider::INVALID), - _provider_ui(virtual_greenscreen_provider::INVALID), _provider_ready(false), _provider_lock(), _provider_task(), - _effect(), _channel0_sampler(), _channel1_sampler(), _input(), _output_color(), _output_alpha(), _dirty(true) + _size(1, 1), _provider(virtual_greenscreen_provider::INVALID), _provider_ui(virtual_greenscreen_provider::INVALID), _provider_ready(false), _provider_lock(), _provider_task(), _effect(), _channel0_sampler(), _channel1_sampler(), _input(), _output_color(), _output_alpha(), _dirty(true) { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); @@ -155,8 +153,7 @@ void virtual_greenscreen_instance::migrate(obs_data_t* data, uint64_t version) { void virtual_greenscreen_instance::update(obs_data_t* data) { // Check if the user changed which Denoising provider we use. - virtual_greenscreen_provider provider = - static_cast(obs_data_get_int(data, ST_KEY_PROVIDER)); + virtual_greenscreen_provider provider = static_cast(obs_data_get_int(data, ST_KEY_PROVIDER)); if (provider == virtual_greenscreen_provider::AUTOMATIC) { provider = virtual_greenscreen_factory::get()->find_ideal_provider(); } @@ -252,10 +249,8 @@ void virtual_greenscreen_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler0{::streamfx::obs::gs::debug_color_source, - "StreamFX Virtual Green-Screen"}; - ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", - obs_source_get_name(_self), obs_source_get_name(parent)}; + ::streamfx::obs::gs::debug_marker profiler0{::streamfx::obs::gs::debug_color_source, "StreamFX Virtual Green-Screen"}; + ::streamfx::obs::gs::debug_marker profiler0_0{::streamfx::obs::gs::debug_color_gray, "'%s' on '%s'", obs_source_get_name(_self), obs_source_get_name(parent)}; #endif if (_dirty) { @@ -349,8 +344,7 @@ struct switch_provider_data_t { virtual_greenscreen_provider provider; }; -void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::switch_provider( - virtual_greenscreen_provider provider) +void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::switch_provider(virtual_greenscreen_provider provider) { std::unique_lock ul(_provider_lock); @@ -364,8 +358,7 @@ void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::switch // - Doesn't guarantee that the task is properly killed off. // Log information. - D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), - cstring(provider)); + D_LOG_INFO("Instance '%s' is switching provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(_provider), cstring(provider)); // If there is an existing task, attempt to cancel it. if (_provider_task) { @@ -385,12 +378,10 @@ void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::switch _provider = provider; // Then spawn a new task to switch provider. - _provider_task = streamfx::threadpool()->push( - std::bind(&virtual_greenscreen_instance::task_switch_provider, this, std::placeholders::_1), spd); + _provider_task = streamfx::threadpool()->push(std::bind(&virtual_greenscreen_instance::task_switch_provider, this, std::placeholders::_1), spd); } -void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::task_switch_provider( - util::threadpool::task_data_t data) +void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::task_switch_provider(util::threadpool::task_data_t data) { std::shared_ptr spd = std::static_pointer_cast(data); @@ -429,8 +420,7 @@ void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::task_s } // Log information. - D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), - cstring(spd->provider), cstring(_provider)); + D_LOG_INFO("Instance '%s' switched provider from '%s' to '%s'.", obs_source_get_name(_self), cstring(spd->provider), cstring(_provider)); // Set the new provider as valid. _provider_ready = true; @@ -460,8 +450,7 @@ void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::nvvfxg _nvidia_fx->size(_size); } -void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::nvvfxgs_process( - std::shared_ptr<::streamfx::obs::gs::texture>& color, std::shared_ptr<::streamfx::obs::gs::texture>& alpha) +void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::nvvfxgs_process(std::shared_ptr<::streamfx::obs::gs::texture>& color, std::shared_ptr<::streamfx::obs::gs::texture>& alpha) { if (!_nvidia_fx) { return; @@ -474,17 +463,12 @@ void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::nvvfxg void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::nvvfxgs_properties(obs_properties_t* props) { obs_properties_t* grp = obs_properties_create(); - obs_properties_add_group(props, ST_KEY_NVIDIA_GREENSCREEN, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN), - OBS_GROUP_NORMAL, grp); + obs_properties_add_group(props, ST_KEY_NVIDIA_GREENSCREEN, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN), OBS_GROUP_NORMAL, grp); { - auto p = - obs_properties_add_list(grp, ST_KEY_NVIDIA_GREENSCREEN_MODE, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN_MODE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN_MODE_PERFORMANCE), - static_cast(::streamfx::nvidia::vfx::greenscreen_mode::PERFORMANCE)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN_MODE_QUALITY), - static_cast(::streamfx::nvidia::vfx::greenscreen_mode::QUALITY)); + auto p = obs_properties_add_list(grp, ST_KEY_NVIDIA_GREENSCREEN_MODE, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN_MODE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN_MODE_PERFORMANCE), static_cast(::streamfx::nvidia::vfx::greenscreen_mode::PERFORMANCE)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_NVIDIA_GREENSCREEN_MODE_QUALITY), static_cast(::streamfx::nvidia::vfx::greenscreen_mode::QUALITY)); } } @@ -493,8 +477,7 @@ void streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance::nvvfxg if (!_nvidia_fx) return; - _nvidia_fx->set_mode( - static_cast<::streamfx::nvidia::vfx::greenscreen_mode>(obs_data_get_int(data, ST_KEY_NVIDIA_GREENSCREEN_MODE))); + _nvidia_fx->set_mode(static_cast<::streamfx::nvidia::vfx::greenscreen_mode>(obs_data_get_int(data, ST_KEY_NVIDIA_GREENSCREEN_MODE))); } #endif @@ -557,8 +540,7 @@ void virtual_greenscreen_factory::get_defaults2(obs_data_t* data) obs_data_set_default_int(data, ST_KEY_PROVIDER, static_cast(virtual_greenscreen_provider::AUTOMATIC)); #ifdef ENABLE_FILTER_VIRTUAL_GREENSCREEN_NVIDIA - obs_data_set_default_int(data, ST_KEY_NVIDIA_GREENSCREEN_MODE, - static_cast(::streamfx::nvidia::vfx::greenscreen_mode::QUALITY)); + obs_data_set_default_int(data, ST_KEY_NVIDIA_GREENSCREEN_MODE, static_cast(::streamfx::nvidia::vfx::greenscreen_mode::QUALITY)); #endif } @@ -581,8 +563,7 @@ obs_properties_t* virtual_greenscreen_factory::get_properties2(virtual_greenscre #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - virtual_greenscreen_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), virtual_greenscreen_factory::on_manual_open, nullptr); } #endif @@ -595,13 +576,10 @@ obs_properties_t* virtual_greenscreen_factory::get_properties2(virtual_greenscre obs_properties_add_group(pr, S_ADVANCED, D_TRANSLATE(S_ADVANCED), OBS_GROUP_NORMAL, grp); { - auto p = obs_properties_add_list(grp, ST_KEY_PROVIDER, D_TRANSLATE(ST_I18N_PROVIDER), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(grp, ST_KEY_PROVIDER, D_TRANSLATE(ST_I18N_PROVIDER), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback(p, modified_provider); - obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), - static_cast(virtual_greenscreen_provider::AUTOMATIC)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_PROVIDER_NVIDIA_GREENSCREEN), - static_cast(virtual_greenscreen_provider::NVIDIA_GREENSCREEN)); + obs_property_list_add_int(p, D_TRANSLATE(S_STATE_AUTOMATIC), static_cast(virtual_greenscreen_provider::AUTOMATIC)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_PROVIDER_NVIDIA_GREENSCREEN), static_cast(virtual_greenscreen_provider::NVIDIA_GREENSCREEN)); } } @@ -624,8 +602,7 @@ bool virtual_greenscreen_factory::on_manual_open(obs_properties_t* props, obs_pr } #endif -bool streamfx::filter::virtual_greenscreen::virtual_greenscreen_factory::is_provider_available( - virtual_greenscreen_provider provider) +bool streamfx::filter::virtual_greenscreen::virtual_greenscreen_factory::is_provider_available(virtual_greenscreen_provider provider) { switch (provider) { #ifdef ENABLE_FILTER_VIRTUAL_GREENSCREEN_NVIDIA diff --git a/source/filters/filter-virtual-greenscreen.hpp b/source/filters/filter-virtual-greenscreen.hpp index 51849320..d253b4fd 100644 --- a/source/filters/filter-virtual-greenscreen.hpp +++ b/source/filters/filter-virtual-greenscreen.hpp @@ -76,16 +76,13 @@ namespace streamfx::filter::virtual_greenscreen { void nvvfxgs_load(); void nvvfxgs_unload(); void nvvfxgs_size(); - void nvvfxgs_process(std::shared_ptr<::streamfx::obs::gs::texture>& color, - std::shared_ptr<::streamfx::obs::gs::texture>& alpha); + void nvvfxgs_process(std::shared_ptr<::streamfx::obs::gs::texture>& color, std::shared_ptr<::streamfx::obs::gs::texture>& alpha); void nvvfxgs_properties(obs_properties_t* props); void nvvfxgs_update(obs_data_t* data); #endif }; - class virtual_greenscreen_factory : public ::streamfx::obs::source_factory< - ::streamfx::filter::virtual_greenscreen::virtual_greenscreen_factory, - ::streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance> { + class virtual_greenscreen_factory : public ::streamfx::obs::source_factory<::streamfx::filter::virtual_greenscreen::virtual_greenscreen_factory, ::streamfx::filter::virtual_greenscreen::virtual_greenscreen_instance> { #ifdef ENABLE_FILTER_VIRTUAL_GREENSCREEN_NVIDIA bool _nvidia_available; std::shared_ptr<::streamfx::nvidia::cuda::obs> _nvcuda; diff --git a/source/gfx/blur/gfx-blur-box-linear.cpp b/source/gfx/blur/gfx-blur-box-linear.cpp index a11a8e0f..f094997e 100644 --- a/source/gfx/blur/gfx-blur-box-linear.cpp +++ b/source/gfx/blur/gfx-blur-box-linear.cpp @@ -60,8 +60,7 @@ bool streamfx::gfx::blur::box_linear_factory::is_type_supported(::streamfx::gfx: } } -std::shared_ptr<::streamfx::gfx::blur::base> - streamfx::gfx::blur::box_linear_factory::create(::streamfx::gfx::blur::type type) +std::shared_ptr<::streamfx::gfx::blur::base> streamfx::gfx::blur::box_linear_factory::create(::streamfx::gfx::blur::type type) { switch (type) { case ::streamfx::gfx::blur::type::Area: @@ -174,8 +173,7 @@ std::shared_ptr<::streamfx::gfx::blur::box_linear_data> streamfx::gfx::blur::box return instance; } -streamfx::gfx::blur::box_linear::box_linear() - : _data(::streamfx::gfx::blur::box_linear_factory::get().data()), _size(1.), _step_scale({1., 1.}) +streamfx::gfx::blur::box_linear::box_linear() : _data(::streamfx::gfx::blur::box_linear_factory::get().data()), _size(1.), _step_scale({1., 1.}) { _rendertarget = std::make_shared<::streamfx::obs::gs::rendertarget>(GS_RGBA, GS_ZS_NONE); _rendertarget2 = std::make_shared<::streamfx::obs::gs::rendertarget>(GS_RGBA, GS_ZS_NONE); @@ -325,8 +323,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::box_linear_di auto gctx = streamfx::obs::gs::context(); #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - auto gdmp = - streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Box Linear Directional Blur"); + auto gdmp = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Box Linear Directional Blur"); #endif float_t width = float_t(_input_texture->get_width()); @@ -349,8 +346,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::box_linear_di streamfx::obs::gs::effect effect = _data->get_effect(); if (effect) { effect.get_parameter("pImage").set_texture(_input_texture); - effect.get_parameter("pImageTexel") - .set_float2(float_t(1. / width * cos(_angle)), float_t(1.f / height * sin(_angle))); + effect.get_parameter("pImageTexel").set_float2(float_t(1. / width * cos(_angle)), float_t(1.f / height * sin(_angle))); effect.get_parameter("pStepScale").set_float2(float_t(_step_scale.first), float_t(_step_scale.second)); effect.get_parameter("pSize").set_float(float_t(_size)); effect.get_parameter("pSizeInverseMul").set_float(float_t(1.0f / (float_t(_size) * 2.0f + 1.0f))); diff --git a/source/gfx/blur/gfx-blur-box-linear.hpp b/source/gfx/blur/gfx-blur-box-linear.hpp index bd7ac1ac..09c1e741 100644 --- a/source/gfx/blur/gfx-blur-box-linear.hpp +++ b/source/gfx/blur/gfx-blur-box-linear.hpp @@ -105,8 +105,7 @@ namespace streamfx::gfx { virtual std::shared_ptr<::streamfx::obs::gs::texture> get() override; }; - class box_linear_directional : public ::streamfx::gfx::blur::box_linear, - public ::streamfx::gfx::blur::base_angle { + class box_linear_directional : public ::streamfx::gfx::blur::box_linear, public ::streamfx::gfx::blur::base_angle { double_t _angle; public: diff --git a/source/gfx/blur/gfx-blur-box.cpp b/source/gfx/blur/gfx-blur-box.cpp index b397e403..9c243f78 100644 --- a/source/gfx/blur/gfx-blur-box.cpp +++ b/source/gfx/blur/gfx-blur-box.cpp @@ -71,8 +71,7 @@ std::shared_ptr<::streamfx::gfx::blur::base> streamfx::gfx::blur::box_factory::c case ::streamfx::gfx::blur::type::Area: return std::make_shared<::streamfx::gfx::blur::box>(); case ::streamfx::gfx::blur::type::Directional: - return std::static_pointer_cast<::streamfx::gfx::blur::box>( - std::make_shared<::streamfx::gfx::blur::box_directional>()); + return std::static_pointer_cast<::streamfx::gfx::blur::box>(std::make_shared<::streamfx::gfx::blur::box_directional>()); case ::streamfx::gfx::blur::type::Rotational: return std::make_shared<::streamfx::gfx::blur::box_rotational>(); case ::streamfx::gfx::blur::type::Zoom: @@ -183,8 +182,7 @@ std::shared_ptr<::streamfx::gfx::blur::box_data> streamfx::gfx::blur::box_factor return instance; } -streamfx::gfx::blur::box::box() - : _data(::streamfx::gfx::blur::box_factory::get().data()), _size(1.), _step_scale({1., 1.}) +streamfx::gfx::blur::box::box() : _data(::streamfx::gfx::blur::box_factory::get().data()), _size(1.), _step_scale({1., 1.}) { auto gctx = streamfx::obs::gs::context(); _rendertarget = std::make_shared<::streamfx::obs::gs::rendertarget>(GS_RGBA, GS_ZS_NONE); @@ -358,8 +356,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::box_direction streamfx::obs::gs::effect effect = _data->get_effect(); if (effect) { effect.get_parameter("pImage").set_texture(_input_texture); - effect.get_parameter("pImageTexel") - .set_float2(float_t(1. / width * cos(_angle)), float_t(1.f / height * sin(_angle))); + effect.get_parameter("pImageTexel").set_float2(float_t(1. / width * cos(_angle)), float_t(1.f / height * sin(_angle))); effect.get_parameter("pStepScale").set_float2(float_t(_step_scale.first), float_t(_step_scale.second)); effect.get_parameter("pSize").set_float(float_t(_size)); effect.get_parameter("pSizeInverseMul").set_float(float_t(1.0f / (float_t(_size) * 2.0f + 1.0f))); diff --git a/source/gfx/blur/gfx-blur-box.hpp b/source/gfx/blur/gfx-blur-box.hpp index 3be31911..5508a45e 100644 --- a/source/gfx/blur/gfx-blur-box.hpp +++ b/source/gfx/blur/gfx-blur-box.hpp @@ -119,9 +119,7 @@ namespace streamfx::gfx { virtual std::shared_ptr<::streamfx::obs::gs::texture> render() override; }; - class box_rotational : public ::streamfx::gfx::blur::box, - public ::streamfx::gfx::blur::base_angle, - public ::streamfx::gfx::blur::base_center { + class box_rotational : public ::streamfx::gfx::blur::box, public ::streamfx::gfx::blur::base_angle, public ::streamfx::gfx::blur::base_center { std::pair _center; double_t _angle; diff --git a/source/gfx/blur/gfx-blur-dual-filtering.cpp b/source/gfx/blur/gfx-blur-dual-filtering.cpp index 6725264d..8ca9a70b 100644 --- a/source/gfx/blur/gfx-blur-dual-filtering.cpp +++ b/source/gfx/blur/gfx-blur-dual-filtering.cpp @@ -76,8 +76,7 @@ bool streamfx::gfx::blur::dual_filtering_factory::is_type_supported(::streamfx:: } } -std::shared_ptr<::streamfx::gfx::blur::base> - streamfx::gfx::blur::dual_filtering_factory::create(::streamfx::gfx::blur::type type) +std::shared_ptr<::streamfx::gfx::blur::base> streamfx::gfx::blur::dual_filtering_factory::create(::streamfx::gfx::blur::type type) { switch (type) { case ::streamfx::gfx::blur::type::Area: @@ -169,8 +168,7 @@ std::shared_ptr<::streamfx::gfx::blur::dual_filtering_data> streamfx::gfx::blur: return instance; } -streamfx::gfx::blur::dual_filtering::dual_filtering() - : _data(::streamfx::gfx::blur::dual_filtering_factory::get().data()), _size(0), _iterations(0) +streamfx::gfx::blur::dual_filtering::dual_filtering() : _data(::streamfx::gfx::blur::dual_filtering_factory::get().data()), _size(0), _iterations(0) { auto gctx = streamfx::obs::gs::context(); _rts.resize(ST_MAX_LEVELS + 1); @@ -267,8 +265,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::dual_filterin // Apply effect.get_parameter("pImage").set_texture(tex); effect.get_parameter("pImageSize").set_float2(static_cast(owidth), static_cast(oheight)); - effect.get_parameter("pImageTexel") - .set_float2(0.5f / static_cast(owidth), 0.5f / static_cast(oheight)); + effect.get_parameter("pImageTexel").set_float2(0.5f / static_cast(owidth), 0.5f / static_cast(oheight)); { auto op = _rts[n]->render(owidth, oheight); @@ -297,8 +294,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::dual_filterin // Apply effect.get_parameter("pImage").set_texture(tex); effect.get_parameter("pImageSize").set_float2(static_cast(iwidth), static_cast(iheight)); - effect.get_parameter("pImageTexel") - .set_float2(0.5f / static_cast(iwidth), 0.5f / static_cast(iheight)); + effect.get_parameter("pImageTexel").set_float2(0.5f / static_cast(iwidth), 0.5f / static_cast(iheight)); { auto op = _rts[n - 1]->render(owidth, oheight); diff --git a/source/gfx/blur/gfx-blur-gaussian-linear.cpp b/source/gfx/blur/gfx-blur-gaussian-linear.cpp index 1d3a8fd7..b7447349 100644 --- a/source/gfx/blur/gfx-blur-gaussian-linear.cpp +++ b/source/gfx/blur/gfx-blur-gaussian-linear.cpp @@ -45,8 +45,7 @@ streamfx::gfx::blur::gaussian_linear_data::gaussian_linear_data() : _gfx_util(:: // Find actual kernel width. for (double_t h = ST_SEARCH_DENSITY; h < ST_SEARCH_RANGE; h += ST_SEARCH_DENSITY) { - if (streamfx::util::math::gaussian(double_t(kernel_size + ST_SEARCH_EXTENSION), h) - > ST_SEARCH_THRESHOLD) { + if (streamfx::util::math::gaussian(double_t(kernel_size + ST_SEARCH_EXTENSION), h) > ST_SEARCH_THRESHOLD) { actual_width = h; break; } @@ -109,15 +108,13 @@ bool streamfx::gfx::blur::gaussian_linear_factory::is_type_supported(::streamfx: } } -std::shared_ptr<::streamfx::gfx::blur::base> - streamfx::gfx::blur::gaussian_linear_factory::create(::streamfx::gfx::blur::type v) +std::shared_ptr<::streamfx::gfx::blur::base> streamfx::gfx::blur::gaussian_linear_factory::create(::streamfx::gfx::blur::type v) { switch (v) { case ::streamfx::gfx::blur::type::Area: return std::make_shared<::streamfx::gfx::blur::gaussian_linear>(); case ::streamfx::gfx::blur::type::Directional: - return std::static_pointer_cast<::streamfx::gfx::blur::gaussian_linear>( - std::make_shared<::streamfx::gfx::blur::gaussian_linear_directional>()); + return std::static_pointer_cast<::streamfx::gfx::blur::gaussian_linear>(std::make_shared<::streamfx::gfx::blur::gaussian_linear_directional>()); default: throw std::runtime_error("Invalid type."); } @@ -224,8 +221,7 @@ std::shared_ptr<::streamfx::gfx::blur::gaussian_linear_data> streamfx::gfx::blur return instance; } -streamfx::gfx::blur::gaussian_linear::gaussian_linear() - : _data(::streamfx::gfx::blur::gaussian_linear_factory::get().data()), _size(1.), _step_scale({1., 1.}) +streamfx::gfx::blur::gaussian_linear::gaussian_linear() : _data(::streamfx::gfx::blur::gaussian_linear_factory::get().data()), _size(1.), _step_scale({1., 1.}) { auto gctx = streamfx::obs::gs::context(); @@ -391,8 +387,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::gaussian_line auto gctx = streamfx::obs::gs::context(); #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - auto gdmp = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, - "Gaussian Linear Directional Blur"); + auto gdmp = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Gaussian Linear Directional Blur"); #endif streamfx::obs::gs::effect effect = _data->get_effect(); @@ -420,8 +415,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::gaussian_line gs_stencil_op(GS_STENCIL_BOTH, GS_ZERO, GS_ZERO, GS_ZERO); effect.get_parameter("pImage").set_texture(_input_texture); - effect.get_parameter("pImageTexel") - .set_float2(float_t(1.f / width * cos(_angle)), float_t(1.f / height * sin(_angle))); + effect.get_parameter("pImageTexel").set_float2(float_t(1.f / width * cos(_angle)), float_t(1.f / height * sin(_angle))); effect.get_parameter("pStepScale").set_float2(float_t(_step_scale.first), float_t(_step_scale.second)); effect.get_parameter("pSize").set_float(float_t(_size)); effect.get_parameter("pKernel").set_value(kernel.data(), ST_MAX_KERNEL_SIZE); diff --git a/source/gfx/blur/gfx-blur-gaussian-linear.hpp b/source/gfx/blur/gfx-blur-gaussian-linear.hpp index 697186bb..50a8db29 100644 --- a/source/gfx/blur/gfx-blur-gaussian-linear.hpp +++ b/source/gfx/blur/gfx-blur-gaussian-linear.hpp @@ -114,8 +114,7 @@ namespace streamfx::gfx { virtual std::shared_ptr<::streamfx::obs::gs::texture> get() override; }; - class gaussian_linear_directional : public ::streamfx::gfx::blur::gaussian_linear, - public ::streamfx::gfx::blur::base_angle { + class gaussian_linear_directional : public ::streamfx::gfx::blur::gaussian_linear, public ::streamfx::gfx::blur::base_angle { double_t _angle; public: diff --git a/source/gfx/blur/gfx-blur-gaussian.cpp b/source/gfx/blur/gfx-blur-gaussian.cpp index 91f27316..16c5f347 100644 --- a/source/gfx/blur/gfx-blur-gaussian.cpp +++ b/source/gfx/blur/gfx-blur-gaussian.cpp @@ -134,15 +134,13 @@ bool streamfx::gfx::blur::gaussian_factory::is_type_supported(::streamfx::gfx::b } } -std::shared_ptr<::streamfx::gfx::blur::base> - streamfx::gfx::blur::gaussian_factory::create(::streamfx::gfx::blur::type v) +std::shared_ptr<::streamfx::gfx::blur::base> streamfx::gfx::blur::gaussian_factory::create(::streamfx::gfx::blur::type v) { switch (v) { case ::streamfx::gfx::blur::type::Area: return std::make_shared<::streamfx::gfx::blur::gaussian>(); case ::streamfx::gfx::blur::type::Directional: - return std::static_pointer_cast<::streamfx::gfx::blur::gaussian>( - std::make_shared<::streamfx::gfx::blur::gaussian_directional>()); + return std::static_pointer_cast<::streamfx::gfx::blur::gaussian>(std::make_shared<::streamfx::gfx::blur::gaussian_directional>()); case ::streamfx::gfx::blur::type::Rotational: return std::make_shared<::streamfx::gfx::blur::gaussian_rotational>(); case ::streamfx::gfx::blur::type::Zoom: @@ -253,8 +251,7 @@ std::shared_ptr<::streamfx::gfx::blur::gaussian_data> streamfx::gfx::blur::gauss return instance; } -streamfx::gfx::blur::gaussian::gaussian() - : _data(::streamfx::gfx::blur::gaussian_factory::get().data()), _size(1.), _step_scale({1., 1.}) +streamfx::gfx::blur::gaussian::gaussian() : _data(::streamfx::gfx::blur::gaussian_factory::get().data()), _size(1.), _step_scale({1., 1.}) { auto gctx = streamfx::obs::gs::context(); _rendertarget = std::make_shared(GS_RGBA, GS_ZS_NONE); @@ -419,8 +416,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::gaussian_dire auto gctx = streamfx::obs::gs::context(); #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - auto gdmp = - streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Gaussian Directional Blur"); + auto gdmp = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Gaussian Directional Blur"); #endif streamfx::obs::gs::effect effect = _data->get_effect(); @@ -448,8 +444,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::gaussian_dire gs_stencil_op(GS_STENCIL_BOTH, GS_ZERO, GS_ZERO, GS_ZERO); effect.get_parameter("pImage").set_texture(_input_texture); - effect.get_parameter("pImageTexel") - .set_float2(float_t(1.f / width * cos(m_angle)), float_t(1.f / height * sin(m_angle))); + effect.get_parameter("pImageTexel").set_float2(float_t(1.f / width * cos(m_angle)), float_t(1.f / height * sin(m_angle))); effect.get_parameter("pStepScale").set_float2(float_t(_step_scale.first), float_t(_step_scale.second)); effect.get_parameter("pSize").set_float(float_t(_size * ST_OVERSAMPLE_MULTIPLIER)); effect.get_parameter("pKernel").set_value(kernel.data(), ST_KERNEL_SIZE); @@ -477,8 +472,7 @@ std::shared_ptr<::streamfx::obs::gs::texture> streamfx::gfx::blur::gaussian_rota auto gctx = streamfx::obs::gs::context(); #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - auto gdmp = - streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Gaussian Rotational Blur"); + auto gdmp = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_azure_radiance, "Gaussian Rotational Blur"); #endif streamfx::obs::gs::effect effect = _data->get_effect(); diff --git a/source/gfx/blur/gfx-blur-gaussian.hpp b/source/gfx/blur/gfx-blur-gaussian.hpp index bb7c9083..654dc91c 100644 --- a/source/gfx/blur/gfx-blur-gaussian.hpp +++ b/source/gfx/blur/gfx-blur-gaussian.hpp @@ -129,9 +129,7 @@ namespace streamfx::gfx { virtual std::shared_ptr<::streamfx::obs::gs::texture> render() override; }; - class gaussian_rotational : public ::streamfx::gfx::blur::gaussian, - public ::streamfx::gfx::blur::base_angle, - public ::streamfx::gfx::blur::base_center { + class gaussian_rotational : public ::streamfx::gfx::blur::gaussian, public ::streamfx::gfx::blur::base_angle, public ::streamfx::gfx::blur::base_center { std::pair m_center; double_t m_angle; diff --git a/source/gfx/gfx-mipmapper.cpp b/source/gfx/gfx-mipmapper.cpp index e935d796..6974f98a 100644 --- a/source/gfx/gfx-mipmapper.cpp +++ b/source/gfx/gfx-mipmapper.cpp @@ -27,16 +27,14 @@ struct d3d_info { ID3D11Resource* target = nullptr; }; -void d3d_initialize(d3d_info& info, std::shared_ptr source, - std::shared_ptr target) +void d3d_initialize(d3d_info& info, std::shared_ptr source, std::shared_ptr target) { info.target = reinterpret_cast(gs_texture_get_obj(target->get_object())); info.device = reinterpret_cast(gs_get_device_obj()); info.device->GetImmediateContext(&info.context); } -void d3d_copy_subregion(d3d_info& info, std::shared_ptr source, uint32_t mip_level, - uint32_t width, uint32_t height) +void d3d_copy_subregion(d3d_info& info, std::shared_ptr source, uint32_t mip_level, uint32_t width, uint32_t height) { D3D11_BOX box = {0, 0, 0, width, height, 1}; auto source_ref = reinterpret_cast(gs_texture_get_obj(source->get_object())); @@ -107,8 +105,7 @@ std::string opengl_translate_framebuffer_status(GLenum error) throw std::runtime_error(sstr.str()); \ } -void opengl_initialize(opengl_info& info, std::shared_ptr source, - std::shared_ptr target) +void opengl_initialize(opengl_info& info, std::shared_ptr source, std::shared_ptr target) { info.target = *reinterpret_cast(gs_texture_get_obj(target->get_object())); @@ -120,8 +117,7 @@ void opengl_finalize(opengl_info& info) glDeleteFramebuffers(1, &info.fbo); } -void opengl_copy_subregion(opengl_info& info, std::shared_ptr source, uint32_t mip_level, - uint32_t width, uint32_t height) +void opengl_copy_subregion(opengl_info& info, std::shared_ptr source, uint32_t mip_level, uint32_t width, uint32_t height) { GLuint source_ref = *reinterpret_cast(gs_texture_get_obj(source->get_object())); @@ -134,11 +130,8 @@ void opengl_copy_subregion(opengl_info& info, std::shared_ptr Texture Unit 1 glActiveTexture(GL_TEXTURE1); @@ -147,8 +140,7 @@ void opengl_copy_subregion(opengl_info& info, std::shared_ptr(mip_level), 0, 0, 0, 0, static_cast(width), - static_cast(height)); + glCopyTexSubImage2D(GL_TEXTURE_2D, static_cast(mip_level), 0, 0, 0, 0, static_cast(width), static_cast(height)); D_OPENGL_CHECK_ERROR("glCopyTexSubImage2D(GL_TEXTURE_2D, mip_level, 0, 0, 0, 0, width, height);"); // Target -/-> Texture Unit 1 @@ -192,12 +184,10 @@ streamfx::gfx::mipmapper::mipmapper() : _gfx_util(::streamfx::gfx::util::get()) uint32_t streamfx::gfx::mipmapper::calculate_max_mip_level(uint32_t width, uint32_t height) { - return static_cast( - 1 + std::lroundl(floor(log2(std::max(static_cast(width), static_cast(height)))))); + return static_cast(1 + std::lroundl(floor(log2(std::max(static_cast(width), static_cast(height)))))); } -void streamfx::gfx::mipmapper::rebuild(std::shared_ptr source, - std::shared_ptr target) +void streamfx::gfx::mipmapper::rebuild(std::shared_ptr source, std::shared_ptr target) { { // Validate arguments and structure. if (!source || !target) @@ -250,8 +240,7 @@ void streamfx::gfx::mipmapper::rebuild(std::shared_ptr(width >> mip, 1); diff --git a/source/gfx/gfx-mipmapper.hpp b/source/gfx/gfx-mipmapper.hpp index a016742e..ec37c013 100644 --- a/source/gfx/gfx-mipmapper.hpp +++ b/source/gfx/gfx-mipmapper.hpp @@ -34,7 +34,6 @@ namespace streamfx::gfx { uint32_t calculate_max_mip_level(uint32_t width, uint32_t height); - void rebuild(std::shared_ptr source, - std::shared_ptr target); + void rebuild(std::shared_ptr source, std::shared_ptr target); }; } // namespace streamfx::gfx diff --git a/source/gfx/gfx-source-texture.cpp b/source/gfx/gfx-source-texture.cpp index 70c78e00..1b961962 100644 --- a/source/gfx/gfx-source-texture.cpp +++ b/source/gfx/gfx-source-texture.cpp @@ -17,8 +17,7 @@ streamfx::gfx::source_texture::~source_texture() } } -streamfx::gfx::source_texture::source_texture(streamfx::obs::source child, streamfx::obs::source parent) - : _parent(parent), _child(child) +streamfx::gfx::source_texture::source_texture(streamfx::obs::source child, streamfx::obs::source parent) : _parent(parent), _child(child) { // Verify that 'child' and 'parent' exist. if (!_child || !_parent) { @@ -70,8 +69,7 @@ std::shared_ptr streamfx::gfx::source_texture::rende if (_child) { #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - auto cctr = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_capture, "gfx::source_texture '%s'", - obs_source_get_name(_child.get())); + auto cctr = streamfx::obs::gs::debug_marker(streamfx::obs::gs::debug_color_capture, "gfx::source_texture '%s'", obs_source_get_name(_child.get())); #endif auto op = _rt->render(static_cast(width), static_cast(height)); vec4 black; diff --git a/source/gfx/gfx-util.cpp b/source/gfx/gfx-util.cpp index 0220175d..ea78f353 100644 --- a/source/gfx/gfx-util.cpp +++ b/source/gfx/gfx-util.cpp @@ -111,8 +111,7 @@ void streamfx::gfx::util::draw_line(float x, float y, float x2, float y2, uint32 gs_load_vertexbuffer(nullptr); } -void streamfx::gfx::util::draw_arrow(float x, float y, float x2, float y2, float w /*= 0.*/, - uint32_t color /*= 0xFFFFFFFF*/) +void streamfx::gfx::util::draw_arrow(float x, float y, float x2, float y2, float w /*= 0.*/, uint32_t color /*= 0xFFFFFFFF*/) { obs::gs::context gctx{}; @@ -179,8 +178,7 @@ void streamfx::gfx::util::draw_arrow(float x, float y, float x2, float y2, float gs_load_vertexbuffer(nullptr); } -void streamfx::gfx::util::draw_rectangle(float x, float y, float w, float h, bool frame, - uint32_t color /*= 0xFFFFFFFF*/) +void streamfx::gfx::util::draw_rectangle(float x, float y, float w, float h, bool frame, uint32_t color /*= 0xFFFFFFFF*/) { obs::gs::context gctx{}; diff --git a/source/gfx/lut/gfx-lut-consumer.cpp b/source/gfx/lut/gfx-lut-consumer.cpp index b57a33c0..31b34b9b 100644 --- a/source/gfx/lut/gfx-lut-consumer.cpp +++ b/source/gfx/lut/gfx-lut-consumer.cpp @@ -15,9 +15,7 @@ streamfx::gfx::lut::consumer::consumer() streamfx::gfx::lut::consumer::~consumer() = default; -std::shared_ptr - streamfx::gfx::lut::consumer::prepare(streamfx::gfx::lut::color_depth depth, - std::shared_ptr lut) +std::shared_ptr streamfx::gfx::lut::consumer::prepare(streamfx::gfx::lut::color_depth depth, std::shared_ptr lut) { auto gctx = streamfx::obs::gs::context(); @@ -47,9 +45,7 @@ std::shared_ptr return effect; } -void streamfx::gfx::lut::consumer::consume(streamfx::gfx::lut::color_depth depth, - std::shared_ptr lut, - std::shared_ptr texture) +void streamfx::gfx::lut::consumer::consume(streamfx::gfx::lut::color_depth depth, std::shared_ptr lut, std::shared_ptr texture) { auto gctx = streamfx::obs::gs::context(); diff --git a/source/gfx/lut/gfx-lut-consumer.hpp b/source/gfx/lut/gfx-lut-consumer.hpp index 86a0f302..76fb7c10 100644 --- a/source/gfx/lut/gfx-lut-consumer.hpp +++ b/source/gfx/lut/gfx-lut-consumer.hpp @@ -19,10 +19,8 @@ namespace streamfx::gfx::lut { consumer(); ~consumer(); - std::shared_ptr prepare(streamfx::gfx::lut::color_depth depth, - std::shared_ptr lut); + std::shared_ptr prepare(streamfx::gfx::lut::color_depth depth, std::shared_ptr lut); - void consume(streamfx::gfx::lut::color_depth depth, std::shared_ptr lut, - std::shared_ptr texture); + void consume(streamfx::gfx::lut::color_depth depth, std::shared_ptr lut, std::shared_ptr texture); }; } // namespace streamfx::gfx::lut diff --git a/source/gfx/shader/gfx-shader-param-basic.cpp b/source/gfx/shader/gfx-shader-param-basic.cpp index ffc3349a..7ee30b2a 100644 --- a/source/gfx/shader/gfx-shader-param-basic.cpp +++ b/source/gfx/shader/gfx-shader-param-basic.cpp @@ -22,8 +22,7 @@ static const std::string_view _annotation_scale = "scale"; static const std::string_view _annotation_enum_entry = "enum_%zu"; static const std::string_view _annotation_enum_entry_name = "enum_%zu_name"; -inline bool get_annotation_string(streamfx::obs::gs::effect_parameter param, std::string_view anno_name, - std::string& out) +inline bool get_annotation_string(streamfx::obs::gs::effect_parameter param, std::string_view anno_name, std::string& out) { if (!param) return false; @@ -68,10 +67,7 @@ streamfx::gfx::shader::basic_field_type streamfx::gfx::shader::get_field_type_fr return basic_field_type::Input; } -streamfx::gfx::shader::basic_parameter::basic_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, std::string prefix) - : parameter(parent, param, prefix), _field_type(basic_field_type::Input), _suffix(), _keys(), _names(), _min(), - _max(), _step(), _values() +streamfx::gfx::shader::basic_parameter::basic_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix) : parameter(parent, param, prefix), _field_type(basic_field_type::Input), _suffix(), _keys(), _names(), _min(), _max(), _step(), _values() { char string_buffer[256]; @@ -91,8 +87,7 @@ streamfx::gfx::shader::basic_parameter::basic_parameter(streamfx::gfx::shader::s for (std::size_t idx = 0; idx < get_size(); idx++) { snprintf(string_buffer, sizeof(string_buffer), "[%" PRId32 "]", static_cast(idx)); _names[idx] = std::string(string_buffer, string_buffer + strnlen(string_buffer, sizeof(string_buffer))); - snprintf(string_buffer, sizeof(string_buffer), "%s[%" PRId32 "]", get_key().data(), - static_cast(idx)); + snprintf(string_buffer, sizeof(string_buffer), "%s[%" PRId32 "]", get_key().data(), static_cast(idx)); _keys[idx] = std::string(string_buffer, string_buffer + strnlen(string_buffer, sizeof(string_buffer))); } } @@ -122,13 +117,11 @@ streamfx::gfx::shader::basic_parameter::basic_parameter(streamfx::gfx::shader::s } // Value must be given, name is optional. - if (auto eanno = get_parameter().get_annotation(key_value); - eanno && (get_type_from_effect_type(eanno.get_type()) == get_type())) { + if (auto eanno = get_parameter().get_annotation(key_value); eanno && (get_type_from_effect_type(eanno.get_type()) == get_type())) { basic_enum_data entry; load_parameter_data(eanno, entry.data); - if (auto nanno = get_parameter().get_annotation(key_name); - nanno && (nanno.get_type() == streamfx::obs::gs::effect_parameter::type::String)) { + if (auto nanno = get_parameter().get_annotation(key_name); nanno && (nanno.get_type() == streamfx::obs::gs::effect_parameter::type::String)) { entry.name = nanno.get_default_string(); } else { entry.name = "Unnamed Entry"; @@ -148,15 +141,12 @@ streamfx::gfx::shader::basic_parameter::basic_parameter(streamfx::gfx::shader::s streamfx::gfx::shader::basic_parameter::~basic_parameter() {} -void streamfx::gfx::shader::basic_parameter::load_parameter_data(streamfx::obs::gs::effect_parameter parameter, - basic_data& data) +void streamfx::gfx::shader::basic_parameter::load_parameter_data(streamfx::obs::gs::effect_parameter parameter, basic_data& data) { parameter.get_default_value(&data.i32, 1); } -streamfx::gfx::shader::bool_parameter::bool_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, std::string prefix) - : basic_parameter(parent, param, prefix) +streamfx::gfx::shader::bool_parameter::bool_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix) : basic_parameter(parent, param, prefix) { _min.resize(0); _max.resize(0); @@ -183,8 +173,7 @@ void streamfx::gfx::shader::bool_parameter::properties(obs_properties_t* props, // TODO: Support for bool[] if (get_size() == 1) { - auto p = obs_properties_add_list(props, get_key().data(), get_name().data(), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(props, get_key().data(), get_name().data(), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); if (has_description()) obs_property_set_long_description(p, get_description().data()); obs_property_list_add_int(p, D_TRANSLATE(S_STATE_DISABLED), 0); @@ -208,9 +197,7 @@ void streamfx::gfx::shader::bool_parameter::assign() get_parameter().set_value(_data.data(), _data.size()); } -streamfx::gfx::shader::float_parameter::float_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, std::string prefix) - : basic_parameter(parent, param, prefix) +streamfx::gfx::shader::float_parameter::float_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix) : basic_parameter(parent, param, prefix) { _data.resize(get_size()); @@ -258,10 +245,7 @@ void streamfx::gfx::shader::float_parameter::defaults(obs_data_t* settings) } } -static inline obs_property_t* build_float_property(streamfx::gfx::shader::basic_field_type ft, obs_properties_t* props, - const char* key, const char* name, float_t min, float_t max, - float_t step, - std::list edata) +static inline obs_property_t* build_float_property(streamfx::gfx::shader::basic_field_type ft, obs_properties_t* props, const char* key, const char* name, float_t min, float_t max, float_t step, std::list edata) { switch (ft) { case streamfx::gfx::shader::basic_field_type::Enum: { @@ -287,15 +271,13 @@ void streamfx::gfx::shader::float_parameter::properties(obs_properties_t* props, obs_properties_t* pr = props; if (get_size() > 1) { pr = obs_properties_create(); - auto p = obs_properties_add_group(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), - OBS_GROUP_NORMAL, pr); + auto p = obs_properties_add_group(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), OBS_GROUP_NORMAL, pr); if (has_description()) obs_property_set_long_description(p, get_description().data()); } for (std::size_t idx = 0; idx < get_size(); idx++) { - auto p = build_float_property(field_type(), pr, key_at(idx).data(), name_at(idx).data(), _min[idx].f32, - _max[idx].f32, _step[idx].f32, _values); + auto p = build_float_property(field_type(), pr, key_at(idx).data(), name_at(idx).data(), _min[idx].f32, _max[idx].f32, _step[idx].f32, _values); if (has_description()) obs_property_set_long_description(p, get_description().data()); obs_property_float_set_suffix(p, suffix().data()); @@ -316,9 +298,7 @@ void streamfx::gfx::shader::float_parameter::assign() get_parameter().set_value(_data.data(), get_size()); } -static inline obs_property_t* build_int_property(streamfx::gfx::shader::basic_field_type ft, obs_properties_t* props, - const char* key, const char* name, int32_t min, int32_t max, - int32_t step, std::list edata) +static inline obs_property_t* build_int_property(streamfx::gfx::shader::basic_field_type ft, obs_properties_t* props, const char* key, const char* name, int32_t min, int32_t max, int32_t step, std::list edata) { switch (ft) { case streamfx::gfx::shader::basic_field_type::Enum: { @@ -336,9 +316,7 @@ static inline obs_property_t* build_int_property(streamfx::gfx::shader::basic_fi } } -streamfx::gfx::shader::int_parameter::int_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, std::string prefix) - : basic_parameter(parent, param, prefix) +streamfx::gfx::shader::int_parameter::int_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix) : basic_parameter(parent, param, prefix) { _data.resize(get_size()); @@ -393,15 +371,13 @@ void streamfx::gfx::shader::int_parameter::properties(obs_properties_t* props, o obs_properties_t* pr = props; if (get_size() > 1) { pr = obs_properties_create(); - auto p = obs_properties_add_group(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), - OBS_GROUP_NORMAL, pr); + auto p = obs_properties_add_group(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), OBS_GROUP_NORMAL, pr); if (has_description()) obs_property_set_long_description(p, get_description().data()); } for (std::size_t idx = 0; idx < get_size(); idx++) { - auto p = build_int_property(field_type(), pr, key_at(idx).data(), name_at(idx).data(), _min[idx].i32, - _max[idx].i32, _step[idx].i32, _values); + auto p = build_int_property(field_type(), pr, key_at(idx).data(), name_at(idx).data(), _min[idx].i32, _max[idx].i32, _step[idx].i32, _values); if (has_description()) obs_property_set_long_description(p, get_description().data()); obs_property_int_set_suffix(p, suffix().data()); diff --git a/source/gfx/shader/gfx-shader-param-basic.hpp b/source/gfx/shader/gfx-shader-param-basic.hpp index 81197991..3d228058 100644 --- a/source/gfx/shader/gfx-shader-param-basic.hpp +++ b/source/gfx/shader/gfx-shader-param-basic.hpp @@ -53,8 +53,7 @@ namespace streamfx::gfx { std::list _values; public: - basic_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, - std::string prefix); + basic_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix); virtual ~basic_parameter(); virtual void load_parameter_data(streamfx::obs::gs::effect_parameter parameter, basic_data& data); @@ -90,8 +89,7 @@ namespace streamfx::gfx { std::vector _data; public: - bool_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, - std::string prefix); + bool_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix); virtual ~bool_parameter(); void defaults(obs_data_t* settings) override; @@ -107,8 +105,7 @@ namespace streamfx::gfx { std::vector _data; public: - float_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, - std::string prefix); + float_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix); virtual ~float_parameter(); void defaults(obs_data_t* settings) override; @@ -124,8 +121,7 @@ namespace streamfx::gfx { std::vector _data; public: - int_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, - std::string prefix); + int_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix); virtual ~int_parameter(); void defaults(obs_data_t* settings) override; diff --git a/source/gfx/shader/gfx-shader-param-texture.cpp b/source/gfx/shader/gfx-shader-param-texture.cpp index 68f5d450..4be81497 100644 --- a/source/gfx/shader/gfx-shader-param-texture.cpp +++ b/source/gfx/shader/gfx-shader-param-texture.cpp @@ -59,13 +59,7 @@ streamfx::gfx::shader::texture_field_type streamfx::gfx::shader::get_texture_fie return texture_field_type::Input; } -streamfx::gfx::shader::texture_parameter::texture_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, - std::string prefix) - : parameter(parent, param, prefix), _field_type(texture_field_type::Input), _keys(), _values(), - _type(texture_type::File), _active(false), _visible(false), _dirty(true), - _dirty_ts(std::chrono::high_resolution_clock::now()), _file_path(), _file_texture(), _source_name(), _source(), - _source_child(), _source_active(), _source_visible(), _source_rendertarget() +streamfx::gfx::shader::texture_parameter::texture_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix) : parameter(parent, param, prefix), _field_type(texture_field_type::Input), _keys(), _values(), _type(texture_type::File), _active(false), _visible(false), _dirty(true), _dirty_ts(std::chrono::high_resolution_clock::now()), _file_path(), _file_texture(), _source_name(), _source(), _source_child(), _source_active(), _source_visible(), _source_rendertarget() { char string_buffer[256]; @@ -107,15 +101,12 @@ streamfx::gfx::shader::texture_parameter::texture_parameter(streamfx::gfx::shade } // Value must be given, name is optional. - if (auto eanno = get_parameter().get_annotation(key_value); - eanno - && (get_type_from_effect_type(eanno.get_type()) == streamfx::gfx::shader::parameter_type::String)) { + if (auto eanno = get_parameter().get_annotation(key_value); eanno && (get_type_from_effect_type(eanno.get_type()) == streamfx::gfx::shader::parameter_type::String)) { texture_enum_data entry; entry.data.file = std::filesystem::path(eanno.get_default_string()); - if (auto nanno = get_parameter().get_annotation(key_name); - nanno && (nanno.get_type() == streamfx::obs::gs::effect_parameter::type::String)) { + if (auto nanno = get_parameter().get_annotation(key_name); nanno && (nanno.get_type() == streamfx::obs::gs::effect_parameter::type::String)) { entry.name = nanno.get_default_string(); } else { entry.name = "Unnamed Entry"; @@ -152,8 +143,7 @@ void streamfx::gfx::shader::texture_parameter::defaults(obs_data_t* settings) } } -bool streamfx::gfx::shader::texture_parameter::modified_type(void* priv, obs_properties_t* props, obs_property_t*, - obs_data_t* settings) +bool streamfx::gfx::shader::texture_parameter::modified_type(void* priv, obs_properties_t* props, obs_property_t*, obs_data_t* settings) { auto self = reinterpret_cast(priv); if (self->field_type() == texture_field_type::Input) { @@ -171,23 +161,20 @@ void streamfx::gfx::shader::texture_parameter::properties(obs_properties_t* prop return; if (field_type() == texture_field_type::Enum) { - auto p = obs_properties_add_list(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); for (auto v : _values) { obs_property_list_add_string(p, v.name.c_str(), v.data.file.generic_u8string().c_str()); } } else { obs_properties_t* pr = obs_properties_create(); { - auto p = obs_properties_add_group(props, get_key().data(), - has_name() ? get_name().data() : get_key().data(), OBS_GROUP_NORMAL, pr); + auto p = obs_properties_add_group(props, get_key().data(), has_name() ? get_name().data() : get_key().data(), OBS_GROUP_NORMAL, pr); if (has_description()) obs_property_set_long_description(p, get_description().data()); } { - auto p = obs_properties_add_list(pr, _keys[0].c_str(), D_TRANSLATE(ST_I18N_TYPE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_INT); + auto p = obs_properties_add_list(pr, _keys[0].c_str(), D_TRANSLATE(ST_I18N_TYPE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); obs_property_set_modified_callback2(p, modified_type, this); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_TYPE_FILE), static_cast(texture_type::File)); obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_TYPE_SOURCE), static_cast(texture_type::Source)); @@ -195,13 +182,11 @@ void streamfx::gfx::shader::texture_parameter::properties(obs_properties_t* prop { // ToDo: Filter and Default Path. - auto p = obs_properties_add_path(pr, _keys[1].c_str(), D_TRANSLATE(ST_I18N_FILE), OBS_PATH_FILE, "* (*.*)", - nullptr); + auto p = obs_properties_add_path(pr, _keys[1].c_str(), D_TRANSLATE(ST_I18N_FILE), OBS_PATH_FILE, "* (*.*)", nullptr); } { - auto p = obs_properties_add_list(pr, _keys[2].c_str(), D_TRANSLATE(ST_I18N_SOURCE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(pr, _keys[2].c_str(), D_TRANSLATE(ST_I18N_SOURCE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_list_add_string(p, "", ""); obs::source_tracker::get()->enumerate( [&p](std::string name, ::streamfx::obs::source) { @@ -281,11 +266,9 @@ void streamfx::gfx::shader::texture_parameter::assign() _source_rendertarget.reset(); _file_texture.reset(); - if (((field_type() == texture_field_type::Input) && (_type == texture_type::File)) - || (field_type() == texture_field_type::Enum)) { + if (((field_type() == texture_field_type::Input) && (_type == texture_type::File)) || (field_type() == texture_field_type::Enum)) { if (!_file_path.empty()) { - _file_texture = std::make_shared( - streamfx::util::platform::native_to_utf8(_file_path).generic_u8string().c_str()); + _file_texture = std::make_shared(streamfx::util::platform::native_to_utf8(_file_path).generic_u8string().c_str()); } } else if ((field_type() == texture_field_type::Input) && (_type == texture_type::Source)) { // Try and grab the source itself. @@ -330,10 +313,8 @@ void streamfx::gfx::shader::texture_parameter::assign() if ((_type == texture_type::Source) && (_active || _visible) && _source_rendertarget) { auto source = _source.lock(); #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_capture, "Parameter '%s'", - get_key().data()}; - ::streamfx::obs::gs::debug_marker profiler2{::streamfx::obs::gs::debug_color_capture, "Capture '%s'", - source.name().data()}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_capture, "Parameter '%s'", get_key().data()}; + ::streamfx::obs::gs::debug_marker profiler2{::streamfx::obs::gs::debug_color_capture, "Capture '%s'", source.name().data()}; #endif uint32_t width = source.width(); uint32_t height = source.height(); diff --git a/source/gfx/shader/gfx-shader-param-texture.hpp b/source/gfx/shader/gfx-shader-param-texture.hpp index 4fcd4d61..5d750416 100644 --- a/source/gfx/shader/gfx-shader-param-texture.hpp +++ b/source/gfx/shader/gfx-shader-param-texture.hpp @@ -74,8 +74,7 @@ namespace streamfx::gfx { std::shared_ptr _source_rendertarget; public: - texture_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, - std::string prefix); + texture_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix); virtual ~texture_parameter(); void defaults(obs_data_t* settings) override; diff --git a/source/gfx/shader/gfx-shader-param.cpp b/source/gfx/shader/gfx-shader-param.cpp index c8220d24..d2ee4fc8 100644 --- a/source/gfx/shader/gfx-shader-param.cpp +++ b/source/gfx/shader/gfx-shader-param.cpp @@ -23,8 +23,7 @@ typedef streamfx::obs::gs::effect_parameter::type eptype; -streamfx::gfx::shader::parameter_type - streamfx::gfx::shader::get_type_from_effect_type(streamfx::obs::gs::effect_parameter::type type) +streamfx::gfx::shader::parameter_type streamfx::gfx::shader::get_type_from_effect_type(streamfx::obs::gs::effect_parameter::type type) { switch (type) { case eptype::Boolean: @@ -104,10 +103,7 @@ streamfx::gfx::shader::parameter_type streamfx::gfx::shader::get_type_from_strin throw std::invalid_argument("Invalid parameter type string."); } -streamfx::gfx::shader::parameter::parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, std::string key_prefix) - : _parent(parent), _param(param), _order(0), _key(_param.get_name()), _visible(true), _automatic(false), - _name(_key), _description() +streamfx::gfx::shader::parameter::parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string key_prefix) : _parent(parent), _param(param), _order(0), _key(_param.get_name()), _visible(true), _automatic(false), _name(_key), _description() { { std::stringstream ss; @@ -175,9 +171,7 @@ void streamfx::gfx::shader::parameter::visible(bool visible) {} void streamfx::gfx::shader::parameter::active(bool active) {} -std::shared_ptr - streamfx::gfx::shader::parameter::make_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, std::string prefix) +std::shared_ptr streamfx::gfx::shader::parameter::make_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix) { if (!parent || !param) { throw std::runtime_error("Bad call to make_parameter. This is a bug in the plugin."); diff --git a/source/gfx/shader/gfx-shader-param.hpp b/source/gfx/shader/gfx-shader-param.hpp index 1d9843d6..6141ddc1 100644 --- a/source/gfx/shader/gfx-shader-param.hpp +++ b/source/gfx/shader/gfx-shader-param.hpp @@ -65,8 +65,7 @@ namespace streamfx::gfx { std::string _description; protected: - parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, - std::string key_prefix); + parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string key_prefix); virtual ~parameter(){}; public: @@ -144,9 +143,7 @@ namespace streamfx::gfx { } public: - static std::shared_ptr make_parameter(streamfx::gfx::shader::shader* parent, - streamfx::obs::gs::effect_parameter param, - std::string prefix); + static std::shared_ptr make_parameter(streamfx::gfx::shader::shader* parent, streamfx::obs::gs::effect_parameter param, std::string prefix); }; } // namespace shader } // namespace streamfx::gfx diff --git a/source/gfx/shader/gfx-shader.cpp b/source/gfx/shader/gfx-shader.cpp index 3ecd30b6..2fa1c191 100644 --- a/source/gfx/shader/gfx-shader.cpp +++ b/source/gfx/shader/gfx-shader.cpp @@ -49,8 +49,7 @@ streamfx::gfx::shader::shader::shader(obs_source_t* self, shader_mode mode) // Initialize random values. _random.seed(static_cast(_random_seed)); for (size_t idx = 0; idx < 16; idx++) { - _random_values[idx] = - static_cast(static_cast(_random()) / static_cast(_random.max())); + _random_values[idx] = static_cast(static_cast(_random()) / static_cast(_random.max())); } } @@ -91,8 +90,7 @@ bool streamfx::gfx::shader::shader::is_technique_different(std::string_view tech return false; } -bool streamfx::gfx::shader::shader::load_shader(const std::filesystem::path& file, std::string_view tech, - bool& shader_dirty, bool& param_dirty) +bool streamfx::gfx::shader::shader::load_shader(const std::filesystem::path& file, std::string_view tech, bool& shader_dirty, bool& param_dirty) { try { if (!std::filesystem::exists(file)) @@ -112,8 +110,7 @@ bool streamfx::gfx::shader::shader::load_shader(const std::filesystem::path& fil // Update Params if (param_dirty) { - auto settings = - std::shared_ptr(obs_source_get_settings(_self), [](obs_data_t* p) { obs_data_release(p); }); + auto settings = std::shared_ptr(obs_source_get_settings(_self), [](obs_data_t* p) { obs_data_release(p); }); bool have_valid_tech = false; for (std::size_t idx = 0; idx < _shader.count_techniques(); idx++) { @@ -136,8 +133,7 @@ bool streamfx::gfx::shader::shader::load_shader(const std::filesystem::path& fil auto etech = _shader.get_technique(_shader_tech); for (std::size_t idx = 0; idx < etech.count_passes(); idx++) { auto pass = etech.get_pass(idx); - auto fetch_params = [&](std::size_t count, - std::function get_func) { + auto fetch_params = [&](std::size_t count, std::function get_func) { for (std::size_t vidx = 0; vidx < count; vidx++) { auto el = get_func(vidx); if (!el) @@ -198,21 +194,15 @@ void streamfx::gfx::shader::shader::properties(obs_properties_t* pr) } else { path = streamfx::data_file_path("examples/").u8string(); } - auto p = obs_properties_add_path(grp, ST_KEY_SHADER_FILE, D_TRANSLATE(ST_I18N_SHADER_FILE), OBS_PATH_FILE, - "*.*", path.c_str()); + auto p = obs_properties_add_path(grp, ST_KEY_SHADER_FILE, D_TRANSLATE(ST_I18N_SHADER_FILE), OBS_PATH_FILE, "*.*", path.c_str()); } { - auto p = obs_properties_add_list(grp, ST_KEY_SHADER_TECHNIQUE, D_TRANSLATE(ST_I18N_SHADER_TECHNIQUE), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); + auto p = obs_properties_add_list(grp, ST_KEY_SHADER_TECHNIQUE, D_TRANSLATE(ST_I18N_SHADER_TECHNIQUE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); } { obs_properties_add_button2( - grp, ST_KEY_REFRESH, D_TRANSLATE(ST_I18N_REFRESH), - [](obs_properties_t* props, obs_property_t* prop, void* priv) { - return reinterpret_cast(priv)->on_refresh_properties(props, prop); - }, - this); + grp, ST_KEY_REFRESH, D_TRANSLATE(ST_I18N_REFRESH), [](obs_properties_t* props, obs_property_t* prop, void* priv) { return reinterpret_cast(priv)->on_refresh_properties(props, prop); }, this); } if (_mode != shader_mode::Transition) { @@ -220,18 +210,15 @@ void streamfx::gfx::shader::shader::properties(obs_properties_t* pr) obs_properties_add_group(grp, ST_KEY_SHADER_SIZE, D_TRANSLATE(ST_I18N_SHADER_SIZE), OBS_GROUP_NORMAL, grp2); { - auto p = obs_properties_add_text(grp2, ST_KEY_SHADER_SIZE_WIDTH, D_TRANSLATE(ST_I18N_SHADER_SIZE_WIDTH), - OBS_TEXT_DEFAULT); + auto p = obs_properties_add_text(grp2, ST_KEY_SHADER_SIZE_WIDTH, D_TRANSLATE(ST_I18N_SHADER_SIZE_WIDTH), OBS_TEXT_DEFAULT); } { - auto p = obs_properties_add_text(grp2, ST_KEY_SHADER_SIZE_HEIGHT, - D_TRANSLATE(ST_I18N_SHADER_SIZE_HEIGHT), OBS_TEXT_DEFAULT); + auto p = obs_properties_add_text(grp2, ST_KEY_SHADER_SIZE_HEIGHT, D_TRANSLATE(ST_I18N_SHADER_SIZE_HEIGHT), OBS_TEXT_DEFAULT); } } { - auto p = obs_properties_add_int_slider(grp, ST_KEY_SHADER_SEED, D_TRANSLATE(ST_I18N_SHADER_SEED), - std::numeric_limits::min(), std::numeric_limits::max(), 1); + auto p = obs_properties_add_int_slider(grp, ST_KEY_SHADER_SEED, D_TRANSLATE(ST_I18N_SHADER_SEED), std::numeric_limits::min(), std::numeric_limits::max(), 1); } } { @@ -277,8 +264,7 @@ bool streamfx::gfx::shader::shader::on_refresh_properties(obs_properties_t* prop return true; } -bool streamfx::gfx::shader::shader::on_shader_or_technique_modified(obs_properties_t* props, obs_property_t* prop, - obs_data_t* data) +bool streamfx::gfx::shader::shader::on_shader_or_technique_modified(obs_properties_t* props, obs_property_t* prop, obs_data_t* data) { bool shader_dirty = false; bool param_dirty = false; @@ -357,8 +343,7 @@ void streamfx::gfx::shader::shader::update(obs_data_t* data) _random_seed = seed; _random.seed(static_cast(_random_seed)); for (size_t idx = 0; idx < 16; idx++) { - _random_values[idx] = - static_cast(static_cast(_random()) / static_cast(_random.max())); + _random_values[idx] = static_cast(static_cast(_random()) / static_cast(_random.max())); } } @@ -437,8 +422,7 @@ bool streamfx::gfx::shader::shader::tick(float_t time) // Recreate Per-Activation-Random values. for (size_t idx = 0; idx < 8; idx++) { - _random_values[8 + idx] = - static_cast(static_cast(_random()) / static_cast(_random.max())); + _random_values[8 + idx] = static_cast(static_cast(_random()) / static_cast(_random.max())); } // Flag Render Target as outdated. @@ -460,17 +444,14 @@ void streamfx::gfx::shader::shader::prepare_render() // float4 Time: (Time in Seconds), (Time in Current Second), (Time in Seconds only), (Random Value) if (streamfx::obs::gs::effect_parameter el = _shader.get_parameter("Time"); el != nullptr) { if (el.get_type() == streamfx::obs::gs::effect_parameter::type::Float4) { - el.set_float4( - _time, _time_loop, static_cast(_loops), - static_cast(static_cast(_random()) / static_cast(_random.max()))); + el.set_float4(_time, _time_loop, static_cast(_loops), static_cast(static_cast(_random()) / static_cast(_random.max()))); } } // float4 ViewSize: (Width), (Height), (1.0 / Width), (1.0 / Height) if (auto el = _shader.get_parameter("ViewSize"); el != nullptr) { if (el.get_type() == streamfx::obs::gs::effect_parameter::type::Float4) { - el.set_float4(static_cast(width()), static_cast(height()), - 1.0f / static_cast(width()), 1.0f / static_cast(height())); + el.set_float4(static_cast(width()), static_cast(height()), 1.0f / static_cast(width()), 1.0f / static_cast(height())); } } @@ -635,8 +616,7 @@ void streamfx::gfx::shader::shader::set_active(bool active) // Recreate Per-Activation-Random values. for (size_t idx = 0; idx < 4; idx++) { - _random_values[4 + idx] = - static_cast(static_cast(_random()) / static_cast(_random.max())); + _random_values[4 + idx] = static_cast(static_cast(_random()) / static_cast(_random.max())); } } diff --git a/source/gfx/shader/gfx-shader.hpp b/source/gfx/shader/gfx-shader.hpp index 5239f278..da63fc5e 100644 --- a/source/gfx/shader/gfx-shader.hpp +++ b/source/gfx/shader/gfx-shader.hpp @@ -67,7 +67,7 @@ namespace streamfx::gfx { int32_t _loops; std::mt19937_64 _random; int32_t _random_seed; - float_t _random_values[16]; // 0..4 Per-Instance-Random, 4..8 Per-Activation-Random 9..15 Per-Frame-Random + float_t _random_values[16]; // 0..4 Per-Instance-Random, 4..8 Per-Activation-Random 9..15 Per-Frame-Random // Rendering bool _rt_up_to_date; @@ -81,8 +81,7 @@ namespace streamfx::gfx { bool is_technique_different(std::string_view tech); - bool load_shader(const std::filesystem::path& file, std::string_view tech, bool& shader_dirty, - bool& param_dirty); + bool load_shader(const std::filesystem::path& file, std::string_view tech, bool& shader_dirty, bool& param_dirty); static void defaults(obs_data_t* data); diff --git a/source/nvidia/ar/nvidia-ar-facedetection.cpp b/source/nvidia/ar/nvidia-ar-facedetection.cpp index 3f2e83e5..20e8c4db 100644 --- a/source/nvidia/ar/nvidia-ar-facedetection.cpp +++ b/source/nvidia/ar/nvidia-ar-facedetection.cpp @@ -36,9 +36,7 @@ streamfx::nvidia::ar::facedetection::~facedetection() D_LOG_DEBUG("Finalizing... (Addr: 0x%" PRIuPTR ")", this); } -streamfx::nvidia::ar::facedetection::facedetection() - : feature(FEATURE_FACE_DETECTION), _input(), _source(), _tmp(), _rects(), _rects_confidence(), _bboxes(), - _dirty(true) +streamfx::nvidia::ar::facedetection::facedetection() : feature(FEATURE_FACE_DETECTION), _input(), _source(), _tmp(), _rects(), _rects_confidence(), _bboxes(), _dirty(true) { D_LOG_DEBUG("Initializing... (Addr: 0x%" PRIuPTR ")", this); @@ -85,8 +83,7 @@ void ar::facedetection::set_tracking_limit(size_t v) _bboxes.current = 0; // Update feature. - if (auto err = set_object(P_NVAR_OUTPUT "BoundingBoxes", reinterpret_cast(&_bboxes), sizeof(bounds_t)); - err != cv::result::SUCCESS) { + if (auto err = set_object(P_NVAR_OUTPUT "BoundingBoxes", reinterpret_cast(&_bboxes), sizeof(bounds_t)); err != cv::result::SUCCESS) { throw cv::exception("BoundingBoxes", err); } if (auto err = set(P_NVAR_OUTPUT "BoundingBoxesConfidence", _rects_confidence); err != cv::result::SUCCESS) { @@ -129,11 +126,8 @@ void ar::facedetection::process(std::shared_ptr<::streamfx::obs::gs::texture> in #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, "Copy Input -> Source"}; #endif - if (auto res = _nvcv->NvCVImage_Transfer(_input->get_image(), _source->get_image(), 1.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", - _nvcv->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcv->NvCVImage_Transfer(_input->get_image(), _source->get_image(), 1.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", _nvcv->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -176,9 +170,7 @@ void ar::facedetection::resize(uint32_t width, uint32_t height) auto cctx = ::streamfx::nvidia::cuda::obs::get()->get_context()->enter(); if (!_tmp) { - _tmp = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, - ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _tmp = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } if (!_input || (width != _input->get_texture()->get_width()) || (height != _input->get_texture()->get_height())) { @@ -194,9 +186,7 @@ void ar::facedetection::resize(uint32_t width, uint32_t height) if (_source) { _source->resize(width, height); } else { - _source = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::UINT8, - ::streamfx::nvidia::cv::component_layout::INTERLEAVED, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _source = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, ::streamfx::nvidia::cv::memory_location::GPU, 1); } if (auto err = set(P_NVAR_INPUT "Image", _source); err != cv::result::SUCCESS) { diff --git a/source/nvidia/ar/nvidia-ar-feature.cpp b/source/nvidia/ar/nvidia-ar-feature.cpp index 1d5a6cdc..fece13c9 100644 --- a/source/nvidia/ar/nvidia-ar-feature.cpp +++ b/source/nvidia/ar/nvidia-ar-feature.cpp @@ -26,9 +26,7 @@ streamfx::nvidia::ar::feature::~feature() D_LOG_DEBUG("Finalizing... (Addr: 0x%" PRIuPTR ")", this); } -streamfx::nvidia::ar::feature::feature(feature_t feature) - : _nvcuda(::streamfx::nvidia::cuda::obs::get()), _nvcv(::streamfx::nvidia::cv::cv::get()), - _nvar(::streamfx::nvidia::ar::ar::get()), _fx() +streamfx::nvidia::ar::feature::feature(feature_t feature) : _nvcuda(::streamfx::nvidia::cuda::obs::get()), _nvcv(::streamfx::nvidia::cv::cv::get()), _nvar(::streamfx::nvidia::ar::ar::get()), _fx() { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); auto gctx = ::streamfx::obs::gs::context(); @@ -39,8 +37,7 @@ streamfx::nvidia::ar::feature::feature(feature_t feature) if (cv::result res = _nvar->NvAR_Create(feature, &handle); res != cv::result::SUCCESS) { throw cv::exception("Failed to create feature.", res); } - _fx = - std::shared_ptr(handle, [this](::streamfx::nvidia::ar::handle_t handle) { _nvar->NvAR_Destroy(handle); }); + _fx = std::shared_ptr(handle, [this](::streamfx::nvidia::ar::handle_t handle) { _nvar->NvAR_Destroy(handle); }); // Set CUDA stream and model directory. set(P_NVAR_CONFIG "CUDAStream", _nvcuda->get_stream()); diff --git a/source/nvidia/ar/nvidia-ar.cpp b/source/nvidia/ar/nvidia-ar.cpp index ed4d37ef..b1da7dd0 100644 --- a/source/nvidia/ar/nvidia-ar.cpp +++ b/source/nvidia/ar/nvidia-ar.cpp @@ -127,15 +127,11 @@ streamfx::nvidia::ar::ar::ar() : _library(), _model_path() std::string error; { LPWSTR str; - FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER - | FORMAT_MESSAGE_IGNORE_INSERTS, - nullptr, ec, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), - reinterpret_cast(&str), 0, nullptr); + FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_IGNORE_INSERTS, nullptr, ec, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), reinterpret_cast(&str), 0, nullptr); error = ::streamfx::util::platform::native_to_utf8(std::wstring(str)); LocalFree(str); } - D_LOG_WARNING("Failed to add '%'s to the library loader paths with error: %s (Code %" PRIu32 ")", - sdk_path.string().c_str(), error.c_str(), ec); + D_LOG_WARNING("Failed to add '%'s to the library loader paths with error: %s (Code %" PRIu32 ")", sdk_path.string().c_str(), error.c_str(), ec); } #endif diff --git a/source/nvidia/ar/nvidia-ar.hpp b/source/nvidia/ar/nvidia-ar.hpp index f66b87ca..63cb5398 100644 --- a/source/nvidia/ar/nvidia-ar.hpp +++ b/source/nvidia/ar/nvidia-ar.hpp @@ -152,16 +152,12 @@ namespace streamfx::nvidia::ar { P_NVAR_DEFINE_FUNCTION(NvAR_SetF64, handle_t ptr, parameter_t parameter, double value); P_NVAR_DEFINE_FUNCTION(NvAR_GetString, handle_t ptr, parameter_t parameter, const char** value); P_NVAR_DEFINE_FUNCTION(NvAR_SetString, handle_t ptr, parameter_t parameter, const char* value); - P_NVAR_DEFINE_FUNCTION(NvAR_GetCudaStream, handle_t ptr, parameter_t parameter, - ::streamfx::nvidia::cuda::stream_t* value); - P_NVAR_DEFINE_FUNCTION(NvAR_SetCudaStream, handle_t ptr, parameter_t parameter, - ::streamfx::nvidia::cuda::stream_t value); + P_NVAR_DEFINE_FUNCTION(NvAR_GetCudaStream, handle_t ptr, parameter_t parameter, ::streamfx::nvidia::cuda::stream_t* value); + P_NVAR_DEFINE_FUNCTION(NvAR_SetCudaStream, handle_t ptr, parameter_t parameter, ::streamfx::nvidia::cuda::stream_t value); P_NVAR_DEFINE_FUNCTION(NvAR_GetObject, handle_t ptr, parameter_t parameter, object_t* value, uint32_t size); P_NVAR_DEFINE_FUNCTION(NvAR_SetObject, handle_t ptr, parameter_t parameter, object_t value, uint32_t size); - P_NVAR_DEFINE_FUNCTION(NvAR_GetF32Array, handle_t ptr, parameter_t parameter, const float** values, - int32_t* size); - P_NVAR_DEFINE_FUNCTION(NvAR_SetF32Array, handle_t ptr, parameter_t parameter, const float* values, - int32_t size); + P_NVAR_DEFINE_FUNCTION(NvAR_GetF32Array, handle_t ptr, parameter_t parameter, const float** values, int32_t* size); + P_NVAR_DEFINE_FUNCTION(NvAR_SetF32Array, handle_t ptr, parameter_t parameter, const float* values, int32_t size); public: static std::shared_ptr<::streamfx::nvidia::ar::ar> get(); diff --git a/source/nvidia/cuda/nvidia-cuda-context.cpp b/source/nvidia/cuda/nvidia-cuda-context.cpp index d089fa7e..154d7169 100644 --- a/source/nvidia/cuda/nvidia-cuda-context.cpp +++ b/source/nvidia/cuda/nvidia-cuda-context.cpp @@ -43,8 +43,7 @@ streamfx::nvidia::cuda::context::~context() } } -streamfx::nvidia::cuda::context::context() - : _cuda(::streamfx::nvidia::cuda::cuda::get()), _ctx(), _has_device(false), _device() +streamfx::nvidia::cuda::context::context() : _cuda(::streamfx::nvidia::cuda::cuda::get()), _ctx(), _has_device(false), _device() { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); } @@ -94,10 +93,7 @@ streamfx::nvidia::cuda::context::context(ID3D11Device* device) : context() _cuda->cuDeviceGetUuid(&device_uuid, _device); } - D_LOG_INFO("Initialized CUDA on device '%s' (%08" PRIx32 "-%04" PRIx16 "-%04" PRIx16 "-%04" PRIx16 "-%04" PRIx16 - "%08" PRIx32 ", %08" PRIx64 ", %" PRIu32 ").", - device_name.c_str(), device_uuid.uuid.a, device_uuid.uuid.b, device_uuid.uuid.c, device_uuid.uuid.d, - device_uuid.uuid.e, device_uuid.uuid.f, device_luid.luid, device_luid_mask); + D_LOG_INFO("Initialized CUDA on device '%s' (%08" PRIx32 "-%04" PRIx16 "-%04" PRIx16 "-%04" PRIx16 "-%04" PRIx16 "%08" PRIx32 ", %08" PRIx64 ", %" PRIu32 ").", device_name.c_str(), device_uuid.uuid.a, device_uuid.uuid.b, device_uuid.uuid.c, device_uuid.uuid.d, device_uuid.uuid.e, device_uuid.uuid.f, device_luid.luid, device_luid_mask); _has_device = true; } diff --git a/source/nvidia/cuda/nvidia-cuda-gs-texture.cpp b/source/nvidia/cuda/nvidia-cuda-gs-texture.cpp index c8c06574..72bbc7d4 100644 --- a/source/nvidia/cuda/nvidia-cuda-gs-texture.cpp +++ b/source/nvidia/cuda/nvidia-cuda-gs-texture.cpp @@ -29,8 +29,7 @@ streamfx::nvidia::cuda::gstexture::~gstexture() _cuda->cuGraphicsUnregisterResource(_resource); } -streamfx::nvidia::cuda::gstexture::gstexture(std::shared_ptr texture) - : _cuda(::streamfx::nvidia::cuda::cuda::get()), _texture(texture), _resource(), _is_mapped(false), _pointer() +streamfx::nvidia::cuda::gstexture::gstexture(std::shared_ptr texture) : _cuda(::streamfx::nvidia::cuda::cuda::get()), _texture(texture), _resource(), _is_mapped(false), _pointer() { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); @@ -69,8 +68,7 @@ streamfx::nvidia::cuda::gstexture::gstexture(std::shared_ptr stream) +streamfx::nvidia::cuda::array_t streamfx::nvidia::cuda::gstexture::map(std::shared_ptr stream) { if (_is_mapped) { return _pointer; diff --git a/source/nvidia/cuda/nvidia-cuda-memory.cpp b/source/nvidia/cuda/nvidia-cuda-memory.cpp index 075bcf7e..bb530824 100644 --- a/source/nvidia/cuda/nvidia-cuda-memory.cpp +++ b/source/nvidia/cuda/nvidia-cuda-memory.cpp @@ -30,8 +30,7 @@ streamfx::nvidia::cuda::memory::~memory() _cuda->cuMemFree(_pointer); } -streamfx::nvidia::cuda::memory::memory(size_t size) - : _cuda(::streamfx::nvidia::cuda::cuda::get()), _pointer(), _size(size) +streamfx::nvidia::cuda::memory::memory(size_t size) : _cuda(::streamfx::nvidia::cuda::cuda::get()), _pointer(), _size(size) { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); diff --git a/source/nvidia/cuda/nvidia-cuda-obs.cpp b/source/nvidia/cuda/nvidia-cuda-obs.cpp index b5a002d5..47ebc0c5 100644 --- a/source/nvidia/cuda/nvidia-cuda-obs.cpp +++ b/source/nvidia/cuda/nvidia-cuda-obs.cpp @@ -44,8 +44,7 @@ streamfx::nvidia::cuda::obs::obs() : _cuda(::streamfx::nvidia::cuda::cuda::get() // Create Context #ifdef WIN32 if (gs_get_device_type() == GS_DEVICE_DIRECT3D_11) { - _context = - std::make_shared<::streamfx::nvidia::cuda::context>(reinterpret_cast(gs_get_device_obj())); + _context = std::make_shared<::streamfx::nvidia::cuda::context>(reinterpret_cast(gs_get_device_obj())); } #endif if (gs_get_device_type() == GS_DEVICE_OPENGL) { diff --git a/source/nvidia/cuda/nvidia-cuda-stream.cpp b/source/nvidia/cuda/nvidia-cuda-stream.cpp index c5f3136a..a80b782f 100644 --- a/source/nvidia/cuda/nvidia-cuda-stream.cpp +++ b/source/nvidia/cuda/nvidia-cuda-stream.cpp @@ -30,8 +30,7 @@ streamfx::nvidia::cuda::stream::~stream() _cuda->cuStreamDestroy(_stream); } -streamfx::nvidia::cuda::stream::stream(::streamfx::nvidia::cuda::stream_flags flags, int32_t priority) - : _cuda(::streamfx::nvidia::cuda::cuda::get()) +streamfx::nvidia::cuda::stream::stream(::streamfx::nvidia::cuda::stream_flags flags, int32_t priority) : _cuda(::streamfx::nvidia::cuda::cuda::get()) { D_LOG_DEBUG("Initializating... (Addr: 0x%" PRIuPTR ")", this); diff --git a/source/nvidia/cuda/nvidia-cuda-stream.hpp b/source/nvidia/cuda/nvidia-cuda-stream.hpp index d553410f..28541455 100644 --- a/source/nvidia/cuda/nvidia-cuda-stream.hpp +++ b/source/nvidia/cuda/nvidia-cuda-stream.hpp @@ -16,8 +16,7 @@ namespace streamfx::nvidia::cuda { public: ~stream(); - stream(::streamfx::nvidia::cuda::stream_flags flags = ::streamfx::nvidia::cuda::stream_flags::DEFAULT, - int32_t priority = 0); + stream(::streamfx::nvidia::cuda::stream_flags flags = ::streamfx::nvidia::cuda::stream_flags::DEFAULT, int32_t priority = 0); ::streamfx::nvidia::cuda::stream_t get(); diff --git a/source/nvidia/cuda/nvidia-cuda.hpp b/source/nvidia/cuda/nvidia-cuda.hpp index 47bc04b8..0e2b3cde 100644 --- a/source/nvidia/cuda/nvidia-cuda.hpp +++ b/source/nvidia/cuda/nvidia-cuda.hpp @@ -229,21 +229,17 @@ namespace streamfx::nvidia::cuda { // Memory Management P_CUDA_DEFINE_FUNCTION(cuArrayGetDescriptor, array_descriptor_v2_t* pArrayDescripter, array_t array); P_CUDA_DEFINE_FUNCTION(cuMemAlloc, device_ptr_t* ptr, std::size_t bytes); - P_CUDA_DEFINE_FUNCTION(cuMemAllocPitch, device_ptr_t* ptr, std::size_t* pitch, std::size_t width_in_bytes, - std::size_t height, uint32_t element_size_bytes); + P_CUDA_DEFINE_FUNCTION(cuMemAllocPitch, device_ptr_t* ptr, std::size_t* pitch, std::size_t width_in_bytes, std::size_t height, uint32_t element_size_bytes); P_CUDA_DEFINE_FUNCTION(cuMemFree, device_ptr_t ptr); P_CUDA_DEFINE_FUNCTION(cuMemHostGetDevicePointer, device_ptr_t* devptr, void* ptr, uint32_t flags); P_CUDA_DEFINE_FUNCTION(cuMemcpy, device_ptr_t dst, device_ptr_t src, std::size_t bytes); P_CUDA_DEFINE_FUNCTION(cuMemcpy2D, const memcpy2d_v2_t* copy); P_CUDA_DEFINE_FUNCTION(cuMemcpy2DAsync, const memcpy2d_v2_t* copy, stream_t stream); - P_CUDA_DEFINE_FUNCTION(cuMemcpyAtoA, array_t dst, std::size_t dstOffset, array_t src, std::size_t srcOffset, - std::size_t byteCount); - P_CUDA_DEFINE_FUNCTION(cuMemcpyAtoD, device_ptr_t dst, array_t src, std::size_t srcOffset, - std::size_t byteCount); + P_CUDA_DEFINE_FUNCTION(cuMemcpyAtoA, array_t dst, std::size_t dstOffset, array_t src, std::size_t srcOffset, std::size_t byteCount); + P_CUDA_DEFINE_FUNCTION(cuMemcpyAtoD, device_ptr_t dst, array_t src, std::size_t srcOffset, std::size_t byteCount); P_CUDA_DEFINE_FUNCTION(cuMemcpyAtoH, void* dst, array_t src, std::size_t srcOffset, std::size_t byteCount); P_CUDA_DEFINE_FUNCTION(cuMemcpyAtoHAsync, void* dst, array_t src, std::size_t srcOffset, std::size_t byteCount); - P_CUDA_DEFINE_FUNCTION(cuMemcpyDtoA, array_t dst, std::size_t dstOffset, device_ptr_t src, - std::size_t byteCount); + P_CUDA_DEFINE_FUNCTION(cuMemcpyDtoA, array_t dst, std::size_t dstOffset, device_ptr_t src, std::size_t byteCount); P_CUDA_DEFINE_FUNCTION(cuMemcpyDtoD, device_ptr_t dst, array_t srcArray, std::size_t byteCount); P_CUDA_DEFINE_FUNCTION(cuMemcpyDtoH, void* dst, array_t src, std::size_t byteCount); P_CUDA_DEFINE_FUNCTION(cuMemcpyDtoHAsync, void* dst, array_t src, std::size_t byteCount); @@ -303,10 +299,8 @@ namespace streamfx::nvidia::cuda { // Graphics Interoperability P_CUDA_DEFINE_FUNCTION(cuGraphicsMapResources, uint32_t count, graphics_resource_t* resources, stream_t stream); - P_CUDA_DEFINE_FUNCTION(cuGraphicsSubResourceGetMappedArray, array_t* array, graphics_resource_t resource, - uint32_t index, uint32_t level); - P_CUDA_DEFINE_FUNCTION(cuGraphicsUnmapResources, uint32_t count, graphics_resource_t* resources, - stream_t stream); + P_CUDA_DEFINE_FUNCTION(cuGraphicsSubResourceGetMappedArray, array_t* array, graphics_resource_t resource, uint32_t index, uint32_t level); + P_CUDA_DEFINE_FUNCTION(cuGraphicsUnmapResources, uint32_t count, graphics_resource_t* resources, stream_t stream); P_CUDA_DEFINE_FUNCTION(cuGraphicsUnregisterResource, graphics_resource_t resource); // Driver Entry Point Access @@ -330,13 +324,11 @@ namespace streamfx::nvidia::cuda { // Direct3D10 Interoperability P_CUDA_DEFINE_FUNCTION(cuD3D10GetDevice, device_t* device, IDXGIAdapter* adapter); - P_CUDA_DEFINE_FUNCTION(cuGraphicsD3D10RegisterResource, graphics_resource_t* resource, - ID3D10Resource* d3dresource, uint32_t flags); + P_CUDA_DEFINE_FUNCTION(cuGraphicsD3D10RegisterResource, graphics_resource_t* resource, ID3D10Resource* d3dresource, uint32_t flags); // Direct3D11 Interoperability P_CUDA_DEFINE_FUNCTION(cuD3D11GetDevice, device_t* device, IDXGIAdapter* adapter); - P_CUDA_DEFINE_FUNCTION(cuGraphicsD3D11RegisterResource, graphics_resource_t* resource, - ID3D11Resource* d3dresource, uint32_t flags); + P_CUDA_DEFINE_FUNCTION(cuGraphicsD3D11RegisterResource, graphics_resource_t* resource, ID3D11Resource* d3dresource, uint32_t flags); #endif public: static std::shared_ptr<::streamfx::nvidia::cuda::cuda> get(); diff --git a/source/nvidia/cv/nvidia-cv-image.cpp b/source/nvidia/cv/nvidia-cv-image.cpp index e433ff4a..2c3561c9 100644 --- a/source/nvidia/cv/nvidia-cv-image.cpp +++ b/source/nvidia/cv/nvidia-cv-image.cpp @@ -41,31 +41,23 @@ image::image() : _cv(::streamfx::nvidia::cv::cv::get()), _image(), _alignment(1) memset(&_image, sizeof(_image), 0); } -image::image(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, - component_layout cmp_layout, memory_location location, uint32_t alignment) - : image() +image::image(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, component_layout cmp_layout, memory_location location, uint32_t alignment) : image() { auto gctx = ::streamfx::obs::gs::context(); auto cctx = ::streamfx::nvidia::cuda::obs::get()->get_context()->enter(); _alignment = alignment; - if (auto res = _cv->NvCVImage_Alloc(&_image, width, height, pix_fmt, cmp_type, static_cast(cmp_layout), - static_cast(location), _alignment); - res != result::SUCCESS) { + if (auto res = _cv->NvCVImage_Alloc(&_image, width, height, pix_fmt, cmp_type, static_cast(cmp_layout), static_cast(location), _alignment); res != result::SUCCESS) { throw std::runtime_error(_cv->NvCV_GetErrorStringFromCode(res)); } } -void streamfx::nvidia::cv::image::reallocate(uint32_t width, uint32_t height, pixel_format pix_fmt, - component_type cmp_type, component_layout cmp_layout, - memory_location location, uint32_t alignment) +void streamfx::nvidia::cv::image::reallocate(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, component_layout cmp_layout, memory_location location, uint32_t alignment) { auto gctx = ::streamfx::obs::gs::context(); auto cctx = ::streamfx::nvidia::cuda::obs::get()->get_context()->enter(); - if (auto res = _cv->NvCVImage_Realloc(&_image, width, height, pix_fmt, cmp_type, static_cast(cmp_layout), - static_cast(location), alignment); - res != result::SUCCESS) { + if (auto res = _cv->NvCVImage_Realloc(&_image, width, height, pix_fmt, cmp_type, static_cast(cmp_layout), static_cast(location), alignment); res != result::SUCCESS) { throw std::runtime_error(_cv->NvCV_GetErrorStringFromCode(res)); } _alignment = alignment; @@ -73,8 +65,7 @@ void streamfx::nvidia::cv::image::reallocate(uint32_t width, uint32_t height, pi void streamfx::nvidia::cv::image::resize(uint32_t width, uint32_t height) { - reallocate(width, height, _image.pxl_format, _image.comp_type, static_cast(_image.comp_layout), - static_cast(_image.mem_location), _alignment); + reallocate(width, height, _image.pxl_format, _image.comp_type, static_cast(_image.comp_layout), static_cast(_image.mem_location), _alignment); } streamfx::nvidia::cv::image_t* streamfx::nvidia::cv::image::get_image() diff --git a/source/nvidia/cv/nvidia-cv-image.hpp b/source/nvidia/cv/nvidia-cv-image.hpp index 02271082..799cc884 100644 --- a/source/nvidia/cv/nvidia-cv-image.hpp +++ b/source/nvidia/cv/nvidia-cv-image.hpp @@ -28,11 +28,9 @@ namespace streamfx::nvidia::cv { image(); public: - image(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, - component_layout cmp_layout, memory_location location, uint32_t alignment); + image(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, component_layout cmp_layout, memory_location location, uint32_t alignment); - virtual void reallocate(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, - component_layout cmp_layout, memory_location location, uint32_t alignment); + virtual void reallocate(uint32_t width, uint32_t height, pixel_format pix_fmt, component_type cmp_type, component_layout cmp_layout, memory_location location, uint32_t alignment); virtual void resize(uint32_t width, uint32_t height); diff --git a/source/nvidia/cv/nvidia-cv-texture.cpp b/source/nvidia/cv/nvidia-cv-texture.cpp index 1a9bd52d..935d1876 100644 --- a/source/nvidia/cv/nvidia-cv-texture.cpp +++ b/source/nvidia/cv/nvidia-cv-texture.cpp @@ -46,8 +46,7 @@ texture::texture(uint32_t width, uint32_t height, gs_color_format pix_fmt) auto cctx = ::streamfx::nvidia::cuda::obs::get()->get_context()->enter(); // Allocate a new Texture - _texture = std::make_shared<::streamfx::obs::gs::texture>(width, height, pix_fmt, 1, nullptr, - ::streamfx::obs::gs::texture::flags::None); + _texture = std::make_shared<::streamfx::obs::gs::texture>(width, height, pix_fmt, 1, nullptr, ::streamfx::obs::gs::texture::flags::None); alloc(); } @@ -60,8 +59,7 @@ void texture::resize(uint32_t width, uint32_t height) // Allocate a new Texture free(); - _texture = std::make_shared<::streamfx::obs::gs::texture>(width, height, _texture->get_color_format(), 1, nullptr, - ::streamfx::obs::gs::texture::flags::None); + _texture = std::make_shared<::streamfx::obs::gs::texture>(width, height, _texture->get_color_format(), 1, nullptr, ::streamfx::obs::gs::texture::flags::None); alloc(); } @@ -77,16 +75,12 @@ void streamfx::nvidia::cv::texture::alloc() auto nvobs = ::streamfx::nvidia::cuda::obs::get(); // Allocate any relevant CV buffers and Map it. - if (auto res = _cv->NvCVImage_InitFromD3D11Texture( - &_image, reinterpret_cast(gs_texture_get_obj(_texture->get_object()))); - res != result::SUCCESS) { - D_LOG_ERROR("Object 0x%" PRIxPTR " failed NvCVImage_InitFromD3D11Texture call with error: %s", this, - _cv->NvCV_GetErrorStringFromCode(res)); + if (auto res = _cv->NvCVImage_InitFromD3D11Texture(&_image, reinterpret_cast(gs_texture_get_obj(_texture->get_object()))); res != result::SUCCESS) { + D_LOG_ERROR("Object 0x%" PRIxPTR " failed NvCVImage_InitFromD3D11Texture call with error: %s", this, _cv->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("NvCVImage_InitFromD3D11Texture"); } if (auto res = _cv->NvCVImage_MapResource(&_image, nvobs->get_stream()->get()); res != result::SUCCESS) { - D_LOG_ERROR("Object 0x%" PRIxPTR " failed NvCVImage_MapResource call with error: %s", this, - _cv->NvCV_GetErrorStringFromCode(res)); + D_LOG_ERROR("Object 0x%" PRIxPTR " failed NvCVImage_MapResource call with error: %s", this, _cv->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("NvCVImage_MapResource"); } } @@ -99,8 +93,7 @@ void streamfx::nvidia::cv::texture::free() // Unmap and deallocate any relevant CV buffers. if (auto res = _cv->NvCVImage_UnmapResource(&_image, nvobs->get_stream()->get()); res != result::SUCCESS) { - D_LOG_ERROR("Object 0x%" PRIxPTR " failed NvCVImage_UnmapResource call with error: %s", this, - _cv->NvCV_GetErrorStringFromCode(res)); + D_LOG_ERROR("Object 0x%" PRIxPTR " failed NvCVImage_UnmapResource call with error: %s", this, _cv->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("NvCVImage_UnmapResource"); } _cv->NvCVImage_Dealloc(&_image); diff --git a/source/nvidia/cv/nvidia-cv.cpp b/source/nvidia/cv/nvidia-cv.cpp index 6a109efc..0f35e137 100644 --- a/source/nvidia/cv/nvidia-cv.cpp +++ b/source/nvidia/cv/nvidia-cv.cpp @@ -84,8 +84,7 @@ streamfx::nvidia::cv::cv::cv() env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_VIDEO_EFFECTS_SDK_PATH, nullptr, 0); if (env_size > 0) { buffer.resize(static_cast(env_size) + 1); - env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_VIDEO_EFFECTS_SDK_PATH, buffer.data(), - static_cast(buffer.size())); + env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_VIDEO_EFFECTS_SDK_PATH, buffer.data(), static_cast(buffer.size())); vfx_sdk_path = std::wstring(buffer.data(), buffer.size()); } else { PWSTR str = nullptr; @@ -113,8 +112,7 @@ streamfx::nvidia::cv::cv::cv() env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_AR_SDK_PATH, nullptr, 0); if (env_size > 0) { buffer.resize(static_cast(env_size) + 1); - env_size = - GetEnvironmentVariableW(ST_ENV_NVIDIA_AR_SDK_PATH, buffer.data(), static_cast(buffer.size())); + env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_AR_SDK_PATH, buffer.data(), static_cast(buffer.size())); ar_sdk_path = std::wstring(buffer.data(), buffer.size()); } else { PWSTR str = nullptr; @@ -153,15 +151,11 @@ streamfx::nvidia::cv::cv::cv() std::string error; { LPWSTR str; - FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER - | FORMAT_MESSAGE_IGNORE_INSERTS, - nullptr, ec, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), - reinterpret_cast(&str), 0, nullptr); + FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_IGNORE_INSERTS, nullptr, ec, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), reinterpret_cast(&str), 0, nullptr); error = ::streamfx::util::platform::native_to_utf8(std::wstring(str)); LocalFree(str); } - D_LOG_WARNING("Failed to add '%'s to the library loader paths with error: %s (Code %" PRIu32 ")", - vfx_sdk_path.string().c_str(), error.c_str(), ec); + D_LOG_WARNING("Failed to add '%'s to the library loader paths with error: %s (Code %" PRIu32 ")", vfx_sdk_path.string().c_str(), error.c_str(), ec); } #endif diff --git a/source/nvidia/cv/nvidia-cv.hpp b/source/nvidia/cv/nvidia-cv.hpp index 31e468c7..41bfdf9c 100644 --- a/source/nvidia/cv/nvidia-cv.hpp +++ b/source/nvidia/cv/nvidia-cv.hpp @@ -201,69 +201,36 @@ namespace streamfx::nvidia::cv { cv(); public: - NVCVI_DEFINE_FUNCTION(NvCVImage_Init, image_t* image, uint32_t width, uint32_t height, uint32_t pitch, - void* pixels, pixel_format format, component_type comp_type, component_layout comp_layout, - memory_location mem_location); - NVCVI_DEFINE_FUNCTION(NvCVImage_InitView, image_t* sub_image, image_t* image, int32_t x, int32_t y, - uint32_t width, uint32_t height); - NVCVI_DEFINE_FUNCTION(NvCVImage_Alloc, image_t* image, uint32_t width, uint32_t height, pixel_format format, - component_type comp_type, uint32_t comp_layout, uint32_t mem_location, - uint32_t alignment); - NVCVI_DEFINE_FUNCTION(NvCVImage_Realloc, image_t* image, uint32_t width, uint32_t height, pixel_format format, - component_type comp_type, uint32_t comp_layout, uint32_t mem_location, - uint32_t alignment); + NVCVI_DEFINE_FUNCTION(NvCVImage_Init, image_t* image, uint32_t width, uint32_t height, uint32_t pitch, void* pixels, pixel_format format, component_type comp_type, component_layout comp_layout, memory_location mem_location); + NVCVI_DEFINE_FUNCTION(NvCVImage_InitView, image_t* sub_image, image_t* image, int32_t x, int32_t y, uint32_t width, uint32_t height); + NVCVI_DEFINE_FUNCTION(NvCVImage_Alloc, image_t* image, uint32_t width, uint32_t height, pixel_format format, component_type comp_type, uint32_t comp_layout, uint32_t mem_location, uint32_t alignment); + NVCVI_DEFINE_FUNCTION(NvCVImage_Realloc, image_t* image, uint32_t width, uint32_t height, pixel_format format, component_type comp_type, uint32_t comp_layout, uint32_t mem_location, uint32_t alignment); NVCVI_DEFINE_FUNCTION_EX(void, NvCVImage_Dealloc, image_t* image); - NVCVI_DEFINE_FUNCTION(NvCVImage_Create, uint32_t width, uint32_t height, pixel_format format, - component_type comp_type, component_layout comp_layout, memory_location mem_location, - uint32_t alignment, image_t** image); + NVCVI_DEFINE_FUNCTION(NvCVImage_Create, uint32_t width, uint32_t height, pixel_format format, component_type comp_type, component_layout comp_layout, memory_location mem_location, uint32_t alignment, image_t** image); NVCVI_DEFINE_FUNCTION_EX(void, NvCVImage_Destroy, image_t* image); - NVCVI_DEFINE_FUNCTION_EX(void, NvCVImage_ComponentOffsets, pixel_format format, int32_t* red_offset, - int32_t* green_offset, int32_t* blue_offset, int32_t* alpha_offset, int32_t* y_offset); - NVCVI_DEFINE_FUNCTION(NvCVImage_Transfer, const image_t* source, image_t* destination, float scale, - ::streamfx::nvidia::cuda::stream_t stream, image_t* buffer); - NVCVI_DEFINE_FUNCTION(NvCVImage_TransferRect, const image_t* source, const rect* source_rect, - image_t* destination, const point* destination_point, float scale, - ::streamfx::nvidia::cuda::stream_t stream, image_t* buffer); - NVCVI_DEFINE_FUNCTION(NvCVImage_TransferFromYUV, const void* y, int32_t yPixBytes, int32_t yPitch, - const void* u, const void* v, int32_t uvPixBytes, int32_t uvPitch, pixel_format yuvFormat, - component_type yuvType, color_information yuvColorSpace, memory_location yuvMemSpace, - image_t* destination, const rect* destination_area, float scale, - ::streamfx::nvidia::cuda::stream_t stream, image_t* tmp); - NVCVI_DEFINE_FUNCTION(NvCVImage_TransferToYUV, const image_t* source, const rect* source_area, - const void* y, int32_t yPixBytes, int32_t yPitch, const void* u, const void* v, - int uvPixBytes, int32_t uvPitch, pixel_format yuvFormat, component_type yuvType, - color_information yuvColorSpace, memory_location yuvMemSpace, float scale, - ::streamfx::nvidia::cuda::stream_t stream, image_t* tmp); + NVCVI_DEFINE_FUNCTION_EX(void, NvCVImage_ComponentOffsets, pixel_format format, int32_t* red_offset, int32_t* green_offset, int32_t* blue_offset, int32_t* alpha_offset, int32_t* y_offset); + NVCVI_DEFINE_FUNCTION(NvCVImage_Transfer, const image_t* source, image_t* destination, float scale, ::streamfx::nvidia::cuda::stream_t stream, image_t* buffer); + NVCVI_DEFINE_FUNCTION(NvCVImage_TransferRect, const image_t* source, const rect* source_rect, image_t* destination, const point* destination_point, float scale, ::streamfx::nvidia::cuda::stream_t stream, image_t* buffer); + NVCVI_DEFINE_FUNCTION(NvCVImage_TransferFromYUV, const void* y, int32_t yPixBytes, int32_t yPitch, const void* u, const void* v, int32_t uvPixBytes, int32_t uvPitch, pixel_format yuvFormat, component_type yuvType, color_information yuvColorSpace, memory_location yuvMemSpace, image_t* destination, const rect* destination_area, float scale, ::streamfx::nvidia::cuda::stream_t stream, image_t* tmp); + NVCVI_DEFINE_FUNCTION(NvCVImage_TransferToYUV, const image_t* source, const rect* source_area, const void* y, int32_t yPixBytes, int32_t yPitch, const void* u, const void* v, int uvPixBytes, int32_t uvPitch, pixel_format yuvFormat, component_type yuvType, color_information yuvColorSpace, memory_location yuvMemSpace, float scale, ::streamfx::nvidia::cuda::stream_t stream, image_t* tmp); NVCVI_DEFINE_FUNCTION(NvCVImage_MapResource, image_t* image, ::streamfx::nvidia::cuda::stream_t stream); NVCVI_DEFINE_FUNCTION(NvCVImage_UnmapResource, image_t* image, ::streamfx::nvidia::cuda::stream_t stream); - NVCVI_DEFINE_FUNCTION(NvCVImage_Composite, const image_t* foreground, const image_t* background, - const image_t* matte, image_t* destination, ::streamfx::nvidia::cuda::stream_t stream); - NVCVI_DEFINE_FUNCTION(NvCVImage_CompositeRect, const image_t* foreground, - const point foreground_origin, const image_t* background, - const point background_origin, const image_t* matte, uint32_t mode, - image_t* destination, const point destination_origin, - ::streamfx::nvidia::cuda::stream_t stream); - NVCVI_DEFINE_FUNCTION(NvCVImage_CompositeOverConstant, const image_t* source, const image_t* matte, - const uint8_t background_color[3], image_t* destination); + NVCVI_DEFINE_FUNCTION(NvCVImage_Composite, const image_t* foreground, const image_t* background, const image_t* matte, image_t* destination, ::streamfx::nvidia::cuda::stream_t stream); + NVCVI_DEFINE_FUNCTION(NvCVImage_CompositeRect, const image_t* foreground, const point foreground_origin, const image_t* background, const point background_origin, const image_t* matte, uint32_t mode, image_t* destination, const point destination_origin, ::streamfx::nvidia::cuda::stream_t stream); + NVCVI_DEFINE_FUNCTION(NvCVImage_CompositeOverConstant, const image_t* source, const image_t* matte, const uint8_t background_color[3], image_t* destination); NVCVI_DEFINE_FUNCTION(NvCVImage_FlipY, const image_t* source, image_t* destination); - NVCVI_DEFINE_FUNCTION(NvCVImage_GetYUVPointers, image_t* image, uint8_t** y, uint8_t** u, uint8_t** v, - int32_t* y_pixel_bytes, int32_t* c_pixel_bytes, int32_t* y_row_bytes, - int32_t* c_row_bytes); + NVCVI_DEFINE_FUNCTION(NvCVImage_GetYUVPointers, image_t* image, uint8_t** y, uint8_t** u, uint8_t** v, int32_t* y_pixel_bytes, int32_t* c_pixel_bytes, int32_t* y_row_bytes, int32_t* c_row_bytes); NVCVI_DEFINE_FUNCTION_EX(const char*, NvCV_GetErrorStringFromCode, result code); #ifdef WIN32 NVCVI_DEFINE_FUNCTION(NvCVImage_InitFromD3D11Texture, image_t* image, struct ID3D11Texture2D* texture); - NVCVI_DEFINE_FUNCTION(NvCVImage_ToD3DFormat, pixel_format format, component_type comp_type, - component_layout comp_layout, DXGI_FORMAT* dxgi_format); - NVCVI_DEFINE_FUNCTION(NvCVImage_FromD3DFormat, DXGI_FORMAT d3dFormat, pixel_format* format, - component_type* comp_type, component_layout* comp_layout); + NVCVI_DEFINE_FUNCTION(NvCVImage_ToD3DFormat, pixel_format format, component_type comp_type, component_layout comp_layout, DXGI_FORMAT* dxgi_format); + NVCVI_DEFINE_FUNCTION(NvCVImage_FromD3DFormat, DXGI_FORMAT d3dFormat, pixel_format* format, component_type* comp_type, component_layout* comp_layout); #ifdef __dxgicommon_h__ - NVCVI_DEFINE_FUNCTION(NvCVImage_ToD3DColorSpace, color_information nvcvColorSpace, - DXGI_COLOR_SPACE_TYPE* pD3dColorSpace); - NVCVI_DEFINE_FUNCTION(NvCVImage_FromD3DColorSpace, DXGI_COLOR_SPACE_TYPE d3dColorSpace, - color_information* pNvcvColorSpace); + NVCVI_DEFINE_FUNCTION(NvCVImage_ToD3DColorSpace, color_information nvcvColorSpace, DXGI_COLOR_SPACE_TYPE* pD3dColorSpace); + NVCVI_DEFINE_FUNCTION(NvCVImage_FromD3DColorSpace, DXGI_COLOR_SPACE_TYPE d3dColorSpace, color_information* pNvcvColorSpace); #endif #endif diff --git a/source/nvidia/vfx/nvidia-vfx-denoising.cpp b/source/nvidia/vfx/nvidia-vfx-denoising.cpp index 2c6ab51c..caecfa30 100644 --- a/source/nvidia/vfx/nvidia-vfx-denoising.cpp +++ b/source/nvidia/vfx/nvidia-vfx-denoising.cpp @@ -44,9 +44,7 @@ streamfx::nvidia::vfx::denoising::~denoising() _tmp.reset(); } -streamfx::nvidia::vfx::denoising::denoising() - : effect(EFFECT_DENOISING), _dirty(true), _input(), _convert_to_fp32(), _source(), _destination(), _convert_to_u8(), - _output(), _tmp(), _state(0), _state_size(0), _strength(1.) +streamfx::nvidia::vfx::denoising::denoising() : effect(EFFECT_DENOISING), _dirty(true), _input(), _convert_to_fp32(), _source(), _destination(), _convert_to_u8(), _output(), _tmp(), _state(0), _state_size(0), _strength(1.) { // Enter Graphics and CUDA context. auto gctx = ::streamfx::obs::gs::context(); @@ -93,19 +91,16 @@ void streamfx::nvidia::vfx::denoising::size(std::pair& size) // Dominant Width double ar = static_cast(size.second) / static_cast(size.first); size.first = std::clamp(size.first, min_width, max_width); - size.second = std::clamp(static_cast(std::lround(static_cast(size.first) * ar)), - min_height, max_height); + size.second = std::clamp(static_cast(std::lround(static_cast(size.first) * ar)), min_height, max_height); } else { // Dominant Height double ar = static_cast(size.first) / static_cast(size.second); size.second = std::clamp(size.second, min_height, max_height); - size.first = std::clamp(static_cast(std::lround(static_cast(size.second) * ar)), - min_width, max_width); + size.first = std::clamp(static_cast(std::lround(static_cast(size.second) * ar)), min_width, max_width); } } -std::shared_ptr<::streamfx::obs::gs::texture> - streamfx::nvidia::vfx::denoising::process(std::shared_ptr<::streamfx::obs::gs::texture> in) +std::shared_ptr<::streamfx::obs::gs::texture> streamfx::nvidia::vfx::denoising::process(std::shared_ptr<::streamfx::obs::gs::texture> in) { // Enter Graphics and CUDA context. auto gctx = ::streamfx::obs::gs::context(); @@ -132,14 +127,10 @@ std::shared_ptr<::streamfx::obs::gs::texture> { // Convert Input to Source format #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, - "Convert Input -> Source"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, "Convert Input -> Source"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_input->get_image(), _convert_to_fp32->get_image(), 1.f / 255.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_input->get_image(), _convert_to_fp32->get_image(), 1.f / 255.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -148,11 +139,8 @@ std::shared_ptr<::streamfx::obs::gs::texture> #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, "Copy Input -> Source"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_fp32->get_image(), _source->get_image(), 1.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_fp32->get_image(), _source->get_image(), 1.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -169,28 +157,20 @@ std::shared_ptr<::streamfx::obs::gs::texture> { // Convert Destination to Output format #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, - "Convert Destination -> Output"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, "Convert Destination -> Output"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_destination->get_image(), _convert_to_u8->get_image(), 255.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_destination->get_image(), _convert_to_u8->get_image(), 255.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } { // Copy destination to output. #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, - "Copy Destination -> Output"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, "Copy Destination -> Output"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_u8->get_image(), _output->get_image(), 1., - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_u8->get_image(), _output->get_image(), 1., _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -205,9 +185,7 @@ void streamfx::nvidia::vfx::denoising::resize(uint32_t width, uint32_t height) auto cctx = ::streamfx::nvidia::cuda::obs::get()->get_context()->enter(); if (!_tmp) { - _tmp = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, - ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _tmp = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } if (!_input || (_input->get_image()->width != width) || (_input->get_image()->height != height)) { @@ -218,14 +196,11 @@ void streamfx::nvidia::vfx::denoising::resize(uint32_t width, uint32_t height) } } - if (!_convert_to_fp32 || (_convert_to_fp32->get_image()->width != width) - || (_convert_to_fp32->get_image()->height != height)) { + if (!_convert_to_fp32 || (_convert_to_fp32->get_image()->width != width) || (_convert_to_fp32->get_image()->height != height)) { if (_convert_to_fp32) { _convert_to_fp32->resize(width, height); } else { - _convert_to_fp32 = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::FP32, - ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _convert_to_fp32 = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } } @@ -233,13 +208,10 @@ void streamfx::nvidia::vfx::denoising::resize(uint32_t width, uint32_t height) if (_source) { _source->resize(width, height); } else { - _source = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::FP32, - ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _source = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } - if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_INPUT_IMAGE_0, _source); - res != ::streamfx::nvidia::cv::result::SUCCESS) { + if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_INPUT_IMAGE_0, _source); res != ::streamfx::nvidia::cv::result::SUCCESS) { D_LOG_ERROR("Failed to set input image due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); _source.reset(); throw std::runtime_error("SetImage failed."); @@ -252,13 +224,10 @@ void streamfx::nvidia::vfx::denoising::resize(uint32_t width, uint32_t height) if (_destination) { _destination->resize(width, height); } else { - _destination = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::FP32, - ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _destination = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } - if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_OUTPUT_IMAGE_0, _destination); - res != ::streamfx::nvidia::cv::result::SUCCESS) { + if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_OUTPUT_IMAGE_0, _destination); res != ::streamfx::nvidia::cv::result::SUCCESS) { D_LOG_ERROR("Failed to set output image due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); _destination.reset(); throw std::runtime_error("SetImage failed."); @@ -267,15 +236,11 @@ void streamfx::nvidia::vfx::denoising::resize(uint32_t width, uint32_t height) _dirty = true; } - if (!_convert_to_u8 || (_convert_to_u8->get_image()->width != width) - || (_convert_to_u8->get_image()->height != height)) { + if (!_convert_to_u8 || (_convert_to_u8->get_image()->width != width) || (_convert_to_u8->get_image()->height != height)) { if (_convert_to_u8) { _convert_to_u8->resize(width, height); } else { - _convert_to_u8 = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, - ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _convert_to_u8 = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, ::streamfx::nvidia::cv::memory_location::GPU, 1); } } @@ -297,9 +262,7 @@ void streamfx::nvidia::vfx::denoising::resize(uint32_t width, uint32_t height) _nvcuda->get_cuda()->cuMemsetD8(_state, 0, _state_size); _states[0] = reinterpret_cast(_state); - if (auto res = _nvvfx->NvVFX_SetObject(_fx.get(), ::streamfx::nvidia::vfx::PARAMETER_STATE, - reinterpret_cast(_states)); - res != ::streamfx::nvidia::cv::result::SUCCESS) { + if (auto res = _nvvfx->NvVFX_SetObject(_fx.get(), ::streamfx::nvidia::vfx::PARAMETER_STATE, reinterpret_cast(_states)); res != ::streamfx::nvidia::cv::result::SUCCESS) { D_LOG_ERROR("Failed to set state due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("SetObject failed."); } diff --git a/source/nvidia/vfx/nvidia-vfx-effect.cpp b/source/nvidia/vfx/nvidia-vfx-effect.cpp index 525302c3..d06f2724 100644 --- a/source/nvidia/vfx/nvidia-vfx-effect.cpp +++ b/source/nvidia/vfx/nvidia-vfx-effect.cpp @@ -37,8 +37,7 @@ streamfx::nvidia::vfx::effect::~effect() _nvcuda.reset(); } -streamfx::nvidia::vfx::effect::effect(effect_t effect) - : _nvcuda(cuda::obs::get()), _nvcvi(cv::cv::get()), _nvvfx(vfx::vfx::get()), _fx() +streamfx::nvidia::vfx::effect::effect(effect_t effect) : _nvcuda(cuda::obs::get()), _nvcvi(cv::cv::get()), _nvvfx(vfx::vfx::get()), _fx() { auto gctx = ::streamfx::obs::gs::context(); auto cctx = cuda::obs::get()->get_context()->enter(); diff --git a/source/nvidia/vfx/nvidia-vfx-greenscreen.cpp b/source/nvidia/vfx/nvidia-vfx-greenscreen.cpp index c2be92c3..f789a572 100644 --- a/source/nvidia/vfx/nvidia-vfx-greenscreen.cpp +++ b/source/nvidia/vfx/nvidia-vfx-greenscreen.cpp @@ -44,8 +44,7 @@ streamfx::nvidia::vfx::greenscreen::~greenscreen() _buffer.clear(); } -streamfx::nvidia::vfx::greenscreen::greenscreen() - : effect(EFFECT_GREEN_SCREEN), _dirty(true), _input(), _source(), _destination(), _output(), _tmp() +streamfx::nvidia::vfx::greenscreen::greenscreen() : effect(EFFECT_GREEN_SCREEN), _dirty(true), _input(), _source(), _destination(), _output(), _tmp() { // Enter Contexts. auto gctx = ::streamfx::obs::gs::context(); @@ -66,16 +65,14 @@ void streamfx::nvidia::vfx::greenscreen::size(std::pair& siz // Calculate Size if (size.first > size.second) { // Dominant Width - double ar = static_cast(size.second) / static_cast(size.first); - size.first = std::max(size.first, min_width); - size.second = - std::max(static_cast(std::lround(static_cast(size.first) * ar)), min_height); + double ar = static_cast(size.second) / static_cast(size.first); + size.first = std::max(size.first, min_width); + size.second = std::max(static_cast(std::lround(static_cast(size.first) * ar)), min_height); } else { // Dominant Height double ar = static_cast(size.first) / static_cast(size.second); size.second = std::max(size.second, min_height); - size.first = - std::max(static_cast(std::lround(static_cast(size.second) * ar)), min_width); + size.first = std::max(static_cast(std::lround(static_cast(size.second) * ar)), min_width); } } @@ -85,8 +82,7 @@ void streamfx::nvidia::vfx::greenscreen::set_mode(greenscreen_mode mode) _dirty = true; } -std::shared_ptr - streamfx::nvidia::vfx::greenscreen::process(std::shared_ptr<::streamfx::obs::gs::texture> in) +std::shared_ptr streamfx::nvidia::vfx::greenscreen::process(std::shared_ptr<::streamfx::obs::gs::texture> in) { // Enter Graphics and CUDA context. auto gctx = ::streamfx::obs::gs::context(); @@ -122,11 +118,8 @@ std::shared_ptr #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, "Copy Input -> Source"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_input->get_image(), _source->get_image(), 1.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_input->get_image(), _source->get_image(), 1.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -143,14 +136,10 @@ std::shared_ptr { // Copy destination to output. #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, - "Copy Destination -> Output"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, "Copy Destination -> Output"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_destination->get_image(), _output->get_image(), 1., - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_destination->get_image(), _output->get_image(), 1., _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -179,18 +168,14 @@ void streamfx::nvidia::vfx::greenscreen::resize(uint32_t width, uint32_t height) size(in_size); if (!_tmp) { - _tmp = std::make_shared<::streamfx::nvidia::cv::image>( - width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, - ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); + _tmp = std::make_shared<::streamfx::nvidia::cv::image>(width, height, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } - if (!_input || (in_size.first != _input->get_texture()->get_width()) - || (in_size.second != _input->get_texture()->get_height())) { + if (!_input || (in_size.first != _input->get_texture()->get_width()) || (in_size.second != _input->get_texture()->get_height())) { { _buffer.clear(); for (size_t idx = 0; idx < LATENCY_BUFFER; idx++) { - auto el = std::make_shared<::streamfx::obs::gs::texture>(width, height, GS_RGBA_UNORM, 1, nullptr, - ::streamfx::obs::gs::texture::flags::None); + auto el = std::make_shared<::streamfx::obs::gs::texture>(width, height, GS_RGBA_UNORM, 1, nullptr, ::streamfx::obs::gs::texture::flags::None); _buffer.push_back(el); } } @@ -204,15 +189,11 @@ void streamfx::nvidia::vfx::greenscreen::resize(uint32_t width, uint32_t height) _dirty = true; } - if (!_source || (in_size.first != _source->get_image()->width) - || (in_size.second != _source->get_image()->height)) { + if (!_source || (in_size.first != _source->get_image()->width) || (in_size.second != _source->get_image()->height)) { if (_source) { _source->resize(in_size.first, in_size.second); } else { - _source = std::make_shared<::streamfx::nvidia::cv::image>( - in_size.first, in_size.second, ::streamfx::nvidia::cv::pixel_format::BGR, - ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _source = std::make_shared<::streamfx::nvidia::cv::image>(in_size.first, in_size.second, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, ::streamfx::nvidia::cv::memory_location::GPU, 1); } if (auto v = set(PARAMETER_INPUT_IMAGE_0, _source); v != ::streamfx::nvidia::cv::result::SUCCESS) { @@ -222,15 +203,11 @@ void streamfx::nvidia::vfx::greenscreen::resize(uint32_t width, uint32_t height) _dirty = true; } - if (!_destination || (in_size.first != _destination->get_image()->width) - || (in_size.second != _destination->get_image()->height)) { + if (!_destination || (in_size.first != _destination->get_image()->width) || (in_size.second != _destination->get_image()->height)) { if (_destination) { _destination->resize(in_size.first, in_size.second); } else { - _destination = std::make_shared<::streamfx::nvidia::cv::image>( - in_size.first, in_size.second, ::streamfx::nvidia::cv::pixel_format::A, - ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _destination = std::make_shared<::streamfx::nvidia::cv::image>(in_size.first, in_size.second, ::streamfx::nvidia::cv::pixel_format::A, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, ::streamfx::nvidia::cv::memory_location::GPU, 1); } if (auto v = set(PARAMETER_OUTPUT_IMAGE_0, _destination); v != ::streamfx::nvidia::cv::result::SUCCESS) { @@ -240,8 +217,7 @@ void streamfx::nvidia::vfx::greenscreen::resize(uint32_t width, uint32_t height) _dirty = true; } - if (!_output || (in_size.first != _output->get_texture()->get_width()) - || (in_size.second != _output->get_texture()->get_height())) { + if (!_output || (in_size.first != _output->get_texture()->get_width()) || (in_size.second != _output->get_texture()->get_height())) { if (_output) { _output->resize(in_size.first, in_size.second); } else { diff --git a/source/nvidia/vfx/nvidia-vfx-superresolution.cpp b/source/nvidia/vfx/nvidia-vfx-superresolution.cpp index f46ab7d1..7b7fa8f4 100644 --- a/source/nvidia/vfx/nvidia-vfx-superresolution.cpp +++ b/source/nvidia/vfx/nvidia-vfx-superresolution.cpp @@ -74,10 +74,7 @@ streamfx::nvidia::vfx::superresolution::~superresolution() _tmp.reset(); } -streamfx::nvidia::vfx::superresolution::superresolution() - : effect(EFFECT_SUPERRESOLUTION), _dirty(true), _input(), _convert_to_fp32(), _source(), _destination(), - _convert_to_u8(), _output(), _tmp(), _strength(1.), _scale(1.5), _cache_input_size(), _cache_output_size(), - _cache_scale() +streamfx::nvidia::vfx::superresolution::superresolution() : effect(EFFECT_SUPERRESOLUTION), _dirty(true), _input(), _convert_to_fp32(), _source(), _destination(), _convert_to_u8(), _output(), _tmp(), _strength(1.), _scale(1.5), _cache_input_size(), _cache_output_size(), _cache_scale() { // Enter Graphics and CUDA context. auto gctx = ::streamfx::obs::gs::context(); @@ -105,8 +102,7 @@ void streamfx::nvidia::vfx::superresolution::set_strength(float strength) uint32_t value = (_strength >= .5f) ? 1u : 0u; auto gctx = ::streamfx::obs::gs::context(); auto cctx = ::streamfx::nvidia::cuda::obs::get()->get_context()->enter(); - if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_STRENGTH, value); - res != ::streamfx::nvidia::cv::result::SUCCESS) { + if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_STRENGTH, value); res != ::streamfx::nvidia::cv::result::SUCCESS) { D_LOG_ERROR("Failed to set '%s' to %lu.", ::streamfx::nvidia::vfx::PARAMETER_STRENGTH, value); }; } @@ -137,13 +133,10 @@ float streamfx::nvidia::vfx::superresolution::scale() return _scale; } -void streamfx::nvidia::vfx::superresolution::size(std::pair const& size, - std::pair& input_size, - std::pair& output_size) +void streamfx::nvidia::vfx::superresolution::size(std::pair const& size, std::pair& input_size, std::pair& output_size) { // Check if the size has actually changed at all. - if ((input_size.first == _cache_input_size.first) && (input_size.second == _cache_input_size.second) - && (_scale == _cache_scale)) { + if ((input_size.first == _cache_input_size.first) && (input_size.second == _cache_input_size.second) && (_scale == _cache_scale)) { input_size = _cache_input_size; output_size = _cache_output_size; _scale = _cache_scale; @@ -175,14 +168,12 @@ void streamfx::nvidia::vfx::superresolution::size(std::pair // Dominant Width double ar = static_cast(input_size.second) / static_cast(input_size.first); input_size.first = std::clamp(input_size.first, min_width, max_width); - input_size.second = std::clamp( - static_cast(std::lround(static_cast(input_size.first) * ar)), min_height, max_height); + input_size.second = std::clamp(static_cast(std::lround(static_cast(input_size.first) * ar)), min_height, max_height); } else { // Dominant Height double ar = static_cast(input_size.first) / static_cast(input_size.second); input_size.second = std::clamp(input_size.second, min_height, max_height); - input_size.first = std::clamp( - static_cast(std::lround(static_cast(input_size.second) * ar)), min_width, max_width); + input_size.first = std::clamp(static_cast(std::lround(static_cast(input_size.second) * ar)), min_width, max_width); } // Calculate Output Size. @@ -192,8 +183,7 @@ void streamfx::nvidia::vfx::superresolution::size(std::pair // Verify that this is a valid scale factor. float width_mul = (static_cast(output_size.first) / static_cast(input_size.first)); float height_mul = (static_cast(output_size.second) / static_cast(input_size.second)); - if (!::streamfx::util::math::is_close(width_mul, _scale, 0.00001) - || !::streamfx::util::math::is_close(height_mul, _scale, 0.00001)) { + if (!::streamfx::util::math::is_close(width_mul, _scale, 0.00001) || !::streamfx::util::math::is_close(height_mul, _scale, 0.00001)) { size_t scale_idx = find_closest_scale_factor_index(_scale); if (scale_idx < supported_scale_factors.size()) { _scale = supported_scale_factors[scale_idx + 1]; @@ -207,8 +197,7 @@ void streamfx::nvidia::vfx::superresolution::size(std::pair _cache_scale = _scale; } -std::shared_ptr<::streamfx::obs::gs::texture> - streamfx::nvidia::vfx::superresolution::process(std::shared_ptr<::streamfx::obs::gs::texture> in) +std::shared_ptr<::streamfx::obs::gs::texture> streamfx::nvidia::vfx::superresolution::process(std::shared_ptr<::streamfx::obs::gs::texture> in) { // Enter Graphics and CUDA context. auto gctx = ::streamfx::obs::gs::context(); @@ -235,14 +224,10 @@ std::shared_ptr<::streamfx::obs::gs::texture> { // Convert Input to Source format #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, - "Convert Input -> Source"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, "Convert Input -> Source"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_input->get_image(), _convert_to_fp32->get_image(), 1.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_input->get_image(), _convert_to_fp32->get_image(), 1.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -251,11 +236,8 @@ std::shared_ptr<::streamfx::obs::gs::texture> #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, "Copy Input -> Source"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_fp32->get_image(), _source->get_image(), 1.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_fp32->get_image(), _source->get_image(), 1.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer input to processing source due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -272,28 +254,20 @@ std::shared_ptr<::streamfx::obs::gs::texture> { // Convert Destination to Output format #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, - "Convert Destination -> Output"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_convert, "Convert Destination -> Output"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_destination->get_image(), _convert_to_u8->get_image(), 1.f, - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_destination->get_image(), _convert_to_u8->get_image(), 1.f, _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } { // Copy destination to output. #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, - "Copy Destination -> Output"}; + ::streamfx::obs::gs::debug_marker profiler1{::streamfx::obs::gs::debug_color_copy, "Copy Destination -> Output"}; #endif - if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_u8->get_image(), _output->get_image(), 1., - _nvcuda->get_stream()->get(), _tmp->get_image()); - res != ::streamfx::nvidia::cv::result::SUCCESS) { - D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", - _nvcvi->NvCV_GetErrorStringFromCode(res)); + if (auto res = _nvcvi->NvCVImage_Transfer(_convert_to_u8->get_image(), _output->get_image(), 1., _nvcuda->get_stream()->get(), _tmp->get_image()); res != ::streamfx::nvidia::cv::result::SUCCESS) { + D_LOG_ERROR("Failed to transfer processing result to output due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("Transfer failed."); } } @@ -311,47 +285,33 @@ void streamfx::nvidia::vfx::superresolution::resize(uint32_t width, uint32_t hei this->size(_cache_input_size, _cache_input_size, _cache_output_size); if (!_tmp) { - _tmp = std::make_shared<::streamfx::nvidia::cv::image>( - _cache_output_size.first, _cache_output_size.second, ::streamfx::nvidia::cv::pixel_format::RGBA, - ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::PLANAR, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _tmp = std::make_shared<::streamfx::nvidia::cv::image>(_cache_output_size.first, _cache_output_size.second, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } - if (!_input || (_input->get_image()->width != _cache_input_size.first) - || (_input->get_image()->height != _cache_input_size.second)) { + if (!_input || (_input->get_image()->width != _cache_input_size.first) || (_input->get_image()->height != _cache_input_size.second)) { if (_input) { _input->resize(_cache_input_size.first, _cache_input_size.second); } else { - _input = std::make_shared<::streamfx::nvidia::cv::texture>(_cache_input_size.first, - _cache_input_size.second, GS_RGBA_UNORM); + _input = std::make_shared<::streamfx::nvidia::cv::texture>(_cache_input_size.first, _cache_input_size.second, GS_RGBA_UNORM); } } - if (!_convert_to_fp32 || (_convert_to_fp32->get_image()->width != _cache_input_size.first) - || (_convert_to_fp32->get_image()->height != _cache_input_size.second)) { + if (!_convert_to_fp32 || (_convert_to_fp32->get_image()->width != _cache_input_size.first) || (_convert_to_fp32->get_image()->height != _cache_input_size.second)) { if (_convert_to_fp32) { _convert_to_fp32->resize(_cache_input_size.first, _cache_input_size.second); } else { - _convert_to_fp32 = std::make_shared<::streamfx::nvidia::cv::image>( - _cache_input_size.first, _cache_input_size.second, ::streamfx::nvidia::cv::pixel_format::RGBA, - ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _convert_to_fp32 = std::make_shared<::streamfx::nvidia::cv::image>(_cache_input_size.first, _cache_input_size.second, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } } - if (!_source || (_source->get_image()->width != _cache_input_size.first) - || (_source->get_image()->height != _cache_input_size.second)) { + if (!_source || (_source->get_image()->width != _cache_input_size.first) || (_source->get_image()->height != _cache_input_size.second)) { if (_source) { _source->resize(_cache_input_size.first, _cache_input_size.second); } else { - _source = std::make_shared<::streamfx::nvidia::cv::image>( - _cache_input_size.first, _cache_input_size.second, ::streamfx::nvidia::cv::pixel_format::BGR, - ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _source = std::make_shared<::streamfx::nvidia::cv::image>(_cache_input_size.first, _cache_input_size.second, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } - if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_INPUT_IMAGE_0, _source); - res != ::streamfx::nvidia::cv::result::SUCCESS) { + if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_INPUT_IMAGE_0, _source); res != ::streamfx::nvidia::cv::result::SUCCESS) { D_LOG_ERROR("Failed to set input image due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("SetImage failed."); } @@ -359,19 +319,14 @@ void streamfx::nvidia::vfx::superresolution::resize(uint32_t width, uint32_t hei _dirty = true; } - if (!_destination || (_destination->get_image()->width != _cache_output_size.first) - || (_destination->get_image()->height != _cache_output_size.second)) { + if (!_destination || (_destination->get_image()->width != _cache_output_size.first) || (_destination->get_image()->height != _cache_output_size.second)) { if (_destination) { _destination->resize(_cache_output_size.first, _cache_output_size.second); } else { - _destination = std::make_shared<::streamfx::nvidia::cv::image>( - _cache_output_size.first, _cache_output_size.second, ::streamfx::nvidia::cv::pixel_format::BGR, - ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _destination = std::make_shared<::streamfx::nvidia::cv::image>(_cache_output_size.first, _cache_output_size.second, ::streamfx::nvidia::cv::pixel_format::BGR, ::streamfx::nvidia::cv::component_type::FP32, ::streamfx::nvidia::cv::component_layout::PLANAR, ::streamfx::nvidia::cv::memory_location::GPU, 1); } - if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_OUTPUT_IMAGE_0, _destination); - res != ::streamfx::nvidia::cv::result::SUCCESS) { + if (auto res = set(::streamfx::nvidia::vfx::PARAMETER_OUTPUT_IMAGE_0, _destination); res != ::streamfx::nvidia::cv::result::SUCCESS) { D_LOG_ERROR("Failed to set output image due to error: %s", _nvcvi->NvCV_GetErrorStringFromCode(res)); throw std::runtime_error("SetImage failed."); } @@ -379,25 +334,19 @@ void streamfx::nvidia::vfx::superresolution::resize(uint32_t width, uint32_t hei _dirty = true; } - if (!_convert_to_u8 || (_convert_to_u8->get_image()->width != _cache_output_size.first) - || (_convert_to_u8->get_image()->height != _cache_output_size.second)) { + if (!_convert_to_u8 || (_convert_to_u8->get_image()->width != _cache_output_size.first) || (_convert_to_u8->get_image()->height != _cache_output_size.second)) { if (_convert_to_u8) { _convert_to_u8->resize(_cache_output_size.first, _cache_output_size.second); } else { - _convert_to_u8 = std::make_shared<::streamfx::nvidia::cv::image>( - _cache_output_size.first, _cache_output_size.second, ::streamfx::nvidia::cv::pixel_format::RGBA, - ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, - ::streamfx::nvidia::cv::memory_location::GPU, 1); + _convert_to_u8 = std::make_shared<::streamfx::nvidia::cv::image>(_cache_output_size.first, _cache_output_size.second, ::streamfx::nvidia::cv::pixel_format::RGBA, ::streamfx::nvidia::cv::component_type::UINT8, ::streamfx::nvidia::cv::component_layout::INTERLEAVED, ::streamfx::nvidia::cv::memory_location::GPU, 1); } } - if (!_output || (_output->get_image()->width != _cache_output_size.first) - || (_output->get_image()->height != _cache_output_size.second)) { + if (!_output || (_output->get_image()->width != _cache_output_size.first) || (_output->get_image()->height != _cache_output_size.second)) { if (_output) { _output->resize(_cache_output_size.first, _cache_output_size.second); } else { - _output = std::make_shared<::streamfx::nvidia::cv::texture>(_cache_output_size.first, - _cache_output_size.second, GS_RGBA_UNORM); + _output = std::make_shared<::streamfx::nvidia::cv::texture>(_cache_output_size.first, _cache_output_size.second, GS_RGBA_UNORM); } } } diff --git a/source/nvidia/vfx/nvidia-vfx-superresolution.hpp b/source/nvidia/vfx/nvidia-vfx-superresolution.hpp index 56cedac0..4cc1828c 100644 --- a/source/nvidia/vfx/nvidia-vfx-superresolution.hpp +++ b/source/nvidia/vfx/nvidia-vfx-superresolution.hpp @@ -40,8 +40,7 @@ namespace streamfx::nvidia::vfx { void set_scale(float scale); float scale(); - void size(std::pair const& size, std::pair& input_size, - std::pair& output_size); + void size(std::pair const& size, std::pair& input_size, std::pair& output_size); std::shared_ptr<::streamfx::obs::gs::texture> process(std::shared_ptr<::streamfx::obs::gs::texture> in); diff --git a/source/nvidia/vfx/nvidia-vfx.cpp b/source/nvidia/vfx/nvidia-vfx.cpp index fe536373..e5f3b586 100644 --- a/source/nvidia/vfx/nvidia-vfx.cpp +++ b/source/nvidia/vfx/nvidia-vfx.cpp @@ -83,8 +83,7 @@ streamfx::nvidia::vfx::vfx::vfx() env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_VIDEO_EFFECTS_SDK_PATH, nullptr, 0); if (env_size > 0) { buffer.resize(static_cast(env_size) + 1); - env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_VIDEO_EFFECTS_SDK_PATH, buffer.data(), - static_cast(buffer.size())); + env_size = GetEnvironmentVariableW(ST_ENV_NVIDIA_VIDEO_EFFECTS_SDK_PATH, buffer.data(), static_cast(buffer.size())); sdk_path = std::wstring(buffer.data(), buffer.size()); } else { PWSTR str = nullptr; @@ -118,15 +117,11 @@ streamfx::nvidia::vfx::vfx::vfx() std::string error; { LPWSTR str; - FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER - | FORMAT_MESSAGE_IGNORE_INSERTS, - nullptr, ec, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), - reinterpret_cast(&str), 0, nullptr); + FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_IGNORE_INSERTS, nullptr, ec, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), reinterpret_cast(&str), 0, nullptr); error = ::streamfx::util::platform::native_to_utf8(std::wstring(str)); LocalFree(str); } - D_LOG_WARNING("Failed to add '%'s to the library loader paths with error: %s (Code %" PRIu32 ")", - sdk_path.string().c_str(), error.c_str(), ec); + D_LOG_WARNING("Failed to add '%'s to the library loader paths with error: %s (Code %" PRIu32 ")", sdk_path.string().c_str(), error.c_str(), ec); } #endif diff --git a/source/nvidia/vfx/nvidia-vfx.hpp b/source/nvidia/vfx/nvidia-vfx.hpp index a7b84e23..5b9562c2 100644 --- a/source/nvidia/vfx/nvidia-vfx.hpp +++ b/source/nvidia/vfx/nvidia-vfx.hpp @@ -70,10 +70,8 @@ namespace streamfx::nvidia::vfx { NVVFX_DEFINE_FUNCTION(NvVFX_SetF64, handle_t effect, parameter_t paramName, double val); NVVFX_DEFINE_FUNCTION(NvVFX_SetU64, handle_t effect, parameter_t paramName, uint64_t val); NVVFX_DEFINE_FUNCTION(NvVFX_SetObject, handle_t effect, parameter_t paramName, void* ptr); - NVVFX_DEFINE_FUNCTION(NvVFX_SetCudaStream, handle_t effect, parameter_t paramName, - ::streamfx::nvidia::cuda::stream_t stream); - NVVFX_DEFINE_FUNCTION(NvVFX_SetImage, handle_t effect, parameter_t paramName, - ::streamfx::nvidia::cv::image_t* im); + NVVFX_DEFINE_FUNCTION(NvVFX_SetCudaStream, handle_t effect, parameter_t paramName, ::streamfx::nvidia::cuda::stream_t stream); + NVVFX_DEFINE_FUNCTION(NvVFX_SetImage, handle_t effect, parameter_t paramName, ::streamfx::nvidia::cv::image_t* im); NVVFX_DEFINE_FUNCTION(NvVFX_SetString, handle_t effect, parameter_t paramName, const char* str); NVVFX_DEFINE_FUNCTION(NvVFX_GetU32, handle_t effect, parameter_t paramName, uint32_t* val); NVVFX_DEFINE_FUNCTION(NvVFX_GetS32, handle_t effect, parameter_t paramName, int32_t* val); @@ -81,10 +79,8 @@ namespace streamfx::nvidia::vfx { NVVFX_DEFINE_FUNCTION(NvVFX_GetF64, handle_t effect, parameter_t paramName, double* val); NVVFX_DEFINE_FUNCTION(NvVFX_GetU64, handle_t effect, parameter_t paramName, uint64_t* val); NVVFX_DEFINE_FUNCTION(NvVFX_GetObject, handle_t effect, parameter_t paramName, void** ptr); - NVVFX_DEFINE_FUNCTION(NvVFX_GetCudaStream, handle_t effect, parameter_t paramName, - ::streamfx::nvidia::cuda::stream_t stream); - NVVFX_DEFINE_FUNCTION(NvVFX_GetImage, handle_t effect, parameter_t paramName, - ::streamfx::nvidia::cv::image_t* im); + NVVFX_DEFINE_FUNCTION(NvVFX_GetCudaStream, handle_t effect, parameter_t paramName, ::streamfx::nvidia::cuda::stream_t stream); + NVVFX_DEFINE_FUNCTION(NvVFX_GetImage, handle_t effect, parameter_t paramName, ::streamfx::nvidia::cv::image_t* im); NVVFX_DEFINE_FUNCTION(NvVFX_GetString, handle_t effect, parameter_t paramName, const char** str); NVVFX_DEFINE_FUNCTION(NvVFX_Run, handle_t effect, int32_t async); NVVFX_DEFINE_FUNCTION(NvVFX_Load, handle_t effect); diff --git a/source/obs/browser/obs-browser-panel.hpp b/source/obs/browser/obs-browser-panel.hpp index 29539b8d..62997848 100644 --- a/source/obs/browser/obs-browser-panel.hpp +++ b/source/obs/browser/obs-browser-panel.hpp @@ -52,11 +52,9 @@ namespace streamfx::obs { virtual bool initialized(void) = 0; virtual bool wait_for_browser_init(void) = 0; - virtual QCefWidget* create_widget(QWidget* parent, const std::string& url, - QCefCookieManager* cookie_manager = nullptr) = 0; + virtual QCefWidget* create_widget(QWidget* parent, const std::string& url, QCefCookieManager* cookie_manager = nullptr) = 0; - virtual QCefCookieManager* create_cookie_manager(const std::string& storage_path, - bool persist_session_cookies = false) = 0; + virtual QCefCookieManager* create_cookie_manager(const std::string& storage_path, bool persist_session_cookies = false) = 0; virtual BPtr get_cookie_path(const std::string& storage_path) = 0; diff --git a/source/obs/gs/gs-effect-parameter.cpp b/source/obs/gs/gs-effect-parameter.cpp index 19509e5a..9131250c 100644 --- a/source/obs/gs/gs-effect-parameter.cpp +++ b/source/obs/gs/gs-effect-parameter.cpp @@ -17,32 +17,27 @@ extern "C" { #include "warning-enable.hpp" } -streamfx::obs::gs::effect_parameter::effect_parameter() - : _effect_parent(nullptr), _pass_parent(nullptr), _param_parent(nullptr) +streamfx::obs::gs::effect_parameter::effect_parameter() : _effect_parent(nullptr), _pass_parent(nullptr), _param_parent(nullptr) { reset(); } -streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param) - : _effect_parent(nullptr), _pass_parent(nullptr), _param_parent(nullptr) +streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param) : _effect_parent(nullptr), _pass_parent(nullptr), _param_parent(nullptr) { reset(param, [](void*) {}); } -streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param, std::shared_ptr parent) - : effect_parameter(param) +streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param, std::shared_ptr parent) : effect_parameter(param) { _effect_parent = std::move(parent); } -streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param, std::shared_ptr parent) - : effect_parameter(param) +streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param, std::shared_ptr parent) : effect_parameter(param) { _pass_parent = std::move(parent); } -streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param, std::shared_ptr parent) - : effect_parameter(param) +streamfx::obs::gs::effect_parameter::effect_parameter(gs_eparam_t* param, std::shared_ptr parent) : effect_parameter(param) { _param_parent = std::move(parent); } diff --git a/source/obs/gs/gs-effect-pass.cpp b/source/obs/gs/gs-effect-pass.cpp index 980a8adb..8bf6c478 100644 --- a/source/obs/gs/gs-effect-pass.cpp +++ b/source/obs/gs/gs-effect-pass.cpp @@ -57,8 +57,7 @@ bool streamfx::obs::gs::effect_pass::has_vertex_parameter(std::string_view name) return (get_vertex_parameter(name) != nullptr); } -bool streamfx::obs::gs::effect_pass::has_vertex_parameter(std::string_view name, - streamfx::obs::gs::effect_parameter::type type) +bool streamfx::obs::gs::effect_pass::has_vertex_parameter(std::string_view name, streamfx::obs::gs::effect_parameter::type type) { if (auto el = get_vertex_parameter(name); el != nullptr) { return el.get_type() == type; @@ -94,8 +93,7 @@ bool streamfx::obs::gs::effect_pass::has_pixel_parameter(std::string_view name) return (get_pixel_parameter(name) != nullptr); } -bool streamfx::obs::gs::effect_pass::has_pixel_parameter(std::string_view name, - streamfx::obs::gs::effect_parameter::type type) +bool streamfx::obs::gs::effect_pass::has_pixel_parameter(std::string_view name, streamfx::obs::gs::effect_parameter::type type) { if (auto el = get_pixel_parameter(name); el != nullptr) { return el.get_type() == type; diff --git a/source/obs/gs/gs-effect-pass.hpp b/source/obs/gs/gs-effect-pass.hpp index 5b7253ea..6c79e742 100644 --- a/source/obs/gs/gs-effect-pass.hpp +++ b/source/obs/gs/gs-effect-pass.hpp @@ -25,12 +25,12 @@ namespace streamfx::obs::gs { streamfx::obs::gs::effect_parameter get_vertex_parameter(std::size_t idx); streamfx::obs::gs::effect_parameter get_vertex_parameter(std::string_view name); bool has_vertex_parameter(std::string_view name); - bool has_vertex_parameter(std::string_view name, streamfx::obs::gs::effect_parameter::type type); + bool has_vertex_parameter(std::string_view name, streamfx::obs::gs::effect_parameter::type type); std::size_t count_pixel_parameters(); streamfx::obs::gs::effect_parameter get_pixel_parameter(std::size_t idx); streamfx::obs::gs::effect_parameter get_pixel_parameter(std::string_view name); bool has_pixel_parameter(std::string_view name); - bool has_pixel_parameter(std::string_view name, streamfx::obs::gs::effect_parameter::type type); + bool has_pixel_parameter(std::string_view name, streamfx::obs::gs::effect_parameter::type type); }; } // namespace streamfx::obs::gs diff --git a/source/obs/gs/gs-effect-technique.cpp b/source/obs/gs/gs-effect-technique.cpp index bd6673e0..8d8e8621 100644 --- a/source/obs/gs/gs-effect-technique.cpp +++ b/source/obs/gs/gs-effect-technique.cpp @@ -16,8 +16,7 @@ extern "C" { #include "warning-enable.hpp" } -streamfx::obs::gs::effect_technique::effect_technique(gs_technique_t* technique, std::shared_ptr parent) - : _parent(parent) +streamfx::obs::gs::effect_technique::effect_technique(gs_technique_t* technique, std::shared_ptr parent) : _parent(parent) { reset(technique, [](void*) {}); } diff --git a/source/obs/gs/gs-effect.cpp b/source/obs/gs/gs-effect.cpp index ed3078b0..4894ff4e 100644 --- a/source/obs/gs/gs-effect.cpp +++ b/source/obs/gs/gs-effect.cpp @@ -93,17 +93,13 @@ streamfx::obs::gs::effect::effect(std::string_view code, std::string_view name) gs_effect_t* effect = gs_effect_create(code.data(), name.data(), &error_buffer); if (!effect) { - throw error_buffer ? std::runtime_error(error_buffer) - : std::runtime_error("Unknown error during effect compile."); + throw error_buffer ? std::runtime_error(error_buffer) : std::runtime_error("Unknown error during effect compile."); } reset(effect, [](gs_effect_t* ptr) { gs_effect_destroy(ptr); }); } -streamfx::obs::gs::effect::effect(std::filesystem::path file) - : effect(load_file_as_code(file), - streamfx::util::platform::utf8_to_native(std::filesystem::absolute(file)).generic_u8string()) -{} +streamfx::obs::gs::effect::effect(std::filesystem::path file) : effect(load_file_as_code(file), streamfx::util::platform::utf8_to_native(std::filesystem::absolute(file)).generic_u8string()) {} streamfx::obs::gs::effect::~effect() { diff --git a/source/obs/gs/gs-indexbuffer.cpp b/source/obs/gs/gs-indexbuffer.cpp index 17c8343c..c1b2c968 100644 --- a/source/obs/gs/gs-indexbuffer.cpp +++ b/source/obs/gs/gs-indexbuffer.cpp @@ -24,8 +24,7 @@ streamfx::obs::gs::index_buffer::index_buffer(index_buffer& other) : index_buffe std::copy(other.begin(), other.end(), this->end()); } -streamfx::obs::gs::index_buffer::index_buffer(std::vector& other) - : index_buffer(static_cast(other.size())) +streamfx::obs::gs::index_buffer::index_buffer(std::vector& other) : index_buffer(static_cast(other.size())) { std::copy(other.begin(), other.end(), this->end()); } diff --git a/source/obs/gs/gs-rendertarget.cpp b/source/obs/gs/gs-rendertarget.cpp index 51c4e456..69eae603 100644 --- a/source/obs/gs/gs-rendertarget.cpp +++ b/source/obs/gs/gs-rendertarget.cpp @@ -16,8 +16,7 @@ streamfx::obs::gs::rendertarget::~rendertarget() gs_texrender_destroy(_render_target); } -streamfx::obs::gs::rendertarget::rendertarget(gs_color_format colorFormat, gs_zstencil_format zsFormat) - : _color_format(colorFormat), _zstencil_format(zsFormat) +streamfx::obs::gs::rendertarget::rendertarget(gs_color_format colorFormat, gs_zstencil_format zsFormat) : _color_format(colorFormat), _zstencil_format(zsFormat) { _is_being_rendered = false; auto gctx = streamfx::obs::gs::context(); @@ -32,8 +31,7 @@ streamfx::obs::gs::rendertarget_op streamfx::obs::gs::rendertarget::render(uint3 return {this, width, height}; } -streamfx::obs::gs::rendertarget_op streamfx::obs::gs::rendertarget::render(uint32_t width, uint32_t height, - gs_color_space cs) +streamfx::obs::gs::rendertarget_op streamfx::obs::gs::rendertarget::render(uint32_t width, uint32_t height, gs_color_space cs) { return {this, width, height, cs}; } @@ -75,9 +73,7 @@ gs_zstencil_format streamfx::obs::gs::rendertarget::get_zstencil_format() return _zstencil_format; } -streamfx::obs::gs::rendertarget_op::rendertarget_op(streamfx::obs::gs::rendertarget* rt, uint32_t width, - uint32_t height) - : parent(rt) +streamfx::obs::gs::rendertarget_op::rendertarget_op(streamfx::obs::gs::rendertarget* rt, uint32_t width, uint32_t height) : parent(rt) { if (parent == nullptr) throw std::invalid_argument("rt"); @@ -92,9 +88,7 @@ streamfx::obs::gs::rendertarget_op::rendertarget_op(streamfx::obs::gs::rendertar parent->_is_being_rendered = true; } -streamfx::obs::gs::rendertarget_op::rendertarget_op(streamfx::obs::gs::rendertarget* rt, uint32_t width, - uint32_t height, gs_color_space cs) - : parent(rt) +streamfx::obs::gs::rendertarget_op::rendertarget_op(streamfx::obs::gs::rendertarget* rt, uint32_t width, uint32_t height, gs_color_space cs) : parent(rt) { if (parent == nullptr) throw std::invalid_argument("rt"); diff --git a/source/obs/gs/gs-sampler.cpp b/source/obs/gs/gs-sampler.cpp index 44d52be7..c5eb9438 100644 --- a/source/obs/gs/gs-sampler.cpp +++ b/source/obs/gs/gs-sampler.cpp @@ -85,8 +85,7 @@ void streamfx::obs::gs::sampler::set_border_color(uint32_t v) void streamfx::obs::gs::sampler::set_border_color(uint8_t r, uint8_t g, uint8_t b, uint8_t a) { _dirty = true; - _sampler_info.border_color = (static_cast(a) << 24) | (static_cast(r) << 16) - | (static_cast(g) << 8) | static_cast(b); + _sampler_info.border_color = (static_cast(a) << 24) | (static_cast(r) << 16) | (static_cast(g) << 8) | static_cast(b); } uint32_t streamfx::obs::gs::sampler::get_border_color() diff --git a/source/obs/gs/gs-texture.cpp b/source/obs/gs/gs-texture.cpp index 6f3cb205..332e00f9 100644 --- a/source/obs/gs/gs-texture.cpp +++ b/source/obs/gs/gs-texture.cpp @@ -25,8 +25,7 @@ static uint32_t decode_flags(streamfx::obs::gs::texture::flags texture_flags) return flags; } -streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, gs_color_format format, uint32_t mip_levels, - const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags) +streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags) { if (width == 0) throw std::logic_error("width must be at least 1"); @@ -52,9 +51,7 @@ streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, gs_color_fo _type = type::Normal; } -streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, uint32_t depth, gs_color_format format, - uint32_t mip_levels, const uint8_t** mip_data, - streamfx::obs::gs::texture::flags texture_flags) +streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, uint32_t depth, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags) { if (width == 0) throw std::logic_error("width must be at least 1"); @@ -66,18 +63,14 @@ streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, uint32_t de throw std::logic_error("mip_levels must be at least 1"); if (mip_levels > 1 || ((texture_flags & flags::BuildMipMaps) == flags::BuildMipMaps)) { - bool isPOT = - (streamfx::util::math::is_equal(pow(2, static_cast(floor(log(width) / log(2)))), width) - && streamfx::util::math::is_equal(pow(2, static_cast(floor(log(height) / log(2)))), height) - && streamfx::util::math::is_equal(pow(2, static_cast(floor(log(depth) / log(2)))), depth)); + bool isPOT = (streamfx::util::math::is_equal(pow(2, static_cast(floor(log(width) / log(2)))), width) && streamfx::util::math::is_equal(pow(2, static_cast(floor(log(height) / log(2)))), height) && streamfx::util::math::is_equal(pow(2, static_cast(floor(log(depth) / log(2)))), depth)); if (!isPOT) throw std::logic_error("mip mapping requires power of two dimensions"); } { auto gctx = streamfx::obs::gs::context(); - _texture = - gs_voltexture_create(width, height, depth, format, mip_levels, mip_data, decode_flags(texture_flags)); + _texture = gs_voltexture_create(width, height, depth, format, mip_levels, mip_data, decode_flags(texture_flags)); } if (!_texture) @@ -86,8 +79,7 @@ streamfx::obs::gs::texture::texture(uint32_t width, uint32_t height, uint32_t de _type = type::Volume; } -streamfx::obs::gs::texture::texture(uint32_t size, gs_color_format format, uint32_t mip_levels, - const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags) +streamfx::obs::gs::texture::texture(uint32_t size, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags) { if (size == 0) throw std::logic_error("size must be at least 1"); diff --git a/source/obs/gs/gs-texture.hpp b/source/obs/gs/gs-texture.hpp index 4f466d5e..aba7b6b8 100644 --- a/source/obs/gs/gs-texture.hpp +++ b/source/obs/gs/gs-texture.hpp @@ -36,8 +36,7 @@ namespace streamfx::obs::gs { * \param mip_data Texture data including mipmaps * \param texture_flags Texture Flags */ - texture(uint32_t width, uint32_t height, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, - streamfx::obs::gs::texture::flags texture_flags); + texture(uint32_t width, uint32_t height, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags); /*! * \brief Create a 3D Texture @@ -50,8 +49,7 @@ namespace streamfx::obs::gs { * \param mip_data Texture data including mipmaps * \param texture_flags Texture Flags */ - texture(uint32_t width, uint32_t height, uint32_t depth, gs_color_format format, uint32_t mip_levels, - const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags); + texture(uint32_t width, uint32_t height, uint32_t depth, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags); /*! * \brief Create a Cube Texture @@ -62,8 +60,7 @@ namespace streamfx::obs::gs { * \param mip_data Texture data including mipmaps * \param texture_flags Texture Flags */ - texture(uint32_t size, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, - streamfx::obs::gs::texture::flags texture_flags); + texture(uint32_t size, gs_color_format format, uint32_t mip_levels, const uint8_t** mip_data, streamfx::obs::gs::texture::flags texture_flags); /*! * \brief Load a texture from a file diff --git a/source/obs/gs/gs-vertex.cpp b/source/obs/gs/gs-vertex.cpp index 89ac20be..851e0e30 100644 --- a/source/obs/gs/gs-vertex.cpp +++ b/source/obs/gs/gs-vertex.cpp @@ -8,11 +8,9 @@ #include #include "warning-enable.hpp" -streamfx::obs::gs::vertex::vertex() - : position(nullptr), normal(nullptr), tangent(nullptr), color(nullptr), _has_store(true), _store(nullptr) +streamfx::obs::gs::vertex::vertex() : position(nullptr), normal(nullptr), tangent(nullptr), color(nullptr), _has_store(true), _store(nullptr) { - _store = - streamfx::util::malloc_aligned(16, sizeof(vec3) * 3 + sizeof(uint32_t) + sizeof(vec4) * MAXIMUM_UVW_LAYERS); + _store = streamfx::util::malloc_aligned(16, sizeof(vec3) * 3 + sizeof(uint32_t) + sizeof(vec4) * MAXIMUM_UVW_LAYERS); std::size_t offset = 0; @@ -41,8 +39,7 @@ streamfx::obs::gs::vertex::~vertex() } } -streamfx::obs::gs::vertex::vertex(vec3* p, vec3* n, vec3* t, uint32_t* col, vec4* uvs[MAXIMUM_UVW_LAYERS]) - : position(p), normal(n), tangent(t), color(col), _has_store(false) +streamfx::obs::gs::vertex::vertex(vec3* p, vec3* n, vec3* t, uint32_t* col, vec4* uvs[MAXIMUM_UVW_LAYERS]) : position(p), normal(n), tangent(t), color(col), _has_store(false) { if (uvs != nullptr) { for (std::size_t idx = 0; idx < MAXIMUM_UVW_LAYERS; idx++) { diff --git a/source/obs/gs/gs-vertexbuffer.cpp b/source/obs/gs/gs-vertexbuffer.cpp index 405a2940..0fb5e5ab 100644 --- a/source/obs/gs/gs-vertexbuffer.cpp +++ b/source/obs/gs/gs-vertexbuffer.cpp @@ -39,12 +39,10 @@ void streamfx::obs::gs::vertex_buffer::initialize(uint32_t capacity, uint8_t lay if (_layers == 0) { _data->tvarray = nullptr; } else { - _data->tvarray = _uv_layers = - static_cast(streamfx::util::malloc_aligned(16, sizeof(gs_tvertarray) * _layers)); + _data->tvarray = _uv_layers = static_cast(streamfx::util::malloc_aligned(16, sizeof(gs_tvertarray) * _layers)); for (uint8_t n = 0; n < _layers; n++) { - _uv_layers[n].array = _uvs[n] = - static_cast(streamfx::util::malloc_aligned(16, sizeof(vec4) * _capacity)); - _uv_layers[n].width = 4; + _uv_layers[n].array = _uvs[n] = static_cast(streamfx::util::malloc_aligned(16, sizeof(vec4) * _capacity)); + _uv_layers[n].width = 4; memset(_uvs[n], 0, sizeof(vec4) * _capacity); } } @@ -52,18 +50,17 @@ void streamfx::obs::gs::vertex_buffer::initialize(uint32_t capacity, uint8_t lay // Allocate actual GPU vertex buffer. { auto gctx = streamfx::obs::gs::context(); - _buffer = decltype(_buffer)(gs_vertexbuffer_create(_data.get(), GS_DYNAMIC | GS_DUP_BUFFER), - [this](gs_vertbuffer_t* v) { - try { - auto gctx = streamfx::obs::gs::context(); - gs_vertexbuffer_destroy(v); - } catch (...) { - if (obs_get_version() < MAKE_SEMANTIC_VERSION(26, 0, 0)) { - // Fixes a memory leak with OBS Studio versions older than 26.x. - gs_vbdata_destroy(_obs_data); - } - } - }); + _buffer = decltype(_buffer)(gs_vertexbuffer_create(_data.get(), GS_DYNAMIC | GS_DUP_BUFFER), [this](gs_vertbuffer_t* v) { + try { + auto gctx = streamfx::obs::gs::context(); + gs_vertexbuffer_destroy(v); + } catch (...) { + if (obs_get_version() < MAKE_SEMANTIC_VERSION(26, 0, 0)) { + // Fixes a memory leak with OBS Studio versions older than 26.x. + gs_vbdata_destroy(_obs_data); + } + } + }); _obs_data = gs_vertexbuffer_get_data(_buffer.get()); } @@ -146,8 +143,7 @@ streamfx::obs::gs::vertex_buffer::vertex_buffer(gs_vertbuffer_t* vb) } } -streamfx::obs::gs::vertex_buffer::vertex_buffer(vertex_buffer const& other) - : vertex_buffer(other._capacity, other._layers) +streamfx::obs::gs::vertex_buffer::vertex_buffer(vertex_buffer const& other) : vertex_buffer(other._capacity, other._layers) { // Copy Constructor memcpy(_positions, other._positions, _capacity * sizeof(vec3)); memcpy(_normals, other._normals, _capacity * sizeof(vec3)); diff --git a/source/obs/obs-encoder-factory.hpp b/source/obs/obs-encoder-factory.hpp index 8a6f7a75..205cbc97 100644 --- a/source/obs/obs-encoder-factory.hpp +++ b/source/obs/obs-encoder-factory.hpp @@ -44,8 +44,7 @@ namespace streamfx::obs { return false; }; - virtual bool encode_video(uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, - struct encoder_packet* packet, bool* received_packet) + virtual bool encode_video(uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, struct encoder_packet* packet, bool* received_packet) { return false; }; @@ -118,10 +117,9 @@ namespace streamfx::obs { _info.encode_texture = _encode_texture; memcpy(&_info_fallback, &_info, sizeof(obs_encoder_info)); - _info_fallback_id = std::string(_info.id) + "_sw"; - _info_fallback.id = _info_fallback_id.c_str(); - _info_fallback.caps = - (_info_fallback.caps & ~OBS_ENCODER_CAP_PASS_TEXTURE) | OBS_ENCODER_CAP_DEPRECATED; + _info_fallback_id = std::string(_info.id) + "_sw"; + _info_fallback.id = _info_fallback_id.c_str(); + _info_fallback.caps = (_info_fallback.caps & ~OBS_ENCODER_CAP_PASS_TEXTURE) | OBS_ENCODER_CAP_DEPRECATED; _info_fallback.create = _create; _info_fallback.encode_texture = nullptr; obs_register_encoder(&_info_fallback); @@ -218,8 +216,7 @@ namespace streamfx::obs { } } - static bool _properties_migrate_settings(void* priv, obs_properties_t*, obs_property_t* p, - obs_data_t* settings) noexcept + static bool _properties_migrate_settings(void* priv, obs_properties_t*, obs_property_t* p, obs_data_t* settings) noexcept { try { obs_property_set_visible(p, false); @@ -238,13 +235,10 @@ namespace streamfx::obs { { try { if (type_data) { - auto props = - reinterpret_cast(type_data)->get_properties2(reinterpret_cast(data)); + auto props = reinterpret_cast(type_data)->get_properties2(reinterpret_cast(data)); { // Support for permanent settings migration. - auto p = obs_properties_add_int( - props, S_VERSION, "If you can see this, something went horribly wrong.", - std::numeric_limits::lowest(), std::numeric_limits::max(), 1); + auto p = obs_properties_add_int(props, S_VERSION, "If you can see this, something went horribly wrong.", std::numeric_limits::lowest(), std::numeric_limits::max(), 1); obs_property_set_modified_callback2(p, _properties_migrate_settings, type_data); } @@ -291,8 +285,7 @@ namespace streamfx::obs { } } - static bool _encode(void* data, struct encoder_frame* frame, struct encoder_packet* packet, - bool* received_packet) noexcept + static bool _encode(void* data, struct encoder_frame* frame, struct encoder_packet* packet, bool* received_packet) noexcept { try { if (data) @@ -307,13 +300,11 @@ namespace streamfx::obs { } } - static bool _encode_texture(void* data, uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, - struct encoder_packet* packet, bool* received_packet) noexcept + static bool _encode_texture(void* data, uint32_t handle, int64_t pts, uint64_t lock_key, uint64_t* next_key, struct encoder_packet* packet, bool* received_packet) noexcept { try { if (data) - return reinterpret_cast(data)->encode_video(handle, pts, lock_key, next_key, - packet, received_packet); + return reinterpret_cast(data)->encode_video(handle, pts, lock_key, next_key, packet, received_packet); return false; } catch (const std::exception& ex) { DLOG_ERROR("Unexpected exception in function '%s': %s.", __FUNCTION_NAME__, ex.what()); diff --git a/source/obs/obs-source-active-child.hpp b/source/obs/obs-source-active-child.hpp index 268aff8a..641d5420 100644 --- a/source/obs/obs-source-active-child.hpp +++ b/source/obs/obs-source-active-child.hpp @@ -22,8 +22,7 @@ namespace streamfx::obs { obs_source_remove_active_child(parent, child); } } - source_active_child(::streamfx::obs::source const& parent, ::streamfx::obs::source const& child) - : _parent(parent), _child(child) + source_active_child(::streamfx::obs::source const& parent, ::streamfx::obs::source const& child) : _parent(parent), _child(child) { if (::streamfx::obs::tools::source_find_source(child, parent)) { throw std::runtime_error("Child contains Parent"); diff --git a/source/obs/obs-source-active-reference.hpp b/source/obs/obs-source-active-reference.hpp index facbf920..d7faa324 100644 --- a/source/obs/obs-source-active-reference.hpp +++ b/source/obs/obs-source-active-reference.hpp @@ -25,8 +25,7 @@ namespace streamfx::obs { } public: - static FORCE_INLINE std::shared_ptr - add_active_reference(::streamfx::obs::source& source) + static FORCE_INLINE std::shared_ptr add_active_reference(::streamfx::obs::source& source) { return std::make_shared(source); } diff --git a/source/obs/obs-source-factory.hpp b/source/obs/obs-source-factory.hpp index e10ef0b1..91f83a85 100644 --- a/source/obs/obs-source-factory.hpp +++ b/source/obs/obs-source-factory.hpp @@ -302,13 +302,11 @@ namespace streamfx::obs { } } - static bool _audio_render(void* data, uint64_t* ts_out, struct obs_source_audio_mix* audio_output, - uint32_t mixers, std::size_t channels, std::size_t sample_rate) noexcept + static bool _audio_render(void* data, uint64_t* ts_out, struct obs_source_audio_mix* audio_output, uint32_t mixers, std::size_t channels, std::size_t sample_rate) noexcept { try { if (data) - return reinterpret_cast<_instance*>(data)->audio_render(ts_out, audio_output, mixers, channels, - sample_rate); + return reinterpret_cast<_instance*>(data)->audio_render(ts_out, audio_output, mixers, channels, sample_rate); return false; } catch (const std::exception& ex) { DLOG_ERROR("Unexpected exception in function '%s': %s.", __FUNCTION_NAME__, ex.what()); @@ -319,8 +317,7 @@ namespace streamfx::obs { } } - static bool _audio_mix(void* data, uint64_t* ts_out, struct audio_output_data* audio_output, - std::size_t channels, std::size_t sample_rate) noexcept + static bool _audio_mix(void* data, uint64_t* ts_out, struct audio_output_data* audio_output, std::size_t channels, std::size_t sample_rate) noexcept { try { if (data) @@ -583,8 +580,7 @@ namespace streamfx::obs { } } - static void _mouse_click(void* data, const obs_mouse_event* event, int32_t type, bool mouse_up, - uint32_t click_count) noexcept + static void _mouse_click(void* data, const obs_mouse_event* event, int32_t type, bool mouse_up, uint32_t click_count) noexcept { try { if (data) @@ -869,14 +865,12 @@ namespace streamfx::obs { return audio; } - virtual bool audio_render(uint64_t* ts_out, struct obs_source_audio_mix* audio_output, uint32_t mixers, - std::size_t channels, std::size_t sample_rate) + virtual bool audio_render(uint64_t* ts_out, struct obs_source_audio_mix* audio_output, uint32_t mixers, std::size_t channels, std::size_t sample_rate) { return false; } - virtual bool audio_mix(uint64_t* ts_out, struct audio_output_data* audio_output, std::size_t channels, - std::size_t sample_rate) + virtual bool audio_mix(uint64_t* ts_out, struct audio_output_data* audio_output, std::size_t channels, std::size_t sample_rate) { return false; } diff --git a/source/obs/obs-source-showing-reference.hpp b/source/obs/obs-source-showing-reference.hpp index 791dc580..c28f82eb 100644 --- a/source/obs/obs-source-showing-reference.hpp +++ b/source/obs/obs-source-showing-reference.hpp @@ -26,8 +26,7 @@ namespace streamfx::obs { } public: - static FORCE_INLINE std::shared_ptr - add_showing_reference(::streamfx::obs::source& source) + static FORCE_INLINE std::shared_ptr add_showing_reference(::streamfx::obs::source& source) { return std::make_shared(source); } diff --git a/source/obs/obs-source-tracker.cpp b/source/obs/obs-source-tracker.cpp index b0c04334..278327f6 100644 --- a/source/obs/obs-source-tracker.cpp +++ b/source/obs/obs-source-tracker.cpp @@ -132,8 +132,7 @@ void streamfx::obs::source_tracker::remove_source(obs_source_t* source) } } -void streamfx::obs::source_tracker::rename_source(std::string_view old_name, std::string_view new_name, - obs_source_t* source) +void streamfx::obs::source_tracker::rename_source(std::string_view old_name, std::string_view new_name, obs_source_t* source) { if (old_name == new_name) { throw std::runtime_error("New and old name are identical."); diff --git a/source/obs/obs-source.hpp b/source/obs/obs-source.hpp index 3727212e..defdc68e 100644 --- a/source/obs/obs-source.hpp +++ b/source/obs/obs-source.hpp @@ -26,8 +26,7 @@ namespace streamfx::obs { * @param source The source object to reference. * @param add_reference Should we increment the reference counter (duplicate ownership) or leave as it is (transfer ownership)? */ - FORCE_INLINE source(obs_source_t* source, bool duplicate_reference = false, bool take_ownership = true) - : _is_owner(take_ownership) + FORCE_INLINE source(obs_source_t* source, bool duplicate_reference = false, bool take_ownership = true) : _is_owner(take_ownership) { if (duplicate_reference) { _ref = obs_source_get_ref(source); @@ -49,8 +48,7 @@ namespace streamfx::obs { * * Attention: May fail. */ - FORCE_INLINE source(std::string_view id, std::string_view name, obs_data_t* settings, obs_data_t* hotkeys) - : _is_owner(true) + FORCE_INLINE source(std::string_view id, std::string_view name, obs_data_t* settings, obs_data_t* hotkeys) : _is_owner(true) { _ref = obs_source_create(id.data(), name.data(), settings, hotkeys); if (!_ref) { @@ -541,8 +539,7 @@ namespace streamfx::obs { * * EXPORT void obs_source_process_filter_tech_end(obs_source_t *filter, gs_effect_t *effect, uint32_t width, uint32_t height, const char *tech_name); */ - FORCE_INLINE void process_filter_tech_end(gs_effect_t* effect, uint32_t width, uint32_t height, - std::string_view tech_name) + FORCE_INLINE void process_filter_tech_end(gs_effect_t* effect, uint32_t width, uint32_t height, std::string_view tech_name) { obs_source_process_filter_tech_end(_ref, effect, width, height, tech_name.data()); }; @@ -605,7 +602,6 @@ namespace streamfx::obs { } public /* ToDo */: - /** * * EXPORT obs_missing_files_t* obs_source_get_missing_files(const obs_source_t *source); diff --git a/source/obs/obs-weak-source.hpp b/source/obs/obs-weak-source.hpp index cb3e988c..d6046703 100644 --- a/source/obs/obs-weak-source.hpp +++ b/source/obs/obs-weak-source.hpp @@ -67,8 +67,7 @@ namespace streamfx::obs { */ FORCE_INLINE weak_source(std::string_view name) { - std::shared_ptr ref{obs_get_source_by_name(name.data()), - [](obs_source_t* v) { obs_source_release(v); }}; + std::shared_ptr ref{obs_get_source_by_name(name.data()), [](obs_source_t* v) { obs_source_release(v); }}; if (!ref) { throw std::invalid_argument("Parameter 'name' does not define an valid source."); } diff --git a/source/sources/source-mirror.cpp b/source/sources/source-mirror.cpp index bd7501a9..bbf49d9a 100644 --- a/source/sources/source-mirror.cpp +++ b/source/sources/source-mirror.cpp @@ -85,9 +85,7 @@ mirror_audio_data::mirror_audio_data(const audio_data* audio, speaker_layout lay } } -mirror_instance::mirror_instance(obs_data_t* settings, obs_source_t* self) - : obs::source_instance(settings, self), _source(), _source_child(), _signal_rename(), _audio_enabled(false), - _audio_layout(SPEAKERS_UNKNOWN) +mirror_instance::mirror_instance(obs_data_t* settings, obs_source_t* self) : obs::source_instance(settings, self), _source(), _source_child(), _signal_rename(), _audio_enabled(false), _audio_layout(SPEAKERS_UNKNOWN) { update(settings); } @@ -152,8 +150,7 @@ void mirror_instance::video_render(gs_effect_t* effect) return; #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Source Mirror '%s' for '%s'", - obs_source_get_name(_self), obs_source_get_name(_source.get())}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Source Mirror '%s' for '%s'", obs_source_get_name(_self), obs_source_get_name(_source.get())}; #endif _source_size.first = obs_source_get_width(_source.get()); @@ -197,8 +194,7 @@ void mirror_instance::acquire(std::string source_name) // Listen to any audio the source spews out. if (_audio_enabled) { _signal_audio = std::make_shared(_source); - _signal_audio->event.add(std::bind(&mirror_instance::on_audio, this, std::placeholders::_1, - std::placeholders::_2, std::placeholders::_3)); + _signal_audio->event.add(std::bind(&mirror_instance::on_audio, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); } } catch (...) { release(); @@ -322,14 +318,12 @@ obs_properties_t* mirror_factory::get_properties2(mirror_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::source::mirror::mirror_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::source::mirror::mirror_factory::on_manual_open, nullptr); } #endif { - p = obs_properties_add_list(pr, ST_KEY_SOURCE, D_TRANSLATE(ST_I18N_SOURCE), OBS_COMBO_TYPE_LIST, - OBS_COMBO_FORMAT_STRING); + p = obs_properties_add_list(pr, ST_KEY_SOURCE, D_TRANSLATE(ST_I18N_SOURCE), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING); obs_property_set_modified_callback(p, modified_properties); obs_property_list_add_string(p, "", ""); @@ -357,24 +351,15 @@ obs_properties_t* mirror_factory::get_properties2(mirror_instance* data) } { - p = obs_properties_add_list(pr, ST_KEY_SOURCE_AUDIO_LAYOUT, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT), - OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Unknown)), - static_cast(SPEAKERS_UNKNOWN)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Mono)), - static_cast(SPEAKERS_MONO)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Stereo)), - static_cast(SPEAKERS_STEREO)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(StereoLFE)), - static_cast(SPEAKERS_2POINT1)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Quadraphonic)), - static_cast(SPEAKERS_4POINT0)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(QuadraphonicLFE)), - static_cast(SPEAKERS_4POINT1)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Surround)), - static_cast(SPEAKERS_5POINT1)); - obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(FullSurround)), - static_cast(SPEAKERS_7POINT1)); + p = obs_properties_add_list(pr, ST_KEY_SOURCE_AUDIO_LAYOUT, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Unknown)), static_cast(SPEAKERS_UNKNOWN)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Mono)), static_cast(SPEAKERS_MONO)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Stereo)), static_cast(SPEAKERS_STEREO)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(StereoLFE)), static_cast(SPEAKERS_2POINT1)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Quadraphonic)), static_cast(SPEAKERS_4POINT0)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(QuadraphonicLFE)), static_cast(SPEAKERS_4POINT1)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(Surround)), static_cast(SPEAKERS_5POINT1)); + obs_property_list_add_int(p, D_TRANSLATE(ST_I18N_SOURCE_AUDIO_LAYOUT_(FullSurround)), static_cast(SPEAKERS_7POINT1)); } return pr; diff --git a/source/sources/source-shader.cpp b/source/sources/source-shader.cpp index f647468f..6083a295 100644 --- a/source/sources/source-shader.cpp +++ b/source/sources/source-shader.cpp @@ -30,8 +30,7 @@ using namespace streamfx::source::shader; -static constexpr std::string_view HELP_URL = - "https://github.com/Xaymar/obs-StreamFX/wiki/Source-Filter-Transition-Shader"; +static constexpr std::string_view HELP_URL = "https://github.com/Xaymar/obs-StreamFX/wiki/Source-Filter-Transition-Shader"; shader_instance::shader_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self), _fx() { @@ -87,8 +86,7 @@ void shader_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Shader Source '%s'", - obs_source_get_name(_self)}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Shader Source '%s'", obs_source_get_name(_self)}; #endif _fx->prepare_render(); @@ -146,8 +144,7 @@ obs_properties_t* shader_factory::get_properties2(shader_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::source::shader::shader_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::source::shader::shader_factory::on_manual_open, nullptr); } #endif diff --git a/source/transitions/transition-shader.cpp b/source/transitions/transition-shader.cpp index ba25f8c6..763d2711 100644 --- a/source/transitions/transition-shader.cpp +++ b/source/transitions/transition-shader.cpp @@ -30,8 +30,7 @@ using namespace streamfx::transition::shader; -static constexpr std::string_view HELP_URL = - "https://github.com/Xaymar/obs-StreamFX/wiki/Source-Filter-Transition-Shader"; +static constexpr std::string_view HELP_URL = "https://github.com/Xaymar/obs-StreamFX/wiki/Source-Filter-Transition-Shader"; shader_instance::shader_instance(obs_data_t* data, obs_source_t* self) : obs::source_instance(data, self) { @@ -88,14 +87,10 @@ void shader_instance::video_render(gs_effect_t* effect) } #if defined(ENABLE_PROFILING) && !defined(D_PLATFORM_MAC) && _DEBUG - streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Shader Transition '%s'", - obs_source_get_name(_self)}; + streamfx::obs::gs::debug_marker gdmp{streamfx::obs::gs::debug_color_source, "Shader Transition '%s'", obs_source_get_name(_self)}; #endif - obs_transition_video_render(_self, - [](void* data, gs_texture_t* a, gs_texture_t* b, float t, uint32_t cx, uint32_t cy) { - reinterpret_cast(data)->transition_render(a, b, t, cx, cy); - }); + obs_transition_video_render(_self, [](void* data, gs_texture_t* a, gs_texture_t* b, float t, uint32_t cx, uint32_t cy) { reinterpret_cast(data)->transition_render(a, b, t, cx, cy); }); } void shader_instance::transition_render(gs_texture_t* a, gs_texture_t* b, float_t t, uint32_t cx, uint32_t cy) @@ -108,12 +103,10 @@ void shader_instance::transition_render(gs_texture_t* a, gs_texture_t* b, float_ _fx->render(nullptr); } -bool shader_instance::audio_render(uint64_t* ts_out, obs_source_audio_mix* audio_output, uint32_t mixers, - std::size_t channels, std::size_t sample_rate) +bool shader_instance::audio_render(uint64_t* ts_out, obs_source_audio_mix* audio_output, uint32_t mixers, std::size_t channels, std::size_t sample_rate) { return obs_transition_audio_render( - _self, ts_out, audio_output, mixers, channels, sample_rate, [](void*, float_t t) { return 1.0f - t; }, - [](void*, float_t t) { return t; }); + _self, ts_out, audio_output, mixers, channels, sample_rate, [](void*, float_t t) { return 1.0f - t; }, [](void*, float_t t) { return t; }); } void shader_instance::transition_start() @@ -158,8 +151,7 @@ obs_properties_t* shader_factory::get_properties2(shader::shader_instance* data) #ifdef ENABLE_FRONTEND { - obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), - streamfx::transition::shader::shader_factory::on_manual_open, nullptr); + obs_properties_add_button2(pr, S_MANUAL_OPEN, D_TRANSLATE(S_MANUAL_OPEN), streamfx::transition::shader::shader_factory::on_manual_open, nullptr); } #endif diff --git a/source/transitions/transition-shader.hpp b/source/transitions/transition-shader.hpp index 6063dd1a..0d19dfdc 100644 --- a/source/transitions/transition-shader.hpp +++ b/source/transitions/transition-shader.hpp @@ -30,15 +30,13 @@ namespace streamfx::transition::shader { void transition_render(gs_texture_t* a, gs_texture_t* b, float_t t, uint32_t cx, uint32_t cy); - virtual bool audio_render(uint64_t* ts_out, struct obs_source_audio_mix* audio_output, uint32_t mixers, - std::size_t channels, std::size_t sample_rate) override; + virtual bool audio_render(uint64_t* ts_out, struct obs_source_audio_mix* audio_output, uint32_t mixers, std::size_t channels, std::size_t sample_rate) override; virtual void transition_start() override; virtual void transition_stop() override; }; - class shader_factory : public obs::source_factory { + class shader_factory : public obs::source_factory { public: shader_factory(); virtual ~shader_factory(); diff --git a/source/ui/ui-about-entry.cpp b/source/ui/ui-about-entry.cpp index 62104457..35f93527 100644 --- a/source/ui/ui-about-entry.cpp +++ b/source/ui/ui-about-entry.cpp @@ -8,8 +8,7 @@ constexpr std::string_view i18n_role_contributor = "UI.About.Role.Contributor"; constexpr std::string_view i18n_role_translator = "UI.About.Role.Translator"; constexpr std::string_view i18n_role_supporter = "UI.About.Role.Supporter"; -streamfx::ui::about_entry::about_entry(QWidget* parent, const streamfx::ui::about::entry& entry) - : QWidget(parent), _link() +streamfx::ui::about_entry::about_entry(QWidget* parent, const streamfx::ui::about::entry& entry) : QWidget(parent), _link() { setupUi(this); diff --git a/source/ui/ui-about.cpp b/source/ui/ui-about.cpp index bb29aecc..633a622f 100644 --- a/source/ui/ui-about.cpp +++ b/source/ui/ui-about.cpp @@ -73,8 +73,7 @@ streamfx::ui::about::about() : QDialog(reinterpret_cast(obs_frontend_g auto kvs = iter->items(); for (auto kv : kvs) { D_LOG_DEBUG(" '%s' => '%s'", kv.key().c_str(), kv.value().get().c_str()); - entries.push_back( - ui::about::entry{kv.key(), role_type::CONTRIBUTOR, "", kv.value().get()}); + entries.push_back(ui::about::entry{kv.key(), role_type::CONTRIBUTOR, "", kv.value().get()}); } } if (auto iter = data.find("translator"); iter != data.end()) { @@ -92,8 +91,7 @@ streamfx::ui::about::about() : QDialog(reinterpret_cast(obs_frontend_g auto kvs = iter2->items(); for (auto kv : kvs) { D_LOG_DEBUG(" '%s' => '%s'", kv.key().c_str(), kv.value().get().c_str()); - entries.push_back( - ui::about::entry{kv.key(), role_type::SUPPORTER, "GitHub", kv.value().get()}); + entries.push_back(ui::about::entry{kv.key(), role_type::SUPPORTER, "GitHub", kv.value().get()}); } } if (auto iter2 = data2.find("patreon"); iter2 != data2.end()) { @@ -101,8 +99,7 @@ streamfx::ui::about::about() : QDialog(reinterpret_cast(obs_frontend_g auto kvs = iter2->items(); for (auto kv : kvs) { D_LOG_DEBUG(" '%s' => '%s'", kv.key().c_str(), kv.value().get().c_str()); - entries.push_back( - ui::about::entry{kv.key(), role_type::SUPPORTER, "Patreon", kv.value().get()}); + entries.push_back(ui::about::entry{kv.key(), role_type::SUPPORTER, "Patreon", kv.value().get()}); } } } diff --git a/source/ui/ui-updater.cpp b/source/ui/ui-updater.cpp index 3107c5ec..4b9e2c1a 100644 --- a/source/ui/ui-updater.cpp +++ b/source/ui/ui-updater.cpp @@ -83,10 +83,7 @@ void streamfx::ui::updater_dialog::on_cancel() reject(); } -streamfx::ui::updater::updater(QMenu* menu) - : _updater(), _dialog(nullptr), _gdpr(nullptr), _cfu(nullptr), _cfu_auto(nullptr), _channel(nullptr), - _channel_menu(nullptr), _channel_stable(nullptr), _channel_candidate(nullptr), _channel_beta(nullptr), - _channel_alpha(nullptr), _channel_group(nullptr) +streamfx::ui::updater::updater(QMenu* menu) : _updater(), _dialog(nullptr), _gdpr(nullptr), _cfu(nullptr), _cfu_auto(nullptr), _channel(nullptr), _channel_menu(nullptr), _channel_stable(nullptr), _channel_candidate(nullptr), _channel_beta(nullptr), _channel_alpha(nullptr), _channel_group(nullptr) { // Create dialog. _dialog = new updater_dialog(); @@ -134,23 +131,16 @@ streamfx::ui::updater::updater(QMenu* menu) } // Connect internal signals. - connect(this, &streamfx::ui::updater::autoupdate_changed, this, &streamfx::ui::updater::on_autoupdate_changed, - Qt::QueuedConnection); - connect(this, &streamfx::ui::updater::channel_changed, this, &streamfx::ui::updater::on_channel_changed, - Qt::QueuedConnection); - connect(this, &streamfx::ui::updater::update_detected, this, &streamfx::ui::updater::on_update_detected, - Qt::QueuedConnection); - connect(this, &streamfx::ui::updater::check_active, this, &streamfx::ui::updater::on_check_active, - Qt::QueuedConnection); + connect(this, &streamfx::ui::updater::autoupdate_changed, this, &streamfx::ui::updater::on_autoupdate_changed, Qt::QueuedConnection); + connect(this, &streamfx::ui::updater::channel_changed, this, &streamfx::ui::updater::on_channel_changed, Qt::QueuedConnection); + connect(this, &streamfx::ui::updater::update_detected, this, &streamfx::ui::updater::on_update_detected, Qt::QueuedConnection); + connect(this, &streamfx::ui::updater::check_active, this, &streamfx::ui::updater::on_check_active, Qt::QueuedConnection); { // Retrieve the updater object and listen to it. _updater = streamfx::updater::instance(); - _updater->events.automation_changed.add(std::bind(&streamfx::ui::updater::on_updater_automation_changed, this, - std::placeholders::_1, std::placeholders::_2)); - _updater->events.channel_changed.add(std::bind(&streamfx::ui::updater::on_updater_channel_changed, this, - std::placeholders::_1, std::placeholders::_2)); - _updater->events.refreshed.add( - std::bind(&streamfx::ui::updater::on_updater_refreshed, this, std::placeholders::_1)); + _updater->events.automation_changed.add(std::bind(&streamfx::ui::updater::on_updater_automation_changed, this, std::placeholders::_1, std::placeholders::_2)); + _updater->events.channel_changed.add(std::bind(&streamfx::ui::updater::on_updater_channel_changed, this, std::placeholders::_1, std::placeholders::_2)); + _updater->events.refreshed.add(std::bind(&streamfx::ui::updater::on_updater_refreshed, this, std::placeholders::_1)); // Sync with updater information. emit autoupdate_changed(_updater->is_automated()); diff --git a/source/ui/ui.cpp b/source/ui/ui.cpp index ffe74fa1..8b23878b 100644 --- a/source/ui/ui.cpp +++ b/source/ui/ui.cpp @@ -278,8 +278,7 @@ streamfx::ui::translator::translator(QObject* parent) {} streamfx::ui::translator::~translator() {} -QString streamfx::ui::translator::translate(const char* context, const char* sourceText, const char* disambiguation, - int n) const +QString streamfx::ui::translator::translate(const char* context, const char* sourceText, const char* disambiguation, int n) const { if (sourceText) { std::string_view sourceView{sourceText}; diff --git a/source/ui/ui.hpp b/source/ui/ui.hpp index 071c40e7..380195fb 100644 --- a/source/ui/ui.hpp +++ b/source/ui/ui.hpp @@ -75,8 +75,7 @@ namespace streamfx::ui { translator(QObject* parent = nullptr); ~translator(); - virtual QString translate(const char* context, const char* sourceText, const char* disambiguation = nullptr, - int n = -1) const override; + virtual QString translate(const char* context, const char* sourceText, const char* disambiguation = nullptr, int n = -1) const override; }; } // namespace streamfx::ui diff --git a/source/updater.cpp b/source/updater.cpp index fcca3652..0a2293dc 100644 --- a/source/updater.cpp +++ b/source/updater.cpp @@ -77,9 +77,7 @@ void streamfx::from_json(const nlohmann::json& json, version_stage& stage) stage = stage_from_string(json.get()); } -streamfx::version_info::version_info() - : major(0), minor(0), patch(0), tweak(0), stage(version_stage::STABLE), url(""), name("") -{} +streamfx::version_info::version_info() : major(0), minor(0), patch(0), tweak(0), stage(version_stage::STABLE), url(""), name("") {} streamfx::version_info::version_info(const std::string text) : version_info() { @@ -89,11 +87,9 @@ streamfx::version_info::version_info(const std::string text) : version_info() // 0.0.0b0 (Testing) // 0.0.0c0 (Testing) // 0.0.0_0 (Development) - static const std::regex re_version( - "([0-9]+)\\.([0-9]+)\\.([0-9]+)(([\\._abc]{1,1})([0-9]+|)|)(-g([0-9a-fA-F]{8,8})|)"); - std::smatch matches; - if (std::regex_match(text, matches, re_version, - std::regex_constants::match_any | std::regex_constants::match_continuous)) { + static const std::regex re_version("([0-9]+)\\.([0-9]+)\\.([0-9]+)(([\\._abc]{1,1})([0-9]+|)|)(-g([0-9a-fA-F]{8,8})|)"); + std::smatch matches; + if (std::regex_match(text, matches, re_version, std::regex_constants::match_any | std::regex_constants::match_continuous)) { major = static_cast(strtoul(matches[1].str().c_str(), nullptr, 10)); minor = static_cast(strtoul(matches[2].str().c_str(), nullptr, 10)); patch = static_cast(strtoul(matches[3].str().c_str(), nullptr, 10)); @@ -217,8 +213,7 @@ streamfx::version_info::operator std::string() std::vector buffer(25, 0); if (stage != version_stage::STABLE) { auto types = stage_to_string(stage); - int len = snprintf(buffer.data(), buffer.size(), "%" PRIu16 ".%" PRIu16 ".%" PRIu16 "%.1s%" PRIu16, major, - minor, patch, types.data(), tweak); + int len = snprintf(buffer.data(), buffer.size(), "%" PRIu16 ".%" PRIu16 ".%" PRIu16 "%.1s%" PRIu16, major, minor, patch, types.data(), tweak); return std::string(buffer.data(), buffer.data() + len); } else { int len = snprintf(buffer.data(), buffer.size(), "%" PRIu16 ".%" PRIu16 ".%" PRIu16, major, minor, patch); @@ -230,8 +225,7 @@ void streamfx::updater::task(streamfx::util::threadpool::task_data_t) { try { auto query_fn = [](std::vector& buffer) { - static constexpr std::string_view ST_API_URL = - "https://api.github.com/repos/Xaymar/obs-StreamFX/releases?per_page=25&page=1"; + static constexpr std::string_view ST_API_URL = "https://api.github.com/repos/Xaymar/obs-StreamFX/releases?per_page=25&page=1"; streamfx::util::curl curl; size_t buffer_offset = 0; @@ -242,7 +236,7 @@ void streamfx::updater::task(streamfx::util::threadpool::task_data_t) // Set up request. curl.set_option(CURLOPT_HTTPGET, true); // GET - curl.set_option(CURLOPT_POST, false); // Not POST + curl.set_option(CURLOPT_POST, false); // Not POST curl.set_option(CURLOPT_URL, ST_API_URL); curl.set_option(CURLOPT_TIMEOUT, 30); // 10s until we fail. @@ -344,10 +338,8 @@ void streamfx::updater::task(streamfx::util::threadpool::task_data_t) // Print all update information to the log file. D_LOG_INFO("Current Version: %s", static_cast(_current_info).c_str()); - D_LOG_INFO("Latest Stable Version: %s", - static_cast(get_update_info(version_stage::STABLE)).c_str()); - D_LOG_INFO("Latest Candidate Version: %s", - static_cast(get_update_info(version_stage::CANDIDATE)).c_str()); + D_LOG_INFO("Latest Stable Version: %s", static_cast(get_update_info(version_stage::STABLE)).c_str()); + D_LOG_INFO("Latest Candidate Version: %s", static_cast(get_update_info(version_stage::CANDIDATE)).c_str()); D_LOG_INFO("Latest Beta Version: %s", static_cast(get_update_info(version_stage::BETA)).c_str()); D_LOG_INFO("Latest Alpha Version: %s", static_cast(get_update_info(version_stage::ALPHA)).c_str()); if (is_update_available()) { @@ -368,7 +360,7 @@ bool streamfx::updater::can_check() #ifdef _DEBUG return true; #else - auto now = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()); + auto now = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()); auto threshold = (_lastcheckedat + std::chrono::minutes(10)); return (now > threshold); #endif @@ -493,8 +485,7 @@ void streamfx::updater::refresh() std::lock_guard lock(_lock); // Update last checked time. - _lastcheckedat = - std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()); + _lastcheckedat = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()); save(); // Spawn a new task. diff --git a/source/updater.hpp b/source/updater.hpp index 5cd4405f..33b83a13 100644 --- a/source/updater.hpp +++ b/source/updater.hpp @@ -16,10 +16,10 @@ namespace streamfx { enum class version_stage : uint8_t { - STABLE, // A.B.C + STABLE, // A.B.C CANDIDATE, // A.B.CcD - BETA, // A.B.CbD - ALPHA, // A.B.CaD + BETA, // A.B.CbD + ALPHA, // A.B.CaD }; version_stage stage_from_string(std::string_view str); std::string_view stage_to_string(version_stage t); diff --git a/source/util/util-curl.cpp b/source/util/util-curl.cpp index 638542e4..e954b794 100644 --- a/source/util/util-curl.cpp +++ b/source/util/util-curl.cpp @@ -9,8 +9,7 @@ #include #include "warning-enable.hpp" -int32_t streamfx::util::curl::debug_helper(CURL* handle, curl_infotype type, char* data, size_t size, - streamfx::util::curl* self) +int32_t streamfx::util::curl::debug_helper(CURL* handle, curl_infotype type, char* data, size_t size, streamfx::util::curl* self) { if (self->_debug_callback) { self->_debug_callback(handle, type, data, size); @@ -18,8 +17,7 @@ int32_t streamfx::util::curl::debug_helper(CURL* handle, curl_infotype type, cha #ifdef _DEBUG_CURL std::stringstream hd; for (size_t n = 0; n < size; n++) { - hd << std::uppercase << std::setfill('0') << std::setw(2) << std::hex << static_cast(data[n]) - << " "; + hd << std::uppercase << std::setfill('0') << std::setw(2) << std::hex << static_cast(data[n]) << " "; if (n % 16 == 15) { hd << "\n "; } @@ -72,12 +70,10 @@ size_t streamfx::util::curl::write_helper(void* ptr, size_t size, size_t count, } } -int32_t streamfx::util::curl::xferinfo_callback(streamfx::util::curl* self, curl_off_t dlt, curl_off_t dln, - curl_off_t ult, curl_off_t uln) +int32_t streamfx::util::curl::xferinfo_callback(streamfx::util::curl* self, curl_off_t dlt, curl_off_t dln, curl_off_t ult, curl_off_t uln) { if (self->_xferinfo_callback) { - return self->_xferinfo_callback(static_cast(dlt), static_cast(dln), - static_cast(ult), static_cast(uln)); + return self->_xferinfo_callback(static_cast(dlt), static_cast(dln), static_cast(ult), static_cast(uln)); } else { return 0; } diff --git a/source/util/util-curl.hpp b/source/util/util-curl.hpp index f243de4c..0c0c74ee 100644 --- a/source/util/util-curl.hpp +++ b/source/util/util-curl.hpp @@ -32,8 +32,7 @@ namespace streamfx::util { curl_debug_callback_t _debug_callback; std::map _headers; - static int32_t debug_helper(CURL* handle, curl_infotype type, char* data, size_t size, - streamfx::util::curl* userptr); + static int32_t debug_helper(CURL* handle, curl_infotype type, char* data, size_t size, streamfx::util::curl* userptr); static size_t read_helper(void*, size_t, size_t, streamfx::util::curl*); static size_t write_helper(void*, size_t, size_t, streamfx::util::curl*); static int32_t xferinfo_callback(streamfx::util::curl*, curl_off_t, curl_off_t, curl_off_t, curl_off_t); diff --git a/source/util/util-event.hpp b/source/util/util-event.hpp index 05dc8980..c90f6cc4 100644 --- a/source/util/util-event.hpp +++ b/source/util/util-event.hpp @@ -44,7 +44,6 @@ namespace streamfx::util { } public /* operators */: - /* Copy Operator */ event<_args...>& operator=(const event<_args...>&) = delete; @@ -78,7 +77,6 @@ namespace streamfx::util { } public /* functions: listeners */: - /** Add a new listener to the event. * @param listener A listener bound with std::bind or a std::function. */ @@ -147,7 +145,6 @@ namespace streamfx::util { } public /* callbacks */: - void set_listen_callback(std::function cb) { std::lock_guard lg(_lock); diff --git a/source/util/util-library.cpp b/source/util/util-library.cpp index 3ee65db7..7c733e9c 100644 --- a/source/util/util-library.cpp +++ b/source/util/util-library.cpp @@ -36,8 +36,7 @@ streamfx::util::library::library(std::filesystem::path file) : _library(nullptr) DWORD error = GetLastError(); if (error != ERROR_PROC_NOT_FOUND) { PSTR message = NULL; - FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_ALLOCATE_BUFFER, - NULL, error, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), (LPSTR)&message, 0, NULL); + FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS | FORMAT_MESSAGE_ALLOCATE_BUFFER, NULL, error, MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US), (LPSTR)&message, 0, NULL); if (message) { ex = message; LocalFree(message); diff --git a/source/util/util-logging.hpp b/source/util/util-logging.hpp index ae373d6c..5cfb189f 100644 --- a/source/util/util-logging.hpp +++ b/source/util/util-logging.hpp @@ -36,8 +36,8 @@ namespace streamfx::util::logging { enum class level { LEVEL_DEBUG, // Debug information, which is not necessary to know at runtime. - LEVEL_INFO, // Runtime information, which may or may not be needed for support. - LEVEL_WARN, // Warnings, which should be respected and fixed. + LEVEL_INFO, // Runtime information, which may or may not be needed for support. + LEVEL_WARN, // Warnings, which should be respected and fixed. LEVEL_ERROR, // Errors that must be fixed. }; diff --git a/source/util/util-platform.cpp b/source/util/util-platform.cpp index e85abcaf..a3e2b5a0 100644 --- a/source/util/util-platform.cpp +++ b/source/util/util-platform.cpp @@ -28,8 +28,7 @@ std::string streamfx::util::platform::native_to_utf8(std::wstring const& v) { std::vector buffer((v.length() + 1) * 4, 0); - int res = WideCharToMultiByte(CP_UTF8, 0, v.c_str(), static_cast(v.length()), buffer.data(), - static_cast(buffer.size()), nullptr, nullptr); + int res = WideCharToMultiByte(CP_UTF8, 0, v.c_str(), static_cast(v.length()), buffer.data(), static_cast(buffer.size()), nullptr, nullptr); if (res == 0) { D_LOG_WARNING("Failed to convert '%ls' to UTF-8 format.", v.c_str()); throw std::runtime_error("Failed to convert Windows-native to UTF-8."); @@ -49,8 +48,7 @@ std::wstring streamfx::util::platform::utf8_to_native(std::string const& v) { std::vector buffer(v.length() + 1, 0); - int res = MultiByteToWideChar(CP_UTF8, 0, v.c_str(), static_cast(v.length()), buffer.data(), - static_cast(buffer.size())); + int res = MultiByteToWideChar(CP_UTF8, 0, v.c_str(), static_cast(v.length()), buffer.data(), static_cast(buffer.size())); if (res == 0) { D_LOG_WARNING("Failed to convert '%s' to native format.", v.c_str()); throw std::runtime_error("Failed to convert UTF-8 to Windows-native."); diff --git a/source/util/util-threadpool.cpp b/source/util/util-threadpool.cpp index 2850d39a..0bde3f88 100644 --- a/source/util/util-threadpool.cpp +++ b/source/util/util-threadpool.cpp @@ -34,9 +34,7 @@ #define D_LOG_DEBUG(...) P_LOG_DEBUG(ST_PREFIX __VA_ARGS__) #endif -streamfx::util::threadpool::task::task(task_callback_t callback, task_data_t data) - : _callback(callback), _data(data), _lock(), _status_changed(), _cancelled(false), _completed(false), _failed(false) -{} +streamfx::util::threadpool::task::task(task_callback_t callback, task_data_t data) : _callback(callback), _data(data), _lock(), _status_changed(), _cancelled(false), _completed(false), _failed(false) {} streamfx::util::threadpool::task::~task() {} @@ -85,8 +83,7 @@ void streamfx::util::threadpool::task::wait() { std::unique_lock ul(_lock); if (!_cancelled && !_completed && !_failed) { - _status_changed.wait(ul, - [this]() { return this->is_completed() || this->is_cancelled() || this->has_failed(); }); + _status_changed.wait(ul, [this]() { return this->is_completed() || this->is_cancelled() || this->has_failed(); }); } } @@ -122,15 +119,13 @@ streamfx::util::threadpool::threadpool::~threadpool() } } -streamfx::util::threadpool::threadpool::threadpool(size_t minimum, size_t maximum) - : _limits{minimum, maximum}, _workers_lock(), _worker_count(0), _workers(), _tasks_lock(), _tasks_cv(), _tasks() +streamfx::util::threadpool::threadpool::threadpool(size_t minimum, size_t maximum) : _limits{minimum, maximum}, _workers_lock(), _worker_count(0), _workers(), _tasks_lock(), _tasks_cv(), _tasks() { // Spawn the minimum number of threads. spawn(_limits.first); } -std::shared_ptr - streamfx::util::threadpool::threadpool::push(task_callback_t callback, task_data_t data /*= nullptr*/) +std::shared_ptr streamfx::util::threadpool::threadpool::push(task_callback_t callback, task_data_t data /*= nullptr*/) { std::lock_guard lg(_tasks_lock); constexpr size_t threshold = 3; @@ -168,8 +163,7 @@ void streamfx::util::threadpool::threadpool::spawn(size_t count) wi->thread.detach(); _workers.emplace_back(wi); ++_worker_count; - D_LOG_DEBUG("Spawning new worker thread (%zu < %zu < %zu).", _limits.first, _worker_count.load(), - _limits.second); + D_LOG_DEBUG("Spawning new worker thread (%zu < %zu < %zu).", _limits.first, _worker_count.load(), _limits.second); } } @@ -188,8 +182,7 @@ bool streamfx::util::threadpool::threadpool::die(std::shared_ptr wi _last_worker_death = now; --_worker_count; _workers.remove(wi); - D_LOG_DEBUG("Terminated idle worker thread (%zu < %zu < %zu).", _limits.first, _worker_count.load(), - _limits.second); + D_LOG_DEBUG("Terminated idle worker thread (%zu < %zu < %zu).", _limits.first, _worker_count.load(), _limits.second); } } @@ -218,10 +211,7 @@ void streamfx::util::threadpool::threadpool::work(std::shared_ptr w // Is there any work available right now? if (_tasks.size() == 0) { // If not: // Block this thread until it is notified of a change. - _tasks_cv.wait_until( - ul, - std::chrono::time_point(std::chrono::high_resolution_clock::now() + std::chrono::milliseconds(250)), - [this, wi]() { return wi->stop || _tasks.size() > 0; }); + _tasks_cv.wait_until(ul, std::chrono::time_point(std::chrono::high_resolution_clock::now() + std::chrono::milliseconds(250)), [this, wi]() { return wi->stop || _tasks.size() > 0; }); } // If we were asked to stop, skip everything. diff --git a/source/util/utility.hpp b/source/util/utility.hpp index ccd82537..0b63c260 100644 --- a/source/util/utility.hpp +++ b/source/util/utility.hpp @@ -22,11 +22,11 @@ extern "C" { } // Constants -#define S_PI 3.1415926535897932384626433832795 // PI = pi -#define S_PI2 6.283185307179586476925286766559 // 2PI = 2 * pi +#define S_PI 3.1415926535897932384626433832795 // PI = pi +#define S_PI2 6.283185307179586476925286766559 // 2PI = 2 * pi #define S_PI2_SQROOT 2.506628274631000502415765284811 // sqrt(2 * pi) -#define S_RAD 57.295779513082320876798154814105 // 180/pi -#define S_DEG 0.01745329251994329576923690768489 // pi/180 +#define S_RAD 57.295779513082320876798154814105 // 180/pi +#define S_DEG 0.01745329251994329576923690768489 // pi/180 #define D_DEG_TO_RAD(x) (x * S_DEG) #define D_RAD_TO_DEG(x) (x * S_RAD) @@ -160,8 +160,7 @@ namespace streamfx::util { template inline bool is_equal(T target, C value) { - return (target > (value - std::numeric_limits::epsilon())) - && (target < (value + std::numeric_limits::epsilon())); + return (target > (value - std::numeric_limits::epsilon())) && (target < (value + std::numeric_limits::epsilon())); } template @@ -218,21 +217,14 @@ namespace streamfx::util { T _k_kalman_gain; public: - kalman1D() - : _q_process_noise_covariance(0), _r_measurement_noise_covariance(0), _x_value_of_interest(0), - _p_estimation_error_covariance(0), _k_kalman_gain(0.0) - {} - kalman1D(T pnc, T mnc, T eec, T value) - : _q_process_noise_covariance(pnc), _r_measurement_noise_covariance(mnc), _x_value_of_interest(value), - _p_estimation_error_covariance(eec), _k_kalman_gain(0.0) - {} + kalman1D() : _q_process_noise_covariance(0), _r_measurement_noise_covariance(0), _x_value_of_interest(0), _p_estimation_error_covariance(0), _k_kalman_gain(0.0) {} + kalman1D(T pnc, T mnc, T eec, T value) : _q_process_noise_covariance(pnc), _r_measurement_noise_covariance(mnc), _x_value_of_interest(value), _p_estimation_error_covariance(eec), _k_kalman_gain(0.0) {} ~kalman1D() = default; T filter(T measurement) { _p_estimation_error_covariance += _q_process_noise_covariance; - _k_kalman_gain = - _p_estimation_error_covariance / (_p_estimation_error_covariance + _r_measurement_noise_covariance); + _k_kalman_gain = _p_estimation_error_covariance / (_p_estimation_error_covariance + _r_measurement_noise_covariance); _x_value_of_interest += _k_kalman_gain * (measurement - _x_value_of_interest); _p_estimation_error_covariance = (1 - _k_kalman_gain) * _p_estimation_error_covariance; return _x_value_of_interest;