libobs: Add deinterlacing API functions

Adds deinterlacing API functions.  Both standard and 2x variants are
supported.  Deinterlacing is set via obs_source_set_deinterlace_mode and
obs_source_set_deinterlace_field_order.

This was implemented in to the core itself because deinterlacing should
happen before effect filters are processed, but after async filters are
processed.  If this were added as a filter, there is the possibility
that a different filter is processed before deinterlacing, which could
mess with the result.  It was also a bit easier to implement this way
due to the fact that that deinterlacing may need to have access to the
previous async frame.

Effects were split in to separate files to reduce load time (especially
for yadif shaders which take a significant amount of time to compile).
This commit is contained in:
jp9000 2016-03-15 20:39:36 -07:00
parent 11d9a8f3e4
commit 07c644c581
15 changed files with 1025 additions and 5 deletions

View file

@ -273,6 +273,7 @@ set(libobs_libobs_SOURCES
obs-encoder.c
obs-service.c
obs-source.c
obs-source-deinterlace.c
obs-source-transition.c
obs-output.c
obs-output-delay.c

View file

@ -0,0 +1,292 @@
/*
* Copyright (c) 2015 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
uniform float4x4 ViewProj;
uniform texture2d image;
uniform float4x4 color_matrix;
uniform float3 color_range_min = {0.0, 0.0, 0.0};
uniform float3 color_range_max = {1.0, 1.0, 1.0};
uniform texture2d previous_image;
uniform float2 dimensions;
uniform int field_order;
uniform bool frame2;
sampler_state textureSampler {
Filter = Linear;
AddressU = Clamp;
AddressV = Clamp;
};
struct VertData {
float4 pos : POSITION;
float2 uv : TEXCOORD0;
};
int3 select(int2 texel, int x, int y)
{
return int3(texel + int2(x, y), 0);
}
float4 load_at_prev(int2 texel, int x, int y)
{
return previous_image.Load(select(texel, x, y));
}
float4 load_at_image(int2 texel, int x, int y)
{
return image.Load(select(texel, x, y));
}
float4 load_at(int2 texel, int x, int y, int field)
{
if(field == 0)
return load_at_image(texel, x, y);
else
return load_at_prev(texel, x, y);
}
#define YADIF_UPDATE(c, level) \
if(score.c < spatial_score.c) \
{ \
spatial_score.c = score.c; \
spatial_pred.c = (load_at(texel, level, -1, field) + load_at(texel, -level, 1, field)).c / 2; \
#define YADIF_CHECK_ONE(level, c) \
{ \
float4 score = abs(load_at(texel, -1 + level, 1, field) - load_at(texel, -1 - level, -1, field)) + \
abs(load_at(texel, level, 1, field) - load_at(texel, -level, -1, field)) + \
abs(load_at(texel, 1 + level, 1, field) - load_at(texel, 1 - level, -1, field)); \
YADIF_UPDATE(c, level) } \
}
#define YADIF_CHECK(level) \
{ \
float4 score = abs(load_at(texel, -1 + level, 1, field) - load_at(texel, -1 - level, -1, field)) + \
abs(load_at(texel, level, 1, field) - load_at(texel, -level, -1, field)) + \
abs(load_at(texel, 1 + level, 1, field) - load_at(texel, 1 - level, -1, field)); \
YADIF_UPDATE(r, level) YADIF_CHECK_ONE(level * 2, r) } \
YADIF_UPDATE(g, level) YADIF_CHECK_ONE(level * 2, g) } \
YADIF_UPDATE(b, level) YADIF_CHECK_ONE(level * 2, b) } \
YADIF_UPDATE(a, level) YADIF_CHECK_ONE(level * 2, a) } \
}
float4 texel_at_yadif(int2 texel, int field, bool mode0)
{
if((texel.y % 2) == field)
return load_at(texel, 0, 0, field);
#define YADIF_AVG(x_off, y_off) ((load_at_prev(texel, x_off, y_off) + load_at_image(texel, x_off, y_off))/2)
float4 c = load_at(texel, 0, 1, field),
d = YADIF_AVG(0, 0),
e = load_at(texel, 0, -1, field);
float4 temporal_diff0 = (abs(load_at_prev(texel, 0, 0) - load_at_image(texel, 0, 0))) / 2,
temporal_diff1 = (abs(load_at_prev(texel, 0, 1) - c) + abs(load_at_prev(texel, 0, -1) - e)) / 2,
temporal_diff2 = (abs(load_at_image(texel, 0, 1) - c) + abs(load_at_image(texel, 0, -1) - e)) / 2,
diff = max(temporal_diff0, max(temporal_diff1, temporal_diff2));
float4 spatial_pred = (c + e) / 2,
spatial_score = abs(load_at(texel, -1, 1, field) - load_at(texel, -1, -1, field)) +
abs(c - e) +
abs(load_at(texel, 1, 1, field) - load_at(texel, 1, -1, field)) - 1;
YADIF_CHECK(-1)
YADIF_CHECK(1)
if (mode0) {
float4 b = YADIF_AVG(0, 2),
f = YADIF_AVG(0, -2);
float4 max_ = max(d - e, max(d - c, min(b - c, f - e))),
min_ = min(d - e, min(d - c, max(b - c, f - e)));
diff = max(diff, max(min_, -max_));
} else {
diff = max(diff, max(min(d - e, d - c), -max(d - e, d - c)));
}
#define YADIF_SPATIAL(c) \
{ \
if(spatial_pred.c > d.c + diff.c) \
spatial_pred.c = d.c + diff.c; \
else if(spatial_pred.c < d.c - diff.c) \
spatial_pred.c = d.c - diff.c; \
}
YADIF_SPATIAL(r)
YADIF_SPATIAL(g)
YADIF_SPATIAL(b)
YADIF_SPATIAL(a)
return spatial_pred;
}
float4 texel_at_yadif_2x(int2 texel, int field, bool mode0)
{
field = frame2 ? (1 - field) : field;
return texel_at_yadif(texel, field, mode0);
}
float4 texel_at_discard(int2 texel, int field)
{
return load_at_image(texel, 0, (texel.y + field) % 2);
}
float4 texel_at_discard_2x(int2 texel, int field)
{
field = frame2 ? (1 - field) : field;
return texel_at_discard(texel, field);
}
float4 texel_at_blend(int2 texel, int field)
{
return (load_at_image(texel, 0, 0) + load_at_image(texel, 0, 1)) / 2;
}
float4 texel_at_blend_2x(int2 texel, int field)
{
if (!frame2)
return (load_at_image(texel, 0, 0) +
load_at_prev(texel, 0, 1)) / 2;
else
return (load_at_image(texel, 0, 0) +
load_at_image(texel, 0, 1)) / 2;
}
float4 texel_at_linear(int2 texel, int field)
{
if ((texel.y % 2) == field)
return load_at_image(texel, 0, 0);
return (load_at_image(texel, 0, -1) + load_at_image(texel, 0, 1)) / 2;
}
float4 texel_at_linear_2x(int2 texel, int field)
{
field = frame2 ? (1 - field) : field;
return texel_at_linear(texel, field);
}
float4 texel_at_yadif_discard(int2 texel, int field)
{
return (texel_at_yadif(texel, field, true) + texel_at_discard(texel, field)) / 2;
}
float4 texel_at_yadif_discard_2x(int2 texel, int field)
{
field = frame2 ? (1 - field) : field;
return (texel_at_yadif(texel, field, true) + texel_at_discard(texel, field)) / 2;
}
int2 pixel_uv(float2 uv)
{
return int2(uv * dimensions);
}
float4 PSYadifMode0RGBA(VertData v_in) : TARGET
{
return texel_at_yadif(pixel_uv(v_in.uv), field_order, true);
}
float4 PSYadifMode0RGBA_2x(VertData v_in) : TARGET
{
return texel_at_yadif_2x(pixel_uv(v_in.uv), field_order, true);
}
float4 PSYadifMode2RGBA(VertData v_in) : TARGET
{
return texel_at_yadif(pixel_uv(v_in.uv), field_order, false);
}
float4 PSYadifMode2RGBA_2x(VertData v_in) : TARGET
{
return texel_at_yadif_2x(pixel_uv(v_in.uv), field_order, false);
}
float4 PSYadifDiscardRGBA(VertData v_in) : TARGET
{
return texel_at_yadif_discard(pixel_uv(v_in.uv), field_order);
}
float4 PSYadifDiscardRGBA_2x(VertData v_in) : TARGET
{
return texel_at_yadif_discard_2x(pixel_uv(v_in.uv), field_order);
}
float4 PSLinearRGBA(VertData v_in) : TARGET
{
return texel_at_linear(pixel_uv(v_in.uv), field_order);
}
float4 PSLinearRGBA_2x(VertData v_in) : TARGET
{
return texel_at_linear_2x(pixel_uv(v_in.uv), field_order);
}
float4 PSDiscardRGBA(VertData v_in) : TARGET
{
return texel_at_discard(pixel_uv(v_in.uv), field_order);
}
float4 PSDiscardRGBA_2x(VertData v_in) : TARGET
{
return texel_at_discard_2x(pixel_uv(v_in.uv), field_order);
}
float4 PSBlendRGBA(VertData v_in) : TARGET
{
return texel_at_blend(pixel_uv(v_in.uv), field_order);
}
float4 PSBlendRGBA_2x(VertData v_in) : TARGET
{
return texel_at_blend_2x(pixel_uv(v_in.uv), field_order);
}
VertData VSDefault(VertData v_in)
{
VertData vert_out;
vert_out.pos = mul(float4(v_in.pos.xyz, 1.0), ViewProj);
vert_out.uv = v_in.uv;
return vert_out;
}
#define TECHNIQUE(rgba_ps, matrix_ps) \
technique Draw \
{ \
pass \
{ \
vertex_shader = VSDefault(v_in); \
pixel_shader = rgba_ps(v_in); \
} \
} \
float4 matrix_ps(VertData v_in) : TARGET \
{ \
float4 yuv = rgba_ps(v_in); \
yuv.xyz = clamp(yuv.xyz, color_range_min, color_range_max); \
return saturate(mul(float4(yuv.xyz, 1.0), color_matrix)); \
} \
\
technique DrawMatrix \
{ \
pass \
{ \
vertex_shader = VSDefault(v_in); \
pixel_shader = matrix_ps(v_in); \
} \
}

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE( PSBlendRGBA, PSBlendMatrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSBlendRGBA_2x, PSBlendMatrix_2x);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSDiscardRGBA, PSDiscardMatrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSDiscardRGBA_2x, PSDiscardMatrix_2x);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSLinearRGBA, PSLinearMatrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSLinearRGBA_2x, PSLinearxMatrixA_2x);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSYadifMode0RGBA, PSYadifMode0Matrix);

View file

@ -0,0 +1,21 @@
/*
* Copyright (c) 2016 Ruwen Hahn <palana@stunned.de>
* John R. Bradley <jrb@turrettech.com>
* Hugh Bailey "Jim" <obs.jim@gmail.com>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "deinterlace_base.effect"
TECHNIQUE(PSYadifMode0RGBA_2x, PSYadifMode0Matrix_2x);

View file

@ -262,6 +262,15 @@ struct obs_core_video {
enum obs_scale_type scale_type;
gs_texture_t *transparent_texture;
gs_effect_t *deinterlace_discard_effect;
gs_effect_t *deinterlace_discard_2x_effect;
gs_effect_t *deinterlace_linear_effect;
gs_effect_t *deinterlace_linear_2x_effect;
gs_effect_t *deinterlace_blend_effect;
gs_effect_t *deinterlace_blend_2x_effect;
gs_effect_t *deinterlace_yadif_effect;
gs_effect_t *deinterlace_yadif_2x_effect;
};
struct obs_core_audio {
@ -591,6 +600,18 @@ struct obs_source {
uint32_t async_convert_width;
uint32_t async_convert_height;
/* async video deinterlacing */
uint64_t deinterlace_offset;
uint64_t deinterlace_frame_ts;
gs_effect_t *deinterlace_effect;
struct obs_source_frame *prev_async_frame;
gs_texture_t *async_prev_texture;
gs_texrender_t *async_prev_texrender;
uint32_t deinterlace_half_duration;
enum obs_deinterlace_mode deinterlace_mode;
bool deinterlace_top_first;
bool deinterlace_rendered;
/* filters */
struct obs_source *filter_parent;
struct obs_source *filter_target;
@ -718,6 +739,12 @@ extern bool set_async_texture_size(struct obs_source *source,
extern void remove_async_frame(obs_source_t *source,
struct obs_source_frame *frame);
extern void set_deinterlace_texture_size(obs_source_t *source);
extern void deinterlace_process_last_frame(obs_source_t *source,
uint64_t sys_time);
extern void deinterlace_update_async_video(obs_source_t *source);
extern void deinterlace_render(obs_source_t *s);
/* ------------------------------------------------------------------------- */
/* outputs */

View file

@ -0,0 +1,443 @@
/******************************************************************************
Copyright (C) 2016 by Hugh Bailey <obs.jim@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#include "obs-internal.h"
static bool ready_deinterlace_frames(obs_source_t *source, uint64_t sys_time)
{
struct obs_source_frame *next_frame = source->async_frames.array[0];
struct obs_source_frame *prev_frame = NULL;
struct obs_source_frame *frame = NULL;
uint64_t sys_offset = sys_time - source->last_sys_timestamp;
uint64_t frame_time = next_frame->timestamp;
uint64_t frame_offset = 0;
size_t idx = 1;
if ((source->flags & OBS_SOURCE_FLAG_UNBUFFERED) != 0) {
while (source->async_frames.num > 2) {
da_erase(source->async_frames, 0);
remove_async_frame(source, next_frame);
next_frame = source->async_frames.array[0];
}
if (source->async_frames.num == 2)
source->async_frames.array[0]->prev_frame = true;
source->deinterlace_offset = 0;
return true;
}
/* account for timestamp invalidation */
if (frame_out_of_bounds(source, frame_time)) {
source->last_frame_ts = next_frame->timestamp;
source->deinterlace_offset = 0;
return true;
} else {
frame_offset = frame_time - source->last_frame_ts;
source->last_frame_ts += sys_offset;
}
while (source->last_frame_ts > next_frame->timestamp) {
/* this tries to reduce the needless frame duplication, also
* helps smooth out async rendering to frame boundaries. In
* other words, tries to keep the framerate as smooth as
* possible */
if ((source->last_frame_ts - next_frame->timestamp) < 2000000)
break;
if (prev_frame) {
da_erase(source->async_frames, 0);
remove_async_frame(source, prev_frame);
}
if (source->async_frames.num <= 2) {
bool exit = true;
if (prev_frame) {
prev_frame->prev_frame = true;
} else if (!frame && source->async_frames.num == 2) {
exit = false;
}
if (exit) {
source->deinterlace_offset = 0;
return true;
}
}
if (frame)
idx = 2;
else
idx = 1;
prev_frame = frame;
frame = next_frame;
next_frame = source->async_frames.array[idx];
/* more timestamp checking and compensating */
if ((next_frame->timestamp - frame_time) > MAX_TS_VAR) {
source->last_frame_ts =
next_frame->timestamp - frame_offset;
source->deinterlace_offset = 0;
}
frame_time = next_frame->timestamp;
frame_offset = frame_time - source->last_frame_ts;
}
if (prev_frame)
prev_frame->prev_frame = true;
return frame != NULL;
}
static inline bool first_frame(obs_source_t *s)
{
if (s->last_frame_ts)
return false;
if (s->async_frames.num >= 2)
s->async_frames.array[0]->prev_frame = true;
return true;
}
static inline uint64_t uint64_diff(uint64_t ts1, uint64_t ts2)
{
return (ts1 < ts2) ? (ts2 - ts1) : (ts1 - ts2);
}
static inline void deinterlace_get_closest_frames(obs_source_t *s,
uint64_t sys_time)
{
const struct video_output_info *info;
uint64_t half_interval;
if (!s->async_frames.num)
return;
info = video_output_get_info(obs->video.video);
half_interval = (uint64_t)info->fps_den * 500000000ULL /
(uint64_t)info->fps_num;
if (first_frame(s) || ready_deinterlace_frames(s, sys_time)) {
uint64_t offset;
s->prev_async_frame = NULL;
s->cur_async_frame = s->async_frames.array[0];
da_erase(s->async_frames, 0);
if (s->cur_async_frame->prev_frame) {
s->prev_async_frame = s->cur_async_frame;
s->cur_async_frame = s->async_frames.array[0];
da_erase(s->async_frames, 0);
s->deinterlace_half_duration = (uint32_t)
((s->cur_async_frame->timestamp -
s->prev_async_frame->timestamp) / 2);
} else {
s->deinterlace_half_duration = (uint32_t)
((s->cur_async_frame->timestamp -
s->deinterlace_frame_ts) / 2);
}
if (!s->last_frame_ts)
s->last_frame_ts = s->cur_async_frame->timestamp;
s->deinterlace_frame_ts = s->cur_async_frame->timestamp;
offset = obs->video.video_time - s->deinterlace_frame_ts;
if (!s->deinterlace_offset) {
s->deinterlace_offset = offset;
} else {
uint64_t offset_diff = uint64_diff(
s->deinterlace_offset, offset);
if (offset_diff > half_interval)
s->deinterlace_offset = offset;
}
}
}
void deinterlace_process_last_frame(obs_source_t *s, uint64_t sys_time)
{
if (s->prev_async_frame) {
remove_async_frame(s, s->prev_async_frame);
s->prev_async_frame = NULL;
}
if (s->cur_async_frame) {
remove_async_frame(s, s->cur_async_frame);
s->cur_async_frame = NULL;
}
deinterlace_get_closest_frames(s, sys_time);
}
void set_deinterlace_texture_size(obs_source_t *source)
{
if (source->async_gpu_conversion) {
source->async_prev_texrender =
gs_texrender_create(GS_BGRX, GS_ZS_NONE);
source->async_prev_texture = gs_texture_create(
source->async_convert_width,
source->async_convert_height,
source->async_texture_format,
1, NULL, GS_DYNAMIC);
} else {
enum gs_color_format format = convert_video_format(
source->async_format);
source->async_prev_texture = gs_texture_create(
source->async_width, source->async_height,
format, 1, NULL, GS_DYNAMIC);
}
}
static inline struct obs_source_frame *get_prev_frame(obs_source_t *source,
bool *updated)
{
struct obs_source_frame *frame = NULL;
pthread_mutex_lock(&source->async_mutex);
*updated = source->cur_async_frame != NULL;
frame = source->prev_async_frame;
source->prev_async_frame = NULL;
if (frame)
os_atomic_inc_long(&frame->refs);
pthread_mutex_unlock(&source->async_mutex);
return frame;
}
void deinterlace_update_async_video(obs_source_t *source)
{
struct obs_source_frame *frame;
bool updated;
if (source->deinterlace_rendered)
return;
frame = get_prev_frame(source, &updated);
source->deinterlace_rendered = true;
if (frame)
frame = filter_async_video(source, frame);
if (frame) {
if (set_async_texture_size(source, frame)) {
update_async_texture(source, frame,
source->async_prev_texture,
source->async_prev_texrender);
}
obs_source_release_frame(source, frame);
} else if (updated) { /* swap cur/prev if no previous texture */
gs_texture_t *prev_tex = source->async_prev_texture;
source->async_prev_texture = source->async_texture;
source->async_texture = prev_tex;
if (source->async_texrender) {
gs_texrender_t *prev = source->async_prev_texrender;
source->async_prev_texrender = source->async_texrender;
source->async_texrender = prev;
}
}
}
static inline gs_effect_t *get_effect(enum obs_deinterlace_mode mode)
{
switch (mode) {
case OBS_DEINTERLACE_MODE_DISABLE: return NULL;
case OBS_DEINTERLACE_MODE_DISCARD:
return obs_load_effect(&obs->video.deinterlace_discard_effect,
"deinterlace_discard.effect");
case OBS_DEINTERLACE_MODE_RETRO:
return obs_load_effect(&obs->video.deinterlace_discard_2x_effect,
"deinterlace_discard_2x.effect");
case OBS_DEINTERLACE_MODE_BLEND:
return obs_load_effect(&obs->video.deinterlace_blend_effect,
"deinterlace_blend.effect");
case OBS_DEINTERLACE_MODE_BLEND_2X:
return obs_load_effect(&obs->video.deinterlace_blend_2x_effect,
"deinterlace_blend_2x.effect");
case OBS_DEINTERLACE_MODE_LINEAR:
return obs_load_effect(&obs->video.deinterlace_linear_effect,
"deinterlace_linear.effect");
case OBS_DEINTERLACE_MODE_LINEAR_2X:
return obs_load_effect(&obs->video.deinterlace_linear_2x_effect,
"deinterlace_linear_2x.effect");
case OBS_DEINTERLACE_MODE_YADIF:
return obs_load_effect(&obs->video.deinterlace_yadif_effect,
"deinterlace_yadif.effect");
case OBS_DEINTERLACE_MODE_YADIF_2X:
return obs_load_effect(&obs->video.deinterlace_yadif_2x_effect,
"deinterlace_yadif_2x.effect");
}
return NULL;
}
#define TWOX_TOLERANCE 1000000
void deinterlace_render(obs_source_t *s)
{
gs_effect_t *effect = s->deinterlace_effect;
uint64_t frame2_ts;
gs_eparam_t *image = gs_effect_get_param_by_name(effect, "image");
gs_eparam_t *prev = gs_effect_get_param_by_name(effect,
"previous_image");
gs_eparam_t *field = gs_effect_get_param_by_name(effect, "field_order");
gs_eparam_t *frame2 = gs_effect_get_param_by_name(effect, "frame2");
gs_eparam_t *dimensions = gs_effect_get_param_by_name(effect,
"dimensions");
struct vec2 size = {(float)s->async_width, (float)s->async_height};
bool yuv = format_is_yuv(s->async_format);
bool limited_range = yuv && !s->async_full_range;
const char *tech = yuv ? "DrawMatrix" : "Draw";
gs_texture_t *cur_tex = s->async_texrender ?
gs_texrender_get_texture(s->async_texrender) :
s->async_texture;
gs_texture_t *prev_tex = s->async_prev_texrender ?
gs_texrender_get_texture(s->async_prev_texrender) :
s->async_prev_texture;
if (!cur_tex || !prev_tex || !s->async_width || !s->async_height)
return;
gs_effect_set_texture(image, cur_tex);
gs_effect_set_texture(prev, prev_tex);
gs_effect_set_int(field, s->deinterlace_top_first);
gs_effect_set_vec2(dimensions, &size);
if (yuv) {
gs_eparam_t *color_matrix = gs_effect_get_param_by_name(
effect, "color_matrix");
gs_effect_set_val(color_matrix, s->async_color_matrix,
sizeof(float) * 16);
}
if (limited_range) {
const size_t size = sizeof(float) * 3;
gs_eparam_t *color_range_min = gs_effect_get_param_by_name(
effect, "color_range_min");
gs_eparam_t *color_range_max = gs_effect_get_param_by_name(
effect, "color_range_max");
gs_effect_set_val(color_range_min, s->async_color_range_min,
size);
gs_effect_set_val(color_range_max, s->async_color_range_max,
size);
}
frame2_ts = s->deinterlace_frame_ts + s->deinterlace_offset +
s->deinterlace_half_duration - TWOX_TOLERANCE;
gs_effect_set_bool(frame2, obs->video.video_time >= frame2_ts);
while (gs_effect_loop(effect, tech))
gs_draw_sprite(NULL, 0, s->async_width, s->async_height);
}
static void enable_deinterlacing(obs_source_t *source,
enum obs_deinterlace_mode mode)
{
obs_enter_graphics();
if (source->async_format != VIDEO_FORMAT_NONE &&
source->async_width != 0 &&
source->async_height != 0)
set_deinterlace_texture_size(source);
source->deinterlace_mode = mode;
source->deinterlace_effect = get_effect(mode);
pthread_mutex_lock(&source->async_mutex);
if (source->prev_async_frame) {
remove_async_frame(source, source->prev_async_frame);
source->prev_async_frame = NULL;
}
pthread_mutex_unlock(&source->async_mutex);
obs_leave_graphics();
}
static void disable_deinterlacing(obs_source_t *source)
{
obs_enter_graphics();
gs_texture_destroy(source->async_prev_texture);
gs_texrender_destroy(source->async_prev_texrender);
source->deinterlace_mode = OBS_DEINTERLACE_MODE_DISABLE;
source->async_prev_texture = NULL;
source->async_prev_texrender = NULL;
obs_leave_graphics();
}
void obs_source_set_deinterlace_mode(obs_source_t *source,
enum obs_deinterlace_mode mode)
{
if (!obs_source_valid(source, "obs_source_set_deinterlace_mode"))
return;
if (source->deinterlace_mode == mode)
return;
if (source->deinterlace_mode == OBS_DEINTERLACE_MODE_DISABLE) {
enable_deinterlacing(source, mode);
} else if (mode == OBS_DEINTERLACE_MODE_DISABLE) {
disable_deinterlacing(source);
} else {
obs_enter_graphics();
source->deinterlace_mode = mode;
source->deinterlace_effect = get_effect(mode);
obs_leave_graphics();
}
}
enum obs_deinterlace_mode obs_source_get_deinterlace_mode(
const obs_source_t *source)
{
return obs_source_valid(source, "obs_source_set_deinterlace_mode") ?
source->deinterlace_mode : OBS_DEINTERLACE_MODE_DISABLE;
}
void obs_source_set_deinterlace_field_order(obs_source_t *source,
enum obs_deinterlace_field_order field_order)
{
if (!obs_source_valid(source, "obs_source_set_deinterlace_field_order"))
return;
source->deinterlace_top_first =
field_order == OBS_DEINTERLACE_FIELD_ORDER_TOP;
}
enum obs_deinterlace_field_order obs_source_get_deinterlace_field_order(
const obs_source_t *source)
{
if (!obs_source_valid(source, "obs_source_set_deinterlace_field_order"))
return OBS_DEINTERLACE_FIELD_ORDER_TOP;
return source->deinterlace_top_first
? OBS_DEINTERLACE_FIELD_ORDER_TOP
: OBS_DEINTERLACE_FIELD_ORDER_BOTTOM;
}

View file

@ -34,6 +34,11 @@ static inline bool data_valid(const struct obs_source *source, const char *f)
return obs_source_valid(source, f) && source->context.data;
}
static inline bool deinterlacing_enabled(const struct obs_source *source)
{
return source->deinterlace_mode != OBS_DEINTERLACE_MODE_DISABLE;
}
const struct obs_source_info *get_source_info(const char *id)
{
for (size_t i = 0; i < obs->source_types.num; i++) {
@ -109,6 +114,12 @@ static void allocate_audio_output_buffer(struct obs_source *source)
}
}
static inline bool is_async_video_source(const struct obs_source *source)
{
return (source->info.output_flags & OBS_SOURCE_ASYNC_VIDEO) ==
OBS_SOURCE_ASYNC_VIDEO;
}
static inline bool is_audio_source(const struct obs_source *source)
{
return source->info.output_flags & OBS_SOURCE_AUDIO;
@ -119,6 +130,8 @@ static inline bool is_composite_source(const struct obs_source *source)
return source->info.output_flags & OBS_SOURCE_COMPOSITE;
}
extern char *find_libobs_data_file(const char *file);
/* internal initialization */
bool obs_source_init(struct obs_source *source)
{
@ -173,6 +186,7 @@ bool obs_source_init(struct obs_source *source)
}
source->control = bzalloc(sizeof(obs_weak_source_t));
source->deinterlace_top_first = true;
source->control->source = source;
source->audio_mixers = 0xF;
@ -483,8 +497,12 @@ void obs_source_destroy(struct obs_source *source)
gs_enter_context(obs->video.graphics);
if (source->async_texrender)
gs_texrender_destroy(source->async_texrender);
if (source->async_prev_texrender)
gs_texrender_destroy(source->async_prev_texrender);
if (source->async_texture)
gs_texture_destroy(source->async_texture);
if (source->async_prev_texture)
gs_texture_destroy(source->async_prev_texture);
if (source->filter_texrender)
gs_texrender_destroy(source->filter_texrender);
gs_leave_context();
@ -903,12 +921,20 @@ void obs_source_video_tick(obs_source_t *source, float seconds)
uint64_t sys_time = obs->video.video_time;
pthread_mutex_lock(&source->async_mutex);
if (source->cur_async_frame) {
remove_async_frame(source, source->cur_async_frame);
source->cur_async_frame = NULL;
if (deinterlacing_enabled(source)) {
deinterlace_process_last_frame(source, sys_time);
} else {
if (source->cur_async_frame) {
remove_async_frame(source,
source->cur_async_frame);
source->cur_async_frame = NULL;
}
source->cur_async_frame = get_closest_frame(source,
sys_time);
}
source->cur_async_frame = get_closest_frame(source, sys_time);
source->last_sys_timestamp = sys_time;
pthread_mutex_unlock(&source->async_mutex);
}
@ -948,6 +974,7 @@ void obs_source_video_tick(obs_source_t *source, float seconds)
source->info.video_tick(source->context.data, seconds);
source->async_rendered = false;
source->deinterlace_rendered = false;
}
/* unless the value is 3+ hours worth of frames, this won't overflow */
@ -1291,8 +1318,13 @@ bool set_async_texture_size(struct obs_source *source,
source->async_format = frame->format;
gs_texture_destroy(source->async_texture);
gs_texture_destroy(source->async_prev_texture);
gs_texrender_destroy(source->async_texrender);
gs_texrender_destroy(source->async_prev_texrender);
source->async_texture = NULL;
source->async_prev_texture = NULL;
source->async_texrender = NULL;
source->async_prev_texrender = NULL;
if (cur != CONVERT_NONE && init_gpu_conversion(source, frame)) {
source->async_gpu_conversion = true;
@ -1316,6 +1348,9 @@ bool set_async_texture_size(struct obs_source *source,
format, 1, NULL, GS_DYNAMIC);
}
if (deinterlacing_enabled(source))
set_deinterlace_texture_size(source);
return !!source->async_texture;
}
@ -1627,8 +1662,11 @@ static inline void render_video(obs_source_t *source)
if (source->info.type == OBS_SOURCE_TYPE_INPUT &&
(source->info.output_flags & OBS_SOURCE_ASYNC) != 0 &&
!source->rendering_filter)
!source->rendering_filter) {
if (deinterlacing_enabled(source))
deinterlace_update_async_video(source);
obs_source_update_async_video(source);
}
if (!source->context.data || !source->enabled) {
if (source->filter_parent)
@ -1645,6 +1683,9 @@ static inline void render_video(obs_source_t *source)
else if (source->filter_target)
obs_source_video_render(source->filter_target);
else if (deinterlacing_enabled(source))
deinterlace_render(source);
else
obs_source_render_async_video(source);
}
@ -2077,6 +2118,7 @@ static inline void free_async_cache(struct obs_source *source)
da_resize(source->async_cache, 0);
da_resize(source->async_frames, 0);
source->cur_async_frame = NULL;
source->prev_async_frame = NULL;
}
#define MAX_UNUSED_FRAME_DURATION 5
@ -2355,6 +2397,9 @@ void obs_source_output_audio(obs_source_t *source,
void remove_async_frame(obs_source_t *source, struct obs_source_frame *frame)
{
if (frame)
frame->prev_frame = false;
for (size_t i = 0; i < source->async_cache.num; i++) {
struct async_frame *f = &source->async_cache.array[i];
@ -2735,6 +2780,8 @@ void obs_source_skip_video_filter(obs_source_t *filter)
obs_source_default_render(target);
else if (target->info.video_render)
obs_source_main_render(target);
else if (deinterlacing_enabled(target))
deinterlace_render(target);
else
obs_source_render_async_video(target);

View file

@ -1412,6 +1412,8 @@ static obs_source_t *obs_load_source_type(obs_data_t *source_data)
int64_t sync;
uint32_t flags;
uint32_t mixers;
int di_order;
int di_mode;
source = obs_source_create(id, name, settings, hotkeys);
@ -1455,6 +1457,14 @@ static obs_source_t *obs_load_source_type(obs_data_t *source_data)
obs_source_set_push_to_talk_delay(source,
obs_data_get_int(source_data, "push-to-talk-delay"));
di_mode = (int)obs_data_get_int(source_data, "deinterlace_mode");
obs_source_set_deinterlace_mode(source,
(enum obs_deinterlace_mode)di_mode);
di_order = (int)obs_data_get_int(source_data, "deinterlace_field_order");
obs_source_set_deinterlace_field_order(source,
(enum obs_deinterlace_field_order)di_order);
if (filters) {
size_t count = obs_data_array_count(filters);
@ -1551,6 +1561,9 @@ obs_data_t *obs_save_source(obs_source_t *source)
uint64_t ptm_delay = obs_source_get_push_to_mute_delay(source);
bool push_to_talk= obs_source_push_to_talk_enabled(source);
uint64_t ptt_delay = obs_source_get_push_to_talk_delay(source);
int di_mode = (int)obs_source_get_deinterlace_mode(source);
int di_order =
(int)obs_source_get_deinterlace_field_order(source);
obs_source_save(source);
hotkeys = obs_hotkeys_save_source(source);
@ -1575,6 +1588,8 @@ obs_data_t *obs_save_source(obs_source_t *source)
obs_data_set_bool (source_data, "push-to-talk", push_to_talk);
obs_data_set_int (source_data, "push-to-talk-delay", ptt_delay);
obs_data_set_obj (source_data, "hotkeys", hotkey_data);
obs_data_set_int (source_data, "deinterlace_mode", di_mode);
obs_data_set_int (source_data, "deinterlace_field_order", di_order);
if (source->info.type == OBS_SOURCE_TYPE_TRANSITION)
obs_transition_save(source, source_data);

View file

@ -232,6 +232,7 @@ struct obs_source_frame {
/* used internally by libobs */
volatile long refs;
bool prev_frame;
};
/* ------------------------------------------------------------------------- */
@ -878,6 +879,32 @@ EXPORT void obs_source_add_audio_capture_callback(obs_source_t *source,
EXPORT void obs_source_remove_audio_capture_callback(obs_source_t *source,
obs_source_audio_capture_t callback, void *param);
enum obs_deinterlace_mode {
OBS_DEINTERLACE_MODE_DISABLE,
OBS_DEINTERLACE_MODE_DISCARD,
OBS_DEINTERLACE_MODE_RETRO,
OBS_DEINTERLACE_MODE_BLEND,
OBS_DEINTERLACE_MODE_BLEND_2X,
OBS_DEINTERLACE_MODE_LINEAR,
OBS_DEINTERLACE_MODE_LINEAR_2X,
OBS_DEINTERLACE_MODE_YADIF,
OBS_DEINTERLACE_MODE_YADIF_2X
};
enum obs_deinterlace_field_order {
OBS_DEINTERLACE_FIELD_ORDER_TOP,
OBS_DEINTERLACE_FIELD_ORDER_BOTTOM
};
EXPORT void obs_source_set_deinterlace_mode(obs_source_t *source,
enum obs_deinterlace_mode mode);
EXPORT enum obs_deinterlace_mode obs_source_get_deinterlace_mode(
const obs_source_t *source);
EXPORT void obs_source_set_deinterlace_field_order(obs_source_t *source,
enum obs_deinterlace_field_order field_order);
EXPORT enum obs_deinterlace_field_order obs_source_get_deinterlace_field_order(
const obs_source_t *source);
/* ------------------------------------------------------------------------- */
/* Functions used by sources */