Compare commits

...

5 commits

6 changed files with 248 additions and 383 deletions

View file

@ -46,6 +46,9 @@ typedef struct av_capture {
pthread_mutex_t mutex;
OBSAVCaptureColorSpace configuredColorSpace;
OBSAVCaptureVideoRange configuredFourCC;
void *settings;
void *source;
bool isFastPath;
@ -58,7 +61,7 @@ typedef struct av_capture {
/// C struct for sample buffer validity checks in capture callback
typedef struct av_capture_info {
OBSAVCaptureColorSpace colorSpace;
OBSAVCaptureVideoRange videoRange;
FourCharCode fourCC;
bool isValid;
} OBSAVCaptureVideoInfo;
@ -70,7 +73,7 @@ typedef struct av_capture_info {
///
/// Devices can be configured either via [presets](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc) (usually 3 quality-based presets in addition to resolution based presets). The resolution defined by the preset does not necessarily switch the actual device to the same resolution, instead the device is automatically switched to the best possible resolution and the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) provided via [AVCaptureVideoDataOutput](https://developer.apple.com/documentation/avfoundation/avcapturevideodataoutput?language=objc) will be resized accordingly. If necessary the actual frame will be pillar-boxed to fit into a widescreen sample buffer in an attempt to fit the content into it.
///
/// Alternatively, devices can be configured manually by specifying resolution, frame-rate, color format and color space. If a device was **not** configured via a preset originally, the size of the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) will be adjusted to the selected resolution.
/// Alternatively, devices can be configured manually by specifying a particular [AVCaptureDeviceFormat](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat?language=objc) representing a specific combination of resolution, frame-rate, color format and color space supported by the device. If a device was **not** configured via a preset originally, the size of the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) will be adjusted to the selected resolution.
///
/// > Important: If a preset was configured before, the resolution of the last valid preset-based buffer will be retained and the frame will be fit into it with the selected resolution.
///
@ -144,11 +147,6 @@ typedef struct av_capture_info {
#pragma mark - OBS Settings Helpers
/// Reads source dimensions from user settings and converts them into a [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct for convenience when interacting with the [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc) framework.
/// - Parameter settings: Pointer to settings struct used by ``libobs``
/// - Returns: [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct with resolution from user settings
+ (CMVideoDimensions)dimensionsFromSettings:(void *)settings;
/// Reads a C-character pointer from user settings and converts it into an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
/// - Parameters:
/// - settings: Pointer to user settings struct used by ``libobs``
@ -164,6 +162,11 @@ typedef struct av_capture_info {
/// - Returns: New [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance created from user setting if setting represented a valid C character pointer.
+ (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting withDefault:(NSString *)defaultValue;
/// Generates an NSString representing the name of the warning to display in the properties window for macOS system effects that are active on a particular `AVCaptureDevice`.
/// - Parameter device: The [AVCaptureDevice](https://developer.apple.com/documentation/avfoundation/avcapturedevice?language=objc) to generate an effects warning string for.
/// - Returns: `nil` if there are no effects active on the device. If effects are found, returns a new [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance containing the `libobs` key used to retrieve the appropriate localized warning string.
+ (NSString *)effectsWarningForDevice:(AVCaptureDevice *)device;
#pragma mark - Format Conversion Helpers
/// Converts a FourCC-based color format identifier into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
@ -235,6 +238,16 @@ typedef struct av_capture_info {
+ (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format withRange:(enum video_range_type)videoRange;
/// Generates a string describing an array of frame rate ranges.
/// - Parameter ranges: [NSArray](https://developer.apple.com/documentation/foundation/nsarray?language=objc) of [AVFrameRateRange](https://developer.apple.com/documentation/avfoundation/avframeraterange?language=objc), such as might be provided by an `AVCaptureDeviceFormat` instance's [videoSupportedFrameRateRanges](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat/1387592-videosupportedframerateranges) property.
/// - Returns: A new [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance that describes the frame rate ranges.
+ (NSString *)frameRateDescription:(NSArray<AVFrameRateRange *> *)ranges;
/// Generates a general localized description of an [AVCaptureDeviceFormat](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat?language=objc) suitable for format selection in the source properties window.
/// - Parameter format: An [AVCaptureDeviceFormat](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat?language=objc) format instance.
/// - Returns: A new [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance containing the localized description of the format.
+ (NSString *)descriptionFromFormat:(AVCaptureDeviceFormat *)format;
/// Converts a [CMFormatDescription](https://developer.apple.com/documentation/coremedia/cmformatdescription?language=objc) into a ``libobs``-based color space value
/// - Parameter description: A [CMFormatDescription](https://developer.apple.com/documentation/coremedia/cmformatdescription?language=objc) media format descriptor
/// - Returns: A ``libobs``-based color space value
@ -336,6 +349,50 @@ typedef struct av_capture_info {
#pragma mark - Static helper functions
/// Compare two `AVCaptureDeviceFormat`s for purposes of sorting in the properties window.
static NSComparator const OBSAVCaptureDeviceFormatCompare =
^NSComparisonResult(AVCaptureDeviceFormat *lhs, AVCaptureDeviceFormat *rhs) {
CMVideoDimensions lhsDimensions = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription);
CMVideoDimensions rhsDimensions = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription);
NSNumber *lhsWidth = @(lhsDimensions.width);
NSNumber *rhsWidth = @(rhsDimensions.width);
NSNumber *lhsHeight = @(lhsDimensions.height);
NSNumber *rhsHeight = @(rhsDimensions.height);
NSNumber *lhsArea = @(lhsDimensions.width * lhsDimensions.height);
NSNumber *rhsArea = @(rhsDimensions.width * rhsDimensions.height);
NSNumber *lhsMaxFrameRate = @(lhs.videoSupportedFrameRateRanges.firstObject.maxFrameRate);
NSNumber *rhsMaxFrameRate = @(rhs.videoSupportedFrameRateRanges.firstObject.maxFrameRate);
NSNumber *lhsFourCC = @(CMFormatDescriptionGetMediaSubType(lhs.formatDescription));
NSNumber *rhsFourCC = @(CMFormatDescriptionGetMediaSubType(rhs.formatDescription));
NSNumber *lhsColorSpace = @([OBSAVCapture colorspaceFromDescription:lhs.formatDescription]);
NSNumber *rhsColorSpace = @([OBSAVCapture colorspaceFromDescription:rhs.formatDescription]);
NSComparisonResult result;
result = [lhsWidth compare:rhsWidth];
if (result == NSOrderedSame) {
result = [lhsArea compare:rhsArea];
if (result == NSOrderedSame) {
result = [lhsHeight compare:rhsHeight];
if (result == NSOrderedSame) {
result = [lhsMaxFrameRate compare:rhsMaxFrameRate];
if (result == NSOrderedSame) {
result = [lhsFourCC compare:rhsFourCC];
if (result == NSOrderedSame) {
result = [lhsColorSpace compare:rhsColorSpace];
}
}
}
}
}
if (result == NSOrderedAscending)
return NSOrderedDescending;
else if (result == NSOrderedDescending)
return NSOrderedAscending;
else
return result;
};
/// Clamp an unsigned 64-bit integer value to the specified minimum and maximum values
static inline UInt64 clamp_Uint(UInt64 value, UInt64 min, UInt64 max)
{

View file

@ -229,24 +229,6 @@
return NO;
}
if (@available(macOS 12.0, *)) {
if (device.portraitEffectActive) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Portrait effect is active on selected device"];
}
}
if (@available(macOS 12.3, *)) {
if (device.centerStageActive) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Center Stage effect is active on selected device"];
}
}
if (@available(macOS 13.0, *)) {
if (device.studioLightActive) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Studio Light effect is active on selected device"];
}
}
[self.session beginConfiguration];
if ([self.session canAddInput:deviceInput]) {
@ -428,45 +410,7 @@
- (BOOL)configureSession:(NSError *__autoreleasing *)error
{
int videoRange;
int colorSpace;
FourCharCode inputFourCC;
if (!self.isFastPath) {
videoRange = (int) obs_data_get_int(self.captureInfo->settings, "video_range");
if (![OBSAVCapture isValidVideoRange:videoRange]) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Unsupported video range: %d", videoRange];
return NO;
}
int inputFormat;
inputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
inputFourCC = [OBSAVCapture fourCharCodeFromFormat:inputFormat withRange:videoRange];
colorSpace = (int) obs_data_get_int(self.captureInfo->settings, "color_space");
if (![OBSAVCapture isValidColorspace:colorSpace]) {
[self AVCaptureLog:LOG_DEBUG withFormat:@"Unsupported color space: %d", colorSpace];
return NO;
}
} else {
int inputFormat;
CMFormatDescriptionRef formatDescription = self.deviceInput.device.activeFormat.formatDescription;
inputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
inputFourCC = [OBSAVCapture fourCharCodeFromFormat:inputFormat withRange:VIDEO_RANGE_DEFAULT];
colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
videoRange = ([OBSAVCapture isFullRangeFormat:inputFourCC]) ? VIDEO_RANGE_FULL : VIDEO_RANGE_PARTIAL;
}
CMVideoDimensions dimensions = [OBSAVCapture dimensionsFromSettings:self.captureInfo->settings];
if (dimensions.width == 0 || dimensions.height == 0) {
[self AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
return NO;
}
//todo: see if we need to handle 'migration' from old settings here (or else in plugin-properties)
struct media_frames_per_second fps;
if (!obs_data_get_frames_per_second(self.captureInfo->settings, "frame_rate", &fps, NULL)) {
[self AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
@ -475,34 +419,39 @@
CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
const char *selectedFormat = obs_data_get_string(self.captureInfo->settings, "supported_format");
NSString *selectedFormatNSString = [NSString stringWithCString:selectedFormat encoding:NSUTF8StringEncoding];
AVCaptureDeviceFormat *format = nil;
FourCharCode subtype;
OBSAVCaptureColorSpace colorSpace;
bool fpsSupported = false;
for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(formatCandidate.formatDescription);
if (!(formatDimensions.width == dimensions.width) || !(formatDimensions.height == dimensions.height)) {
continue;
if ([selectedFormatNSString isEqualToString:[OBSAVCapture descriptionFromFormat:formatCandidate]]) {
CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
FourCharCode formatFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
format = formatCandidate;
subtype = formatFourCC;
colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
}
for (AVFrameRateRange *range in formatCandidate.videoSupportedFrameRateRanges) {
if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
FourCharCode formatFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
if (inputFourCC == formatFourCC) {
format = formatCandidate;
inputFourCC = formatFourCC;
break;
}
}
}
if (format) {
break;
}
}
if (!format) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Selected format '%@' not found on device", selectedFormatNSString];
return NO;
}
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
fpsSupported = true;
}
}
if (!fpsSupported) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Frame rate is not supported: %g FPS (%u/%u)",
media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator];
return NO;
@ -513,30 +462,25 @@
self.isDeviceLocked = [self.deviceInput.device lockForConfiguration:error];
if (!self.isDeviceLocked) {
[self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock devie for configuration"];
[self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock device for configuration"];
return NO;
}
[self AVCaptureLog:LOG_INFO
withFormat:@"Capturing '%@' (%@):\n"
" Resolution : %ux%u\n"
" FPS : %g (%u/%u)\n"
" Frame Interval : %g\u00a0s\n"
" Input Format : %@\n"
" Requested Color Space : %@ (%d)\n"
" Requested Video Range : %@ (%d)\n"
" Using Format : %@",
self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID, dimensions.width,
dimensions.height, media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator,
media_frames_per_second_to_frame_interval(fps), [OBSAVCapture stringFromSubType:inputFourCC],
[OBSAVCapture stringFromColorspace:colorSpace], colorSpace,
[OBSAVCapture stringFromVideoRange:videoRange], videoRange, format.description];
[self AVCaptureLog:LOG_INFO withFormat:@"Capturing '%@' (%@):\n"
" Using Format : %@"
" FPS : %g (%u/%u)\n"
" Frame Interval : %g\u00a0s\n",
self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID,
selectedFormatNSString, media_frames_per_second_to_fps(fps), fps.numerator,
fps.denominator, media_frames_per_second_to_frame_interval(fps)];
OBSAVCaptureVideoInfo newInfo = {.colorSpace = _videoInfo.colorSpace,
.videoRange = _videoInfo.videoRange,
.fourCC = _videoInfo.fourCC,
.isValid = false};
self.videoInfo = newInfo;
self.captureInfo->configuredColorSpace = colorSpace;
self.captureInfo->configuredFourCC = subtype;
self.isPresetBased = NO;
@ -649,33 +593,6 @@
#pragma mark - OBS Settings Helpers
+ (CMVideoDimensions)dimensionsFromSettings:(void *)settings
{
CMVideoDimensions zero = {0};
NSString *jsonString = [OBSAVCapture stringFromSettings:settings withSetting:@"resolution"];
NSDictionary *data = [NSJSONSerialization JSONObjectWithData:[jsonString dataUsingEncoding:NSUTF8StringEncoding]
options:0
error:nil];
if (data.count == 0) {
return zero;
}
NSInteger width = [[data objectForKey:@"width"] intValue];
NSInteger height = [[data objectForKey:@"height"] intValue];
if (!width || !height) {
return zero;
}
CMVideoDimensions dimensions = {.width = (int32_t) clamp_Uint(width, 0, UINT32_MAX),
.height = (int32_t) clamp_Uint(height, 0, UINT32_MAX)};
return dimensions;
}
+ (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting
{
return [OBSAVCapture stringFromSettings:settings withSetting:setting withDefault:@""];
@ -700,21 +617,70 @@
return result;
}
+ (NSString *)effectsWarningForDevice:(AVCaptureDevice *)device
{
int effectsCount = 0;
NSString *effectWarning = nil;
if (@available(macOS 12.0, *)) {
if (device.portraitEffectActive) {
effectWarning = @"Warning.Effect.Portrait";
effectsCount++;
}
}
if (@available(macOS 12.3, *)) {
if (device.centerStageActive) {
effectWarning = @"Warning.Effect.CenterStage";
effectsCount++;
}
}
if (@available(macOS 13.0, *)) {
if (device.studioLightActive) {
effectWarning = @"Warning.Effect.StudioLight";
effectsCount++;
}
}
if (@available(macOS 14.0, *)) {
/// For some reason, `reactionEffectGesturesEnabled` is a class property rather than an instance property.
/// In truth, all of these may as well be class properties, since, as of macOS 15, toggling an effect for one device toggles
/// that effect for all devices in the same application. Yet it still seems appropriate to deliver these warnings on a
/// per-device basis. We are in the source properties window, where a user will be interested in what effects are active
/// for this specific device.
if (AVCaptureDevice.reactionEffectGesturesEnabled) {
effectWarning = @"Warning.Effect.Reactions";
effectsCount++;
}
}
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 150000
if (@available(macOS 15.0, *)) {
if (device.backgroundReplacementActive) {
effectWarning = @"Warning.Effect.BackgroundReplacement";
effectsCount++;
}
}
#endif
if (effectsCount > 1) {
effectWarning = @"Warning.Effect.Multiple";
}
return effectWarning;
}
#pragma mark - Format Conversion Helpers
+ (NSString *)stringFromSubType:(FourCharCode)subtype
{
switch (subtype) {
case kCVPixelFormatType_422YpCbCr8:
return @"UYVY - 422YpCbCr8";
return @"UYVY (2vuy)";
case kCVPixelFormatType_422YpCbCr8_yuvs:
return @"YUY2 - 422YpCbCr8_yuvs";
return @"YUY2 (yuvs)";
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
return @"NV12 (420v)";
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
return @"NV12 - 420YpCbCr8BiPlanar";
return @"NV12 (420f)";
case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
return @"P010 (xf20)";
case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
return @"P010 - 420YpCbCr10BiPlanar";
return @"P010 (x420)";
case kCVPixelFormatType_32ARGB:
return @"ARGB - 32ARGB";
case kCVPixelFormatType_32BGRA:
@ -946,6 +912,39 @@
return [OBSAVCapture fourCharCodeFromFormat:format withRange:VIDEO_RANGE_PARTIAL];
}
+ (NSString *)frameRateDescription:(NSArray<AVFrameRateRange *> *)ranges
{
NSString *frameRateDescription = @"";
uint32_t index = 0;
for (AVFrameRateRange *range in ranges) {
if (range.minFrameRate == range.maxFrameRate) {
frameRateDescription = [frameRateDescription stringByAppendingFormat:@"%.2f", range.minFrameRate];
} else {
frameRateDescription =
[frameRateDescription stringByAppendingFormat:@"%.2f-%.2f", range.minFrameRate, range.maxFrameRate];
}
index++;
if (index < (ranges.count - 1)) {
frameRateDescription = [frameRateDescription stringByAppendingString:@", "];
}
}
return [frameRateDescription stringByAppendingString:@" fps"];
}
+ (NSString *)descriptionFromFormat:(AVCaptureDeviceFormat *)format
{
CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
NSString *pixelFormatDescription = [OBSAVCapture stringFromSubType:formatSubType];
OBSAVCaptureColorSpace deviceColorSpace = [OBSAVCapture colorspaceFromDescription:format.formatDescription];
NSString *colorspaceDescription = [OBSAVCapture stringFromColorspace:deviceColorSpace];
NSString *fpsRangesDescription = [OBSAVCapture frameRateDescription:format.videoSupportedFrameRateRanges];
NSString *formatDescription = [NSString stringWithFormat:@"%dx%d - %@ - %@ - %@", formatDimensions.width,
formatDimensions.height, fpsRangesDescription,
colorspaceDescription, pixelFormatDescription];
return formatDescription;
}
+ (OBSAVCaptureColorSpace)colorspaceFromDescription:(CMFormatDescriptionRef)description
{
CFPropertyListRef matrix = CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_YCbCrMatrix);
@ -1104,7 +1103,7 @@
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(description);
OBSAVCaptureVideoInfo newInfo = {.videoRange = _videoInfo.videoRange,
OBSAVCaptureVideoInfo newInfo = {.fourCC = _videoInfo.fourCC,
.colorSpace = _videoInfo.colorSpace,
.isValid = false};
@ -1196,10 +1195,9 @@
[OBSAVCapture colorspaceFromDescription:description];
OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
: VIDEO_RANGE_PARTIAL;
BOOL isColorSpaceMatching = NO;
SInt64 configuredColorSpace = obs_data_get_int(_captureInfo->settings, "color_space");
SInt64 configuredColorSpace = _captureInfo->configuredColorSpace;
if (usePreset) {
isColorSpaceMatching = sampleBufferColorSpace == _videoInfo.colorSpace;
@ -1207,17 +1205,16 @@
isColorSpaceMatching = configuredColorSpace == _videoInfo.colorSpace;
}
BOOL isVideoRangeMatching = NO;
SInt64 configuredVideoRangeType = obs_data_get_int(_captureInfo->settings, "video_range");
BOOL isFourCCMatching = NO;
SInt64 configuredFourCC = _captureInfo->configuredFourCC;
if (usePreset) {
isVideoRangeMatching = sampleBufferRangeType == _videoInfo.videoRange;
isFourCCMatching = mediaSubType == _videoInfo.fourCC;
} else {
isVideoRangeMatching = configuredVideoRangeType == _videoInfo.videoRange;
isSampleBufferFullRange = configuredVideoRangeType == VIDEO_RANGE_FULL;
isFourCCMatching = configuredFourCC == _videoInfo.fourCC;
}
if (isColorSpaceMatching && isVideoRangeMatching) {
if (isColorSpaceMatching && isFourCCMatching) {
newInfo.isValid = true;
} else {
frame->full_range = isSampleBufferFullRange;
@ -1233,7 +1230,7 @@
newInfo.isValid = false;
} else {
newInfo.colorSpace = sampleBufferColorSpace;
newInfo.videoRange = sampleBufferRangeType;
newInfo.fourCC = mediaSubType;
newInfo.isValid = true;
}
}

View file

@ -15,3 +15,9 @@ Auto="Auto"
Unknown="Unknown (%1)"
EnableAudio="Enable audio if supported by device"
Resolution="Resolution"
Warning.Effect.Portrait="Portrait system effect is active on the selected device"
Warning.Effect.CenterStage="Center Stage system effect is active on the selected device"
Warning.Effect.StudioLight="Studio Light system effect is active on the selected device"
Warning.Effect.Reactions="Reactions system effect is active on the selected device"
Warning.Effect.BackgroundReplacement="Background replacement system effect is active on the selected device"
Warning.Effect.Multiple="Multiple system effects are active on the selected device"

View file

@ -79,42 +79,52 @@ static obs_properties_t *av_capture_properties(void *av_capture)
{
OBSAVCapture *capture = (__bridge OBSAVCapture *) (av_capture);
OBSAVCaptureInfo *capture_info = capture.captureInfo;
AVCaptureDevice *device = capture.deviceInput.device;
NSString *effectsWarningKey = [OBSAVCapture effectsWarningForDevice:device];
bool has_effects = effectsWarningKey != nil;
const char *effects_warning_string;
if (has_effects) {
effects_warning_string = obs_module_text(effectsWarningKey.UTF8String);
}
obs_properties_t *properties = obs_properties_create();
// Create Properties
obs_property_t *device_list = obs_properties_add_list(properties, "device", obs_module_text("Device"),
OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
obs_property_t *effects_warning;
if (has_effects) {
effects_warning = obs_properties_add_text(properties, "effects_warning", effects_warning_string, OBS_TEXT_INFO);
obs_property_text_set_info_type(effects_warning, OBS_TEXT_INFO_WARNING);
}
obs_property_t *use_preset = obs_properties_add_bool(properties, "use_preset", obs_module_text("UsePreset"));
obs_property_t *preset_list = obs_properties_add_list(properties, "preset", obs_module_text("Preset"),
OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
obs_property_t *resolutions = obs_properties_add_list(properties, "resolution", obs_module_text("Resolution"),
OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
obs_property_t *supported_formats = obs_properties_add_list(
properties, "supported_format", obs_module_text("InputFormat"), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
obs_property_t *use_buffering = obs_properties_add_bool(properties, "buffering", obs_module_text("Buffering"));
obs_property_t *frame_rates = obs_properties_add_frame_rate(properties, "frame_rate", obs_module_text("FrameRate"));
obs_property_t *input_format = obs_properties_add_list(properties, "input_format", obs_module_text("InputFormat"),
OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
obs_property_t *color_space = obs_properties_add_list(properties, "color_space", obs_module_text("ColorSpace"),
OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
obs_property_t *video_range = obs_properties_add_list(properties, "video_range", obs_module_text("VideoRange"),
OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
if (capture_info) {
bool isFastPath = capture_info->isFastPath;
// Add Property Visibility and Callbacks
configure_property(device_list, true, true, properties_changed, capture);
if (has_effects) {
configure_property(effects_warning, true, has_effects, NULL, NULL);
}
configure_property(use_preset, !isFastPath, !isFastPath, (!isFastPath) ? properties_changed_use_preset : NULL,
capture);
configure_property(preset_list, !isFastPath, !isFastPath, (!isFastPath) ? properties_changed_preset : NULL,
capture);
configure_property(resolutions, isFastPath, isFastPath, NULL, NULL);
configure_property(supported_formats, true, true, properties_changed, capture);
configure_property(use_buffering, !isFastPath, !isFastPath, NULL, NULL);
configure_property(frame_rates, isFastPath, isFastPath, NULL, NULL);
configure_property(color_space, !isFastPath, !isFastPath, NULL, NULL);
configure_property(video_range, !isFastPath, !isFastPath, NULL, NULL);
configure_property(input_format, true, true, NULL, NULL);
}
return properties;

View file

@ -67,12 +67,9 @@ bool properties_update_device(OBSAVCapture *capture, obs_property_t *property, o
/// Updates available values for all properties required in manual device configuration.
///
/// Properties updated by this call include:
/// * Resolutions
/// * Device formats
/// * Frame rates and frame rate ranges
/// * Color formats
/// * Color range
///
/// In CoreMediaIO color format, resolution and frame rate ranges are always coupled into a single format, while color range is always contained in the color format. The formats are thus compiled and de-duplicated to create a selection of all properties.
/// * Effects warnings
///
/// Frame rate ranges will be limited to ranges only available for a specific combination of resolution and color format.
///

View file

@ -135,7 +135,7 @@ bool properties_changed_use_preset(OBSAVCapture *capture, obs_properties_t *prop
properties_changed_preset(capture, properties, preset_list, settings);
}
const char *update_properties[5] = {"resolution", "frame_rate", "color_space", "video_range", "input_format"};
const char *update_properties[2] = {"frame_rate", "supported_format"};
size_t number_of_properties = sizeof(update_properties) / sizeof(update_properties[0]);
@ -273,253 +273,51 @@ bool properties_update_device(OBSAVCapture *capture __unused, obs_property_t *pr
bool properties_update_config(OBSAVCapture *capture, obs_properties_t *properties, obs_data_t *settings)
{
//todo: see if we need to handle 'migration' of old settings here (or else in OBSAVCapture)
AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:[OBSAVCapture stringFromSettings:settings
withSetting:@"device"]];
obs_property_t *prop_resolution = obs_properties_get(properties, "resolution");
obs_property_t *prop_framerate = obs_properties_get(properties, "frame_rate");
obs_property_t *prop_format = obs_properties_get(properties, "supported_format");
obs_property_t *prop_effects_warning = obs_properties_get(properties, "effects_warning");
obs_property_list_clear(prop_resolution);
obs_property_frame_rate_clear(prop_framerate);
obs_property_t *prop_input_format = NULL;
obs_property_t *prop_color_space = NULL;
obs_property_t *prop_video_range = NULL;
prop_input_format = obs_properties_get(properties, "input_format");
obs_property_list_clear(prop_input_format);
if (!capture.isFastPath) {
prop_color_space = obs_properties_get(properties, "color_space");
prop_video_range = obs_properties_get(properties, "video_range");
obs_property_list_clear(prop_video_range);
obs_property_list_clear(prop_color_space);
}
CMVideoDimensions resolution = [OBSAVCapture dimensionsFromSettings:settings];
if (resolution.width == 0 || resolution.height == 0) {
[capture AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
}
obs_property_list_clear(prop_format);
obs_property_list_clear(prop_effects_warning);
struct media_frames_per_second fps;
if (!obs_data_get_frames_per_second(settings, "frame_rate", &fps, NULL)) {
[capture AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
}
CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
const char *selectedFormatData = obs_data_get_string(settings, "supported_format");
NSString *selectedFormatString = [NSString stringWithCString:selectedFormatData encoding:NSUTF8StringEncoding];
int input_format = 0;
int color_space = 0;
int video_range = 0;
NSMutableArray *inputFormats = NULL;
NSMutableArray *colorSpaces = NULL;
NSMutableArray *videoRanges = NULL;
input_format = (int) obs_data_get_int(settings, "input_format");
inputFormats = [[NSMutableArray alloc] init];
if (!capture.isFastPath) {
color_space = (int) obs_data_get_int(settings, "color_space");
video_range = (int) obs_data_get_int(settings, "video_range");
colorSpaces = [[NSMutableArray alloc] init];
videoRanges = [[NSMutableArray alloc] init];
}
NSMutableArray *resolutions = [[NSMutableArray alloc] init];
NSMutableArray *frameRates = [[NSMutableArray alloc] init];
BOOL hasFoundResolution = NO;
BOOL hasFoundFramerate = NO;
BOOL hasFoundInputFormat = NO;
BOOL hasFoundColorSpace = capture.isFastPath;
BOOL hasFoundVideoRange = capture.isFastPath;
CFPropertyListRef priorColorPrimary = @"";
if (device) {
// Iterate over all formats reported by the device and gather them for property lists
for (AVCaptureDeviceFormat *format in device.formats) {
if (!capture.isFastPath) {
FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
for (AVCaptureDeviceFormat *format in
[device.formats sortedArrayUsingComparator:OBSAVCaptureDeviceFormatCompare]) {
NSString *enumeratedFormatString = [OBSAVCapture descriptionFromFormat:format];
obs_property_list_add_string(prop_format, enumeratedFormatString.UTF8String,
enumeratedFormatString.UTF8String);
if ([selectedFormatString isEqualToString:enumeratedFormatString]) {
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
struct media_frames_per_second min_fps = {
.numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX),
.denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX)};
struct media_frames_per_second max_fps = {
.numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX),
.denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX)};
NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
int device_range;
const char *range_description;
if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
device_range = VIDEO_RANGE_FULL;
range_description = av_capture_get_text("VideoRange.Full");
} else {
device_range = VIDEO_RANGE_PARTIAL;
range_description = av_capture_get_text("VideoRange.Partial");
}
if (!hasFoundInputFormat && input_format == device_format) {
hasFoundInputFormat = YES;
}
if (!hasFoundVideoRange && video_range == device_range) {
hasFoundVideoRange = YES;
}
if (![inputFormats containsObject:@(formatSubType)]) {
obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
[inputFormats addObject:@(formatSubType)];
}
if (![videoRanges containsObject:@(range_description)]) {
obs_property_list_add_int(prop_video_range, range_description, device_range);
[videoRanges addObject:@(range_description)];
}
int device_color_space = [OBSAVCapture colorspaceFromDescription:format.formatDescription];
if (![colorSpaces containsObject:@(device_color_space)]) {
obs_property_list_add_int(prop_color_space,
[OBSAVCapture stringFromColorspace:device_color_space].UTF8String,
device_color_space);
[colorSpaces addObject:@(device_color_space)];
}
if (!hasFoundColorSpace && device_color_space == color_space) {
hasFoundColorSpace = YES;
}
} else {
FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
if (!hasFoundInputFormat && input_format == device_format) {
hasFoundInputFormat = YES;
}
if (![inputFormats containsObject:@(formatSubType)]) {
obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
[inputFormats addObject:@(formatSubType)];
}
}
CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
NSDictionary *resolutionData =
@{@"width": @(formatDimensions.width),
@"height": @(formatDimensions.height)};
if (![resolutions containsObject:resolutionData]) {
[resolutions addObject:resolutionData];
}
if (!hasFoundResolution && formatDimensions.width == resolution.width &&
formatDimensions.height == resolution.height) {
hasFoundResolution = YES;
}
// Only iterate over available framerates if input format, color space, and resolution are matching
if (hasFoundInputFormat && hasFoundColorSpace && hasFoundResolution) {
CFComparisonResult isColorPrimaryMatch = kCFCompareEqualTo;
CFPropertyListRef colorPrimary = CMFormatDescriptionGetExtension(
format.formatDescription, kCMFormatDescriptionExtension_ColorPrimaries);
if (colorPrimary) {
isColorPrimaryMatch = CFStringCompare(colorPrimary, priorColorPrimary, 0);
}
if (isColorPrimaryMatch != kCFCompareEqualTo || !hasFoundFramerate) {
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges.reverseObjectEnumerator) {
FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
if (input_format == device_format) {
struct media_frames_per_second min_fps = {
.numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX),
.denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX)};
struct media_frames_per_second max_fps = {
.numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX),
.denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX)};
if (![frameRates containsObject:range]) {
obs_property_frame_rate_fps_range_add(prop_framerate, min_fps, max_fps);
[frameRates addObject:range];
}
if (!hasFoundFramerate && CMTimeCompare(range.maxFrameDuration, time) >= 0 &&
CMTimeCompare(range.minFrameDuration, time) <= 0) {
hasFoundFramerate = YES;
}
}
if (![frameRates containsObject:range]) {
obs_property_frame_rate_fps_range_add(prop_framerate, min_fps, max_fps);
[frameRates addObject:range];
}
priorColorPrimary = colorPrimary;
}
}
}
// Add resolutions in reverse order (formats reported by macOS are sorted with lowest resolution first)
for (NSDictionary *resolutionData in resolutions.reverseObjectEnumerator) {
NSError *error;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
int width = [[resolutionData objectForKey:@"width"] intValue];
int height = [[resolutionData objectForKey:@"height"] intValue];
obs_property_list_add_string(
prop_resolution, [NSString stringWithFormat:@"%dx%d", width, height].UTF8String,
[[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
}
// Add currently selected values in disabled state if they are not supported by the device
size_t index;
FourCharCode formatSubType = [OBSAVCapture fourCharCodeFromFormat:input_format withRange:video_range];
if (!hasFoundInputFormat) {
NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
index = obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, input_format);
obs_property_list_item_disable(prop_input_format, index, true);
}
if (!capture.isFastPath) {
if (!hasFoundVideoRange) {
int device_range;
const char *range_description;
if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
device_range = VIDEO_RANGE_FULL;
range_description = av_capture_get_text("VideoRange.Full");
} else {
device_range = VIDEO_RANGE_PARTIAL;
range_description = av_capture_get_text("VideoRange.Partial");
}
index = obs_property_list_add_int(prop_video_range, range_description, device_range);
obs_property_list_item_disable(prop_video_range, index, true);
}
if (!hasFoundColorSpace) {
index = obs_property_list_add_int(
prop_color_space, [OBSAVCapture stringFromColorspace:color_space].UTF8String, color_space);
obs_property_list_item_disable(prop_color_space, index, true);
}
}
if (!hasFoundResolution) {
NSDictionary *resolutionData = @{@"width": @(resolution.width), @"height": @(resolution.height)};
NSError *error;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
index = obs_property_list_add_string(
prop_resolution, [NSString stringWithFormat:@"%dx%d", resolution.width, resolution.height].UTF8String,
[[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
obs_property_list_item_disable(prop_resolution, index, true);
}
}
return true;
}