mac-capture: Refactor macOS Screen Capture

This commit is contained in:
jcm 2023-06-25 14:47:38 -05:00 committed by Patrick Heyer
parent 0a610dcc7a
commit ce5f391c73
5 changed files with 449 additions and 428 deletions

View file

@ -11,6 +11,8 @@ target_sources(
CGDisplayStream.h
mac-audio.c
mac-display-capture.m
mac-sck-common.h
mac-sck-common.m
mac-screen-capture.m
mac-window-capture.m
plugin-main.c

View file

@ -0,0 +1,80 @@
#include <AvailabilityMacros.h>
#include <Cocoa/Cocoa.h>
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 120300 // __MAC_12_3
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunguarded-availability-new"
#include <stdlib.h>
#include <obs-module.h>
#include <util/threading.h>
#include <pthread.h>
#include <IOSurface/IOSurface.h>
#include <ScreenCaptureKit/ScreenCaptureKit.h>
#include <CoreMedia/CMSampleBuffer.h>
#include <CoreVideo/CVPixelBuffer.h>
#define MACCAP_LOG(level, msg, ...) blog(level, "[ mac-screencapture ]: " msg, ##__VA_ARGS__)
#define MACCAP_ERR(msg, ...) MACCAP_LOG(LOG_ERROR, msg, ##__VA_ARGS__)
typedef enum {
ScreenCaptureDisplayStream = 0,
ScreenCaptureWindowStream = 1,
ScreenCaptureApplicationStream = 2,
} ScreenCaptureStreamType;
@interface ScreenCaptureDelegate : NSObject <SCStreamOutput, SCStreamDelegate>
@property struct screen_capture *sc;
@end
struct screen_capture {
obs_source_t *source;
gs_effect_t *effect;
gs_texture_t *tex;
NSRect frame;
bool hide_cursor;
bool hide_obs;
bool show_hidden_windows;
bool show_empty_names;
SCStream *disp;
SCStreamConfiguration *stream_properties;
SCShareableContent *shareable_content;
ScreenCaptureDelegate *capture_delegate;
os_event_t *disp_finished;
os_event_t *stream_start_completed;
os_sem_t *shareable_content_available;
IOSurfaceRef current, prev;
pthread_mutex_t mutex;
ScreenCaptureStreamType capture_type;
CGDirectDisplayID display;
CGWindowID window;
NSString *application_id;
};
bool is_screen_capture_available(void);
void screen_capture_build_content_list(struct screen_capture *sc, bool display_capture);
bool build_display_list(struct screen_capture *sc, obs_properties_t *props);
bool build_window_list(struct screen_capture *sc, obs_properties_t *props);
bool build_application_list(struct screen_capture *sc, obs_properties_t *props);
static const char *screen_capture_getname(void *unused __unused);
void screen_stream_video_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer);
void screen_stream_audio_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer);
#pragma clang diagnostic pop
#endif

View file

@ -0,0 +1,339 @@
#include "mac-sck-common.h"
bool is_screen_capture_available(void)
{
if (@available(macOS 12.5, *)) {
return true;
} else {
return false;
}
}
#pragma mark - ScreenCaptureDelegate
@implementation ScreenCaptureDelegate
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type
{
if (self.sc != NULL) {
if (type == SCStreamOutputTypeScreen) {
screen_stream_video_update(self.sc, sampleBuffer);
}
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 130000
else if (@available(macOS 13.0, *)) {
if (type == SCStreamOutputTypeAudio) {
screen_stream_audio_update(self.sc, sampleBuffer);
}
}
#endif
}
}
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error
{
NSString *errorMessage;
switch (error.code) {
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 130000
case SCStreamErrorUserStopped:
errorMessage = @"User stopped stream.";
break;
#endif
case SCStreamErrorNoCaptureSource:
errorMessage = @"Stream stopped as no capture source was not found.";
break;
default:
errorMessage = [NSString stringWithFormat:@"Stream stopped with error %ld (\"%s\")", error.code,
error.localizedDescription.UTF8String];
break;
}
MACCAP_LOG(LOG_WARNING, "%s", error.domain.UTF8String);
}
@end
#pragma mark - obs_properties
void screen_capture_build_content_list(struct screen_capture *sc, bool display_capture)
{
typedef void (^shareable_content_callback)(SCShareableContent *, NSError *);
shareable_content_callback new_content_received = ^void(SCShareableContent *shareable_content, NSError *error) {
if (error == nil && sc->shareable_content_available != NULL) {
sc->shareable_content = [shareable_content retain];
} else {
#ifdef DEBUG
MACCAP_ERR("screen_capture_properties: Failed to get shareable content with error %s\n",
[[error localizedFailureReason] cStringUsingEncoding:NSUTF8StringEncoding]);
#endif
MACCAP_LOG(LOG_WARNING, "Unable to get list of available applications or windows. "
"Please check if OBS has necessary screen capture permissions.");
}
os_sem_post(sc->shareable_content_available);
};
os_sem_wait(sc->shareable_content_available);
[sc->shareable_content release];
BOOL onScreenWindowsOnly = (display_capture) ? NO : !sc->show_hidden_windows;
[SCShareableContent getShareableContentExcludingDesktopWindows:YES onScreenWindowsOnly:onScreenWindowsOnly
completionHandler:new_content_received];
}
bool build_display_list(struct screen_capture *sc, obs_properties_t *props)
{
os_sem_wait(sc->shareable_content_available);
obs_property_t *display_list = obs_properties_get(props, "display_uuid");
obs_property_list_clear(display_list);
for (SCDisplay *display in sc->shareable_content.displays) {
NSScreen *display_screen = nil;
for (NSScreen *screen in NSScreen.screens) {
NSNumber *screen_num = screen.deviceDescription[@"NSScreenNumber"];
CGDirectDisplayID screen_display_id = (CGDirectDisplayID) screen_num.intValue;
if (screen_display_id == display.displayID) {
display_screen = screen;
break;
}
}
if (!display_screen) {
continue;
}
char dimension_buffer[4][12] = {};
char name_buffer[256] = {};
snprintf(dimension_buffer[0], sizeof(dimension_buffer[0]), "%u", (uint32_t) display_screen.frame.size.width);
snprintf(dimension_buffer[1], sizeof(dimension_buffer[0]), "%u", (uint32_t) display_screen.frame.size.height);
snprintf(dimension_buffer[2], sizeof(dimension_buffer[0]), "%d", (int32_t) display_screen.frame.origin.x);
snprintf(dimension_buffer[3], sizeof(dimension_buffer[0]), "%d", (int32_t) display_screen.frame.origin.y);
snprintf(name_buffer, sizeof(name_buffer), "%.200s: %.12sx%.12s @ %.12s,%.12s",
display_screen.localizedName.UTF8String, dimension_buffer[0], dimension_buffer[1], dimension_buffer[2],
dimension_buffer[3]);
CFUUIDRef display_uuid = CGDisplayCreateUUIDFromDisplayID(display.displayID);
CFStringRef uuid_string = CFUUIDCreateString(kCFAllocatorDefault, display_uuid);
obs_property_list_add_string(display_list, name_buffer,
CFStringGetCStringPtr(uuid_string, kCFStringEncodingUTF8));
CFRelease(uuid_string);
CFRelease(display_uuid);
}
os_sem_post(sc->shareable_content_available);
return true;
}
bool build_window_list(struct screen_capture *sc, obs_properties_t *props)
{
os_sem_wait(sc->shareable_content_available);
obs_property_t *window_list = obs_properties_get(props, "window");
obs_property_list_clear(window_list);
NSPredicate *filteredWindowPredicate =
[NSPredicate predicateWithBlock:^BOOL(SCWindow *window, NSDictionary *bindings __unused) {
NSString *app_name = window.owningApplication.applicationName;
NSString *title = window.title;
if (!sc->show_empty_names) {
return (app_name.length > 0) && (title.length > 0);
} else {
return YES;
}
}];
NSArray<SCWindow *> *filteredWindows;
filteredWindows = [sc->shareable_content.windows filteredArrayUsingPredicate:filteredWindowPredicate];
NSArray<SCWindow *> *sortedWindows;
sortedWindows = [filteredWindows sortedArrayUsingComparator:^NSComparisonResult(SCWindow *window, SCWindow *other) {
NSComparisonResult appNameCmp = [window.owningApplication.applicationName
compare:other.owningApplication.applicationName
options:NSCaseInsensitiveSearch];
if (appNameCmp == NSOrderedAscending) {
return NSOrderedAscending;
} else if (appNameCmp == NSOrderedSame) {
return [window.title compare:other.title options:NSCaseInsensitiveSearch];
} else {
return NSOrderedDescending;
}
}];
for (SCWindow *window in sortedWindows) {
NSString *app_name = window.owningApplication.applicationName;
NSString *title = window.title;
const char *list_text = [[NSString stringWithFormat:@"[%@] %@", app_name, title] UTF8String];
obs_property_list_add_int(window_list, list_text, window.windowID);
}
os_sem_post(sc->shareable_content_available);
return true;
}
bool build_application_list(struct screen_capture *sc, obs_properties_t *props)
{
os_sem_wait(sc->shareable_content_available);
obs_property_t *application_list = obs_properties_get(props, "application");
obs_property_list_clear(application_list);
NSArray<SCRunningApplication *> *filteredApplications;
filteredApplications = [sc->shareable_content.applications
filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(SCRunningApplication *app,
NSDictionary *bindings __unused) {
return app.applicationName.length > 0;
}]];
NSArray<SCRunningApplication *> *sortedApplications;
sortedApplications = [filteredApplications
sortedArrayUsingComparator:^NSComparisonResult(SCRunningApplication *app, SCRunningApplication *other) {
return [app.applicationName compare:other.applicationName options:NSCaseInsensitiveSearch];
}];
for (SCRunningApplication *application in sortedApplications) {
const char *name = [application.applicationName UTF8String];
const char *bundle_id = [application.bundleIdentifier UTF8String];
obs_property_list_add_string(application_list, name, bundle_id);
}
os_sem_post(sc->shareable_content_available);
return true;
}
#pragma mark - audio/video
void screen_stream_video_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer)
{
bool frame_detail_errored = false;
float scale_factor = 1.0f;
CGRect window_rect = {};
CFArrayRef attachments_array = CMSampleBufferGetSampleAttachmentsArray(sample_buffer, false);
if (sc->capture_type == ScreenCaptureWindowStream && attachments_array != NULL &&
CFArrayGetCount(attachments_array) > 0) {
CFDictionaryRef attachments_dict = CFArrayGetValueAtIndex(attachments_array, 0);
if (attachments_dict != NULL) {
CFTypeRef frame_scale_factor = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoScaleFactor);
if (frame_scale_factor != NULL) {
Boolean result = CFNumberGetValue((CFNumberRef) frame_scale_factor, kCFNumberFloatType, &scale_factor);
if (result == false) {
scale_factor = 1.0f;
frame_detail_errored = true;
}
}
CFTypeRef content_rect_dict = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoContentRect);
CFTypeRef content_scale_factor = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoContentScale);
if ((content_rect_dict != NULL) && (content_scale_factor != NULL)) {
CGRect content_rect = {};
float points_to_pixels = 0.0f;
Boolean result =
CGRectMakeWithDictionaryRepresentation((__bridge CFDictionaryRef) content_rect_dict, &content_rect);
if (result == false) {
content_rect = CGRectZero;
frame_detail_errored = true;
}
result = CFNumberGetValue((CFNumberRef) content_scale_factor, kCFNumberFloatType, &points_to_pixels);
if (result == false) {
points_to_pixels = 1.0f;
frame_detail_errored = true;
}
window_rect.origin = content_rect.origin;
window_rect.size.width = content_rect.size.width / points_to_pixels * scale_factor;
window_rect.size.height = content_rect.size.height / points_to_pixels * scale_factor;
}
}
}
CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
CVPixelBufferLockBaseAddress(image_buffer, 0);
IOSurfaceRef frame_surface = CVPixelBufferGetIOSurface(image_buffer);
CVPixelBufferUnlockBaseAddress(image_buffer, 0);
IOSurfaceRef prev_current = NULL;
if (frame_surface && !pthread_mutex_lock(&sc->mutex)) {
bool needs_to_update_properties = false;
if (!frame_detail_errored) {
if (sc->capture_type == ScreenCaptureWindowStream) {
if ((sc->frame.size.width != window_rect.size.width) ||
(sc->frame.size.height != window_rect.size.height)) {
sc->frame.size.width = window_rect.size.width;
sc->frame.size.height = window_rect.size.height;
needs_to_update_properties = true;
}
} else {
size_t width = CVPixelBufferGetWidth(image_buffer);
size_t height = CVPixelBufferGetHeight(image_buffer);
if ((sc->frame.size.width != width) || (sc->frame.size.height != height)) {
sc->frame.size.width = width;
sc->frame.size.height = height;
needs_to_update_properties = true;
}
}
}
if (needs_to_update_properties) {
[sc->stream_properties setWidth:(size_t) sc->frame.size.width];
[sc->stream_properties setHeight:(size_t) sc->frame.size.height];
[sc->disp updateConfiguration:sc->stream_properties completionHandler:^(NSError *_Nullable error) {
if (error) {
MACCAP_ERR("screen_stream_video_update: Failed to update stream properties with error %s\n",
[[error localizedFailureReason] cStringUsingEncoding:NSUTF8StringEncoding]);
}
}];
}
prev_current = sc->current;
sc->current = frame_surface;
CFRetain(sc->current);
IOSurfaceIncrementUseCount(sc->current);
pthread_mutex_unlock(&sc->mutex);
}
if (prev_current) {
IOSurfaceDecrementUseCount(prev_current);
CFRelease(prev_current);
}
}
void screen_stream_audio_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer)
{
CMFormatDescriptionRef format_description = CMSampleBufferGetFormatDescription(sample_buffer);
const AudioStreamBasicDescription *audio_description =
CMAudioFormatDescriptionGetStreamBasicDescription(format_description);
if (audio_description->mChannelsPerFrame < 1) {
MACCAP_ERR(
"screen_stream_audio_update: Received sample buffer has less than 1 channel per frame (mChannelsPerFrame set to '%d')\n",
audio_description->mChannelsPerFrame);
return;
}
char *_Nullable bytes = NULL;
CMBlockBufferRef data_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
size_t data_buffer_length = CMBlockBufferGetDataLength(data_buffer);
CMBlockBufferGetDataPointer(data_buffer, 0, &data_buffer_length, NULL, &bytes);
CMTime presentation_time = CMSampleBufferGetOutputPresentationTimeStamp(sample_buffer);
struct obs_source_audio audio_data = {};
for (uint32_t channel_idx = 0; channel_idx < audio_description->mChannelsPerFrame; ++channel_idx) {
uint32_t offset = (uint32_t) (data_buffer_length / audio_description->mChannelsPerFrame) * channel_idx;
audio_data.data[channel_idx] = (uint8_t *) bytes + offset;
}
audio_data.frames =
(uint32_t) (data_buffer_length / audio_description->mBytesPerFrame / audio_description->mChannelsPerFrame);
audio_data.speakers = audio_description->mChannelsPerFrame;
audio_data.samples_per_sec = (uint32_t) audio_description->mSampleRate;
audio_data.timestamp = (uint64_t) (CMTimeGetSeconds(presentation_time) * NSEC_PER_SEC);
audio_data.format = AUDIO_FORMAT_FLOAT_PLANAR;
obs_source_output_audio(sc->source, &audio_data);
}

View file

@ -1,75 +1,4 @@
#include <AvailabilityMacros.h>
#include <Cocoa/Cocoa.h>
bool is_screen_capture_available(void)
{
if (@available(macOS 12.5, *)) {
return true;
} else {
return false;
}
}
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 120300 // __MAC_12_3
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunguarded-availability-new"
#include <stdlib.h>
#include <obs-module.h>
#include <util/threading.h>
#include <pthread.h>
#include <IOSurface/IOSurface.h>
#include <ScreenCaptureKit/ScreenCaptureKit.h>
#include <CoreMedia/CMSampleBuffer.h>
#include <CoreVideo/CVPixelBuffer.h>
#define MACCAP_LOG(level, msg, ...) blog(level, "[ mac-screencapture ]: " msg, ##__VA_ARGS__)
#define MACCAP_ERR(msg, ...) MACCAP_LOG(LOG_ERROR, msg, ##__VA_ARGS__)
typedef enum {
ScreenCaptureDisplayStream = 0,
ScreenCaptureWindowStream = 1,
ScreenCaptureApplicationStream = 2,
} ScreenCaptureStreamType;
@interface ScreenCaptureDelegate : NSObject <SCStreamOutput, SCStreamDelegate>
@property struct screen_capture *sc;
@end
struct screen_capture {
obs_source_t *source;
gs_effect_t *effect;
gs_texture_t *tex;
NSRect frame;
bool hide_cursor;
bool hide_obs;
bool show_hidden_windows;
bool show_empty_names;
SCStream *disp;
SCStreamConfiguration *stream_properties;
SCShareableContent *shareable_content;
ScreenCaptureDelegate *capture_delegate;
os_event_t *disp_finished;
os_event_t *stream_start_completed;
os_sem_t *shareable_content_available;
IOSurfaceRef current, prev;
pthread_mutex_t mutex;
ScreenCaptureStreamType capture_type;
CGDirectDisplayID display;
CGWindowID window;
NSString *application_id;
};
#pragma mark -
#include "mac-sck-common.h"
static void destroy_screen_stream(struct screen_capture *sc)
{
@ -115,7 +44,7 @@ static void destroy_screen_stream(struct screen_capture *sc)
os_event_destroy(sc->stream_start_completed);
}
static void screen_capture_destroy(void *data)
static void sck_video_capture_destroy(void *data)
{
struct screen_capture *sc = data;
@ -144,144 +73,6 @@ static void screen_capture_destroy(void *data)
bfree(sc);
}
static inline void screen_stream_video_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer)
{
bool frame_detail_errored = false;
float scale_factor = 1.0f;
CGRect window_rect = {};
CFArrayRef attachments_array = CMSampleBufferGetSampleAttachmentsArray(sample_buffer, false);
if (sc->capture_type == ScreenCaptureWindowStream && attachments_array != NULL &&
CFArrayGetCount(attachments_array) > 0) {
CFDictionaryRef attachments_dict = CFArrayGetValueAtIndex(attachments_array, 0);
if (attachments_dict != NULL) {
CFTypeRef frame_scale_factor = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoScaleFactor);
if (frame_scale_factor != NULL) {
Boolean result = CFNumberGetValue((CFNumberRef) frame_scale_factor, kCFNumberFloatType, &scale_factor);
if (result == false) {
scale_factor = 1.0f;
frame_detail_errored = true;
}
}
CFTypeRef content_rect_dict = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoContentRect);
CFTypeRef content_scale_factor = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoContentScale);
if ((content_rect_dict != NULL) && (content_scale_factor != NULL)) {
CGRect content_rect = {};
float points_to_pixels = 0.0f;
Boolean result =
CGRectMakeWithDictionaryRepresentation((__bridge CFDictionaryRef) content_rect_dict, &content_rect);
if (result == false) {
content_rect = CGRectZero;
frame_detail_errored = true;
}
result = CFNumberGetValue((CFNumberRef) content_scale_factor, kCFNumberFloatType, &points_to_pixels);
if (result == false) {
points_to_pixels = 1.0f;
frame_detail_errored = true;
}
window_rect.origin = content_rect.origin;
window_rect.size.width = content_rect.size.width / points_to_pixels * scale_factor;
window_rect.size.height = content_rect.size.height / points_to_pixels * scale_factor;
}
}
}
CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
CVPixelBufferLockBaseAddress(image_buffer, 0);
IOSurfaceRef frame_surface = CVPixelBufferGetIOSurface(image_buffer);
CVPixelBufferUnlockBaseAddress(image_buffer, 0);
IOSurfaceRef prev_current = NULL;
if (frame_surface && !pthread_mutex_lock(&sc->mutex)) {
bool needs_to_update_properties = false;
if (!frame_detail_errored) {
if (sc->capture_type == ScreenCaptureWindowStream) {
if ((sc->frame.size.width != window_rect.size.width) ||
(sc->frame.size.height != window_rect.size.height)) {
sc->frame.size.width = window_rect.size.width;
sc->frame.size.height = window_rect.size.height;
needs_to_update_properties = true;
}
} else {
size_t width = CVPixelBufferGetWidth(image_buffer);
size_t height = CVPixelBufferGetHeight(image_buffer);
if ((sc->frame.size.width != width) || (sc->frame.size.height != height)) {
sc->frame.size.width = width;
sc->frame.size.height = height;
needs_to_update_properties = true;
}
}
}
if (needs_to_update_properties) {
[sc->stream_properties setWidth:(size_t) sc->frame.size.width];
[sc->stream_properties setHeight:(size_t) sc->frame.size.height];
[sc->disp updateConfiguration:sc->stream_properties completionHandler:^(NSError *_Nullable error) {
if (error) {
MACCAP_ERR("screen_stream_video_update: Failed to update stream properties with error %s\n",
[[error localizedFailureReason] cStringUsingEncoding:NSUTF8StringEncoding]);
}
}];
}
prev_current = sc->current;
sc->current = frame_surface;
CFRetain(sc->current);
IOSurfaceIncrementUseCount(sc->current);
pthread_mutex_unlock(&sc->mutex);
}
if (prev_current) {
IOSurfaceDecrementUseCount(prev_current);
CFRelease(prev_current);
}
}
static inline void screen_stream_audio_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer)
{
CMFormatDescriptionRef format_description = CMSampleBufferGetFormatDescription(sample_buffer);
const AudioStreamBasicDescription *audio_description =
CMAudioFormatDescriptionGetStreamBasicDescription(format_description);
if (audio_description->mChannelsPerFrame < 1) {
MACCAP_ERR(
"screen_stream_audio_update: Received sample buffer has less than 1 channel per frame (mChannelsPerFrame set to '%d')\n",
audio_description->mChannelsPerFrame);
return;
}
char *_Nullable bytes = NULL;
CMBlockBufferRef data_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
size_t data_buffer_length = CMBlockBufferGetDataLength(data_buffer);
CMBlockBufferGetDataPointer(data_buffer, 0, &data_buffer_length, NULL, &bytes);
CMTime presentation_time = CMSampleBufferGetOutputPresentationTimeStamp(sample_buffer);
struct obs_source_audio audio_data = {};
for (uint32_t channel_idx = 0; channel_idx < audio_description->mChannelsPerFrame; ++channel_idx) {
uint32_t offset = (uint32_t) (data_buffer_length / audio_description->mChannelsPerFrame) * channel_idx;
audio_data.data[channel_idx] = (uint8_t *) bytes + offset;
}
audio_data.frames =
(uint32_t) (data_buffer_length / audio_description->mBytesPerFrame / audio_description->mChannelsPerFrame);
audio_data.speakers = audio_description->mChannelsPerFrame;
audio_data.samples_per_sec = (uint32_t) audio_description->mSampleRate;
audio_data.timestamp = (uint64_t) (CMTimeGetSeconds(presentation_time) * NSEC_PER_SEC);
audio_data.format = AUDIO_FORMAT_FLOAT_PLANAR;
obs_source_output_audio(sc->source, &audio_data);
}
static bool init_screen_stream(struct screen_capture *sc)
{
SCContentFilter *content_filter;
@ -453,31 +244,7 @@ static bool init_screen_stream(struct screen_capture *sc)
return did_stream_start;
}
static void screen_capture_build_content_list(struct screen_capture *sc, bool display_capture)
{
typedef void (^shareable_content_callback)(SCShareableContent *, NSError *);
shareable_content_callback new_content_received = ^void(SCShareableContent *shareable_content, NSError *error) {
if (error == nil && sc->shareable_content_available != NULL) {
sc->shareable_content = [shareable_content retain];
} else {
#ifdef DEBUG
MACCAP_ERR("screen_capture_properties: Failed to get shareable content with error %s\n",
[[error localizedFailureReason] cStringUsingEncoding:NSUTF8StringEncoding]);
#endif
MACCAP_LOG(LOG_WARNING, "Unable to get list of available applications or windows. "
"Please check if OBS has necessary screen capture permissions.");
}
os_sem_post(sc->shareable_content_available);
};
os_sem_wait(sc->shareable_content_available);
[sc->shareable_content release];
BOOL onScreenWindowsOnly = (display_capture) ? NO : !sc->show_hidden_windows;
[SCShareableContent getShareableContentExcludingDesktopWindows:YES onScreenWindowsOnly:onScreenWindowsOnly
completionHandler:new_content_received];
}
static void *screen_capture_create(obs_data_t *settings, obs_source_t *source)
static void *sck_video_capture_create(obs_data_t *settings, obs_source_t *source)
{
struct screen_capture *sc = bzalloc(sizeof(struct screen_capture));
@ -531,11 +298,11 @@ static void *screen_capture_create(obs_data_t *settings, obs_source_t *source)
fail:
obs_leave_graphics();
screen_capture_destroy(sc);
sck_video_capture_destroy(sc);
return NULL;
}
static void screen_capture_video_tick(void *data, float seconds __unused)
static void sck_video_capture_tick(void *data, float seconds __unused)
{
struct screen_capture *sc = data;
@ -567,7 +334,7 @@ static void screen_capture_video_tick(void *data, float seconds __unused)
}
}
static void screen_capture_video_render(void *data, gs_effect_t *effect __unused)
static void sck_video_capture_render(void *data, gs_effect_t *effect __unused)
{
struct screen_capture *sc = data;
@ -586,7 +353,7 @@ static void screen_capture_video_render(void *data, gs_effect_t *effect __unused
gs_enable_framebuffer_srgb(previous);
}
static const char *screen_capture_getname(void *unused __unused)
static const char *sck_video_capture_getname(void *unused __unused)
{
if (@available(macOS 13.0, *))
return obs_module_text("SCK.Name");
@ -594,21 +361,21 @@ static const char *screen_capture_getname(void *unused __unused)
return obs_module_text("SCK.Name.Beta");
}
static uint32_t screen_capture_getwidth(void *data)
static uint32_t sck_video_capture_getwidth(void *data)
{
struct screen_capture *sc = data;
return (uint32_t) sc->frame.size.width;
}
static uint32_t screen_capture_getheight(void *data)
static uint32_t sck_video_capture_getheight(void *data)
{
struct screen_capture *sc = data;
return (uint32_t) sc->frame.size.height;
}
static void screen_capture_defaults(obs_data_t *settings)
static void sck_video_capture_defaults(obs_data_t *settings)
{
CGDirectDisplayID initial_display = 0;
{
@ -636,7 +403,7 @@ static void screen_capture_defaults(obs_data_t *settings)
obs_data_set_default_bool(settings, "show_hidden_windows", false);
}
static void screen_capture_update(void *data, obs_data_t *settings)
static void sck_video_capture_update(void *data, obs_data_t *settings)
{
struct screen_capture *sc = data;
@ -703,126 +470,6 @@ static void screen_capture_update(void *data, obs_data_t *settings)
#pragma mark - obs_properties
static bool build_display_list(struct screen_capture *sc, obs_properties_t *props)
{
os_sem_wait(sc->shareable_content_available);
obs_property_t *display_list = obs_properties_get(props, "display_uuid");
obs_property_list_clear(display_list);
for (SCDisplay *display in sc->shareable_content.displays) {
NSScreen *display_screen = nil;
for (NSScreen *screen in NSScreen.screens) {
NSNumber *screen_num = screen.deviceDescription[@"NSScreenNumber"];
CGDirectDisplayID screen_display_id = (CGDirectDisplayID) screen_num.intValue;
if (screen_display_id == display.displayID) {
display_screen = screen;
break;
}
}
if (!display_screen) {
continue;
}
char dimension_buffer[4][12] = {};
char name_buffer[256] = {};
snprintf(dimension_buffer[0], sizeof(dimension_buffer[0]), "%u", (uint32_t) display_screen.frame.size.width);
snprintf(dimension_buffer[1], sizeof(dimension_buffer[0]), "%u", (uint32_t) display_screen.frame.size.height);
snprintf(dimension_buffer[2], sizeof(dimension_buffer[0]), "%d", (int32_t) display_screen.frame.origin.x);
snprintf(dimension_buffer[3], sizeof(dimension_buffer[0]), "%d", (int32_t) display_screen.frame.origin.y);
snprintf(name_buffer, sizeof(name_buffer), "%.200s: %.12sx%.12s @ %.12s,%.12s",
display_screen.localizedName.UTF8String, dimension_buffer[0], dimension_buffer[1], dimension_buffer[2],
dimension_buffer[3]);
CFUUIDRef display_uuid = CGDisplayCreateUUIDFromDisplayID(display.displayID);
CFStringRef uuid_string = CFUUIDCreateString(kCFAllocatorDefault, display_uuid);
obs_property_list_add_string(display_list, name_buffer,
CFStringGetCStringPtr(uuid_string, kCFStringEncodingUTF8));
CFRelease(uuid_string);
CFRelease(display_uuid);
}
os_sem_post(sc->shareable_content_available);
return true;
}
static bool build_window_list(struct screen_capture *sc, obs_properties_t *props)
{
os_sem_wait(sc->shareable_content_available);
obs_property_t *window_list = obs_properties_get(props, "window");
obs_property_list_clear(window_list);
NSPredicate *filteredWindowPredicate =
[NSPredicate predicateWithBlock:^BOOL(SCWindow *window, NSDictionary *bindings __unused) {
NSString *app_name = window.owningApplication.applicationName;
NSString *title = window.title;
if (!sc->show_empty_names) {
return (app_name.length > 0) && (title.length > 0);
} else {
return YES;
}
}];
NSArray<SCWindow *> *filteredWindows;
filteredWindows = [sc->shareable_content.windows filteredArrayUsingPredicate:filteredWindowPredicate];
NSArray<SCWindow *> *sortedWindows;
sortedWindows = [filteredWindows sortedArrayUsingComparator:^NSComparisonResult(SCWindow *window, SCWindow *other) {
NSComparisonResult appNameCmp = [window.owningApplication.applicationName
compare:other.owningApplication.applicationName
options:NSCaseInsensitiveSearch];
if (appNameCmp == NSOrderedAscending) {
return NSOrderedAscending;
} else if (appNameCmp == NSOrderedSame) {
return [window.title compare:other.title options:NSCaseInsensitiveSearch];
} else {
return NSOrderedDescending;
}
}];
for (SCWindow *window in sortedWindows) {
NSString *app_name = window.owningApplication.applicationName;
NSString *title = window.title;
const char *list_text = [[NSString stringWithFormat:@"[%@] %@", app_name, title] UTF8String];
obs_property_list_add_int(window_list, list_text, window.windowID);
}
os_sem_post(sc->shareable_content_available);
return true;
}
static bool build_application_list(struct screen_capture *sc, obs_properties_t *props)
{
os_sem_wait(sc->shareable_content_available);
obs_property_t *application_list = obs_properties_get(props, "application");
obs_property_list_clear(application_list);
NSArray<SCRunningApplication *> *filteredApplications;
filteredApplications = [sc->shareable_content.applications
filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(SCRunningApplication *app,
NSDictionary *bindings __unused) {
return app.applicationName.length > 0;
}]];
NSArray<SCRunningApplication *> *sortedApplications;
sortedApplications = [filteredApplications
sortedArrayUsingComparator:^NSComparisonResult(SCRunningApplication *app, SCRunningApplication *other) {
return [app.applicationName compare:other.applicationName options:NSCaseInsensitiveSearch];
}];
for (SCRunningApplication *application in sortedApplications) {
const char *name = [application.applicationName UTF8String];
const char *bundle_id = [application.bundleIdentifier UTF8String];
obs_property_list_add_string(application_list, name, bundle_id);
}
os_sem_post(sc->shareable_content_available);
return true;
}
static bool content_settings_changed(void *data, obs_properties_t *props, obs_property_t *list __unused,
obs_data_t *settings)
{
@ -894,7 +541,7 @@ static bool content_settings_changed(void *data, obs_properties_t *props, obs_pr
return true;
}
static obs_properties_t *screen_capture_properties(void *data)
static obs_properties_t *sck_video_capture_properties(void *data)
{
struct screen_capture *sc = data;
@ -999,8 +646,8 @@ static obs_properties_t *screen_capture_properties(void *data)
return props;
}
enum gs_color_space screen_capture_video_get_color_space(void *data, size_t count,
const enum gs_color_space *preferred_spaces)
enum gs_color_space sck_video_capture_get_color_space(void *data, size_t count,
const enum gs_color_space *preferred_spaces)
{
UNUSED_PARAMETER(data);
@ -1024,72 +671,25 @@ enum gs_color_space screen_capture_video_get_color_space(void *data, size_t coun
#pragma mark - obs_source_info
struct obs_source_info screen_capture_info = {
struct obs_source_info sck_video_capture_info = {
.id = "screen_capture",
.type = OBS_SOURCE_TYPE_INPUT,
.get_name = screen_capture_getname,
.get_name = sck_video_capture_getname,
.create = screen_capture_create,
.destroy = screen_capture_destroy,
.create = sck_video_capture_create,
.destroy = sck_video_capture_destroy,
.output_flags = OBS_SOURCE_VIDEO | OBS_SOURCE_CUSTOM_DRAW | OBS_SOURCE_DO_NOT_DUPLICATE | OBS_SOURCE_SRGB |
OBS_SOURCE_AUDIO,
.video_tick = screen_capture_video_tick,
.video_render = screen_capture_video_render,
.video_tick = sck_video_capture_tick,
.video_render = sck_video_capture_render,
.get_width = screen_capture_getwidth,
.get_height = screen_capture_getheight,
.get_width = sck_video_capture_getwidth,
.get_height = sck_video_capture_getheight,
.get_defaults = screen_capture_defaults,
.get_properties = screen_capture_properties,
.update = screen_capture_update,
.get_defaults = sck_video_capture_defaults,
.get_properties = sck_video_capture_properties,
.update = sck_video_capture_update,
.icon_type = OBS_ICON_TYPE_DESKTOP_CAPTURE,
.video_get_color_space = screen_capture_video_get_color_space,
.video_get_color_space = sck_video_capture_get_color_space,
};
#pragma mark - ScreenCaptureDelegate
@implementation ScreenCaptureDelegate
- (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type
{
if (self.sc != NULL) {
if (type == SCStreamOutputTypeScreen) {
screen_stream_video_update(self.sc, sampleBuffer);
}
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 130000
else if (@available(macOS 13.0, *)) {
if (type == SCStreamOutputTypeAudio) {
screen_stream_audio_update(self.sc, sampleBuffer);
}
}
#endif
}
}
- (void)stream:(SCStream *)stream didStopWithError:(NSError *)error
{
NSString *errorMessage;
switch (error.code) {
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 130000
case SCStreamErrorUserStopped:
errorMessage = @"User stopped stream.";
break;
#endif
case SCStreamErrorNoCaptureSource:
errorMessage = @"Stream stopped as no capture source was not found.";
break;
default:
errorMessage = [NSString stringWithFormat:@"Stream stopped with error %ld (\"%s\")", error.code,
error.localizedDescription.UTF8String];
break;
}
MACCAP_LOG(LOG_WARNING, "%s", error.domain.UTF8String);
}
@end
// "-Wunguarded-availability-new"
#pragma clang diagnostic pop
#endif

View file

@ -20,8 +20,8 @@ bool obs_module_load(void)
obs_register_source(&coreaudio_output_capture_info);
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 120300 // __MAC_12_3
if (is_screen_capture_available()) {
extern struct obs_source_info screen_capture_info;
obs_register_source(&screen_capture_info);
extern struct obs_source_info sck_video_capture_info;
obs_register_source(&sck_video_capture_info);
if (__builtin_available(macOS 13.0, *)) {
display_capture_info.output_flags |=
OBS_SOURCE_DEPRECATED;