diff --git a/camera/camera_driver.c b/camera/camera_driver.c
index 260d8740a1..342b8ae94b 100644
--- a/camera/camera_driver.c
+++ b/camera/camera_driver.c
@@ -58,6 +58,9 @@ const camera_driver_t *camera_drivers[] = {
#ifdef ANDROID
&camera_android,
#endif
+#ifdef HAVE_AVF
+ &camera_avfoundation,
+#endif
#ifdef HAVE_FFMPEG
&camera_ffmpeg,
#endif
diff --git a/camera/drivers/avfoundation.m b/camera/drivers/avfoundation.m
new file mode 100644
index 0000000000..05f03043b5
--- /dev/null
+++ b/camera/drivers/avfoundation.m
@@ -0,0 +1,725 @@
+/* RetroArch - A frontend for libretro.
+ * Copyright (C) 2025 - Joseph Mattiello
+ *
+ * RetroArch is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU General Public License as published by the Free Software Found-
+ * ation, either version 3 of the License, or (at your option) any later version.
+ *
+ * RetroArch is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+ * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along with RetroArch.
+ * If not, see .
+ */
+
+#include
+#include
+#include
+#include
+#include "../camera/camera_driver.h"
+#include "../verbosity.h"
+/// For image scaling and color space DSP
+#import
+#if TARGET_OS_IOS
+/// For camera rotation detection
+#import
+#endif
+
+// TODO: Add an API to retroarch to allow selection of camera
+#ifndef CAMERA_PREFER_FRONTFACING
+#define CAMERA_PREFER_FRONTFACING 1 /// Default to front camera
+#endif
+
+#ifndef CAMERA_MIRROR_FRONT_CAMERA
+#define CAMERA_MIRROR_FRONT_CAMERA 1
+#endif
+
+@interface AVCameraManager : NSObject
+@property (strong, nonatomic) AVCaptureSession *session;
+@property (strong, nonatomic) AVCaptureDeviceInput *input;
+@property (strong, nonatomic) AVCaptureVideoDataOutput *output;
+@property (assign) uint32_t *frameBuffer;
+@property (assign) size_t width;
+@property (assign) size_t height;
+
+- (bool)setupCameraSession;
+@end
+
+@implementation AVCameraManager
+
++ (AVCameraManager *)sharedInstance {
+ static AVCameraManager *instance = nil;
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ instance = [[AVCameraManager alloc] init];
+ });
+ return instance;
+}
+
+- (void)requestCameraAuthorizationWithCompletion:(void (^)(BOOL granted))completion {
+ RARCH_LOG("[Camera]: Checking camera authorization status\n");
+
+ AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
+
+ switch (status) {
+ case AVAuthorizationStatusAuthorized: {
+ RARCH_LOG("[Camera]: Camera access already authorized\n");
+ completion(YES);
+ break;
+ }
+
+ case AVAuthorizationStatusNotDetermined: {
+
+ RARCH_LOG("[Camera]: Requesting camera authorization...\n");
+ [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
+ completionHandler:^(BOOL granted) {
+ RARCH_LOG("[Camera]: Authorization %s\n", granted ? "granted" : "denied");
+ completion(granted);
+ }];
+ break;
+ }
+
+ case AVAuthorizationStatusDenied: {
+ RARCH_ERR("[Camera]: Camera access denied by user\n");
+ completion(NO);
+ break;
+ }
+
+ case AVAuthorizationStatusRestricted: {
+ RARCH_ERR("[Camera]: Camera access restricted (parental controls?)\n");
+ completion(NO);
+ break;
+ }
+
+ default: {
+ RARCH_ERR("[Camera]: Unknown authorization status\n");
+ completion(NO);
+ break;
+ }
+ }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)output
+didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection {
+ @autoreleasepool {
+ if (!self.frameBuffer)
+ return;
+
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ if (!imageBuffer) {
+ RARCH_ERR("[Camera]: Failed to get image buffer\n");
+ return;
+ }
+
+ CVPixelBufferLockBaseAddress(imageBuffer, 0);
+
+ size_t sourceWidth = CVPixelBufferGetWidth(imageBuffer);
+ size_t sourceHeight = CVPixelBufferGetHeight(imageBuffer);
+ OSType pixelFormat = CVPixelBufferGetPixelFormatType(imageBuffer);
+
+#ifdef DEBUG
+ RARCH_LOG("[Camera]: Processing frame %zux%zu format: %u\n", sourceWidth, sourceHeight, (unsigned int)pixelFormat);
+#endif
+ // Create intermediate buffer for full-size converted image
+ uint32_t *intermediateBuffer = (uint32_t*)malloc(sourceWidth * sourceHeight * 4);
+ if (!intermediateBuffer) {
+ RARCH_ERR("[Camera]: Failed to allocate intermediate buffer\n");
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ vImage_Buffer srcBuffer = {}, intermediateVBuffer = {}, dstBuffer = {};
+ vImage_Error err = kvImageNoError;
+
+ // Setup intermediate buffer
+ intermediateVBuffer.data = intermediateBuffer;
+ intermediateVBuffer.width = sourceWidth;
+ intermediateVBuffer.height = sourceHeight;
+ intermediateVBuffer.rowBytes = sourceWidth * 4;
+
+ // Setup destination buffer
+ dstBuffer.data = self.frameBuffer;
+ dstBuffer.width = self.width;
+ dstBuffer.height = self.height;
+ dstBuffer.rowBytes = self.width * 4;
+
+ // Convert source format to RGBA
+ switch (pixelFormat) {
+ case kCVPixelFormatType_32BGRA: {
+ srcBuffer.data = CVPixelBufferGetBaseAddress(imageBuffer);
+ srcBuffer.width = sourceWidth;
+ srcBuffer.height = sourceHeight;
+ srcBuffer.rowBytes = CVPixelBufferGetBytesPerRow(imageBuffer);
+
+ uint8_t permuteMap[4] = {2, 1, 0, 3}; // BGRA -> RGBA
+ err = vImagePermuteChannels_ARGB8888(&srcBuffer, &intermediateVBuffer, permuteMap, kvImageNoFlags);
+ break;
+ }
+
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
+ case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: {
+ // YUV to RGB conversion
+ vImage_Buffer srcY = {}, srcCbCr = {};
+
+ srcY.data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
+ srcY.width = sourceWidth;
+ srcY.height = sourceHeight;
+ srcY.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
+
+ srcCbCr.data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
+ srcCbCr.width = sourceWidth / 2;
+ srcCbCr.height = sourceHeight / 2;
+ srcCbCr.rowBytes = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
+
+ vImage_YpCbCrToARGB info;
+ vImage_YpCbCrPixelRange pixelRange =
+ (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) ?
+ (vImage_YpCbCrPixelRange){16, 128, 235, 240} : // Video range
+ (vImage_YpCbCrPixelRange){0, 128, 255, 255}; // Full range
+
+ err = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
+ &pixelRange,
+ &info,
+ kvImage420Yp8_CbCr8,
+ kvImageARGB8888,
+ kvImageNoFlags);
+
+ if (err == kvImageNoError) {
+ err = vImageConvert_420Yp8_CbCr8ToARGB8888(&srcY,
+ &srcCbCr,
+ &intermediateVBuffer,
+ &info,
+ NULL,
+ 255,
+ kvImageNoFlags);
+ }
+ break;
+ }
+
+ default:
+ RARCH_ERR("[Camera]: Unsupported pixel format: %u\n", (unsigned int)pixelFormat);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ if (err != kvImageNoError) {
+ RARCH_ERR("[Camera]: Error converting color format: %ld\n", err);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ // Determine rotation based on platform and camera type
+#if TARGET_OS_OSX
+ int rotationDegrees = 0; // Default 180-degree rotation for most cases
+ bool shouldMirror = true;
+#else
+ int rotationDegrees = 180; // Default 180-degree rotation for most cases
+ bool shouldMirror = false;
+
+ /// For camera rotation detection
+ UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
+ if (orientation == UIDeviceOrientationPortrait ||
+ orientation == UIDeviceOrientationPortraitUpsideDown) {
+ // In portrait mode, adjust rotation based on camera type
+ if (self.input.device.position == AVCaptureDevicePositionFront) {
+ rotationDegrees = 270;
+ #if CAMERA_MIRROR_FRONT_CAMERA
+ // TODO: Add an API to retroarch to allow for mirroring of front camera
+ shouldMirror = true; // Mirror front camera
+ #endif
+ RARCH_LOG("[Camera]: Using 270-degree rotation with mirroring for front camera in portrait mode\n");
+ }
+ }
+#endif
+
+ // Rotate image
+ vImage_Buffer rotatedBuffer = {};
+ rotatedBuffer.data = malloc(sourceWidth * sourceHeight * 4);
+ if (!rotatedBuffer.data) {
+ RARCH_ERR("[Camera]: Failed to allocate rotation buffer\n");
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ // Set dimensions based on rotation angle
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ rotatedBuffer.width = sourceHeight;
+ rotatedBuffer.height = sourceWidth;
+ } else {
+ rotatedBuffer.width = sourceWidth;
+ rotatedBuffer.height = sourceHeight;
+ }
+ rotatedBuffer.rowBytes = rotatedBuffer.width * 4;
+
+ const Pixel_8888 backgroundColor = {0, 0, 0, 255};
+
+ err = vImageRotate90_ARGB8888(&intermediateVBuffer,
+ &rotatedBuffer,
+ rotationDegrees / 90,
+ backgroundColor,
+ kvImageNoFlags);
+
+ if (err != kvImageNoError) {
+ RARCH_ERR("[Camera]: Error rotating image: %ld\n", err);
+ free(rotatedBuffer.data);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ // Mirror the image if needed
+ if (shouldMirror) {
+ vImage_Buffer mirroredBuffer = {};
+ mirroredBuffer.data = malloc(rotatedBuffer.height * rotatedBuffer.rowBytes);
+ if (!mirroredBuffer.data) {
+ RARCH_ERR("[Camera]: Failed to allocate mirror buffer\n");
+ free(rotatedBuffer.data);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ mirroredBuffer.width = rotatedBuffer.width;
+ mirroredBuffer.height = rotatedBuffer.height;
+ mirroredBuffer.rowBytes = rotatedBuffer.rowBytes;
+
+ err = vImageHorizontalReflect_ARGB8888(&rotatedBuffer, &mirroredBuffer, kvImageNoFlags);
+
+ if (err == kvImageNoError) {
+ // Free rotated buffer and use mirrored buffer for scaling
+ free(rotatedBuffer.data);
+ rotatedBuffer = mirroredBuffer;
+ } else {
+ RARCH_ERR("[Camera]: Error mirroring image: %ld\n", err);
+ free(mirroredBuffer.data);
+ }
+ }
+
+ // Calculate aspect fill scaling
+ float sourceAspect = (float)rotatedBuffer.width / rotatedBuffer.height;
+ float targetAspect = (float)self.width / self.height;
+
+ vImage_Buffer scaledBuffer = {};
+ size_t scaledWidth, scaledHeight;
+
+ if (sourceAspect > targetAspect) {
+ // Source is wider - scale to match height
+ scaledHeight = self.height;
+ scaledWidth = (size_t)(self.height * sourceAspect);
+ } else {
+ // Source is taller - scale to match width
+ scaledWidth = self.width;
+ scaledHeight = (size_t)(self.width / sourceAspect);
+ }
+
+ RARCH_LOG("[Camera]: Aspect fill scaling from %zux%zu to %zux%zu\n",
+ rotatedBuffer.width, rotatedBuffer.height, scaledWidth, scaledHeight);
+
+ scaledBuffer.data = malloc(scaledWidth * scaledHeight * 4);
+ if (!scaledBuffer.data) {
+ RARCH_ERR("[Camera]: Failed to allocate scaled buffer\n");
+ free(rotatedBuffer.data);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ scaledBuffer.width = scaledWidth;
+ scaledBuffer.height = scaledHeight;
+ scaledBuffer.rowBytes = scaledWidth * 4;
+
+ // Scale maintaining aspect ratio
+ err = vImageScale_ARGB8888(&rotatedBuffer, &scaledBuffer, NULL, kvImageHighQualityResampling);
+
+ if (err != kvImageNoError) {
+ RARCH_ERR("[Camera]: Error scaling image: %ld\n", err);
+ free(scaledBuffer.data);
+ free(rotatedBuffer.data);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ return;
+ }
+
+ // Center crop the scaled image into the destination buffer
+ size_t xOffset = (scaledWidth > self.width) ? (scaledWidth - self.width) / 2 : 0;
+ size_t yOffset = (scaledHeight > self.height) ? (scaledHeight - self.height) / 2 : 0;
+
+ // Copy the centered portion to the destination buffer
+ uint32_t *srcPtr = (uint32_t *)scaledBuffer.data;
+ uint32_t *dstPtr = (uint32_t *)self.frameBuffer;
+
+ for (size_t y = 0; y < self.height; y++) {
+ memcpy(dstPtr + y * self.width,
+ srcPtr + (y + yOffset) * scaledWidth + xOffset,
+ self.width * 4);
+ }
+
+ // Clean up
+ free(scaledBuffer.data);
+ free(rotatedBuffer.data);
+ free(intermediateBuffer);
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+ } // End of autorelease pool
+}
+
+- (AVCaptureDevice *)selectCameraDevice {
+ RARCH_LOG("[Camera]: Selecting camera device\n");
+
+ NSArray *devices;
+
+#if TARGET_OS_OSX
+ // On macOS, use default discovery method
+ // Could probably due the same as iOS but need to test.
+ devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
+#else
+ // On iOS/tvOS use modern discovery session
+ NSArray *deviceTypes;
+ if (@available(iOS 17.0, *)) {
+ deviceTypes = @[
+ AVCaptureDeviceTypeExternal,
+ AVCaptureDeviceTypeBuiltInWideAngleCamera,
+ AVCaptureDeviceTypeBuiltInTelephotoCamera,
+ AVCaptureDeviceTypeBuiltInUltraWideCamera,
+ // AVCaptureDeviceTypeBuiltInDualCamera,
+ // AVCaptureDeviceTypeBuiltInDualWideCamera,
+ // AVCaptureDeviceTypeBuiltInTripleCamera,
+ // AVCaptureDeviceTypeBuiltInTrueDepthCamera,
+ // AVCaptureDeviceTypeBuiltInLiDARDepthCamera,
+ // AVCaptureDeviceTypeContinuityCamera,
+ ];
+ } else {
+ deviceTypes = @[
+ AVCaptureDeviceTypeBuiltInWideAngleCamera,
+ AVCaptureDeviceTypeBuiltInTelephotoCamera,
+ AVCaptureDeviceTypeBuiltInUltraWideCamera,
+ // AVCaptureDeviceTypeBuiltInDualCamera,
+ // AVCaptureDeviceTypeBuiltInDualWideCamera,
+ // AVCaptureDeviceTypeBuiltInTripleCamera,
+ // AVCaptureDeviceTypeBuiltInTrueDepthCamera,
+ // AVCaptureDeviceTypeBuiltInLiDARDepthCamera,
+ // AVCaptureDeviceTypeContinuityCamera,
+ ];
+ }
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:deviceTypes
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+
+ devices = discoverySession.devices;
+#endif
+
+ if (devices.count == 0) {
+ RARCH_ERR("[Camera]: No camera devices found\n");
+ return nil;
+ }
+
+ // Log available devices
+ for (AVCaptureDevice *device in devices) {
+ RARCH_LOG("[Camera]: Found device: %s - Position: %d\n",
+ [device.localizedName UTF8String],
+ (int)device.position);
+ }
+
+#if TARGET_OS_OSX
+ // macOS: Just use the first available camera if only one exists
+ if (devices.count == 1) {
+ RARCH_LOG("[Camera]: Using only available camera: %s\n",
+ [devices.firstObject.localizedName UTF8String]);
+ return devices.firstObject;
+ }
+
+ // Try to match by name for built-in cameras
+ for (AVCaptureDevice *device in devices) {
+ BOOL isFrontFacing = [device.localizedName containsString:@"FaceTime"] ||
+ [device.localizedName containsString:@"Front"];
+ if (CAMERA_PREFER_FRONTFACING == isFrontFacing) {
+ RARCH_LOG("[Camera]: Selected macOS camera: %s\n",
+ [device.localizedName UTF8String]);
+ return device;
+ }
+ }
+#else
+ // iOS: Use position property
+ AVCaptureDevicePosition preferredPosition = CAMERA_PREFER_FRONTFACING ?
+ AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
+
+ // Try to find preferred camera
+ for (AVCaptureDevice *device in devices) {
+ if (device.position == preferredPosition) {
+ RARCH_LOG("[Camera]: Selected iOS camera position: %d\n",
+ (int)preferredPosition);
+ return device;
+ }
+ }
+#endif
+
+ // Fallback to first available camera
+ RARCH_LOG("[Camera]: Using fallback camera: %s\n",
+ [devices.firstObject.localizedName UTF8String]);
+ return devices.firstObject;
+}
+
+- (bool)setupCameraSession {
+ // Initialize capture session
+ self.session = [[AVCaptureSession alloc] init];
+
+ // Get camera device
+ AVCaptureDevice *device = [self selectCameraDevice];
+ if (!device) {
+ RARCH_ERR("[Camera]: No camera device found\n");
+ return false;
+ }
+
+ // Create device input
+ NSError *error = nil;
+ self.input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
+ if (error) {
+ RARCH_ERR("[Camera]: Failed to create device input: %s\n",
+ [error.localizedDescription UTF8String]);
+ return false;
+ }
+
+ if ([self.session canAddInput:self.input]) {
+ [self.session addInput:self.input];
+ RARCH_LOG("[Camera]: Added camera input to session\n");
+ }
+
+ // Create and configure video output
+ self.output = [[AVCaptureVideoDataOutput alloc] init];
+ self.output.videoSettings = @{
+ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)
+ };
+ [self.output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
+
+ if ([self.session canAddOutput:self.output]) {
+ [self.session addOutput:self.output];
+ RARCH_LOG("[Camera]: Added video output to session\n");
+ }
+
+ return true;
+}
+
+@end
+
+typedef struct
+{
+ AVCameraManager *manager;
+ unsigned width;
+ unsigned height;
+} avfoundation_t;
+
+static void generateColorBars(uint32_t *buffer, size_t width, size_t height) {
+ const uint32_t colors[] = {
+ 0xFFFFFFFF, // White
+ 0xFFFFFF00, // Yellow
+ 0xFF00FFFF, // Cyan
+ 0xFF00FF00, // Green
+ 0xFFFF00FF, // Magenta
+ 0xFFFF0000, // Red
+ 0xFF0000FF, // Blue
+ 0xFF000000 // Black
+ };
+
+ size_t barWidth = width / 8;
+ for (size_t y = 0; y < height; y++) {
+ for (size_t x = 0; x < width; x++) {
+ size_t colorIndex = x / barWidth;
+ buffer[y * width + x] = colors[colorIndex];
+ }
+ }
+}
+
+static void *avfoundation_init(const char *device, uint64_t caps,
+ unsigned width, unsigned height)
+{
+ RARCH_LOG("[Camera]: Initializing AVFoundation camera %ux%u\n", width, height);
+
+ avfoundation_t *avf = (avfoundation_t*)calloc(1, sizeof(avfoundation_t));
+ if (!avf) {
+ RARCH_ERR("[Camera]: Failed to allocate avfoundation_t\n");
+ return NULL;
+ }
+
+ avf->manager = [AVCameraManager sharedInstance];
+ avf->width = width;
+ avf->height = height;
+ avf->manager.width = width;
+ avf->manager.height = height;
+
+ // Check if we're on the main thread
+ if ([NSThread isMainThread]) {
+ RARCH_LOG("[Camera]: Initializing on main thread\n");
+ // Direct initialization on main thread
+ [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
+ AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
+ if (status != AVAuthorizationStatusAuthorized) {
+ RARCH_ERR("[Camera]: Camera access not authorized (status: %d)\n", (int)status);
+ free(avf);
+ return;
+ }
+ }];
+ } else {
+ RARCH_LOG("[Camera]: Initializing on background thread\n");
+ // Use dispatch_sync to run authorization check on main thread
+ __block AVAuthorizationStatus status;
+ dispatch_sync(dispatch_get_main_queue(), ^{
+ status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
+ });
+
+ if (status != AVAuthorizationStatusAuthorized) {
+ RARCH_ERR("[Camera]: Camera access not authorized (status: %d)\n", (int)status);
+ free(avf);
+ return NULL;
+ }
+ }
+
+ // Allocate frame buffer
+ avf->manager.frameBuffer = (uint32_t*)calloc(width * height, sizeof(uint32_t));
+ if (!avf->manager.frameBuffer) {
+ RARCH_ERR("[Camera]: Failed to allocate frame buffer\n");
+ free(avf);
+ return NULL;
+ }
+
+ // Initialize capture session on main thread
+ __block bool setupSuccess = false;
+
+ if ([NSThread isMainThread]) {
+ @autoreleasepool {
+ setupSuccess = [avf->manager setupCameraSession];
+ if (setupSuccess) {
+ [avf->manager.session startRunning];
+ RARCH_LOG("[Camera]: Started camera session\n");
+ }
+ }
+ } else {
+ dispatch_sync(dispatch_get_main_queue(), ^{
+ @autoreleasepool {
+ setupSuccess = [avf->manager setupCameraSession];
+ if (setupSuccess) {
+ [avf->manager.session startRunning];
+ RARCH_LOG("[Camera]: Started camera session\n");
+ }
+ }
+ });
+ }
+
+ if (!setupSuccess) {
+ RARCH_ERR("[Camera]: Failed to setup camera\n");
+ free(avf->manager.frameBuffer);
+ free(avf);
+ return NULL;
+ }
+
+ // Add a check to verify the session is actually running
+ if (!avf->manager.session.isRunning) {
+ RARCH_ERR("[Camera]: Failed to start camera session\n");
+ free(avf->manager.frameBuffer);
+ free(avf);
+ return NULL;
+ }
+
+ RARCH_LOG("[Camera]: AVFoundation camera initialized and started successfully\n");
+ return avf;
+}
+
+static void avfoundation_free(void *data)
+{
+ avfoundation_t *avf = (avfoundation_t*)data;
+ if (!avf)
+ return;
+
+ RARCH_LOG("[Camera]: Freeing AVFoundation camera\n");
+
+ if (avf->manager.session) {
+ [avf->manager.session stopRunning];
+ }
+
+ if (avf->manager.frameBuffer) {
+ free(avf->manager.frameBuffer);
+ avf->manager.frameBuffer = NULL;
+ }
+
+ free(avf);
+ RARCH_LOG("[Camera]: AVFoundation camera freed\n");
+}
+
+static bool avfoundation_start(void *data)
+{
+ avfoundation_t *avf = (avfoundation_t*)data;
+ if (!avf || !avf->manager.session) {
+ RARCH_ERR("[Camera]: Cannot start - invalid data\n");
+ return false;
+ }
+
+ RARCH_LOG("[Camera]: Starting AVFoundation camera\n");
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [avf->manager.session startRunning];
+ RARCH_LOG("[Camera]: Camera session started on background thread\n");
+ });
+
+ // Give the session a moment to start
+ usleep(100000); // 100ms
+
+ bool isRunning = avf->manager.session.isRunning;
+ RARCH_LOG("[Camera]: Camera session running: %s\n", isRunning ? "YES" : "NO");
+ return isRunning;
+}
+
+static void avfoundation_stop(void *data)
+{
+ avfoundation_t *avf = (avfoundation_t*)data;
+ if (!avf || !avf->manager.session)
+ return;
+
+ RARCH_LOG("[Camera]: Stopping AVFoundation camera\n");
+
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+ [avf->manager.session stopRunning];
+ RARCH_LOG("[Camera]: Camera session stopped on background thread\n");
+ });
+}
+
+static bool avfoundation_poll(void *data,
+ retro_camera_frame_raw_framebuffer_t frame_raw_cb,
+ retro_camera_frame_opengl_texture_t frame_gl_cb)
+{
+ avfoundation_t *avf = (avfoundation_t*)data;
+ if (!avf || !frame_raw_cb) {
+ RARCH_ERR("[Camera]: Cannot poll - invalid data or callback\n");
+ return false;
+ }
+
+ if (!avf->manager.session.isRunning) {
+ RARCH_LOG("[Camera]: Camera not running, generating color bars\n");
+ uint32_t *tempBuffer = (uint32_t*)calloc(avf->width * avf->height, sizeof(uint32_t));
+ if (tempBuffer) {
+ generateColorBars(tempBuffer, avf->width, avf->height);
+ frame_raw_cb(tempBuffer, avf->width, avf->height, avf->width * 4);
+ free(tempBuffer);
+ return true;
+ }
+ return false;
+ }
+
+#ifdef DEBUG
+ RARCH_LOG("[Camera]: Delivering camera frame\n");
+#endif
+ frame_raw_cb(avf->manager.frameBuffer, avf->width, avf->height, avf->width * 4);
+ return true;
+}
+
+camera_driver_t camera_avfoundation = {
+ avfoundation_init,
+ avfoundation_free,
+ avfoundation_start,
+ avfoundation_stop,
+ avfoundation_poll,
+ "avfoundation"
+};
diff --git a/griffin/griffin_objc.m b/griffin/griffin_objc.m
index c6ce469920..c670919e65 100644
--- a/griffin/griffin_objc.m
+++ b/griffin/griffin_objc.m
@@ -65,6 +65,10 @@
#include "../location/drivers/corelocation.m"
#endif
+#ifdef HAVE_AVF
+#include "../camera/drivers/avfoundation.m"
+#endif
+
#if defined(HAVE_DISCORD)
#include "../deps/discord-rpc/src/discord_register_osx.m"
#endif
diff --git a/pkg/apple/BaseConfig.xcconfig b/pkg/apple/BaseConfig.xcconfig
index c570f53e1d..2ba4deadbc 100644
--- a/pkg/apple/BaseConfig.xcconfig
+++ b/pkg/apple/BaseConfig.xcconfig
@@ -91,6 +91,7 @@ OTHER_CFLAGS[arch=x86_64] = $(inherited) -DHAVE_SSE
OTHER_CFLAGS[arch=arm64*] = $(inherited) -D__ARM_NEON__ -DHAVE_NEON
OTHER_CFLAGS[sdk=macosx*] = $(inherited) -DGL_SILENCE_DEPRECATION
+OTHER_CFLAGS[sdk=macosx*] = $(inherited) -DHAVE_AVF
OTHER_CFLAGS[sdk=macosx*] = $(inherited) -DHAVE_COMMAND
OTHER_CFLAGS[sdk=macosx*] = $(inherited) -DHAVE_COREAUDIO3
OTHER_CFLAGS[sdk=macosx*] = $(inherited) -DHAVE_COREMIDI
@@ -124,6 +125,7 @@ OTHER_CFLAGS[sdk=iphonesimulator*] = $(inherited) $(OTHER_CFLAGS_IOS_TVOS_SHARE)
OTHER_CFLAGS[sdk=appletvos*] = $(inherited) $(OTHER_CFLAGS_IOS_TVOS_SHARE)
OTHER_CFLAGS[sdk=appletvsimulator*] = $(inherited) $(OTHER_CFLAGS_IOS_TVOS_SHARE)
+OTHER_CFLAGS_IOS = $(inherited) -DHAVE_AVF
OTHER_CFLAGS_IOS = $(inherited) -DHAVE_COREMIDI
OTHER_CFLAGS_IOS = $(inherited) -DHAVE_COREMOTION
OTHER_CFLAGS_IOS = $(inherited) -DHAVE_IOS_CUSTOMKEYBOARD
diff --git a/pkg/apple/RetroArch_Metal.xcodeproj/project.pbxproj b/pkg/apple/RetroArch_Metal.xcodeproj/project.pbxproj
index 89a4aff5b4..19549c5663 100644
--- a/pkg/apple/RetroArch_Metal.xcodeproj/project.pbxproj
+++ b/pkg/apple/RetroArch_Metal.xcodeproj/project.pbxproj
@@ -38,6 +38,9 @@
05A8E23C20A63CF50084ABDA /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05A8E23B20A63CF50084ABDA /* QuartzCore.framework */; };
05D7753520A567A400646447 /* griffin_cpp.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 05D7753320A5678300646447 /* griffin_cpp.cpp */; };
05D7753720A567A700646447 /* griffin_glslang.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 05D7753420A5678400646447 /* griffin_glslang.cpp */; };
+ 0703E3292D76B6ED00D7B9BE /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0703E3282D76B6ED00D7B9BE /* CoreMedia.framework */; };
+ 0703E32A2D76B6F400D7B9BE /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0703E3282D76B6ED00D7B9BE /* CoreMedia.framework */; };
+ 0703E32B2D76B6FA00D7B9BE /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0703E3282D76B6ED00D7B9BE /* CoreMedia.framework */; };
07097FFB2D60F4C80021608F /* CoreMIDI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 07097FFA2D60F4C80021608F /* CoreMIDI.framework */; };
07097FFC2D60F4D00021608F /* CoreMIDI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 07097FFA2D60F4C80021608F /* CoreMIDI.framework */; };
07097FFD2D60F4D60021608F /* CoreMIDI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 07097FFA2D60F4C80021608F /* CoreMIDI.framework */; };
@@ -68,6 +71,9 @@
0720995929B1258C001642BB /* IOKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 84DD5EB41A89E737007336C1 /* IOKit.framework */; };
072976DD296284F600D6E00C /* OpenGL.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 072976DC296284F600D6E00C /* OpenGL.framework */; };
0746953A2997393000CCB7BD /* GameController.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 074695362995C03900CCB7BD /* GameController.framework */; };
+ 074A924B2D76B1850084364A /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 074A924A2D76B1850084364A /* Accelerate.framework */; };
+ 074A924C2D76B18D0084364A /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 074A924A2D76B1850084364A /* Accelerate.framework */; };
+ 074A924D2D76B19A0084364A /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 074A924A2D76B1850084364A /* Accelerate.framework */; };
075650252C488918004C5E7E /* CloudKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 075650242C488918004C5E7E /* CloudKit.framework */; };
076512622D64E99200E1F6BE /* CoreLocation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 076512612D64E99200E1F6BE /* CoreLocation.framework */; };
076512632D64E99A00E1F6BE /* CoreLocation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 076512612D64E99200E1F6BE /* CoreLocation.framework */; };
@@ -449,12 +455,14 @@
05F2873F20F2BEEA00632D47 /* task_content.c */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.c; path = task_content.c; sourceTree = ""; };
05F2874020F2BEEA00632D47 /* task_http.c */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.c; path = task_http.c; sourceTree = ""; };
05F2874120F2BEEA00632D47 /* task_patch.c */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.c; path = task_patch.c; sourceTree = ""; };
+ 0703E3282D76B6ED00D7B9BE /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
07097FFA2D60F4C80021608F /* CoreMIDI.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMIDI.framework; path = System/Library/Frameworks/CoreMIDI.framework; sourceTree = SDKROOT; };
070A883F2A4E7A1B003161C0 /* OpenAL.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenAL.framework; path = System/Library/Frameworks/OpenAL.framework; sourceTree = SDKROOT; };
071051BE2BEFEFBA009C29D8 /* Info_AppStore.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; name = Info_AppStore.plist; path = OSX/Info_AppStore.plist; sourceTree = ""; };
0720996029B1258C001642BB /* RetroArch.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = RetroArch.app; sourceTree = BUILT_PRODUCTS_DIR; };
072976DC296284F600D6E00C /* OpenGL.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGL.framework; path = System/Library/Frameworks/OpenGL.framework; sourceTree = SDKROOT; };
074695362995C03900CCB7BD /* GameController.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = GameController.framework; path = System/Library/Frameworks/GameController.framework; sourceTree = SDKROOT; };
+ 074A924A2D76B1850084364A /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
075650242C488918004C5E7E /* CloudKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CloudKit.framework; path = System/Library/Frameworks/CloudKit.framework; sourceTree = SDKROOT; };
075650262C48B417004C5E7E /* RetroArchCI.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = RetroArchCI.entitlements; sourceTree = ""; };
076512612D64E99200E1F6BE /* CoreLocation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreLocation.framework; path = System/Library/Frameworks/CoreLocation.framework; sourceTree = SDKROOT; };
@@ -525,6 +533,7 @@
0720994C29B1258C001642BB /* OpenGL.framework in Frameworks */,
0720994D29B1258C001642BB /* GameController.framework in Frameworks */,
0720994F29B1258C001642BB /* AVFoundation.framework in Frameworks */,
+ 0703E3292D76B6ED00D7B9BE /* CoreMedia.framework in Frameworks */,
0720995029B1258C001642BB /* QuartzCore.framework in Frameworks */,
0720995129B1258C001642BB /* IOSurface.framework in Frameworks */,
0720995229B1258C001642BB /* CoreHaptics.framework in Frameworks */,
@@ -536,6 +545,7 @@
076512642D64E9A000E1F6BE /* CoreLocation.framework in Frameworks */,
0720995629B1258C001642BB /* CoreAudio.framework in Frameworks */,
0720995729B1258C001642BB /* AudioUnit.framework in Frameworks */,
+ 074A924D2D76B19A0084364A /* Accelerate.framework in Frameworks */,
0720995829B1258C001642BB /* AppKit.framework in Frameworks */,
0720995929B1258C001642BB /* IOKit.framework in Frameworks */,
);
@@ -548,6 +558,8 @@
07F2BBD32BE83A4700FD1295 /* AudioToolbox.framework in Frameworks */,
07F2BBD42BE83A4700FD1295 /* OpenGL.framework in Frameworks */,
07F2BBD52BE83A4700FD1295 /* GameController.framework in Frameworks */,
+ 074A924C2D76B18D0084364A /* Accelerate.framework in Frameworks */,
+ 0703E32B2D76B6FA00D7B9BE /* CoreMedia.framework in Frameworks */,
07F2BBD72BE83A4700FD1295 /* AVFoundation.framework in Frameworks */,
07F2BBD82BE83A4700FD1295 /* QuartzCore.framework in Frameworks */,
07F2BBD92BE83A4700FD1295 /* IOSurface.framework in Frameworks */,
@@ -573,6 +585,8 @@
D27C508C2228362700113BC0 /* AudioToolbox.framework in Frameworks */,
072976DD296284F600D6E00C /* OpenGL.framework in Frameworks */,
0746953A2997393000CCB7BD /* GameController.framework in Frameworks */,
+ 074A924B2D76B1850084364A /* Accelerate.framework in Frameworks */,
+ 0703E32A2D76B6F400D7B9BE /* CoreMedia.framework in Frameworks */,
D27C508B2228361D00113BC0 /* AVFoundation.framework in Frameworks */,
05A8E23C20A63CF50084ABDA /* QuartzCore.framework in Frameworks */,
05A8E23A20A63CED0084ABDA /* IOSurface.framework in Frameworks */,
@@ -1259,6 +1273,8 @@
29B97323FDCFA39411CA2CEA /* Frameworks */ = {
isa = PBXGroup;
children = (
+ 0703E3282D76B6ED00D7B9BE /* CoreMedia.framework */,
+ 074A924A2D76B1850084364A /* Accelerate.framework */,
076512612D64E99200E1F6BE /* CoreLocation.framework */,
07097FFA2D60F4C80021608F /* CoreMIDI.framework */,
075650242C488918004C5E7E /* CloudKit.framework */,