Skip to content

Commit

Permalink
fix ios request camera permission and formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
j20001970 committed May 15, 2024
1 parent c3f2bef commit 4dcf184
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 76 deletions.
1 change: 1 addition & 0 deletions GDMP/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,6 @@ refresh_compile_commands(
--crosstool_top=https://external:android/crosstool
--cpu=arm64-v8a
""",
"//GDMP/io:camera_helper_ios": "--config=ios",
},
)
162 changes: 86 additions & 76 deletions GDMP/io/camera_helper_ios.mm
Original file line number Diff line number Diff line change
@@ -1,109 +1,119 @@
#include "camera_helper.h"

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>

#include "mediapipe/gpu/gpu_buffer.h"

#include "GDMP/framework/image.h"

@class OutputDelegate;
@interface OutputDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
@interface OutputDelegate
: NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic) MediaPipeCameraHelper *camera_helper;
@end

@implementation OutputDelegate
- (instancetype)init:(MediaPipeCameraHelper *)camera_helper {
self = [super init];
self.camera_helper = camera_helper;
self = [super init];
self.camera_helper = camera_helper;
return self;
}
- (void)captureOutput:(AVCaptureOutput*)captureOutput
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)connection {
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
Ref<MediaPipeImage> image = memnew(MediaPipeImage(imageBuffer));
self.camera_helper->emit_signal("new_frame", image);
fromConnection:(AVCaptureConnection *)connection {
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
Ref<MediaPipeImage> image = memnew(MediaPipeImage(imageBuffer));
self.camera_helper->emit_signal("new_frame", image);
}
@end

class CameraHelperIOS : public CameraHelperImpl {
private:
AVCaptureDeviceInput* videoDeviceInput;
AVCaptureSession* session;
AVCaptureVideoDataOutput* videoDataOutput;
OutputDelegate *delegate;
dispatch_queue_t delegateQueue;
private:
AVCaptureSession *session;
AVCaptureVideoDataOutput *videoDataOutput;
OutputDelegate *delegate;

public:
CameraHelperIOS(MediaPipeCameraHelper *camera_helper) : CameraHelperImpl(camera_helper) {
dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
delegateQueue = dispatch_queue_create(NULL, qosAttribute);
delegate = [[OutputDelegate alloc] init:camera_helper];
}
public:
CameraHelperIOS(MediaPipeCameraHelper *camera_helper)
: CameraHelperImpl(camera_helper) {
delegate = [[OutputDelegate alloc] init:camera_helper];
}

~CameraHelperIOS() = default;
~CameraHelperIOS() = default;

bool permission_granted() {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
return status == AVAuthorizationStatusAuthorized;
}
bool permission_granted() {
AVAuthorizationStatus status =
[AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
return status == AVAuthorizationStatusAuthorized;
}

void request_permission(MediaPipeCameraHelper *camera_helper) {
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
completionHandler:^(BOOL granted) {
camera_helper->emit_signal("permission_result", granted);
}];
}
void request_permission() {
[AVCaptureDevice
requestAccessForMediaType:AVMediaTypeVideo
completionHandler:^(BOOL granted) {
camera_helper->emit_signal("permission_result", granted);
}];
}

void start(int index, Vector2 size) {
ERR_FAIL_COND(delegate == nil);
session = [[AVCaptureSession alloc] init];
AVCaptureDevicePosition position;
if(index == 0) {
position = AVCaptureDevicePositionFront;
}
else {
position = AVCaptureDevicePositionBack;
}
AVCaptureDeviceType deviceType = AVCaptureDeviceTypeBuiltInWideAngleCamera;
AVCaptureDeviceDiscoverySession* deviceDiscoverySession = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[deviceType]
mediaType:AVMediaTypeVideo
position:position];
AVCaptureDevice* videoDevice =
[deviceDiscoverySession devices]
? deviceDiscoverySession.devices.firstObject
: [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError* error = nil;
videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
[session addInput:videoDeviceInput];
videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
videoDataOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
[videoDataOutput setSampleBufferDelegate:delegate queue:delegateQueue];
[session addOutput:videoDataOutput];
session.sessionPreset = AVCaptureSessionPreset640x480;
AVCaptureConnection* connection = [videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
connection.videoOrientation = (AVCaptureVideoOrientation)UIDevice.currentDevice.orientation;
if (position == AVCaptureDevicePositionFront) {
connection.videoMirrored = YES;
}
[session startRunning];
}

void start(int index, Vector2 size) {
ERR_FAIL_COND(delegate == nil);
session = [[AVCaptureSession alloc] init];
AVCaptureDevicePosition position;
if (index == 0) {
position = AVCaptureDevicePositionFront;
} else {
position = AVCaptureDevicePositionBack;
}
AVCaptureDeviceType deviceType = AVCaptureDeviceTypeBuiltInWideAngleCamera;
AVCaptureDeviceDiscoverySession *deviceDiscoverySession =
[AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ deviceType ]
mediaType:AVMediaTypeVideo
position:position];
AVCaptureDevice *videoDevice =
[deviceDiscoverySession devices]
? deviceDiscoverySession.devices.firstObject
: [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *videoDeviceInput =
[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error) {
ERR_PRINT(error.description.UTF8String);
return;
}
[session addInput:videoDeviceInput];
videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
videoDataOutput.videoSettings =
@{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
dispatch_queue_attr_t qosAttribute =
dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL,
QOS_CLASS_USER_INTERACTIVE, 0);
dispatch_queue_t delegateQueue = dispatch_queue_create(NULL, qosAttribute);
[videoDataOutput setSampleBufferDelegate:delegate queue:delegateQueue];
[session addOutput:videoDataOutput];
session.sessionPreset = AVCaptureSessionPreset640x480;
AVCaptureConnection *connection =
[videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
connection.videoOrientation =
(AVCaptureVideoOrientation)UIDevice.currentDevice.orientation;
if (position == AVCaptureDevicePositionFront) {
connection.videoMirrored = YES;
}
[session startRunning];
}

void close() {
if(session != nil) {
[session stopRunning];
session = nil;
}
}
void close() {
if (session != nil) {
[session stopRunning];
session = nil;
}
}
};

MediaPipeCameraHelper::MediaPipeCameraHelper() {
impl = std::make_unique<CameraHelperIOS>(this);
impl = std::make_unique<CameraHelperIOS>(this);
}

MediaPipeCameraHelper::~MediaPipeCameraHelper() = default;

0 comments on commit 4dcf184

Please sign in to comment.