13.4 C
London
Monday, December 25, 2023

AVFoundation (Goal-C, iOS 16): captureOutput not referred to as


I’ve an issue with captureOutput technique not being referred to as throughout the session. I attempted including NSLog to it however it appears that evidently technique isn’t referred to as. Furthermore, I attempted trying to find the answer, however did not discover any.

#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIImage.h>

@interface Cam : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
-(void)captureOutput:(AVCaptureOutput *)output
       didOutputSampleBuffer:(CMSampleBufferRef)buffer
       fromConnection:(AVCaptureConnection *)connection;
@finish

@interface Cam ()
{
}

-(BOOL)begin: (int)deviceIndex;
-(void)cease;
-(NSData *)getFrame;
@finish

@implementation Cam

Cam *cam;
CVImageBufferRef head;
AVCaptureSession *session;
int rely;

-(id)init
{
    self = [super init];
    head = nil;
    rely = 0;
    return self;
}

-(void)dealloc
{
    @synchronized (self)
    {
        if (head != nil)
        {
            CFRelease(head);
        }
    }
}

-(BOOL)begin:(int)deviceIndex
{
    int index;
    NSArray *units;
    AVCaptureDeviceDiscoverySession *discoverySession;

    AVCaptureDevice *machine;
    AVCaptureDeviceInput *enter;
    AVCaptureVideoDataOutput *output;
    NSError *error;
    dispatch_queue_t queue;

    session = [[AVCaptureSession alloc] init];
    session.sessionPreset = AVCaptureSessionPresetMedium;

    discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]
                                                        mediaType:AVMediaTypeVideo place:AVCaptureDevicePositionUnspecified];
    units = discoverySession.units;
    index = deviceIndex;

    if (index < 0 || index >= [devices count])
    {
        log_debug("* Didn't open machine (%d)n", index);
        return NO;
    }

    machine = units[index];
    enter = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];

    if (!enter)
    {
        log_debug("* Didn't seize enter (%s)n", [error.localizedDescription UTF8String]);
        return NO;
    }

    [session addInput:input];
    output = [[AVCaptureVideoDataOutput alloc] init];
    [session addOutput:output];

    queue = dispatch_queue_create("cam_queue", NULL);
    [output setAlwaysDiscardsLateVideoFrames:YES];
    [output setVideoSettings:@{(NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
    [output setSampleBufferDelegate:self queue:queue];
    [session startRunning];

    return YES;
}

-(void)cease
{
    [session stopRunning];
}

-(NSData *)getFrame
{
    int timer;

    CIImage *ciImage;
    CIContext *temporaryContext;
    CGImageRef videoImage;
    UIImage *uiImage;
    NSData *body;

    for (timer = 0; timer < 500; timer++)
    {
        if (rely > 5)
        {
            break;
        }

        usleep(10000);
    }

    @synchronized (self)
    {
        if (head == nil)
        {
            log_debug("* Head is by some means nil (rely: %d)n", rely);
            return nil;
        }

        ciImage = [[CIImage imageWithCVPixelBuffer:head] imageByApplyingOrientation:6];
        temporaryContext = [CIContext contextWithOptions:nil];
        videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0,
                                        CVPixelBufferGetHeight(head),
                                        CVPixelBufferGetWidth(head))];
        uiImage = [[UIImage alloc] initWithCGImage:videoImage];
        body = UIImageJPEGRepresentation(uiImage, 1.0);
        CGImageRelease(videoImage);

        return body;
    }

    return nil;
}

-(void)captureOutput:(AVCaptureOutput *)output
       didOutputSampleBuffer:(CMSampleBufferRef)buffer
       fromConnection:(AVCaptureConnection *)connection
{
    CVImageBufferRef body;
    CVImageBufferRef prev;

    body = CMSampleBufferGetImageBuffer(buffer);
    CFRetain(body);

    @synchronized (self)
    {
        prev = head;
        head = body;
        rely++;
    }

    if (prev != nil)
    {
        CFRelease(prev);
    }
}

@finish

So, what I do is principally:

...
NSData *body;

@autoreleasepool
{
    cam = [[Cam alloc] init];

    if ([cam start:camID])
    {
        body = [cam getFrame];
        if (body == nil)
            log_debug("* Body is by some means nil?n");
    }
    else
    {
        cam = nil;
    }
}
...

Consequently I get * Body is by some means nil? and * Head is by some means nil (rely: 0)

I do not know what else I can do or repair. Please assist me with this subject.

Thanks prematurely

Latest news
Related news

LEAVE A REPLY

Please enter your comment!
Please enter your name here