AVF 8 - 媒体捕捉
- 注意使用 NSCameraUsageDescription
- iOS 的摄像头可能比 Mac 更多功能特性
@interface Capture ()<AVCaptureFileOutputRecordingDelegate>
@property (strong, nonatomic) AVCaptureSession *captureSession;
@property (weak, nonatomic) AVCaptureDeviceInput *activeVideoInput;
@property (strong, nonatomic) AVCaptureStillImageOutput *imageOutput;
@property (strong, nonatomic) AVCaptureMovieFileOutput *movieOutput;
@property (strong, nonatomic) NSURL *outputURL;
@property (nonatomic, strong) NSWindow *window;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewViewLayer;
@end
@implementation Capture
- (void)setup{
[self setupSession:nil];
[self setupPreviewLayer];
[self startSession];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[self startRecording];
});
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[self stopRecording];
});
}
- (BOOL)setupSession:(NSError **)error {
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *videoDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput =
[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;
}
} else {
return NO;
}
AVCaptureDevice *audioDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput =
[AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
if (audioInput) {
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
}
} else {
return NO;
}
self.imageOutput = [[AVCaptureStillImageOutput alloc] init];
self.imageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
if ([self.captureSession canAddOutput:self.imageOutput]) {
[self.captureSession addOutput:self.imageOutput];
}else{
NSLog(@"can not add image output");
}
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.captureSession canAddOutput:self.movieOutput]) {
[self.captureSession addOutput:self.movieOutput];
}else{
NSLog(@"can not add movie output");
}
return YES;
}
- (void)setupPreviewLayer{
self.previewViewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession];
self.previewViewLayer.frame = NSMakeRect(10, 10, 300, 300);
[self.previewViewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[self.window.contentView.layer addSublayer:self.previewViewLayer];
}
- (void)startSession {
if (![self.captureSession isRunning]) {
dispatch_async([self globalQueue], ^{
[self.captureSession startRunning];
});
}
}
- (void)stopSession {
if ([self.captureSession isRunning]) {
dispatch_async([self globalQueue], ^{
[self.captureSession stopRunning];
});
}
}
- (dispatch_queue_t)globalQueue {
return dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
}
#pragma mark - Image Capture Methods
- (void)captureStillImage {
AVCaptureConnection *connection =
[self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
id handler = ^(CMSampleBufferRef sampleBuffer, NSError *error) {
if (sampleBuffer != NULL) {
NSData *imageData =
[AVCaptureStillImageOutput
jpegStillImageNSDataRepresentation:sampleBuffer];
[self writeImageToAssetsLibrary:imageData];
} else {
NSLog(@"NULL sampleBuffer: %@", [error localizedDescription]);
}
};
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection
completionHandler:handler];
}
#pragma mark - 保存图片
- (void)writeImageToAssetsLibrary:(NSData *)imageData {
NSLog(@"imageData : %d",imageData.length);
NSString *directory = NSSearchPathForDirectoriesInDomains(NSDesktopDirectory, NSUserDomainMask, YES).firstObject;
NSTimeInterval currentTime= [[NSDate date] timeIntervalSince1970];
NSString *path = [NSString stringWithFormat:@"%@/%f.png",directory,currentTime];
[imageData writeToFile:path atomically:YES];
[[NSWorkspace sharedWorkspace] selectFile:path inFileViewerRootedAtPath:nil];
}
#pragma mark - Video Capture Methods
- (BOOL)isRecording {
return self.movieOutput.isRecording;
}
- (void)startRecording {
if (![self isRecording]) {
AVCaptureConnection *videoConnection = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *device = [self activeCamera];
self.outputURL = [self uniqueURL];
[self.movieOutput startRecordingToOutputFileURL:self.outputURL recordingDelegate:self];
}
}
- (CMTime)recordedDuration {
return self.movieOutput.recordedDuration;
}
- (NSURL *)uniqueURL {
NSString *directory = NSSearchPathForDirectoriesInDomains(NSDesktopDirectory, NSUserDomainMask, YES).firstObject;
NSTimeInterval currentTime= [[NSDate date] timeIntervalSince1970];
NSString *path = [NSString stringWithFormat:@"%@/%f.mov",directory,currentTime];
return [NSURL fileURLWithPath:path];
}
- (void)stopRecording {
if ([self isRecording]) {
[self.movieOutput stopRecording];
}
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
NSLog(@"didFinishRecordingToOutputFileAtURL : %@",outputFileURL);
if (error) {
NSLog(@"mediaCaptureFailedWithError : %@",error);
} else {
[self writeVideoToAssetsLibrary:[self.outputURL copy]];
}
self.outputURL = nil;
}
- (void)writeVideoToAssetsLibrary:(NSURL *)videoURL {
[self generateThumbnailForVideoAtURL:videoURL];
[[NSWorkspace sharedWorkspace] selectFile:videoURL.path
inFileViewerRootedAtPath:nil];
NSLog(@"videoURL : %@",videoURL);
}