网站建设资讯

NEWS

网站建设资讯

怎么在iOS中使用AVCaptureSession实现视频录制功能

本篇文章给大家分享的是有关怎么在iOS中使用AVCaptureSession实现视频录制功能,小编觉得挺实用的,因此分享给大家学习,希望大家阅读完这篇文章后可以有所收获,话不多说,跟着小编一起来看看吧。

创新互联长期为上1000家客户提供的网站建设服务,团队从业经验10年,关注不同地域、不同群体,并针对不同对象提供差异化的产品和服务;打造开放共赢平台,与合作伙伴共同营造健康的互联网生态环境。为新安企业提供专业的成都做网站、成都网站建设,新安网站改版等技术服务。拥有十载丰富建站经验和众多成功案例,为您定制开发。

具体内容如下

#import "RecordingVideoViewController.h" 
#import  
#import  
 
@interface RecordingVideoViewController () 
 
//会话 负责输入和输出设备之间的数据传递 
@property (strong,nonatomic) AVCaptureSession  *captureSession; 
//设备输入 负责从AVCaptureDevice获得输入数据 
@property (strong,nonatomic) AVCaptureDeviceInput  *videoCaptureDeviceInput; 
@property (strong,nonatomic) AVCaptureDeviceInput  *audioCaptureDeviceInput; 
//视频输出流 
@property (strong,nonatomic) AVCaptureMovieFileOutput  *captureMovieFileOutput; 
//相机拍摄预览图层 
@property (strong,nonatomic) AVCaptureVideoPreviewLayer  *captureVideoPreviewLayer; 
 
//自定义UI控件容器 
@property (strong,nonatomic) UIView  *viewContainer; 
//聚焦图标 
@property (strong,nonatomic) UIImageView  *focusCursor; 
//录制时长 
@property (strong,nonatomic) UILabel  *timeLabel; 
//切换前后摄像头 
@property (strong,nonatomic) UIButton  *switchCameraBtn; 
//改变焦距 
@property (strong,nonatomic) UIButton  *scaleBtn; 
//计时器 
@property (strong,nonatomic) NSTimer  *timer; 
 
 
@end 
 
@implementation RecordingVideoViewController { 
 @private 
  NSInteger _num; 
  CGFloat _kCameraScale; 
} 
 
 
- (UIView *)viewContainer { 
  if (!_viewContainer) { 
    _viewContainer = [[UIView alloc] initWithFrame:[UIScreen mainScreen].bounds]; 
     
    UIButton *takeButton = [UIButton buttonWithType:UIButtonTypeCustom]; 
    takeButton.backgroundColor = [UIColor redColor]; 
    [takeButton setTitle:@"start" forState:UIControlStateNormal]; 
    [takeButton addTarget:self action:@selector(takeButtonClick:) forControlEvents:UIControlEventTouchUpInside]; 
     
   
    _timeLabel = [[UILabel alloc] init]; 
    _timeLabel.textColor = [UIColor redColor]; 
    _timeLabel.textAlignment = NSTextAlignmentCenter; 
    _timeLabel.font = [UIFont boldSystemFontOfSize:20]; 
    _timeLabel.text = @"00:00"; 
     
     
    _switchCameraBtn = [UIButton buttonWithType:UIButtonTypeCustom]; 
    [_switchCameraBtn setTitle:@"switch" forState:UIControlStateNormal]; 
    _switchCameraBtn.backgroundColor = [UIColor redColor]; 
    [_switchCameraBtn addTarget:self action:@selector(switchCameraBtnClick) forControlEvents:UIControlEventTouchUpInside]; 
     
    _scaleBtn = [UIButton buttonWithType:UIButtonTypeCustom]; 
    [_scaleBtn setTitle:@"1X" forState:UIControlStateNormal]; 
    _scaleBtn.backgroundColor = [UIColor redColor]; 
    [_scaleBtn addTarget:self action:@selector(scaleBtnClick:) forControlEvents:UIControlEventTouchUpInside]; 
     
    [_viewContainer addSubview:takeButton]; 
    [_viewContainer addSubview:_timeLabel]; 
    [_viewContainer addSubview:_scaleBtn]; 
    [_viewContainer addSubview:_switchCameraBtn]; 
    [takeButton mas_makeConstraints:^(MASConstraintMaker *make) { 
      make.size.mas_equalTo(CGSizeMake(60, 40)); 
      make.centerX.mas_equalTo(_viewContainer); 
      make.bottom.mas_equalTo(_viewContainer).offset(-64); 
    }]; 
    [_timeLabel mas_makeConstraints:^(MASConstraintMaker *make) { 
      make.centerX.mas_equalTo(_viewContainer); 
      make.height.mas_equalTo(@30); 
      make.top.mas_equalTo(_viewContainer); 
    }]; 
    [_scaleBtn mas_makeConstraints:^(MASConstraintMaker *make) { 
      make.size.mas_equalTo(CGSizeMake(60, 40)); 
      make.left.mas_equalTo(_viewContainer).offset(10); 
      make.top.mas_equalTo(_viewContainer); 
    }]; 
    [_switchCameraBtn mas_makeConstraints:^(MASConstraintMaker *make) { 
      make.size.mas_equalTo(CGSizeMake(60, 40)); 
      make.top.mas_equalTo(_viewContainer); 
      make.right.mas_equalTo(_viewContainer).offset(-10); 
    }]; 
     
    _focusCursor = [[UIImageView alloc] init]; 
    kBorder(_focusCursor, 1, [UIColor yellowColor]); 
    _focusCursor.alpha = 0; 
    [_viewContainer addSubview:self.focusCursor]; 
    [_focusCursor mas_makeConstraints:^(MASConstraintMaker *make) { 
      make.size.mas_equalTo(CGSizeMake(40, 40)); 
      make.center.mas_equalTo(_viewContainer); 
    }]; 
 
  } 
  return _viewContainer; 
} 
 
- (void)viewDidLoad { 
  [super viewDidLoad]; 
   
  self.title = @"视频录制"; 
  _kCameraScale = 1.0f; 
  //初始化会话对象 
  _captureSession = [[AVCaptureSession alloc] init]; 
  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) { 
    _captureSession.sessionPreset = AVCaptureSessionPreset1280x720; 
  } 
   
   
  NSError *error = nil; 
 
  //获取视频输入对象 
  AVCaptureDevice *videoCaptureDevice = [self cameraDeviceWithPosition:(AVCaptureDevicePositionBack)]; 
  if (!videoCaptureDevice) { 
    NSLog(@"获取后置摄像头失败!"); 
    return; 
  } 
  _videoCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCaptureDevice error:&error]; 
  if (error) { 
    NSLog(@"取得视频设备输入对象时出错"); 
    return; 
  } 
   
   
  //获取音频输入对象 
  AVCaptureDevice *audioCatureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 
  _audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCatureDevice error:&error]; 
  if (error) { 
    NSLog(@"取得音频设备输入对象时出错"); 
    return; 
  } 
   
  //初始化设备输出对象 
  _captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 
   
  //将设备输入添加到会话中 
  if ([_captureSession canAddInput:_videoCaptureDeviceInput]) { 
    [_captureSession addInput:_videoCaptureDeviceInput]; 
    [_captureSession addInput:_audioCaptureDeviceInput]; 
     
    //防抖功能 
    AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio]; 
    if ([captureConnection isVideoStabilizationSupported]) { 
      captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; 
    } 
  } 
   
  //将设备输出添加到会话中 
  if ([_captureSession canAddOutput:_captureMovieFileOutput]) { 
    [_captureSession addOutput:_captureMovieFileOutput]; 
  } 
   
   
  //创建视频预览图层 
  _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession]; 
  self.viewContainer.layer.masksToBounds = YES; 
  _captureVideoPreviewLayer.frame = self.viewContainer.bounds; 
  _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
  [self.view.layer addSublayer:_captureVideoPreviewLayer]; 
   
  //显示自定义控件 
  [self.view addSubview:self.viewContainer]; 
   
  //添加点按聚焦手势 
  UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)]; 
  [self.viewContainer addGestureRecognizer:tapGesture]; 
   
} 
 
-(void)viewDidAppear:(BOOL)animated{ 
  [super viewDidAppear:animated]; 
  [self.captureSession startRunning]; 
} 
 
-(void)viewDidDisappear:(BOOL)animated{ 
  [super viewDidDisappear:animated]; 
  [self.captureSession stopRunning]; 
  [self.timer invalidate]; 
  self.timer = nil; 
} 
 
- (void)viewWillDisappear:(BOOL)animated { 
  [super viewWillDisappear:animated]; 
  [self.captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(1, 1)]; 
} 
 
- (void)didReceiveMemoryWarning { 
  [super didReceiveMemoryWarning]; 
} 
 
//开始 + 暂停录制 
- (void)takeButtonClick:(UIButton *)sender { 
  if ([self.captureMovieFileOutput isRecording]) { 
    [self.captureMovieFileOutput stopRecording]; 
     
    [self.navigationController popViewControllerAnimated:YES]; 
     
  } else { 
    AVCaptureConnection *captureConnection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 
    captureConnection.videoOrientation = [self.captureVideoPreviewLayer connection].videoOrientation; 
     
    NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Movie.mov"]; 
    NSLog(@"%@",filePath); 
    [self.captureMovieFileOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:filePath] recordingDelegate:self]; 
     
     
    self.switchCameraBtn.hidden = YES; 
     
    sender.backgroundColor = [UIColor greenColor]; 
    [sender setTitle:@"stop" forState:UIControlStateNormal]; 
     
    self.timer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(timeAction) userInfo:nil repeats:YES]; 
    [self.timer setFireDate:[NSDate distantPast]]; 
  } 
} 
 
//切换摄像头 
- (void)switchCameraBtnClick { 
  AVCaptureDevicePosition currentPosition = self.videoCaptureDeviceInput.device.position; 
  AVCaptureDevicePosition toPosition; 
  if (currentPosition == AVCaptureDevicePositionUnspecified || 
    currentPosition == AVCaptureDevicePositionFront) { 
    toPosition = AVCaptureDevicePositionBack; 
  } else { 
    toPosition = AVCaptureDevicePositionFront; 
  } 
   
  AVCaptureDevice *toCapturDevice = [self cameraDeviceWithPosition:toPosition]; 
  if (!toCapturDevice) { 
    NSLog(@"获取要切换的设备失败"); 
    return; 
  } 
   
  NSError *error = nil; 
  AVCaptureDeviceInput *toVideoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:toCapturDevice error:&error]; 
  if (error) { 
    NSLog(@"获取要切换的设备输入失败"); 
    return; 
  } 
   
  //改变会话配置 
  [self.captureSession beginConfiguration]; 
   
  [self.captureSession removeInput:self.videoCaptureDeviceInput]; 
  if ([self.captureSession canAddInput:toVideoDeviceInput]) { 
    [self.captureSession addInput:toVideoDeviceInput]; 
     
    self.videoCaptureDeviceInput = toVideoDeviceInput; 
  } 
  //提交会话配置 
  [self.captureSession commitConfiguration]; 
} 
 
 
//点按手势 
- (void)tapScreen:(UITapGestureRecognizer *)tap { 
  CGPoint point = [tap locationInView:self.viewContainer]; 
   
  //将界面point对应到摄像头point 
  CGPoint cameraPoint = [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point]; 
 
  //设置聚光动画 
  self.focusCursor.center = point; 
  self.focusCursor.transform = CGAffineTransformMakeScale(1.5, 1.5); 
  self.focusCursor.alpha = 1.0f; 
  [UIView animateWithDuration:1 animations:^{ 
    self.focusCursor.transform = CGAffineTransformIdentity; 
  } completion:^(BOOL finished) { 
    self.focusCursor.alpha = 0.0f; 
 
  }]; 
   
  //设置聚光点坐标 
  [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint]; 
 
} 
 
 
/**设置聚焦点*/ 
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ 
   
  AVCaptureDevice *captureDevice= [self.videoCaptureDeviceInput device]; 
  NSError *error = nil; 
  //设置设备属性必须先解锁 然后加锁 
  if ([captureDevice lockForConfiguration:&error]) { 
     
    if ([captureDevice isFocusModeSupported:focusMode]) { 
      [captureDevice setFocusMode:focusMode]; 
    } 
    if ([captureDevice isFocusPointOfInterestSupported]) { 
      [captureDevice setFocusPointOfInterest:point]; 
    } 
    //    //曝光 
    //    if ([captureDevice isExposureModeSupported:exposureMode]) { 
    //      [captureDevice setExposureMode:exposureMode]; 
    //    } 
    //    if ([captureDevice isExposurePointOfInterestSupported]) { 
    //      [captureDevice setExposurePointOfInterest:point]; 
    //    } 
    //    //闪光灯模式 
    //    if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) { 
    //      [captureDevice setFlashMode:AVCaptureFlashModeAuto]; 
    //    } 
     
    //加锁 
    [captureDevice unlockForConfiguration]; 
     
  }else{ 
    NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription); 
  } 
} 
 
 
 
//调整焦距 
-(void)scaleBtnClick:(UIButton *)sender 
{ 
  _kCameraScale += 0.5; 
  if(_kCameraScale > 3.0) { 
    _kCameraScale = 1.0; 
  } 
  //改变焦距 
  AVCaptureDevice *videoDevice = self.videoCaptureDeviceInput.device; 
  NSError *error = nil; 
  if ([videoDevice lockForConfiguration:&error]) { 
     
    [videoDevice setVideoZoomFactor:_kCameraScale]; 
     
    [videoDevice unlockForConfiguration]; 
     
    [sender setTitle:[NSString stringWithFormat:@"%lgX",_kCameraScale] forState:UIControlStateNormal]; 
 
    [CATransaction begin]; 
    [CATransaction setAnimationDuration:0.25]; 
    [self.captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(_kCameraScale, _kCameraScale)]; 
    [CATransaction commit]; 
     
  } else { 
    NSLog(@"修改设备属性失败!") 
  } 
} 
 
 
 
#pragma mark -------- AVCaptureFileOutputRecordingDelegate ---------- 
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { 
  NSLog(@"开始录制"); 
} 
 
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { 
  NSLog(@"录制结束"); 
  ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init]; 
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) { 
    if (error) { 
      NSLog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription); 
    } 
  }]; 
} 
 
//录制计时 
- (void)timeAction { 
  self.timeLabel.text = [NSString stringWithFormat:@"%.2ld:%.2ld",_num/60,_num%60]; 
  _num ++; 
} 
 
 
/**取得指定位置的摄像头*/ 
- (AVCaptureDevice *)cameraDeviceWithPosition:(AVCaptureDevicePosition )position{ 
  NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
  for (AVCaptureDevice *camera in cameras) { 
    if ([camera position] == position) { 
      return camera; 
    } 
  } 
  return nil; 
} 
  
@end

参考代码:

#import "VideoTestViewController.h" 
#import  
#import  
 
typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice); 
 
@interface VideoTestViewController ()//视频文件输出代理 
 
@property (strong,nonatomic) AVCaptureSession *captureSession;//负责输入和输出设备之间的数据传递 
@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;//负责从AVCaptureDevice获得输入数据 
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;//视频输出流 
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//相机拍摄预览图层 
 
@property (assign,nonatomic) BOOL enableRotation;//是否允许旋转(注意在视频录制过程中禁止屏幕旋转) 
@property (assign,nonatomic) CGRect *lastBounds;//旋转的前大小 
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;//后台任务标识 
@property (strong,nonatomic) UIView *viewContainer; 
@property (strong,nonatomic) UIButton *takeButton;//拍照按钮 
@property (strong,nonatomic) UIImageView *focusCursor; //聚焦光标 
 
 
@end 
 
@implementation VideoTestViewController 
 
#pragma mark - 控制器视图方法 
- (void)viewDidLoad { 
  [super viewDidLoad]; 
} 
 
-(void)viewWillAppear:(BOOL)animated{ 
  [super viewWillAppear:animated]; 
   
  //初始化会话 
  _captureSession=[[AVCaptureSession alloc]init]; 
  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {//设置分辨率 
    _captureSession.sessionPreset=AVCaptureSessionPreset1280x720; 
  } 
  //获得输入设备 
  AVCaptureDevice *captureDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];//取得后置摄像头 
  if (!captureDevice) { 
    NSLog(@"取得后置摄像头时出现问题."); 
    return; 
  } 
  //添加一个音频输入设备 
  AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 
   
   
  NSError *error=nil; 
  //根据输入设备初始化设备输入对象,用于获得输入数据 
  _captureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error]; 
  if (error) { 
    NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription); 
    return; 
  } 
  AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error]; 
  if (error) { 
    NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription); 
    return; 
  } 
  //初始化设备输出对象,用于获得输出数据 
  _captureMovieFileOutput=[[AVCaptureMovieFileOutput alloc]init]; 
   
  //将设备输入添加到会话中 
  if ([_captureSession canAddInput:_captureDeviceInput]) { 
    [_captureSession addInput:_captureDeviceInput]; 
    [_captureSession addInput:audioCaptureDeviceInput]; 
    AVCaptureConnection *captureConnection=[_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 
    if ([captureConnection isVideoStabilizationSupported ]) { 
      captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto; 
    } 
  } 
   
  //将设备输出添加到会话中 
  if ([_captureSession canAddOutput:_captureMovieFileOutput]) { 
    [_captureSession addOutput:_captureMovieFileOutput]; 
  } 
   
  //创建视频预览层,用于实时展示摄像头状态 
  _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession]; 
   
  CALayer *layer=self.viewContainer.layer; 
  layer.masksToBounds=YES; 
   
  _captureVideoPreviewLayer.frame=layer.bounds; 
  _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//填充模式 
  //将视频预览层添加到界面中 
  //[layer addSublayer:_captureVideoPreviewLayer]; 
  [layer insertSublayer:_captureVideoPreviewLayer below:self.focusCursor.layer]; 
   
  _enableRotation=YES; 
  [self addNotificationToCaptureDevice:captureDevice]; 
  [self addGenstureRecognizer]; 
} 
 
-(void)viewDidAppear:(BOOL)animated{ 
  [super viewDidAppear:animated]; 
  [self.captureSession startRunning]; 
} 
 
-(void)viewDidDisappear:(BOOL)animated{ 
  [super viewDidDisappear:animated]; 
  [self.captureSession stopRunning]; 
} 
 
- (void)didReceiveMemoryWarning { 
  [super didReceiveMemoryWarning]; 
} 
 
-(BOOL)shouldAutorotate{ 
  return self.enableRotation; 
} 
 
////屏幕旋转时调整视频预览图层的方向 
//-(void)willTransitionToTraitCollection:(UITraitCollection *)newCollection withTransitionCoordinator:(id)coordinator{ 
//  [super willTransitionToTraitCollection:newCollection withTransitionCoordinator:coordinator]; 
////  NSLog(@"%i,%i",newCollection.verticalSizeClass,newCollection.horizontalSizeClass); 
//  UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation]; 
//  NSLog(@"%i",orientation); 
//  AVCaptureConnection *captureConnection=[self.captureVideoPreviewLayer connection]; 
//  captureConnection.videoOrientation=orientation; 
// 
//} 
//屏幕旋转时调整视频预览图层的方向 
-(void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration{ 
  AVCaptureConnection *captureConnection=[self.captureVideoPreviewLayer connection]; 
  captureConnection.videoOrientation=(AVCaptureVideoOrientation)toInterfaceOrientation; 
} 
//旋转后重新设置大小 
-(void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation{ 
  _captureVideoPreviewLayer.frame=self.viewContainer.bounds; 
} 
 
-(void)dealloc{ 
  [self removeNotification]; 
} 
#pragma mark - UI方法 
#pragma mark 视频录制 
- (void)takeButtonClick:(UIButton *)sender { 
  //根据设备输出获得连接 
  AVCaptureConnection *captureConnection=[self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 
  //根据连接取得设备输出的数据 
  if (![self.captureMovieFileOutput isRecording]) { 
    self.enableRotation=NO; 
    //如果支持多任务则则开始多任务 
    if ([[UIDevice currentDevice] isMultitaskingSupported]) { 
      self.backgroundTaskIdentifier=[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]; 
    } 
    //预览图层和视频方向保持一致 
    captureConnection.videoOrientation=[self.captureVideoPreviewLayer connection].videoOrientation; 
    NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"]; 
    NSLog(@"save path is :%@",outputFielPath); 
    NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath]; 
    [self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self]; 
  } 
  else{ 
    [self.captureMovieFileOutput stopRecording];//停止录制 
  } 
} 
#pragma mark 切换前后摄像头 
- (void)toggleButtonClick:(UIButton *)sender { 
  AVCaptureDevice *currentDevice=[self.captureDeviceInput device]; 
  AVCaptureDevicePosition currentPosition=[currentDevice position]; 
  [self removeNotificationFromCaptureDevice:currentDevice]; 
  AVCaptureDevice *toChangeDevice; 
  AVCaptureDevicePosition toChangePosition=AVCaptureDevicePositionFront; 
  if (currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront) { 
    toChangePosition=AVCaptureDevicePositionBack; 
  } 
  toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition]; 
  [self addNotificationToCaptureDevice:toChangeDevice]; 
  //获得要调整的设备输入对象 
  AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil]; 
   
  //改变会话的配置前一定要先开启配置,配置完成后提交配置改变 
  [self.captureSession beginConfiguration]; 
  //移除原有输入对象 
  [self.captureSession removeInput:self.captureDeviceInput]; 
  //添加新的输入对象 
  if ([self.captureSession canAddInput:toChangeDeviceInput]) { 
    [self.captureSession addInput:toChangeDeviceInput]; 
    self.captureDeviceInput=toChangeDeviceInput; 
  } 
  //提交会话配置 
  [self.captureSession commitConfiguration]; 
   
} 
 
#pragma mark - 视频输出代理 
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{ 
  NSLog(@"开始录制..."); 
} 
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{ 
  NSLog(@"视频录制完成."); 
  //视频录入完成之后在后台将视频存储到相簿 
  self.enableRotation=YES; 
  UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier=self.backgroundTaskIdentifier; 
  self.backgroundTaskIdentifier=UIBackgroundTaskInvalid; 
  ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init]; 
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) { 
    if (error) { 
      NSLog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription); 
    } 
    if (lastBackgroundTaskIdentifier!=UIBackgroundTaskInvalid) { 
      [[UIApplication sharedApplication] endBackgroundTask:lastBackgroundTaskIdentifier]; 
    } 
    NSLog(@"成功保存视频到相簿."); 
  }]; 
   
} 
 
#pragma mark - 通知 
/** 
 * 给输入设备添加通知 
 */ 
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{ 
  //注意添加区域改变捕获通知必须首先设置设备允许捕获 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    captureDevice.subjectAreaChangeMonitoringEnabled=YES; 
  }]; 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  //捕获区域发生改变 
  [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; 
} 
-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{ 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; 
} 
/** 
 * 移除所有通知 
 */ 
-(void)removeNotification{ 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  [notificationCenter removeObserver:self]; 
} 
 
-(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{ 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  //会话出错 
  [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession]; 
} 
 
/** 
 * 设备连接成功 
 * 
 * @param notification 通知对象 
 */ 
-(void)deviceConnected:(NSNotification *)notification{ 
  NSLog(@"设备已连接..."); 
} 
/** 
 * 设备连接断开 
 * 
 * @param notification 通知对象 
 */ 
-(void)deviceDisconnected:(NSNotification *)notification{ 
  NSLog(@"设备已断开."); 
} 
/** 
 * 捕获区域改变 
 * 
 * @param notification 通知对象 
 */ 
-(void)areaChange:(NSNotification *)notification{ 
  NSLog(@"捕获区域改变..."); 
} 
 
/** 
 * 会话出错 
 * 
 * @param notification 通知对象 
 */ 
-(void)sessionRuntimeError:(NSNotification *)notification{ 
  NSLog(@"会话发生错误."); 
} 
 
#pragma mark - 私有方法 
 
/** 
 * 取得指定位置的摄像头 
 * 
 * @param position 摄像头位置 
 * 
 * @return 摄像头设备 
 */ 
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{ 
  NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
  for (AVCaptureDevice *camera in cameras) { 
    if ([camera position]==position) { 
      return camera; 
    } 
  } 
  return nil; 
} 
 
/** 
 * 改变设备属性的统一操作方法 
 * 
 * @param propertyChange 属性改变操作 
 */ 
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{ 
  AVCaptureDevice *captureDevice= [self.captureDeviceInput device]; 
  NSError *error; 
  //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁 
  if ([captureDevice lockForConfiguration:&error]) { 
    propertyChange(captureDevice); 
    [captureDevice unlockForConfiguration]; 
  }else{ 
    NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription); 
  } 
} 
 
/** 
 * 设置闪光灯模式 
 * 
 * @param flashMode 闪光灯模式 
 */ 
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isFlashModeSupported:flashMode]) { 
      [captureDevice setFlashMode:flashMode]; 
    } 
  }]; 
} 
/** 
 * 设置聚焦模式 
 * 
 * @param focusMode 聚焦模式 
 */ 
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isFocusModeSupported:focusMode]) { 
      [captureDevice setFocusMode:focusMode]; 
    } 
  }]; 
} 
/** 
 * 设置曝光模式 
 * 
 * @param exposureMode 曝光模式 
 */ 
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isExposureModeSupported:exposureMode]) { 
      [captureDevice setExposureMode:exposureMode]; 
    } 
  }]; 
} 
/** 
 * 设置聚焦点 
 * 
 * @param point 聚焦点 
 */ 
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isFocusModeSupported:focusMode]) { 
      [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; 
    } 
    if ([captureDevice isFocusPointOfInterestSupported]) { 
      [captureDevice setFocusPointOfInterest:point]; 
    } 
    if ([captureDevice isExposureModeSupported:exposureMode]) { 
      [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; 
    } 
    if ([captureDevice isExposurePointOfInterestSupported]) { 
      [captureDevice setExposurePointOfInterest:point]; 
    } 
  }]; 
} 
 
/** 
 * 添加点按手势,点按时聚焦 
 */ 
-(void)addGenstureRecognizer{ 
  UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)]; 
  [self.viewContainer addGestureRecognizer:tapGesture]; 
} 
-(void)tapScreen:(UITapGestureRecognizer *)tapGesture{ 
  CGPoint point= [tapGesture locationInView:self.viewContainer]; 
  //将UI坐标转化为摄像头坐标 
  CGPoint cameraPoint= [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point]; 
  [self setFocusCursorWithPoint:point]; 
  [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint]; 
} 
 
/** 
 * 设置聚焦光标位置 
 * 
 * @param point 光标位置 
 */ 
-(void)setFocusCursorWithPoint:(CGPoint)point{ 
  self.focusCursor.center=point; 
  self.focusCursor.transform=CGAffineTransformMakeScale(1.5, 1.5); 
  self.focusCursor.alpha=1.0; 
  [UIView animateWithDuration:1.0 animations:^{ 
    self.focusCursor.transform=CGAffineTransformIdentity; 
  } completion:^(BOOL finished) { 
    self.focusCursor.alpha=0; 
  }]; 
} 
@end

以上就是怎么在iOS中使用AVCaptureSession实现视频录制功能,小编相信有部分知识点可能是我们日常工作会见到或用到的。希望你能通过这篇文章学到更多知识。更多详情敬请关注创新互联行业资讯频道。


名称栏目:怎么在iOS中使用AVCaptureSession实现视频录制功能
网页地址:http://njwzjz.com/article/peshjs.html