IOS8.1打开摄像头问题,紧急求助
在开发视频程序的过程中,打开摄像头并做预览及录像时,ios7及以下,下面的代码是正常的
但是在IOS8.1上时,该代码无法预览,获取不到视频流,显示一个灰色的窗口,没有任何视频信息,
通过真机调试时,未发现异常抛出
是否是ios8.1对这块做了调整,能否告知应用代码要做哪些调整?
#pragma mark - 录像
-(AVCaptureVideoPreviewLayer *)recordVideo
{
if (_isRecordVideo == NO)
return nil;
self.nFrame = 0;
[self initCameraSession];//20140604
//preview
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[[AVCaptureVideoPreviewLayer alloc] initWithSession:cameraSession] autorelease];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[captureVideoPreviewLayer setFrame:CGRectMake(0, 0, 320, 320)];
[cameraSession startRunning];
return captureVideoPreviewLayer;
}
-(void)initCameraSession
{
cameraSession.sessionPreset = AVCaptureSessionPresetLow;
// Add inputs and outputs.
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera = nil;
AVCaptureDevice *backCamera = nil;
for (AVCaptureDevice *device in devices)
{
if ([device hasMediaType:AVMediaTypeVideo])
{
if ([device position] == AVCaptureDevicePositionBack)
{
backCamera = device;
}
else
{
frontCamera = device;
}
}
}
if (frontCamera == nil)
{
//todo alert warning.
return;
}
if (backCamera == nil) {
return;
}
[cameraSession beginConfiguration];
if (_deviceInput) {
[cameraSession removeInput:_deviceInput];
}
if (self.carmeraMode) {
_deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil] ;
}else
{
_deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:nil] ;
}
if (!_deviceInput)
{
return;
}
if ([cameraSession canAddInput:_deviceInput])
{
[cameraSession addInput:_deviceInput];
}
else
{
return;
}
if (fileDataOutput) {
[cameraSession removeOutput:fileDataOutput];
}
fileDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[fileDataOutput setAlwaysDiscardsLateVideoFrames:YES];
NSDictionary * settings = [[NSDictionary alloc] initWithObjectsAndKeys:
//[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:240], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInt:320], (id)kCVPixelBufferHeightKey,
nil];
[fileDataOutput setVideoSettings:settings];
[settings release];
/*We create a serial queue to handle the processing of our frames*/
dispatch_queue_t queue = dispatch_queue_create("com.wly.kge.queue", NULL);
[fileDataOutput setSampleBufferDelegate:self queue:queue];
[cameraSession addOutput:fileDataOutput];
dispatch_release(queue);
[[Utility sharedUtility] deleteFileAtPaht:[[[Utility sharedUtility]getDocumentsDirectory] stringByAppendingString:PATH_RecordingVideo]];
if (videoWriter) {
videoWriter = nil;
}
videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:[[[Utility sharedUtility]getDocumentsDirectory] stringByAppendingString:PATH_RecordingVideo]]
fileType:AVFileTypeQuickTimeMovie
error:nil];
NSParameterAssert(videoWriter);
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:240], AVVideoWidthKey,
[NSNumber numberWithInt:320], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
if (videoWriterInput) {
videoWriterInput = nil;
}
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
for (AVCaptureConnection * connection in fileDataOutput.connections)
{
if ([connection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
[connection setVideoOrientation:orientation];
}
if ([connection isVideoMirroringSupported])
{
if (self.carmeraMode) {
connection.videoMirrored = NO;
}else
{
connection.videoMirrored = YES;
}//zsx 20140605
// connection.videoMirrored = YES;//zsx 20140605
}
}
[cameraSession commitConfiguration];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
NSLog( @"sample buffer is not ready. Skipping sample" );
return;
}
if( _isPause == NO )
{
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( (self.nFrame == 0) && (videoWriter.status != AVAssetWriterStatusWriting) )
{
@try {
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:lastSampleTime];
self.nFrame = 1;
}
@catch (NSException *exception) {
//donothing
NSLog(@"videoWriter startWriting err!");
}
@finally {
//donothing
}
}
if( captureOutput == fileDataOutput )
{
if( videoWriter.status > AVAssetWriterStatusWriting )
{
if( videoWriter.status == AVAssetWriterStatusFailed )
{
NSLog(@"Error(%d): %@",self.nFrame, videoWriter.error);
//self.nFrame = -1;
//[self setRecordVideo:YES];
}
return;
}
if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to video input,status = %d,frame = %d",videoWriter.status,self.nFrame);
self.nFrame++;
}
}
}