Swift 3中的AVCaptureStillImageOutput与AVCapturePhotoOutput的区别

17

我正在尝试将相机视图简单地放入我的视图控制器中。

我在顶部导入了AVFoundation,以及UIImagePickerControllerDelegateUINavigationControllerDelegate类。

然而,每当我尝试使用AVCaptureStillImageOutput时,Xcode告诉我它在iOS10中已被弃用,并且我应该使用AVCapturePhotoOutput。这完全没问题,但是一旦我想调用stillImageOutput.outputSettings.outputSettings本身就不可用了。因此,我必须使用AVAVCaptureStillImageOutput才能使其正常工作,但是我有多个警告,因为这个函数在iOS10中已被弃用。

我搜寻了很久,但真的找不到解决方法。非常感谢您的帮助。我正在学习,所以任何解释都会很好!下面是代码。

import UIKit
import AVFoundation

class CameraView: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {

    var captureSession : AVCaptureSession?
    var stillImageOutput : AVCaptureStillImageOutput?
    var previewLayer : AVCaptureVideoPreviewLayer?

    @IBOutlet var cameraView: UIView!

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        captureSession = AVCaptureSession()
        captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080

        var backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
        var error : NSError?

        do {
            var input = try! AVCaptureDeviceInput (device: backCamera)
            if (error == nil && captureSession?.canAddInput(input) != nil) {

                captureSession?.addInput(input)

                stillImageOutput = AVCaptureStillImageOutput()
                stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

                if (captureSession?.canAddOutput(stillImageOutput) != nil) {
                    captureSession?.addOutput(stillImageOutput)

                    previewLayer = AVCaptureVideoPreviewLayer (session: captureSession)
                    previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
                    previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
                    cameraView.layer.addSublayer(previewLayer!)
                    captureSession?.startRunning()
                }
            }
        } catch {

        }
    }
}
3个回答

17

这是我的完整实现

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {

    var captureSesssion : AVCaptureSession!
    var cameraOutput : AVCapturePhotoOutput!
    var previewLayer : AVCaptureVideoPreviewLayer!

    @IBOutlet weak var capturedImage: UIImageView!
    @IBOutlet weak var previewView: UIView!

    override func viewDidLoad() {
        super.viewDidLoad()
        captureSesssion = AVCaptureSession()
        captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto
        cameraOutput = AVCapturePhotoOutput()

        let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)

        if let input = try? AVCaptureDeviceInput(device: device) {
            if captureSesssion.canAddInput(input) {
                captureSesssion.addInput(input)
                if captureSesssion.canAddOutput(cameraOutput) {
                    captureSesssion.addOutput(cameraOutput)
                    previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion)
                    previewLayer.frame = previewView.bounds
                    previewView.layer.addSublayer(previewLayer)
                    captureSesssion.startRunning()
                }
            } else {
                print("issue here : captureSesssion.canAddInput")
            }
        } else {
            print("some problem here")
        }
    }

    // Take picture button
    @IBAction func didPressTakePhoto(_ sender: UIButton) {
        let settings = AVCapturePhotoSettings()
        let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
        let previewFormat = [
            kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
            kCVPixelBufferWidthKey as String: 160,
            kCVPixelBufferHeightKey as String: 160
        ]
        settings.previewPhotoFormat = previewFormat
        cameraOutput.capturePhoto(with: settings, delegate: self)
    }

    // callBack from take picture
    func capture(_ captureOutput: AVCapturePhotoOutput,  didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?,  previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings:  AVCaptureResolvedPhotoSettings, bracketSettings:   AVCaptureBracketedStillImageSettings?, error: Error?) {

        if let error = error {
            print("error occure : \(error.localizedDescription)")
        }

        if  let sampleBuffer = photoSampleBuffer,
            let previewBuffer = previewPhotoSampleBuffer,
            let dataImage =  AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer:  sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
            print(UIImage(data: dataImage)?.size as Any)

            let dataProvider = CGDataProvider(data: dataImage as CFData)
            let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
            let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)

            self.capturedImage.image = image
        } else {
            print("some error here")
        }
    }

    // This method you can use somewhere you need to know camera permission   state
    func askPermission() {
        print("here")

        let cameraPermissionStatus =  AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)

        switch cameraPermissionStatus {
        case .authorized:
            print("Already Authorized")

        case .denied:
            print("denied")

            let alert = UIAlertController(title: "Sorry :(" , message: "But could you please grant permission for camera within device settings",  preferredStyle: .alert)
            let action = UIAlertAction(title: "Ok", style: .cancel,  handler: nil)
            alert.addAction(action)
            present(alert, animated: true, completion: nil)

        case .restricted:
            print("restricted")

        default:
            AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) {
                [weak self]
                (granted :Bool) -> Void in

                if granted == true {
                    // User granted
                    print("User granted")
                    DispatchQueue.main.async() {
                        // Do smth that you need in main thread   
                    } 
                } else {
                    // User Rejected
                    print("User Rejected")

                    DispatchQueue.main.async() {
                        let alert = UIAlertController(title: "WHY?" , message: "Camera it is the main feature of our application", preferredStyle: .alert)
                        let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
                        alert.addAction(action)
                        self?.present(alert, animated: true, completion: nil)  
                    }
                }
            }
        }
    }
}

8
AVCaptureStillImageOutput被弃用意味着你可以在iOS 10中继续使用,但是:
- 苹果不保证它会在iOS 10之后的版本中一直可用。 - 随着iOS 10及以后版本加入新的硬件和软件功能,你将无法访问所有功能。例如,你可以设置AVCaptureStillImageOutput以获得广色域,但使用AVCapturePhotoOutput更容易实现广色域。而对于RAW捕捉或Live Photos,则只有AVCapturePhotoOutput能胜任。
如果您即使已经过期仍然愿意使用,请注意以下事项:您的问题不在于删除了outputSettings - 它仍然存在
对于beta 6及以上版本需注意的一点(虽然在这里并不是一个问题)是:在Swift 3中,没有明确键和值类型的NSDictionary API会转换为[AnyHashable: Any],而你可能会在字典中使用的Foundation或CoreFoundation类型不再自动桥接到Swift类型。(关于beta 6字典转换的其他问题可能会指引你正确的方向。)
然而,对于设置outputSettings,我没有遇到任何编译错误。无论是你的完整代码还是只是提取关于那行的基本部分。
var stillImageOutput : AVCaptureStillImageOutput?
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

我只看到有关弃用的警告。


-2

我写Objective-C代码是因为Aleksey Timoshchenko的答案是正确的。

只是为了帮助其他人。

@interface CameraGalleryViewController ()

@property (weak, nonatomic) IBOutlet UIView *viewCamera;

@property (weak, nonatomic) IBOutlet UICollectionView *collectionView;

@property (strong, nonatomic) AVCaptureSession *session;
@property (strong, nonatomic) AVCapturePhotoOutput *cameraOutput;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;

@end

@implementation CameraGalleryViewController

#pragma mark - Lifecycle

// ==================================================================================
// Lifecycle

- (void) viewDidLoad {
    [super viewDidLoad];

    [self.viewModel viewModelDidLoad];
}

- (void) viewWillAppear:(BOOL)animated {
    [super viewWillAppear:animated];

}

- (void) viewDidAppear:(BOOL)animated {
    [super viewDidAppear:animated];

    [self initVars];
}

- (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator {
    [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];

    [coordinator animateAlongsideTransition:^(id<UIViewControllerTransitionCoordinatorContext>  _Nonnull context) {

    } completion:^(id<UIViewControllerTransitionCoordinatorContext>  _Nonnull context) {
        [self changeOrientation];
    }];
}

#pragma mark - IBActions

// ==================================================================================
// IBActions


- (IBAction)takePhoto:(UIButton *)sender {
    AVCapturePhotoSettings *settings = [[AVCapturePhotoSettings alloc] init];
    NSNumber *previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.firstObject;

    NSString *formatTypeKey = (NSString *)kCVPixelBufferPixelFormatTypeKey;
    NSString *widthKey = (NSString *)kCVPixelBufferWidthKey;
    NSString *heightKey = (NSString *)kCVPixelBufferHeightKey;

    NSDictionary *previewFormat = @{formatTypeKey:previewPixelType,
                                    widthKey:@1024,
                                    heightKey:@768
                                    };

    settings.previewPhotoFormat = previewFormat;
    [self.cameraOutput capturePhotoWithSettings:settings delegate:self];
}


#pragma mark - Public methods

// ==================================================================================
// Public methods

- (void) setupView {
    [self.collectionView reloadData];
}

#pragma mark - Private methods

// ==================================================================================
// Private methods

- (void) initVars {
    [self.collectionView registerNib:[CameraGalleryViewCell cellNib] forCellWithReuseIdentifier:[CameraGalleryViewCell cellId]];
    self.collectionView.dataSource = self;
    self.collectionView.delegate = self;

    self.session = [[AVCaptureSession alloc] init];
    [self.session setSessionPreset:AVCaptureSessionPresetPhoto];
    self.cameraOutput = [[AVCapturePhotoOutput alloc] init];

    AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError *error;
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];

    if ([self.session canAddInput:deviceInput]) {
        [self.session addInput:deviceInput];

        if ([self.session canAddOutput:self.cameraOutput]) {
            [self.session addOutput:self.cameraOutput];

            self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
            [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

            self.previewLayer.frame = CGRectMake(0,0, self.view.bounds.size.width, self.viewCamera.bounds.size.height);
            [self.viewCamera.layer addSublayer:self.previewLayer];

            [self changeOrientation];

            [self.session startRunning];
        }
    }
}

- (void) changeOrientation {

    UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;

    CGRect size = [UIScreen mainScreen].bounds;
    if (size.size.height > size.size.width) {
        if (orientation == UIInterfaceOrientationPortrait) {
            self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
        } else {
            self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
        }
    } else {
        if (orientation == UIInterfaceOrientationLandscapeRight) {
            self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
        } else {
            self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
        }
    }
}

#pragma mark - CollectionView delegate

// ==================================================================================
// CollectionView delegate

- (NSInteger) collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section {
    NSInteger numItems = [self.viewModel imageListCount];

    self.collectionView.hidden = !(numItems > 0);

    return numItems;
}

- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath {

    CameraGalleryViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:[CameraGalleryViewCell cellId] forIndexPath:indexPath];

    [cell imageForImageView:[self.viewModel imageFromListWithIndex:indexPath.row]];

    return cell;
}

#pragma mark - Camera delegate

// ==================================================================================
// Camera delegate

- (void)  captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings error:(NSError *)error {
    if (error) {
        return;
    }

    if (photoSampleBuffer && previewPhotoSampleBuffer) {
        NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
        [self.viewModel addImageToListAndRefresh:[UIImage imageWithData:imageData]];
    }
}

@end

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接