当AVCaptureVideoPreviewLayer子视图添加时,UIView忽略大小约束

你好,周一愉快!

我试图建立一个自定义相机,外观与Instagram类似。

首先,我使用Storyboard添加一个UIView,垂直和水平居中放置UIView,并通过故事板将宽度和高度设置为200。

然后,在我的代码中,我将AVCaptureVideoPreviewLayer添加到我称为cameraView UIView,然后将AVCaptureVideoPreviewLayer的框架设置为我的UIView框架。

问题:看起来UIView的宽度和高度约束被忽略。 相机预览应该位于视图的中心,但不是。 我在这里做错了什么? 我想这与预览图层有关。 或者,也许我需要使用UIImageView而不是UIView?

任何输入将不胜感激! 谢谢!

下面是截图,看看它的样子......注意:突出显示图片以查看截图边缘的位置,对此很抱歉。

在这里输入图像描述

我的代码:

extension String {
func stripCharactersInSet(chars: [Character]) -> String {
    return String(filter(self) {find(chars, $0) == nil})
}
}

class MyCameraViewController: UIViewController {

let session = AVCaptureSession()
var captureDevice: AVCaptureDevice?
var previewLayer: AVCaptureVideoPreviewLayer?
var stillImageOutput = AVCaptureStillImageOutput()
var imageData: NSData!

@IBOutlet weak var capturePhotoButton: UIButton!
@IBOutlet weak var flashButton: UIButton!
@IBOutlet weak var cameraView: UIView!

override func prefersStatusBarHidden() -> Bool {
    return true
}

override func viewDidLoad() {
    super.viewDidLoad()
    //flashButton.hidden = true

    if(session.canSetSessionPreset(AVCaptureSessionPresetHigh)) {
        session.sessionPreset = AVCaptureSessionPresetHigh
    } else {
        println("Cannot Set session Preset to AVCaptureSessionPresetPhoto")
    }

    let devices = AVCaptureDevice.devices()
    for device in devices {
        if(device.hasMediaType(AVMediaTypeVideo)){
            if(device.position == AVCaptureDevicePosition.Front){
                captureDevice = device as? AVCaptureDevice
                if captureDevice != nil {
                    beginSession()

                }
            }
        }
    }
}

func setCaptureDevice() {
    let devices = AVCaptureDevice.devices()
    for device in devices {
        if(device.hasMediaType(AVMediaTypeVideo)){
            if(device.position == AVCaptureDevicePosition.Back){
                captureDevice = device as? AVCaptureDevice

            }
        }
    }

}

@IBAction func flashButtonPressed(sender: UIButton) {
    if captureDevice!.hasFlash {
        if captureDevice!.isFlashModeSupported(AVCaptureFlashMode.On) {
            if (captureDevice!.lockForConfiguration(nil)) {
                if (captureDevice!.flashActive) {
                    captureDevice!.flashMode = AVCaptureFlashMode.Off
                    flashButton.setTitle("Flash Off", forState: UIControlState.Normal)
                } else {
                    captureDevice!.flashMode = AVCaptureFlashMode.On
                    flashButton.setTitle("Flash On", forState: UIControlState.Normal)
                }
            }
            captureDevice!.unlockForConfiguration()
        }
    }
}

@IBAction func switchCamera(sender: UIButton) {
    let currentCameraInput: AVCaptureInput = session.inputs[0] as! AVCaptureInput
    session.removeInput(currentCameraInput)

    let newCamera: AVCaptureDevice?
    if(captureDevice!.position == AVCaptureDevicePosition.Back){
        println("Setting new camera with Front")
        flashButton.hidden = true
        newCamera = self.cameraWithPosition(AVCaptureDevicePosition.Front)
    } else {
        println("Setting new camera with Back")
        flashButton.hidden = false
        newCamera = self.cameraWithPosition(AVCaptureDevicePosition.Back)
    }

    let newVideoInput = AVCaptureDeviceInput(device: newCamera!, error: nil)
    if(newVideoInput != nil) {
        session.addInput(newVideoInput)
    } else {
        println("Error creating capture device input")
    }
    captureDevice! = newCamera!
    session.commitConfiguration()

}

func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice {
    let devices = AVCaptureDevice.devices()
    for device in devices {
            if(device.position == position){
                return device as! AVCaptureDevice
            }
    }
    return AVCaptureDevice()
}

func beginSession() {
    if(captureDevice!.isFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) {
            captureDevice?.focusMode = AVCaptureFocusMode.ContinuousAutoFocus
        }

    var err : NSError? = nil
    //Add Input, which is my captureDevice
    session.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
    if err != nil {
        println("error: (err?.localizedDescription)")
    }
    previewLayer = AVCaptureVideoPreviewLayer(session: session)



    //self.view.layer.addSublayer(previewLayer)
    cameraView.layer.addSublayer(previewLayer)

    //previewLayer?.frame = self.view.layer.frame
    previewLayer?.frame = cameraView.layer.frame

    session.startRunning()
}

struct imageViewStruct {
    static var image: UIImage?
}

@IBAction func shotPress(sender: UIButton) {
    stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
    if session.canAddOutput(stillImageOutput) {
        session.addOutput(stillImageOutput)
    }
    var videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo)

    if videoConnection != nil {
        stillImageOutput.captureStillImageAsynchronouslyFromConnection(stillImageOutput.connectionWithMediaType(AVMediaTypeVideo))
            { (imageDataSampleBuffer, error) -> Void in
                self.imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
                var dataProvider = CGDataProviderCreateWithCFData(self.imageData)
                var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault)
                var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Up)

                imageViewStruct.image = image

                //self.uploadPhoto(image!)

                self.presentPhotoEditViewController(image!)                    
        }}

}

func setupPhotoEditor() {
    //Remove existing camera stuff
    previewLayer?.removeFromSuperlayer()
}

func presentPhotoEditViewController(imageToSend: UIImage) {
    let vc = self.storyboard?.instantiateViewControllerWithIdentifier("photoEditor") as! PhotoEditViewController
    self.presentViewController(vc, animated: true, completion: nil)
}

func uploadPhoto(image: UIImage) {
    let imageData = UIImagePNGRepresentation(image)
    let imageFile = PFFile(name:"image.png", data:imageData)

    var userPhoto = PFObject(className:getStringForVenue())
    userPhoto["imageFile"] = imageFile
    userPhoto.saveInBackground()
}

func getStringForVenue() -> String {

    let initialVenueString = LocViewController.variables.selectedVenue
    let chars: [Character] = ["'",",",":"," "]

    println(initialVenueString.stripCharactersInSet(chars))

    return initialVenueString.stripCharactersInSet(chars)
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

}

你的方法bigSession()应该有一些AVCaptureVideoPreviewLayer的设置:setVideoGravity和setFrame。

OBJ-C

[previewLayer setVideoGravity: AVLayerVideoGravityResizeAspectFill];
[previewLayer setFrame:self.cameraView.layer.bounds];

这会将您的视频纵横比视图设置为适合您指定的视图图层框架。

链接地址: http://www.djcxy.com/p/74271.html

上一篇: UIView ignoring size constraints when AVCaptureVideoPreviewLayer subview added

下一篇: custom annotation view for maps