我制作了一个实时翻译应用,它通过用户的相机识别物体并进行翻译。在我的iPhone 6s上运行良好,并且在任何模拟器上都不会崩溃,但当我在iPhone 6上运行时,只要我尝试切换到相机画面,应用就会崩溃。苹果公司也表示在iPad上也会崩溃。
某些设备是否不支持Vision API,还是我的代码有问题?
import UIKitimport AVKitimport Visionvar lang = ""var lang2 = ""class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate { @IBAction func screenshotB(_ sender: Any) { //screenshot camera screen view } @IBOutlet weak var screenshotBOutlet: UIButton! @IBOutlet weak var swirlyGuy: UIActivityIndicatorView! @IBOutlet weak var title1: UILabel! @IBOutlet weak var settingsButtonOutlet: UIButton! @IBOutlet weak var launchScreen: UIViewX! @IBOutlet weak var launchScreenLogo: UIImageView! func stopSwirlyGuy(){ swirlyGuy.stopAnimating() } let identifierLabel: UILabel = { let label = UILabel() label.backgroundColor = UIColor(red: 0, green: 0, blue:0, alpha: 0.4) label.textColor = .white label.textAlignment = .center label.translatesAutoresizingMaskIntoConstraints = false return label }() @IBAction func prepareForUnwind (segue:UIStoryboardSegue) { } override func viewDidLoad() { super.viewDidLoad() launchScreen.alpha = 1 launchScreenLogo.alpha = 1 swirlyGuy.startAnimating() // start up the camera let captureSession = AVCaptureSession() captureSession.sessionPreset = .hd4K3840x2160 guard let captureDevice = AVCaptureDevice.default(for: .video) else { return } guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return } captureSession.addInput(input) captureSession.startRunning() let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) view.layer.addSublayer(previewLayer) previewLayer.frame = view.frame let dataOutput = AVCaptureVideoDataOutput() dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue")) captureSession.addOutput(dataOutput) setupIdentifierConfidenceLabel() setupSettingsButton() setupTitle() setupSwirlyGuy() setupScreenshot() } override func viewDidAppear(_ animated: Bool) { super.viewDidAppear(animated) UIView.animate(withDuration: 1.5) { self.launchScreen.alpha = 0 self.launchScreenLogo.alpha = 0 } } fileprivate func setupSettingsButton() { view.addSubview(settingsButtonOutlet) } fileprivate func setupScreenshot() { view.addSubview(screenshotBOutlet) } fileprivate func setupSwirlyGuy() { view.addSubview(swirlyGuy) } fileprivate func setupTitle() { view.addSubview(title1) } fileprivate func setupIdentifierConfidenceLabel() { view.addSubview(identifierLabel) identifierLabel.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true identifierLabel.leftAnchor.constraint(equalTo: view.leftAnchor).isActive = true identifierLabel.rightAnchor.constraint(equalTo: view.rightAnchor).isActive = true identifierLabel.heightAnchor.constraint(equalToConstant: 100).isActive = true identifierLabel.numberOfLines = 0 } func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { // print("Camera was able to capture a frame:", Date()) guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } // model guard let model = try? VNCoreMLModel(for: Resnet50().model) else { return } let request = VNCoreMLRequest(model: model) { (finishedReq, err) in //perhaps check the err // print(finishedReq.results) guard let results = finishedReq.results as? [VNClassificationObservation] else { return } guard let firstObservation = results.first else { return } print(firstObservation.identifier, firstObservation.confidence) let x = (firstObservation.confidence) let y = (x * 10000).rounded() / 10000 let z = (firstObservation.identifier) let s = (self.translateSpanish(object1: firstObservation.identifier)) let f = (self.translateFrench(object1: firstObservation.identifier)) // var lang = "" // var lang2 = "" if language == "English" { lang = z } else if language == "Spanish" { lang = s } else { lang = f } if language2 == "Spanish" { lang2 = s } else if language2 == "English" { lang2 = z } else { lang2 = f } DispatchQueue.main.async { self.identifierLabel.text = "\(lang)" + " = " + "\(lang2) \n \(y * 100)% accuracy" self.stopSwirlyGuy() } } try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request]) }//Translation fucntions omitted for brevity
这是视图控制器的代码,它将切换到主屏幕,在那里进行相机画面和Vision处理。
import UIKit
class FirstLaunchViewController: UIViewController {
@IBOutlet weak var title1: UILabelX!@IBOutlet weak var logo1: UIImageView!@IBOutlet weak var description1: UILabel!@IBOutlet weak var buttonOutlet: UIButtonX!@IBOutlet weak var initialBackground: UIViewX!@IBOutlet weak var initialLogo: UIImageView!@IBAction func toVC(_ sender: Any) { UserDefaults.standard.set(false, forKey: "name") performSegue(withIdentifier: "toMain", sender: self)}override func viewDidLoad() { super.viewDidLoad() initialLogo.alpha = 1 initialBackground.alpha = 1 title1.alpha = 0 logo1.alpha = 0 description1.alpha = 0 buttonOutlet.alpha = 0 // Do any additional setup after loading the view.}override func viewDidAppear(_ animated: Bool) { super.viewDidAppear(animated) UIView.animate(withDuration: 1.5, animations: { self.initialLogo.alpha = 0 self.initialBackground.alpha = 0 }) { (true) in self.initialBackgroundGone() }} func initialBackgroundGone() { UIView.animate(withDuration: 1.5, animations: { self.title1.alpha = 1 }) { (true) in self.showBackgroundAgain() }}func showBackgroundAgain() { UIView.animate(withDuration: 1.3, animations: { self.logo1.alpha = 1 }) { (true) in self.showTitle() }}func showTitle() { UIView.animate(withDuration: 1.5, animations: { self.description1.alpha = 1 }) { (true) in self.showEverythingElse() }}func showEverythingElse() { UIView.animate(withDuration: 3.5) { self.buttonOutlet.alpha = 1 }}
}
回答: