// // ScannerViewController.swift // scanner // // Created by Alva on 2020/5/25. // Copyright © 2020 Alva. All rights reserved. // import SwiftUI import AVFoundation var SCREENWidth = UIScreen.main.bounds.size.width var SCREENHeight = UIScreen.main.bounds.size.height let QRCodeWidth = Double(min(SCREENWidth, SCREENHeight)) / 1.5 let RATIO = 0.45 enum ArgumentsEnum: String { case title = "SCAN_TITLE" case laserColor = "LASER_COLOR" case titleColor = "TITLE_COLOR" case playBeep = "KEY_PLAY_BEEP" case scanWidth = "SCAN_WIDTH" case scanHeight = "SCAN_HEIGHT" case promptMessage = "PROMPT_MESSAGE" case permissionDeniedMessage = "PERMISSION_DENIED_MESSAGE" case confirmText = "MESSAGE_CONFIRM_TEXT" case cancelText = "MESSAGE_CANCEL_TEXT" func getKeyValue(dictionary: NSDictionary) -> T? { guard let result = dictionary[self.rawValue] as? T else { return nil } return result } } protocol ScannerDelegate: class { func didScanWithResult(code: String) func didFailWithErrorCode(code: String) } class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate { @IBOutlet private var camera: UIView! private var session: AVCaptureSession? = nil private var top = 0.0 var bundle: Bundle? = nil var currentOrientation = UIInterfaceOrientationMask.portrait weak var delegate: ScannerDelegate? var arguments: NSDictionary = [:] var laserColor: UIColor = UIColor.clear var promptMessage: String? var permissionDeniedText: String = "Your privacy settings seem to prevent us from accessing your camera for barcode scanning. You can fix it by doing this, touch the OK button below to open the Settings and then turn the Camera on." var confirmText: String = "OK" var cancelText: String = "Cancel" var windowOrientation: UIInterfaceOrientation { if #available(iOS 13.0, *) { return view.window?.windowScene?.interfaceOrientation ?? .unknown } else { return UIApplication.shared.statusBarOrientation } } required init?(coder aDecoder: NSCoder) { super.init(coder: aDecoder) } init() { super.init(nibName: nil, bundle: nil) let mainBundle = Bundle(for: type(of: self)) let url = mainBundle.url(forResource: "FlutterScannerBundle", withExtension: "bundle") if let url = url { bundle = Bundle(url: url) } if(bundle == nil) { return } getCurrentOrientation() } override func viewDidLoad() { super.viewDidLoad() initArguments() setupNavigationBar() } override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) checkAuthorization() } override var shouldAutorotate: Bool { get { return false } } override var supportedInterfaceOrientations: UIInterfaceOrientationMask { get { return currentOrientation } } override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) { session?.stopRunning() SCREENWidth = UIScreen.main.bounds.size.width SCREENHeight = UIScreen.main.bounds.size.height checkAuthorization() } func getCurrentOrientation(){ switch UIDevice.current.orientation { case UIDeviceOrientation.faceDown: break case UIDeviceOrientation.unknown: break case UIDeviceOrientation.portrait: break case UIDeviceOrientation.portraitUpsideDown: break case UIDeviceOrientation.faceUp: break case UIDeviceOrientation.landscapeLeft: currentOrientation = UIInterfaceOrientationMask.landscapeRight break case UIDeviceOrientation.landscapeRight: currentOrientation = UIInterfaceOrientationMask.landscapeLeft break @unknown default: break } } func initArguments() { SCREENWidth = UIScreen.main.bounds.size.width SCREENHeight = UIScreen.main.bounds.size.height if let titleHex: String = ArgumentsEnum.title.getKeyValue(dictionary: arguments) { self.title = titleHex } if let laserColorHex: String = ArgumentsEnum.laserColor.getKeyValue(dictionary: arguments) { laserColor = UIColor(hexString: laserColorHex) ?? UIColor.clear } if let promptMessageHex: String = ArgumentsEnum.promptMessage.getKeyValue(dictionary: arguments) { promptMessage = promptMessageHex } if let permissionDeniedTextHex: String = ArgumentsEnum.permissionDeniedMessage.getKeyValue(dictionary: arguments) { permissionDeniedText = permissionDeniedTextHex } if let confirmTextHex: String = ArgumentsEnum.confirmText.getKeyValue(dictionary: arguments) { confirmText = confirmTextHex } if let cancelTextHex: String = ArgumentsEnum.cancelText.getKeyValue(dictionary: arguments) { cancelText = cancelTextHex } } func checkAuthorization(){ /* Check the video authorization status. Video access is required and audio access is optional. If the user denies audio access, AVCam won't record audio during movie recording. */ switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: self.setupMaskView() self.beginScanning() break case .notDetermined: AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in if !granted { self.permissionDenied() } self.setupMaskView() self.beginScanning() }) default: self.permissionDenied() } } func permissionDenied(){ DispatchQueue.main.async { let alertController = UIAlertController(title: self.title, message: self.permissionDeniedText, preferredStyle: .alert) let confirmAction = UIAlertAction(title: self.confirmText, style: .default) { (action) in if let url = URL(string: UIApplication.openSettingsURLString) { if #available(iOS 10, *) { UIApplication.shared.open(url, options: [:], completionHandler: { (success) in self.dismiss(animated: true, completion: nil) }) } else { UIApplication.shared.openURL(url) self.dismiss(animated: true, completion: nil) } } } let cancelAction = UIAlertAction(title: self.cancelText, style: .default) { (action) in self.dismiss(animated: true, completion: nil) } alertController.addAction(confirmAction) alertController.addAction(cancelAction) self.present(alertController, animated: true) } } func setupMaskView() { UINib.init(nibName: "FlutterQrScanner", bundle: bundle!).instantiate(withOwner: self, options: nil) var scanY = Double(SCREENHeight) - top - QRCodeWidth scanY = scanY * RATIO let frame = CGRect(x: (Double(SCREENWidth) - QRCodeWidth) / 2.0, y: scanY, width: QRCodeWidth, height: QRCodeWidth) let backgroundView = UIView(frame: UIScreen.main.bounds) backgroundView.backgroundColor = UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.6) camera.addSubview(backgroundView) let maskLayer = CAShapeLayer() maskLayer.fillRule = CAShapeLayerFillRule.evenOdd // fill rule let basicPath = UIBezierPath(rect: UIScreen.main.bounds) // basic let maskPath = UIBezierPath(roundedRect: frame, cornerRadius: 15) basicPath.append(maskPath) // recover maskLayer.path = basicPath.cgPath backgroundView.layer.mask = maskLayer let scanBorder = BorderCanvas(frame: frame, border: laserColor) camera.addSubview(scanBorder) let scanWindow = UIView(frame: frame) scanWindow.clipsToBounds = true camera.addSubview(scanWindow) let winMaskLayer = CAShapeLayer() // fill rule winMaskLayer.fillRule = CAShapeLayerFillRule.evenOdd let winFrame = CGRect(x: 0, y: 0, width: QRCodeWidth, height: QRCodeWidth) let winBasicPath = UIBezierPath(rect: winFrame) let winMaskPath = UIBezierPath(rect: winFrame) let winMaskPath2 = UIBezierPath(roundedRect: winFrame, cornerRadius: 15) winBasicPath.append(winMaskPath) winBasicPath.append(winMaskPath2) winMaskLayer.path = winBasicPath.cgPath scanWindow.layer.mask = winMaskLayer //scan window animation let scanNetImageViewH = scanWindow.frame.size.height let scanNetImageViewW = scanWindow.frame.size.width let scanNetImageView = UIImageView(image: UIImage.init(named: "scannet", in: bundle!, compatibleWith: nil)) scanNetImageView.frame = CGRect(x: 0, y: -scanNetImageViewH, width: scanNetImageViewW, height: scanNetImageViewH) let scanNetAnimation = CABasicAnimation(keyPath: "transform.translation.y") scanNetAnimation.byValue = NSNumber(value: QRCodeWidth) scanNetAnimation.duration = 1.5 scanNetAnimation.repeatCount = MAXFLOAT scanNetImageView.layer.add(scanNetAnimation, forKey: "animation") scanWindow.addSubview(scanNetImageView) let promptSize = CGSize(width: Double(SCREENWidth) - 30, height: 0) let promptRect = (promptMessage ?? "").boundingRect(with: promptSize, options: NSStringDrawingOptions.usesLineFragmentOrigin, attributes: nil , context: nil) let promptLabel = UILabel(frame: CGRect(x: 15, y: scanY + QRCodeWidth + 30, width: Double(SCREENWidth) - 30, height: Double(promptRect.size.height))) promptLabel.textColor = UIColor.gray promptLabel.text = promptMessage promptLabel.textAlignment = NSTextAlignment.center camera.addSubview(promptLabel) } func setupNavigationBar(){ let navHeight = self.navigationController?.navigationBar.bounds.height ?? 20 let backButton = UIButton(frame: CGRect(x: 0, y: 0, width: navHeight, height: navHeight)) backButton.setImage(UIImage.init(named: "arrow_left", in: bundle!, compatibleWith: nil), for: .normal) backButton.setTitle("", for: .normal) backButton.setTitleColor(backButton.tintColor, for: .normal) backButton.addTarget(self, action: #selector(backButtonPressed), for: .touchUpInside) backButton.imageView?.contentMode = .scaleAspectFit self.navigationItem.leftBarButtonItem = UIBarButtonItem(customView: backButton) self.navigationController?.navigationBar.setBackgroundImage(UIImage(), for: .default) self.navigationController?.navigationBar.shadowImage = UIImage() self.navigationController?.navigationBar.isTranslucent = true self.navigationController?.navigationBar.titleTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.white, NSAttributedString.Key.font: UIFont.systemFont(ofSize:18)] } func beginScanning() { //get device let device = AVCaptureDevice.default(for: .video) //create device input var input: AVCaptureDeviceInput? = nil do { if let device = device { input = try AVCaptureDeviceInput(device: device) } } catch { delegate?.didFailWithErrorCode(code: "") return } if(input == nil){ delegate?.didFailWithErrorCode(code: "") return } //create device output let output = AVCaptureMetadataOutput() let xx = (Double(SCREENHeight) - QRCodeWidth - top) * RATIO let x = xx / Double(SCREENHeight) let yy = (Double(SCREENWidth) - QRCodeWidth) / 2.0 let y = yy / Double(SCREENWidth) let width = QRCodeWidth / Double(SCREENHeight) let height = QRCodeWidth / Double(SCREENWidth) output.rectOfInterest = CGRect(x: x, y: y, width: width, height: height) output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) session = AVCaptureSession() session!.sessionPreset = .high if session?.canAddInput(input!) ?? false { session!.addInput(input!) session!.addOutput(output) //code data type output.metadataObjectTypes = [ .qr, .ean13, .ean8, .code128 ] let layer = AVCaptureVideoPreviewLayer(session: session!) layer.frame = CGRect(x: 0, y: 0, width: CGFloat(SCREENWidth), height: SCREENHeight - CGFloat(top)) layer.videoGravity = .resizeAspectFill camera.layer.insertSublayer(layer, at: 0) DispatchQueue.main.async { /* Dispatch video streaming to the main queue because AVCaptureVideoPreviewLayer is the backing layer for PreviewView. You can manipulate UIView only on the main thread. Note: As an exception to the above rule, it's not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. Use the window scene's orientation as the initial video orientation. Subsequent orientation changes are handled by CameraViewController.viewWillTransition(to:with:). */ var initialVideoOrientation: AVCaptureVideoOrientation = .portrait if self.windowOrientation != .unknown { if let videoOrientation = AVCaptureVideoOrientation(rawValue: self.windowOrientation.rawValue) { initialVideoOrientation = videoOrientation } } layer.connection?.videoOrientation = initialVideoOrientation } //start session!.startRunning() } else { print("Couldn't add video device input to the session.") session!.commitConfiguration() return } } func metadataOutput(_ captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) { if metadataObjects.count > 0 { //code result data let metadataObject = metadataObjects[0] as? AVMetadataMachineReadableCodeObject delegate?.didScanWithResult(code: metadataObject?.stringValue ?? "") self.dismiss(animated: true, completion: nil) } } @objc func backButtonPressed() { delegate?.didFailWithErrorCode(code: "canceled") self.dismiss(animated: true, completion: nil) } override func viewWillDisappear(_ animated: Bool) { super.viewWillDisappear(animated) self.session?.stopRunning() } } class BorderCanvas: UIView { var border: UIColor = UIColor.clear override init(frame: CGRect) { super.init(frame: frame) self.backgroundColor = UIColor.clear } convenience init(frame: CGRect, border: UIColor?) { self.init(frame: frame) self.border = border ?? UIColor.clear } required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } override func draw(_ rect: CGRect) { let pathRect = self.bounds.insetBy(dx: 1, dy: 1) let path = UIBezierPath(roundedRect: pathRect, cornerRadius: 15) path.lineWidth = 3 UIColor.clear.setFill() self.border.setStroke() path.fill() path.stroke() let maskLayer = CAShapeLayer() maskLayer.fillRule = CAShapeLayerFillRule.evenOdd // fill rule let basicPath = UIBezierPath(rect: CGRect(x: 0, y: 0, width: QRCodeWidth, height: QRCodeWidth)) // basic let maskPath = UIBezierPath(rect: CGRect(x: QRCodeWidth / 6, y: 0, width: QRCodeWidth * 2 / 3, height: QRCodeWidth)) let maskPath2 = UIBezierPath(rect: CGRect(x: 0, y: QRCodeWidth / 6, width: QRCodeWidth, height: QRCodeWidth * 2 / 3)) basicPath.append(maskPath) // recover basicPath.append(maskPath2) // recover maskLayer.path = basicPath.cgPath layer.mask = maskLayer } } extension AVCaptureVideoOrientation { init?(deviceOrientation: UIDeviceOrientation) { switch deviceOrientation { case .portrait: self = .portrait case .portraitUpsideDown: self = .portraitUpsideDown case .landscapeLeft: self = .landscapeRight case .landscapeRight: self = .landscapeLeft default: return nil } } init?(interfaceOrientation: UIInterfaceOrientation) { switch interfaceOrientation { case .portrait: self = .portrait case .portraitUpsideDown: self = .portraitUpsideDown case .landscapeLeft: self = .landscapeLeft case .landscapeRight: self = .landscapeRight default: return nil } } } extension UIColor { convenience init(rgba: Int) { self.init( red: CGFloat((rgba & 0x00FF0000) >> 16) / 255.0, green: CGFloat((rgba & 0x0000FF00) >> 8) / 255.0, blue: CGFloat(rgba & 0x000000FF) / 255.0, alpha: CGFloat((rgba & 0xFF000000) >> 24) / 255.0 ) } convenience init?(hexString: String) { var chars = Array(hexString.hasPrefix("#") ? hexString.dropFirst() : hexString[...]) let red, green, blue, alpha: CGFloat switch chars.count { case 3: chars = chars.flatMap { [$0, $0] } fallthrough case 6: chars = ["F","F"] + chars fallthrough case 8: alpha = CGFloat(strtoul(String(chars[0...1]), nil, 16)) / 255 red = CGFloat(strtoul(String(chars[2...3]), nil, 16)) / 255 green = CGFloat(strtoul(String(chars[4...5]), nil, 16)) / 255 blue = CGFloat(strtoul(String(chars[6...7]), nil, 16)) / 255 default: return nil } self.init(red: red, green: green, blue: blue, alpha: alpha) } }