lnlink

iOS app for connecting to lightning nodes
git clone git://jb55.com/lnlink
Log | Files | Refs | Submodules | README

QRScan.swift (3711B)


      1 //
      2 //  QRScan.swift
      3 //  lightninglink
      4 //
      5 //  Created by William Casarin on 2022-02-05.
      6 //
      7 
      8 import AVFoundation
      9 import UIKit
     10 import SwiftUI
     11 
     12 struct QRScanner: UIViewControllerRepresentable {
     13     var found: (String) -> Void
     14 
     15     init(found: @escaping (String) -> Void) {
     16         self.found = found
     17     }
     18 
     19     func makeUIViewController(context: Context) -> some UIViewController {
     20         return ScannerViewController().onScan(self.found)
     21     }
     22 
     23     func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) {
     24     }
     25 }
     26 
     27 class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
     28     var captureSession: AVCaptureSession!
     29     var previewLayer: AVCaptureVideoPreviewLayer!
     30     var completion: ((String) -> Void)? = nil
     31 
     32     public func onScan(_ found: @escaping (String) -> Void) -> ScannerViewController {
     33         self.completion = found
     34         return self
     35     }
     36 
     37     override func viewDidLoad() {
     38         super.viewDidLoad()
     39 
     40         view.backgroundColor = UIColor.black
     41         captureSession = AVCaptureSession()
     42 
     43         guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
     44         let videoInput: AVCaptureDeviceInput
     45 
     46         do {
     47             videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
     48         } catch {
     49             return
     50         }
     51 
     52         if (captureSession.canAddInput(videoInput)) {
     53             captureSession.addInput(videoInput)
     54         } else {
     55             failed()
     56             return
     57         }
     58 
     59         let metadataOutput = AVCaptureMetadataOutput()
     60 
     61         if (captureSession.canAddOutput(metadataOutput)) {
     62             captureSession.addOutput(metadataOutput)
     63 
     64             metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
     65             metadataOutput.metadataObjectTypes = [.qr]
     66         } else {
     67             failed()
     68             return
     69         }
     70 
     71         previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
     72         previewLayer.frame = view.layer.bounds
     73         previewLayer.videoGravity = .resizeAspectFill
     74         view.layer.addSublayer(previewLayer)
     75 
     76         captureSession.startRunning()
     77     }
     78 
     79     func failed() {
     80         let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
     81         ac.addAction(UIAlertAction(title: "OK", style: .default))
     82         present(ac, animated: true)
     83         captureSession = nil
     84     }
     85 
     86     override func viewWillAppear(_ animated: Bool) {
     87         super.viewWillAppear(animated)
     88 
     89         if (captureSession?.isRunning == false) {
     90             captureSession.startRunning()
     91         }
     92     }
     93 
     94     override func viewWillDisappear(_ animated: Bool) {
     95         super.viewWillDisappear(animated)
     96 
     97         if (captureSession?.isRunning == true) {
     98             captureSession.stopRunning()
     99         }
    100     }
    101 
    102     func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
    103         captureSession.stopRunning()
    104 
    105         if let metadataObject = metadataObjects.first {
    106             guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
    107             guard let stringValue = readableObject.stringValue else { return }
    108             AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
    109             completion?(stringValue)
    110         }
    111 
    112         dismiss(animated: true)
    113     }
    114 
    115     override var prefersStatusBarHidden: Bool {
    116         return true
    117     }
    118 
    119     override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
    120         return .portrait
    121     }
    122 }