commit 828bdd12ccb2da3697ba1f5c76d0af0133750888
parent d7126171524f564c42f556858d83ec495b9d2271
Author: William Casarin <jb55@jb55.com>
Date: Fri, 25 Feb 2022 13:12:47 -0800
fix buggy qrcode scanner
Signed-off-by: William Casarin <jb55@jb55.com>
Diffstat:
6 files changed, 536 insertions(+), 140 deletions(-)
diff --git a/lightninglink.xcodeproj/project.pbxproj b/lightninglink.xcodeproj/project.pbxproj
@@ -7,7 +7,6 @@
objects = {
/* Begin PBXBuildFile section */
- 4C0359FB27AEE86600FF92CE /* QRScan.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C0359FA27AEE86600FF92CE /* QRScan.swift */; };
4C035A0027AEF90000FF92CE /* PayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C0359FF27AEF90000FF92CE /* PayView.swift */; };
4C035A0427AEFD2F00FF92CE /* Invoice.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C035A0327AEFD2F00FF92CE /* Invoice.swift */; };
4C641D192788FF2F002A36C9 /* lightninglinkApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C641D182788FF2F002A36C9 /* lightninglinkApp.swift */; };
@@ -24,6 +23,9 @@
4C873FD527A6EF3F008C972C /* LNSocket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C873FD427A6EF3F008C972C /* LNSocket.swift */; };
4C873FD727A6F1F5008C972C /* RPC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C873FD627A6F1F5008C972C /* RPC.swift */; };
4C8B289327B44EAF00DF3372 /* LNLink.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C8B289227B44EAF00DF3372 /* LNLink.swift */; };
+ 4CCB0E2627C979F30026461C /* CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CCB0E2227C979F30026461C /* CodeScanner.swift */; };
+ 4CCB0E2727C979F30026461C /* ScannerCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CCB0E2327C979F30026461C /* ScannerCoordinator.swift */; };
+ 4CCB0E2827C979F30026461C /* ScannerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CCB0E2427C979F30026461C /* ScannerViewController.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@@ -44,7 +46,6 @@
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
- 4C0359FA27AEE86600FF92CE /* QRScan.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = QRScan.swift; sourceTree = "<group>"; };
4C0359FE27AEEE8500FF92CE /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
4C0359FF27AEF90000FF92CE /* PayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PayView.swift; sourceTree = "<group>"; };
4C035A0327AEFD2F00FF92CE /* Invoice.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Invoice.swift; sourceTree = "<group>"; };
@@ -69,6 +70,9 @@
4C873FD427A6EF3F008C972C /* LNSocket.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LNSocket.swift; sourceTree = "<group>"; };
4C873FD627A6F1F5008C972C /* RPC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RPC.swift; sourceTree = "<group>"; };
4C8B289227B44EAF00DF3372 /* LNLink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LNLink.swift; sourceTree = "<group>"; };
+ 4CCB0E2227C979F30026461C /* CodeScanner.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CodeScanner.swift; sourceTree = "<group>"; };
+ 4CCB0E2327C979F30026461C /* ScannerCoordinator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ScannerCoordinator.swift; sourceTree = "<group>"; };
+ 4CCB0E2427C979F30026461C /* ScannerViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ScannerViewController.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -125,6 +129,7 @@
4C641D172788FF2F002A36C9 /* lightninglink */ = {
isa = PBXGroup;
children = (
+ 4CCB0E1F27C979F30026461C /* CodeScanner */,
4C0359FE27AEEE8500FF92CE /* Info.plist */,
4C873FD427A6EF3F008C972C /* LNSocket.swift */,
4C641D182788FF2F002A36C9 /* lightninglinkApp.swift */,
@@ -132,7 +137,6 @@
4C641D1C2788FF30002A36C9 /* Assets.xcassets */,
4C641D1E2788FF30002A36C9 /* Preview Content */,
4C873FD627A6F1F5008C972C /* RPC.swift */,
- 4C0359FA27AEE86600FF92CE /* QRScan.swift */,
4C0359FF27AEF90000FF92CE /* PayView.swift */,
4C035A0327AEFD2F00FF92CE /* Invoice.swift */,
4C8B289227B44EAF00DF3372 /* LNLink.swift */,
@@ -186,6 +190,16 @@
name = Frameworks;
sourceTree = "<group>";
};
+ 4CCB0E1F27C979F30026461C /* CodeScanner */ = {
+ isa = PBXGroup;
+ children = (
+ 4CCB0E2227C979F30026461C /* CodeScanner.swift */,
+ 4CCB0E2327C979F30026461C /* ScannerCoordinator.swift */,
+ 4CCB0E2427C979F30026461C /* ScannerViewController.swift */,
+ );
+ path = CodeScanner;
+ sourceTree = "<group>";
+ };
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
@@ -321,11 +335,13 @@
4C873FD527A6EF3F008C972C /* LNSocket.swift in Sources */,
4C641D1B2788FF2F002A36C9 /* ContentView.swift in Sources */,
4C8B289327B44EAF00DF3372 /* LNLink.swift in Sources */,
+ 4CCB0E2727C979F30026461C /* ScannerCoordinator.swift in Sources */,
4C641D492789083E002A36C9 /* lightninglink.c in Sources */,
+ 4CCB0E2627C979F30026461C /* CodeScanner.swift in Sources */,
+ 4CCB0E2827C979F30026461C /* ScannerViewController.swift in Sources */,
4C035A0427AEFD2F00FF92CE /* Invoice.swift in Sources */,
4C641D192788FF2F002A36C9 /* lightninglinkApp.swift in Sources */,
4C873FD727A6F1F5008C972C /* RPC.swift in Sources */,
- 4C0359FB27AEE86600FF92CE /* QRScan.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
diff --git a/lightninglink/CodeScanner/CodeScanner.swift b/lightninglink/CodeScanner/CodeScanner.swift
@@ -0,0 +1,114 @@
+//
+// CodeScanner.swift
+// https://github.com/twostraws/CodeScanner
+//
+// Created by Paul Hudson on 14/12/2021.
+// Copyright © 2021 Paul Hudson. All rights reserved.
+//
+
+import AVFoundation
+import SwiftUI
+
+/// An enum describing the ways CodeScannerView can hit scanning problems.
+public enum ScanError: Error {
+ /// The camera could not be accessed.
+ case badInput
+
+ /// The camera was not capable of scanning the requested codes.
+ case badOutput
+
+ /// Initialization failed.
+ case initError(_ error: Error)
+}
+
+/// The result from a successful scan: the string that was scanned, and also the type of data that was found.
+/// The type is useful for times when you've asked to scan several different code types at the same time, because
+/// it will report the exact code type that was found.
+public struct ScanResult {
+ /// The contents of the code.
+ public let string: String
+
+ /// The type of code that was matched.
+ public let type: AVMetadataObject.ObjectType
+}
+
+/// The operating mode for CodeScannerView.
+public enum ScanMode {
+ /// Scan exactly one code, then stop.
+ case once
+
+ /// Scan each code no more than once.
+ case oncePerCode
+
+ /// Keep scanning all codes until dismissed.
+ case continuous
+}
+
+/// A SwiftUI view that is able to scan barcodes, QR codes, and more, and send back what was found.
+/// To use, set `codeTypes` to be an array of things to scan for, e.g. `[.qr]`, and set `completion` to
+/// a closure that will be called when scanning has finished. This will be sent the string that was detected or a `ScanError`.
+/// For testing inside the simulator, set the `simulatedData` property to some test data you want to send back.
+public struct CodeScannerView: UIViewControllerRepresentable {
+
+ public let codeTypes: [AVMetadataObject.ObjectType]
+ public let scanMode: ScanMode
+ public let scanInterval: Double
+ public let showViewfinder: Bool
+ public var simulatedData = ""
+ public var shouldVibrateOnSuccess: Bool
+ public var isTorchOn: Bool
+ public var isGalleryPresented: Binding<Bool>
+ public var videoCaptureDevice: AVCaptureDevice?
+ public var completion: (Result<ScanResult, ScanError>) -> Void
+
+ public init(
+ codeTypes: [AVMetadataObject.ObjectType],
+ scanMode: ScanMode = .once,
+ scanInterval: Double = 2.0,
+ showViewfinder: Bool = false,
+ simulatedData: String = "",
+ shouldVibrateOnSuccess: Bool = true,
+ isTorchOn: Bool = false,
+ isGalleryPresented: Binding<Bool> = .constant(false),
+ videoCaptureDevice: AVCaptureDevice? = AVCaptureDevice.default(for: .video),
+ completion: @escaping (Result<ScanResult, ScanError>) -> Void
+ ) {
+ self.codeTypes = codeTypes
+ self.scanMode = scanMode
+ self.showViewfinder = showViewfinder
+ self.scanInterval = scanInterval
+ self.simulatedData = simulatedData
+ self.shouldVibrateOnSuccess = shouldVibrateOnSuccess
+ self.isTorchOn = isTorchOn
+ self.isGalleryPresented = isGalleryPresented
+ self.videoCaptureDevice = videoCaptureDevice
+ self.completion = completion
+ }
+
+ public func makeCoordinator() -> ScannerCoordinator {
+ ScannerCoordinator(parent: self)
+ }
+
+ public func makeUIViewController(context: Context) -> ScannerViewController {
+ let viewController = ScannerViewController(showViewfinder: showViewfinder)
+ viewController.delegate = context.coordinator
+ return viewController
+ }
+
+ public func updateUIViewController(_ uiViewController: ScannerViewController, context: Context) {
+ uiViewController.updateViewController(
+ isTorchOn: isTorchOn,
+ isGalleryPresented: isGalleryPresented.wrappedValue
+ )
+ }
+
+}
+
+@available(macCatalyst 14.0, *)
+struct CodeScannerView_Previews: PreviewProvider {
+ static var previews: some View {
+ CodeScannerView(codeTypes: [.qr]) { result in
+ // do nothing
+ }
+ }
+}
diff --git a/lightninglink/CodeScanner/ScannerCoordinator.swift b/lightninglink/CodeScanner/ScannerCoordinator.swift
@@ -0,0 +1,75 @@
+//
+// CodeScanner.swift
+// https://github.com/twostraws/CodeScanner
+//
+// Created by Paul Hudson on 14/12/2021.
+// Copyright © 2021 Paul Hudson. All rights reserved.
+//
+
+import AVFoundation
+import SwiftUI
+
+extension CodeScannerView {
+ @available(macCatalyst 14.0, *)
+ public class ScannerCoordinator: NSObject, AVCaptureMetadataOutputObjectsDelegate {
+ var parent: CodeScannerView
+ var codesFound = Set<String>()
+ var didFinishScanning = false
+ var lastTime = Date(timeIntervalSince1970: 0)
+
+ init(parent: CodeScannerView) {
+ self.parent = parent
+ }
+
+ public func reset() {
+ codesFound.removeAll()
+ didFinishScanning = false
+ lastTime = Date(timeIntervalSince1970: 0)
+ }
+
+ public func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
+ if let metadataObject = metadataObjects.first {
+ guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
+ guard let stringValue = readableObject.stringValue else { return }
+ guard didFinishScanning == false else { return }
+ let result = ScanResult(string: stringValue, type: readableObject.type)
+
+ switch parent.scanMode {
+ case .once:
+ found(result)
+ // make sure we only trigger scan once per use
+ didFinishScanning = true
+
+ case .oncePerCode:
+ if !codesFound.contains(stringValue) {
+ codesFound.insert(stringValue)
+ found(result)
+ }
+
+ case .continuous:
+ if isPastScanInterval() {
+ found(result)
+ }
+ }
+ }
+ }
+
+ func isPastScanInterval() -> Bool {
+ Date().timeIntervalSince(lastTime) >= parent.scanInterval
+ }
+
+ func found(_ result: ScanResult) {
+ lastTime = Date()
+
+ if parent.shouldVibrateOnSuccess {
+ AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
+ }
+
+ parent.completion(.success(result))
+ }
+
+ func didFail(reason: ScanError) {
+ parent.completion(.failure(reason))
+ }
+ }
+}
diff --git a/lightninglink/CodeScanner/ScannerViewController.swift b/lightninglink/CodeScanner/ScannerViewController.swift
@@ -0,0 +1,300 @@
+//
+// CodeScanner.swift
+// https://github.com/twostraws/CodeScanner
+//
+// Created by Paul Hudson on 14/12/2021.
+// Copyright © 2021 Paul Hudson. All rights reserved.
+//
+
+import AVFoundation
+import UIKit
+
+extension CodeScannerView {
+
+ @available(macCatalyst 14.0, *)
+ public class ScannerViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
+
+ var delegate: ScannerCoordinator?
+ private let showViewfinder: Bool
+
+ private var isGalleryShowing: Bool = false {
+ didSet {
+ // Update binding
+ if delegate?.parent.isGalleryPresented.wrappedValue != isGalleryShowing {
+ delegate?.parent.isGalleryPresented.wrappedValue = isGalleryShowing
+ }
+ }
+ }
+
+ public init(showViewfinder: Bool = false) {
+ self.showViewfinder = showViewfinder
+ super.init(nibName: nil, bundle: nil)
+ }
+
+ required init?(coder: NSCoder) {
+ self.showViewfinder = false
+ super.init(coder: coder)
+ }
+
+ func openGallery() {
+ isGalleryShowing = true
+ let imagePicker = UIImagePickerController()
+ imagePicker.delegate = self
+ present(imagePicker, animated: true, completion: nil)
+ }
+
+ @objc func openGalleryFromButton(_ sender: UIButton) {
+ openGallery()
+ }
+
+ public func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey: Any]) {
+ isGalleryShowing = false
+
+ if let qrcodeImg = info[.originalImage] as? UIImage {
+ let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])!
+ let ciImage = CIImage(image:qrcodeImg)!
+ var qrCodeLink = ""
+
+ let features = detector.features(in: ciImage)
+
+ for feature in features as! [CIQRCodeFeature] {
+ qrCodeLink += feature.messageString!
+ }
+
+ if qrCodeLink == "" {
+ delegate?.didFail(reason: .badOutput)
+ } else {
+ let result = ScanResult(string: qrCodeLink, type: .qr)
+ delegate?.found(result)
+ }
+ } else {
+ print("Something went wrong")
+ }
+
+ dismiss(animated: true, completion: nil)
+ }
+
+ public func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
+ isGalleryShowing = false
+ }
+
+ #if targetEnvironment(simulator)
+ override public func loadView() {
+ view = UIView()
+ view.isUserInteractionEnabled = true
+
+ let label = UILabel()
+ label.translatesAutoresizingMaskIntoConstraints = false
+ label.numberOfLines = 0
+ label.text = "You're running in the simulator, which means the camera isn't available. Tap anywhere to send back some simulated data."
+ label.textAlignment = .center
+
+ let button = UIButton()
+ button.translatesAutoresizingMaskIntoConstraints = false
+ button.setTitle("Select a custom image", for: .normal)
+ button.setTitleColor(UIColor.systemBlue, for: .normal)
+ button.setTitleColor(UIColor.gray, for: .highlighted)
+ button.addTarget(self, action: #selector(openGalleryFromButton), for: .touchUpInside)
+
+ let stackView = UIStackView()
+ stackView.translatesAutoresizingMaskIntoConstraints = false
+ stackView.axis = .vertical
+ stackView.spacing = 50
+ stackView.addArrangedSubview(label)
+ stackView.addArrangedSubview(button)
+
+ view.addSubview(stackView)
+
+ NSLayoutConstraint.activate([
+ button.heightAnchor.constraint(equalToConstant: 50),
+ stackView.leadingAnchor.constraint(equalTo: view.layoutMarginsGuide.leadingAnchor),
+ stackView.trailingAnchor.constraint(equalTo: view.layoutMarginsGuide.trailingAnchor),
+ stackView.centerYAnchor.constraint(equalTo: view.centerYAnchor)
+ ])
+ }
+
+ override public func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
+ guard let simulatedData = delegate?.parent.simulatedData else {
+ print("Simulated Data Not Provided!")
+ return
+ }
+
+ // Send back their simulated data, as if it was one of the types they were scanning for
+ let result = ScanResult(string: simulatedData, type: delegate?.parent.codeTypes.first ?? .qr)
+ delegate?.found(result)
+ }
+
+ #else
+
+ var captureSession: AVCaptureSession!
+ var previewLayer: AVCaptureVideoPreviewLayer!
+ let fallbackVideoCaptureDevice = AVCaptureDevice.default(for: .video)
+
+ private lazy var viewFinder: UIImageView? = {
+ guard let image = UIImage(named: "viewfinder", in: .main, with: nil) else {
+ return nil
+ }
+
+ let imageView = UIImageView(image: image)
+ imageView.translatesAutoresizingMaskIntoConstraints = false
+ return imageView
+ }()
+
+ override public func viewDidLoad() {
+ super.viewDidLoad()
+
+ NotificationCenter.default.addObserver(self,
+ selector: #selector(updateOrientation),
+ name: Notification.Name("UIDeviceOrientationDidChangeNotification"),
+ object: nil)
+
+ view.backgroundColor = UIColor.black
+ captureSession = AVCaptureSession()
+
+ guard let videoCaptureDevice = delegate?.parent.videoCaptureDevice ?? fallbackVideoCaptureDevice else {
+ return
+ }
+
+ let videoInput: AVCaptureDeviceInput
+
+ do {
+ videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
+ } catch {
+ delegate?.didFail(reason: .initError(error))
+ return
+ }
+
+ if (captureSession.canAddInput(videoInput)) {
+ captureSession.addInput(videoInput)
+ } else {
+ delegate?.didFail(reason: .badInput)
+ return
+ }
+
+ let metadataOutput = AVCaptureMetadataOutput()
+
+ if (captureSession.canAddOutput(metadataOutput)) {
+ captureSession.addOutput(metadataOutput)
+
+ metadataOutput.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
+ metadataOutput.metadataObjectTypes = delegate?.parent.codeTypes
+ } else {
+ delegate?.didFail(reason: .badOutput)
+ return
+ }
+ }
+
+ override public func viewWillLayoutSubviews() {
+ previewLayer?.frame = view.layer.bounds
+ }
+
+ @objc func updateOrientation() {
+ guard let orientation = view.window?.windowScene?.interfaceOrientation else { return }
+ guard let connection = captureSession.connections.last, connection.isVideoOrientationSupported else { return }
+ connection.videoOrientation = AVCaptureVideoOrientation(rawValue: orientation.rawValue) ?? .portrait
+ }
+
+ override public func viewDidAppear(_ animated: Bool) {
+ super.viewDidAppear(animated)
+ updateOrientation()
+ }
+
+ override public func viewWillAppear(_ animated: Bool) {
+ super.viewWillAppear(animated)
+
+ if previewLayer == nil {
+ previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
+ }
+
+ previewLayer.frame = view.layer.bounds
+ previewLayer.videoGravity = .resizeAspectFill
+ view.layer.addSublayer(previewLayer)
+ addviewfinder()
+
+ delegate?.reset()
+
+ if (captureSession?.isRunning == false) {
+ DispatchQueue.global(qos: .userInitiated).async {
+ self.captureSession.startRunning()
+ }
+ }
+ }
+
+ private func addviewfinder() {
+ guard showViewfinder, let imageView = viewFinder else { return }
+
+ view.addSubview(imageView)
+
+ NSLayoutConstraint.activate([
+ imageView.centerYAnchor.constraint(equalTo: view.centerYAnchor),
+ imageView.centerXAnchor.constraint(equalTo: view.centerXAnchor),
+ imageView.widthAnchor.constraint(equalToConstant: 200),
+ imageView.heightAnchor.constraint(equalToConstant: 200),
+ ])
+ }
+
+ override public func viewDidDisappear(_ animated: Bool) {
+ super.viewDidDisappear(animated)
+
+ if (captureSession?.isRunning == true) {
+ DispatchQueue.global(qos: .userInitiated).async {
+ self.captureSession.stopRunning()
+ }
+ }
+
+ NotificationCenter.default.removeObserver(self)
+ }
+
+ override public var prefersStatusBarHidden: Bool {
+ true
+ }
+
+ override public var supportedInterfaceOrientations: UIInterfaceOrientationMask {
+ .all
+ }
+
+ /** Touch the screen for autofocus */
+ public override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
+ guard touches.first?.view == view,
+ let touchPoint = touches.first,
+ let device = delegate?.parent.videoCaptureDevice ?? fallbackVideoCaptureDevice
+ else { return }
+
+ let videoView = view
+ let screenSize = videoView!.bounds.size
+ let xPoint = touchPoint.location(in: videoView).y / screenSize.height
+ let yPoint = 1.0 - touchPoint.location(in: videoView).x / screenSize.width
+ let focusPoint = CGPoint(x: xPoint, y: yPoint)
+
+ do {
+ try device.lockForConfiguration()
+ } catch {
+ return
+ }
+
+ // Focus to the correct point, make continiuous focus and exposure so the point stays sharp when moving the device closer
+ device.focusPointOfInterest = focusPoint
+ device.focusMode = .continuousAutoFocus
+ device.exposurePointOfInterest = focusPoint
+ device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
+ device.unlockForConfiguration()
+ }
+
+ #endif
+
+ func updateViewController(isTorchOn: Bool, isGalleryPresented: Bool) {
+ if let backCamera = AVCaptureDevice.default(for: AVMediaType.video),
+ backCamera.hasTorch
+ {
+ try? backCamera.lockForConfiguration()
+ backCamera.torchMode = isTorchOn ? .on : .off
+ backCamera.unlockForConfiguration()
+ }
+
+ if isGalleryPresented && !isGalleryShowing {
+ openGallery()
+ }
+ }
+
+ }
+}
diff --git a/lightninglink/ContentView.swift b/lightninglink/ContentView.swift
@@ -6,6 +6,7 @@
//
import SwiftUI
+import AVFoundation
extension Notification.Name {
static var sentPayment: Notification.Name {
@@ -60,6 +61,8 @@ struct Funds {
}
}
+let SCAN_TYPES: [AVMetadataObject.ObjectType] = [.qr]
+
struct ContentView: View {
@State private var info: GetInfo
@State private var active_sheet: ActiveSheet?
@@ -107,8 +110,9 @@ struct ContentView: View {
return
}
- self.has_alert = true
+ self.active_sheet = nil
self.active_alert = .pay(amt, inv)
+ self.has_alert = true
}
var body: some View {
@@ -136,34 +140,43 @@ struct ContentView: View {
.padding()
}
}
- .alert("Use invoice in clipboard?", isPresented: $has_alert, presenting: active_alert, actions: { alert in
+ .alert("Use invoice in clipboard?", isPresented: $has_alert, presenting: active_alert) { alert in
Button("Use QR") {
+ self.has_alert = false
self.active_sheet = .qr
}
Button("Yes") {
+ self.has_alert = false
self.active_alert = nil
switch alert {
case .pay(let amt, let inv):
self.active_sheet = .pay(amt, inv)
}
}
- }, message: { alert in
- Text("There is an invoice in your clipboard, should we use that for payment?")
- })
+ }
.sheet(item: $active_sheet) { sheet in
switch sheet {
case .qr:
- QRScanner() { code in
- var invstr: String = code
- if code.starts(with: "lightning:") {
- let index = code.index(code.startIndex, offsetBy: 10)
- invstr = String(code[index...])
- }
- let m_parsed = parseInvoiceAmount(invstr)
- guard let parsed = m_parsed else {
+ CodeScannerView(codeTypes: SCAN_TYPES) { res in
+ switch res {
+ case .success(let scan_res):
+ let code = scan_res.string
+ var invstr: String = code
+ if code.starts(with: "lightning:") {
+ let index = code.index(code.startIndex, offsetBy: 10)
+ invstr = String(code[index...])
+ }
+ let m_parsed = parseInvoiceAmount(invstr)
+ guard let parsed = m_parsed else {
+ return
+ }
+ self.active_sheet = .pay(parsed, invstr)
+
+ case .failure:
+ self.active_sheet = nil
return
}
- self.active_sheet = .pay(parsed, invstr)
+
}
case .pay(let amt, let raw):
diff --git a/lightninglink/QRScan.swift b/lightninglink/QRScan.swift
@@ -1,122 +0,0 @@
-//
-// QRScan.swift
-// lightninglink
-//
-// Created by William Casarin on 2022-02-05.
-//
-
-import AVFoundation
-import UIKit
-import SwiftUI
-
-struct QRScanner: UIViewControllerRepresentable {
- var found: (String) -> Void
-
- init(found: @escaping (String) -> Void) {
- self.found = found
- }
-
- func makeUIViewController(context: Context) -> some UIViewController {
- return ScannerViewController().onScan(self.found)
- }
-
- func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) {
- }
-}
-
-class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
- var captureSession: AVCaptureSession!
- var previewLayer: AVCaptureVideoPreviewLayer!
- var completion: ((String) -> Void)? = nil
-
- public func onScan(_ found: @escaping (String) -> Void) -> ScannerViewController {
- self.completion = found
- return self
- }
-
- override func viewDidLoad() {
- super.viewDidLoad()
-
- view.backgroundColor = UIColor.black
- captureSession = AVCaptureSession()
-
- guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
- let videoInput: AVCaptureDeviceInput
-
- do {
- videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
- } catch {
- return
- }
-
- if (captureSession.canAddInput(videoInput)) {
- captureSession.addInput(videoInput)
- } else {
- failed()
- return
- }
-
- let metadataOutput = AVCaptureMetadataOutput()
-
- if (captureSession.canAddOutput(metadataOutput)) {
- captureSession.addOutput(metadataOutput)
-
- metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
- metadataOutput.metadataObjectTypes = [.qr]
- } else {
- failed()
- return
- }
-
- previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
- previewLayer.frame = view.layer.bounds
- previewLayer.videoGravity = .resizeAspectFill
- view.layer.addSublayer(previewLayer)
-
- captureSession.startRunning()
- }
-
- func failed() {
- let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
- ac.addAction(UIAlertAction(title: "OK", style: .default))
- present(ac, animated: true)
- captureSession = nil
- }
-
- override func viewWillAppear(_ animated: Bool) {
- super.viewWillAppear(animated)
-
- if (captureSession?.isRunning == false) {
- captureSession.startRunning()
- }
- }
-
- override func viewWillDisappear(_ animated: Bool) {
- super.viewWillDisappear(animated)
-
- if (captureSession?.isRunning == true) {
- captureSession.stopRunning()
- }
- }
-
- func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
- captureSession.stopRunning()
-
- if let metadataObject = metadataObjects.first {
- guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
- guard let stringValue = readableObject.stringValue else { return }
- AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
- completion?(stringValue)
- }
-
- dismiss(animated: true)
- }
-
- override var prefersStatusBarHidden: Bool {
- return true
- }
-
- override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
- return .portrait
- }
-}