Skip to content

Commit

Permalink
chore: swiftformat
Browse files Browse the repository at this point in the history
  • Loading branch information
castdrian committed Jun 24, 2024
1 parent 596b891 commit 71cb9bb
Show file tree
Hide file tree
Showing 24 changed files with 713 additions and 711 deletions.
61 changes: 31 additions & 30 deletions CoreDex/CoreML/ImagePredictor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,104 +5,105 @@
// Created by Adrian Castro on 24.02.24.
//

import Vision
import UIKit
import Vision

class ImagePredictor {
static func createImageClassifier() -> VNCoreMLModel {
let defaultConfig = MLModelConfiguration()
let imageClassifierWrapper = try? Dex(configuration: defaultConfig)

guard let imageClassifier = imageClassifierWrapper else {
fatalError("App failed to create an image classifier model instance.")
}

let imageClassifierModel = imageClassifier.model

guard let imageClassifierVisionModel = try? VNCoreMLModel(for: imageClassifierModel) else {
fatalError("App failed to create a `VNCoreMLModel` instance.")
}

return imageClassifierVisionModel
}

private static let imageClassifier = createImageClassifier()

struct Prediction {
let classification: Int
let confidence: Float
}

typealias ImagePredictionHandler = (_ prediction: Prediction?) -> Void

private var predictionHandlers = [VNRequest: ImagePredictionHandler]()

private func createImageClassificationRequest() -> VNImageBasedRequest {
let imageClassificationRequest = VNCoreMLRequest(model: ImagePredictor.imageClassifier,
completionHandler: visionRequestHandler)

imageClassificationRequest.imageCropAndScaleOption = .centerCrop
return imageClassificationRequest
}

private func makePredictions(for photo: UIImage, completionHandler: @escaping ImagePredictionHandler) throws {
guard let orientation = CGImagePropertyOrientation(rawValue: UInt32(photo.imageOrientation.rawValue)) else { return }

guard let photoImage = photo.cgImage else {
fatalError("Photo doesn't have underlying CGImage.")
}

let imageClassificationRequest = createImageClassificationRequest()
predictionHandlers[imageClassificationRequest] = completionHandler

let handler = VNImageRequestHandler(cgImage: photoImage, orientation: orientation)
let requests: [VNRequest] = [imageClassificationRequest]

try handler.perform(requests)
}

private func visionRequestHandler(_ request: VNRequest, error: Error?) {
guard let predictionHandler = predictionHandlers.removeValue(forKey: request) else {
fatalError("Every request must have a prediction handler.")
}
var topPrediction: Prediction? = nil

var topPrediction: Prediction?

defer {
predictionHandler(topPrediction)
}
if let error = error {

if let error {
print("Vision image classification error...\n\n\(error.localizedDescription)")
return
}

if request.results == nil {
print("Vision request had no results.")
return
}

guard let observations = request.results as? [VNClassificationObservation] else {
print("VNRequest produced the wrong result type: \(type(of: request.results)).")
return
}

if let highestConfidenceObservation = observations.max(by: { a, b in a.confidence < b.confidence }) {
topPrediction = Prediction(classification: Int(highestConfidenceObservation.identifier)!,
confidence: highestConfidenceObservation.confidence)
}
}

func classifyImage(_ image: UIImage, completion: @escaping (Result<Prediction, Error>) -> Void) {
do {
try self.makePredictions(for: image) { prediction in
if let prediction = prediction {
try makePredictions(for: image) { prediction in
if let prediction {
completion(.success(prediction))
} else {
completion(.failure(NSError(domain: "", code: 0, userInfo: [NSLocalizedDescriptionKey : "No predictions available"])))
completion(.failure(NSError(domain: "", code: 0, userInfo: [NSLocalizedDescriptionKey: "No predictions available"])))
}
}
} catch {
completion(.failure(error))
}
}}
}
}
4 changes: 2 additions & 2 deletions CoreDex/Util/AudioPlayerDelegate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
// Created by Adrian Castro on 25.02.24.
//

import Foundation
import AVFoundation
import Foundation

class AudioPlayerDelegate: NSObject, AVAudioPlayerDelegate {
var onAudioFinished: (() -> Void)?

func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
onAudioFinished?()
}
}
4 changes: 2 additions & 2 deletions CoreDex/Util/ColorUtils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ extension Color {
.sRGB,
red: Double(r) / 255,
green: Double(g) / 255,
blue: Double(b) / 255,
blue: Double(b) / 255,
opacity: Double(a) / 255
)
}
Expand All @@ -50,5 +50,5 @@ let pokemonTypeColors: [String: Color] = [
"dragon": Color(hex: "#6F35FC"),
"dark": Color(hex: "#705746"),
"steel": Color(hex: "#B7B7CE"),
"fairy": Color(hex: "#D685AD")
"fairy": Color(hex: "#D685AD"),
]
45 changes: 23 additions & 22 deletions CoreDex/Util/DocumentInteractionController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,52 +10,53 @@ import UIKit

struct DocumentInteractionController: UIViewControllerRepresentable {
let url: URL

func makeUIViewController(context: Context) -> UIViewController {
let viewController = UIViewController()
DispatchQueue.main.async {
let documentInteractionController = UIDocumentInteractionController(url: self.url)
let documentInteractionController = UIDocumentInteractionController(url: url)
documentInteractionController.delegate = context.coordinator
documentInteractionController.presentPreview(animated: true)
}
return viewController
}

func updateUIViewController(_ uiViewController: UIViewController, context: Context) {
let documentInteractionController = UIDocumentInteractionController(url: url)
documentInteractionController.delegate = context.coordinator
documentInteractionController.presentPreview(animated: true)

if let rootVC = uiViewController.presentingViewController {
documentInteractionController.presentOptionsMenu(from: rootVC.view.bounds, in: rootVC.view, animated: true)
}
}

func makeCoordinator() -> Coordinator {
Coordinator(self)
}

class Coordinator: NSObject, UIDocumentInteractionControllerDelegate {
var parent: DocumentInteractionController
var parent: DocumentInteractionController

init(_ parent: DocumentInteractionController) {
self.parent = parent
}
init(_ parent: DocumentInteractionController) {
self.parent = parent
}

func documentInteractionControllerViewControllerForPreview(_ controller: UIDocumentInteractionController) -> UIViewController {
guard let scene = UIApplication.shared.connectedScenes.first as? UIWindowScene,
let rootViewController = scene.windows.first?.rootViewController else {
fatalError("Unable to find the root view controller")
}
return rootViewController
func documentInteractionControllerViewControllerForPreview(_: UIDocumentInteractionController) -> UIViewController {
guard let scene = UIApplication.shared.connectedScenes.first as? UIWindowScene,
let rootViewController = scene.windows.first?.rootViewController
else {
fatalError("Unable to find the root view controller")
}
return rootViewController
}

func documentInteractionControllerDidEndPreview(_ controller: UIDocumentInteractionController) {
do {
try FileManager.default.removeItem(at: parent.url)
} catch {
print("Error removing temporary file: \(error)")
}
func documentInteractionControllerDidEndPreview(_: UIDocumentInteractionController) {
do {
try FileManager.default.removeItem(at: parent.url)
} catch {
print("Error removing temporary file: \(error)")
}
}
}
}
53 changes: 26 additions & 27 deletions CoreDex/Util/ScannerViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
// Created by Adrian Castro on 26.02.24.
//

import Foundation
import AVFoundation
import CoreMotion
import UIKit
import Foundation
import PkmnApi
import UIKit

class ScannerViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var captureSession: AVCaptureSession!
Expand All @@ -19,37 +19,37 @@ class ScannerViewController: UIViewController, AVCaptureVideoDataOutputSampleBuf
var lastMotionData: CMDeviceMotion?
var currentSampleBuffer: CMSampleBuffer?
var onImageCaptured: ((UIImage) -> Void)?

override func viewDidLoad() {
super.viewDidLoad()
setupCaptureSession()
startMotionUpdates()
}

func startMotionUpdates() {
if motionManager.isDeviceMotionAvailable {
motionManager.deviceMotionUpdateInterval = 0.2
motionManager.startDeviceMotionUpdates(to: .main) { [weak self] (motion, error) in
motionManager.startDeviceMotionUpdates(to: .main) { [weak self] motion, error in
guard error == nil else {
print("Error in motion updates: \(error!)")
return
}
if let motion = motion {

if let motion {
self?.processDeviceMotion(motion)
}
}
}
}

func processDeviceMotion(_ motion: CMDeviceMotion) {
let rotationThreshold = 0.05
let accelerationThreshold = 0.05

let rotationRate = motion.rotationRate
let userAcceleration = motion.userAcceleration
if abs(rotationRate.x) < rotationThreshold && abs(rotationRate.y) < rotationThreshold && abs(rotationRate.z) < rotationThreshold && abs(userAcceleration.x) < accelerationThreshold && abs(userAcceleration.y) < accelerationThreshold && abs(userAcceleration.z) < accelerationThreshold {

if abs(rotationRate.x) < rotationThreshold, abs(rotationRate.y) < rotationThreshold, abs(rotationRate.z) < rotationThreshold, abs(userAcceleration.x) < accelerationThreshold, abs(userAcceleration.y) < accelerationThreshold, abs(userAcceleration.z) < accelerationThreshold {
if !isProcessing, let sampleBuffer = currentSampleBuffer {
isProcessing = true
captureFrame(sampleBuffer: sampleBuffer)
Expand All @@ -58,71 +58,70 @@ class ScannerViewController: UIViewController, AVCaptureVideoDataOutputSampleBuf
isProcessing = false
}
}

func setupCaptureSession() {
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput

do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {

if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
} else {
failed()
return
}

let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
if (captureSession.canAddOutput(videoOutput)) {
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
} else {
failed()
return
}

previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)



DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
}

func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

func captureOutput(_: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from _: AVCaptureConnection) {
currentSampleBuffer = sampleBuffer
}

func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
let context = CIContext()

guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
return UIImage(cgImage: cgImage)
}

func captureFrame(sampleBuffer: CMSampleBuffer) {
guard let image = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else {
isProcessing = false
return
}

onImageCaptured?(image)
isProcessing = false
}
Expand Down
Loading

0 comments on commit 71cb9bb

Please sign in to comment.