Skip to content

Commit

Permalink
Removed all warnings, no more MLModel casting
Browse files Browse the repository at this point in the history
  • Loading branch information
dokun1 committed Jul 19, 2021
1 parent dffee3b commit fad22d0
Show file tree
Hide file tree
Showing 9 changed files with 37 additions and 76 deletions.
2 changes: 1 addition & 1 deletion Sample/LuminaSample/LoggingViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import UIKit
import Lumina
import Logging

protocol LoggingLevelDelegate: class {
protocol LoggingLevelDelegate: AnyObject {
func didSelect(loggingLevel: Logger.Level, controller: LoggingViewController)
}

Expand Down
2 changes: 1 addition & 1 deletion Sample/LuminaSample/ResolutionViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import UIKit
import Lumina

protocol ResolutionDelegate: class {
protocol ResolutionDelegate: AnyObject {
func didSelect(resolution: CameraResolution, controller: ResolutionViewController)
}

Expand Down
22 changes: 20 additions & 2 deletions Sample/LuminaSample/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,16 @@ extension ViewController { // MARK: IBActions
camera.maxZoomScale = (self.maxZoomScaleLabel.text! as NSString).floatValue
camera.frameRate = Int(self.frameRateLabel.text!) ?? 30
if self.useCoreMLModelSwitch.isOn {
camera.streamingModels = [LuminaModel(model: MobileNet().model, type: "MobileNet"), LuminaModel(model: SqueezeNet().model, type: "SqueezeNet")]
let config = MLModelConfiguration()
config.computeUnits = .cpuAndGPU
do {
let mobileNet = LuminaModel(model: try MobileNet(configuration: config).model, type: "MobileNet")
let squeezeNet = LuminaModel(model: try SqueezeNet(configuration: config).model, type: "SqueezeNet")
camera.streamingModels = [mobileNet, squeezeNet]
} catch let error {
self.showErrorAlert(with: error.localizedDescription)
return
}
}
camera.modalPresentationStyle = .fullScreen
present(camera, animated: true, completion: nil)
Expand Down Expand Up @@ -189,7 +198,8 @@ extension CVPixelBuffer {
}

private func getImageOrientation(with position: CameraPosition) -> UIImage.Orientation {
switch UIApplication.shared.statusBarOrientation {
let orientation = UIApplication.shared.windows.first(where: { $0.isKeyWindow })?.windowScene?.interfaceOrientation ?? .portrait
switch orientation {
case .landscapeLeft:
return position == .back ? .down : .upMirrored
case .landscapeRight:
Expand All @@ -205,3 +215,11 @@ extension CVPixelBuffer {
}
}
}

extension ViewController {
func showErrorAlert(with message: String) {
let alert = UIAlertController(title: "Error", message: message, preferredStyle: .alert)
alert.addAction(.init(title: "OK", style: .default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ extension AVCapturePhoto {
}

private func getImageOrientation(forCamera: CameraPosition) -> UIImage.Orientation {
switch UIApplication.shared.statusBarOrientation {
switch LuminaViewController.orientation {
case .landscapeLeft:
return forCamera == .back ? .down : .upMirrored
case .landscapeRight:
Expand Down
16 changes: 1 addition & 15 deletions Sources/Lumina/Camera/Extensions/SampleBufferExtension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,6 @@ import UIKit
import AVFoundation

extension CMSampleBuffer {
func normalizedStillImage(forCameraPosition position: CameraPosition) -> UIImage? {
LuminaLogger.notice(message: "normalizing still image from CMSampleBuffer")
guard let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: self, previewPhotoSampleBuffer: nil) else {
return nil
}
guard let dataProvider = CGDataProvider(data: imageData as CFData) else {
return nil
}
guard let cgImageRef = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent) else {
return nil
}
return UIImage(cgImage: cgImageRef, scale: 1.0, orientation: getImageOrientation(forCamera: position))
}

func normalizedVideoFrame() -> UIImage? {
LuminaLogger.notice(message: "normalizing video frame from CMSampleBbuffer")
guard let imageBuffer = CMSampleBufferGetImageBuffer(self) else {
Expand All @@ -38,7 +24,7 @@ extension CMSampleBuffer {
}

private func getImageOrientation(forCamera: CameraPosition) -> UIImage.Orientation {
switch UIApplication.shared.statusBarOrientation {
switch LuminaViewController.orientation {
case .landscapeLeft:
return forCamera == .back ? .down : .upMirrored
case .landscapeRight:
Expand Down
35 changes: 2 additions & 33 deletions Sources/Lumina/Camera/LuminaCamera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import UIKit
import AVFoundation
import CoreML

protocol LuminaCameraDelegate: class {
protocol LuminaCameraDelegate: AnyObject {
func stillImageCaptured(camera: LuminaCamera, image: UIImage, livePhotoURL: URL?, depthData: Any?)
func videoFrameCaptured(camera: LuminaCamera, frame: UIImage)
func videoFrameCaptured(camera: LuminaCamera, frame: UIImage, predictedObjects: [LuminaRecognitionResult]?)
Expand Down Expand Up @@ -164,38 +164,7 @@ final class LuminaCamera: NSObject {

var recognizer: AnyObject?

private var _streamingModels: [(AnyObject, String)]?

var streamingModels: [LuminaModel]? {
get {
if let existingModels = _streamingModels {
var models = [LuminaModel]()
for potentialModel in existingModels {
if let model = potentialModel.0 as? MLModel {
models.append(LuminaModel(model: model, type: potentialModel.1))
}
}
guard models.count > 0 else {
return nil
}
return models
} else {
return nil
}
}
set {
if let tuples = newValue {
var downcastCollection = [(AnyObject, String)]()
for tuple in tuples {
guard let model = tuple.model, let type = tuple.type else {
continue
}
downcastCollection.append((model as AnyObject, type))
}
_streamingModels = downcastCollection
}
}
}
var streamingModels: [LuminaModel]?

var session = AVCaptureSession()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import UIKit
import CoreML

/// Delegate for returning information to the application utilizing Lumina
public protocol LuminaDelegate: class {
public protocol LuminaDelegate: AnyObject {

/// Triggered whenever a still image is captured by the user of Lumina
///
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ extension LuminaViewController {
case .videoSuccess:
if let camera = self.camera {
self.enableUI(valid: true)
self.updateUI(orientation: UIApplication.shared.statusBarOrientation)
self.updateUI(orientation: LuminaViewController.orientation)
camera.start()
}
case .audioSuccess:
Expand Down
30 changes: 9 additions & 21 deletions Sources/Lumina/UI/LuminaViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -239,27 +239,11 @@ open class LuminaViewController: UIViewController {
}

/// A collection of model types that will be used when streaming images for object recognition
///
/// - Note: Only works on iOS 11 and up
///
/// - Warning: If this is set, streamFrames is over-ridden to true
open var streamingModels: [AnyObject]? {
open var streamingModels: [LuminaModel]? {
didSet {
guard let streamingModels = self.streamingModels else {
return
}
var properlyCastModels = [LuminaModel]()
for possibleModel in streamingModels {
guard let model = possibleModel as? LuminaModel else {
continue
}
properlyCastModels.append(model)
}
if properlyCastModels.count > 0 {
LuminaLogger.notice(message: "Valid models loaded - frame streaming mode defaulted to on")
self.streamFrames = true
self.camera?.streamingModels = properlyCastModels
}
self.camera?.streamingModels = streamingModels
self.streamFrames = true
}
}

Expand Down Expand Up @@ -353,7 +337,7 @@ open class LuminaViewController: UIViewController {
open override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
createUI()
updateUI(orientation: UIApplication.shared.statusBarOrientation)
updateUI(orientation: LuminaViewController.orientation)
self.camera?.updateVideo { result in
self.handleCameraSetupResult(result)
}
Expand All @@ -363,6 +347,10 @@ open class LuminaViewController: UIViewController {
}
}
}

static var orientation: UIInterfaceOrientation {
UIApplication.shared.windows.first(where: { $0.isKeyWindow })?.windowScene?.interfaceOrientation ?? .portrait
}

/// override with caution
open override func viewDidAppear(_ animated: Bool) {
Expand All @@ -389,7 +377,7 @@ open class LuminaViewController: UIViewController {
if self.camera?.recordingVideo == true {
return
}
updateUI(orientation: UIApplication.shared.statusBarOrientation)
updateUI(orientation: LuminaViewController.orientation)
updateButtonFrames()
}

Expand Down

0 comments on commit fad22d0

Please sign in to comment.