Custom Models with Create ML

Sep 18 2024 · Swift 5.10, iOS 17.5, Xcode 15.4

Lesson 03: Deploying Your Custom Model in an iOS app

Demo

Episode complete

Play next episode

Next

Heads up... You’re accessing parts of this content for free, with some sections shown as obfuscated text.

Heads up... You’re accessing parts of this content for free, with some sections shown as obfuscated text.

Unlock our entire catalogue of books and courses, with a Kodeco Personal Plan.

Unlock now

In Lesson 2, you laid the groundwork for the MoodTracker app by implementing the basic UI for emotion detection. In this demo, you’ll go through the process of integrating the Core ML model into your MoodTracker app to perform emotion detection on images. This includes setting up a view model, configuring the classifier, and updating the user interface to display the results.

import SwiftUI
import Combine

class EmotionDetectionViewModel: ObservableObject {
  @Published var image: UIImage?

  func reset() {
    DispatchQueue.main.async {
      self.image = nil
    }
  }
}
@StateObject private var viewModel = EmotionDetectionViewModel()
import SwiftUI
import Vision
import CoreML

class EmotionClassifier {
  private let model: VNCoreMLModel

  init() {
    // 1. Load the Core ML model
    let configuration = MLModelConfiguration()
    guard let mlModel = try? EmotionsImageClassifier(configuration: configuration).model else {
      fatalError("Failed to load model")
    }
    self.model = try! VNCoreMLModel(for: mlModel)
  }

  func classify(image: UIImage, completion: @escaping (String?, Float?) -> Void) {
    // 2. Convert UIImage to CIImage
    guard let ciImage = CIImage(image: image) else {
      completion(nil, nil)
      return
    }

    // 3. Create a VNCoreMLRequest with the model
    let request = VNCoreMLRequest(model: model) { request, error in
      if let error = error {
        print("Error during classification: \(error.localizedDescription)")
        completion(nil, nil)
        return
      }

      // 4. Handle the classification results
      guard let results = request.results as? [VNClassificationObservation] else {
        print("No results found")
        completion(nil, nil)
        return
      }

      // 5. Find the top result based on confidence
      let topResult = results.max(by: { a, b in a.confidence < b.confidence })
      guard let bestResult = topResult else {
        print("No top result found")
        completion(nil, nil)
        return
      }

      // 6. Pass the top result to the completion handler
      completion(bestResult.identifier, bestResult.confidence)
    }

    // 7. Create a VNImageRequestHandler
    let handler = VNImageRequestHandler(ciImage: ciImage)

    // 8. Perform the request on a background thread
    DispatchQueue.global(qos: .userInteractive).async {
      do {
        try handler.perform([request])
      } catch {
        print("Failed to perform classification: \(error.localizedDescription)")
        completion(nil, nil)
      }
    }
  }
}
@Published var emotion: String?
@Published var accuracy: String?

private let classifier = EmotionClassifier()
func classifyImage() {
  if let image = self.image {
    // Resize the image before classification
    let resizedImage = resizeImage(image)
    DispatchQueue.global(qos: .userInteractive).async {
      self.classifier.classify(image: resizedImage ?? image) { [weak self] emotion, confidence in
        // Update the published properties on the main thread
        DispatchQueue.main.async {
          self?.emotion = emotion ?? "Unknown"
          self?.accuracy = String(format: "%.2f%%", (confidence ?? 0) * 100.0)
        }
      }
    }
  }
}

private func resizeImage(_ image: UIImage) -> UIImage? {
  UIGraphicsBeginImageContext(CGSize(width: 224, height: 224))
  image.draw(in: CGRect(x: 0, y: 0, width: 224, height: 224))
  let resizedImage = UIGraphicsGetImageFromCurrentImageContext()
  UIGraphicsEndImageContext()
  return resizedImage
}
self.emotion = nil
self.accuracy = nil
import SwiftUI

struct EmotionResultView: View {
  let emotion: String
  let accuracy: String

  var body: some View {
    VStack(spacing: 5) {
      Text("Detected Emotion: \(emotion)")
        .font(.title2)
        .padding(.bottom)
      Text("Accuracy: \(accuracy)")
        .font(.subheadline)
        .foregroundColor(.secondary)
    }
    .padding()
    .background(Color.blue.opacity(0.1))
    .cornerRadius(10)
    .shadow(radius: 10)
  }
}

#Preview {
  EmotionResultView(emotion: "Happy", accuracy: "100%")
}
if let emotion = viewModel.emotion, let accuracy = viewModel.accuracy {
    EmotionResultView(emotion: emotion, accuracy: accuracy)
}
Button(action: classifyImage) {
  Text("Detect Emotion")
    .font(.headline)
    .padding()
    .frame(maxWidth: .infinity)
    .background(Color.blue)
    .foregroundColor(.white)
    .cornerRadius(10)
}
.padding(.horizontal)
ActionButtonsView(image: $viewModel.image, reset: viewModel.reset, classifyImage: viewModel.classifyImage)
See forum comments
Cinema mode Download course materials from Github
Previous: Instruction Next: Conclusion