Skip to content

Commit

Permalink
Push Vision Quickstart app changes to internal devrel.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 331658050
Change-Id: I17103e3460130a9e75e6bde4180da62b5ae1925d
  • Loading branch information
Google ML Kit authored and Daniel Furlong committed Sep 15, 2020
1 parent ad93f80 commit 4c0af65
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 47 deletions.
1 change: 1 addition & 0 deletions ios/quickstarts/vision/Podfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ pod 'GoogleMLKit/ImageLabelingCustom'
pod 'GoogleMLKit/ObjectDetection'
pod 'GoogleMLKit/ObjectDetectionCustom'
pod 'GoogleMLKit/PoseDetection'
pod 'GoogleMLKit/PoseDetectionAccurate'
pod 'GoogleMLKit/TextRecognition'

target 'VisionExample' do
Expand Down
18 changes: 9 additions & 9 deletions ios/quickstarts/vision/VisionExample/CameraViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class CameraViewController: UIViewController {
.onDeviceObjectCustomProminentWithClassifier,
.onDeviceObjectCustomMultipleNoClassifier,
.onDeviceObjectCustomMultipleWithClassifier,
.poseFast,
.pose,
.poseAccurate,
]

Expand Down Expand Up @@ -75,9 +75,9 @@ class CameraViewController: UIViewController {
var detector: PoseDetector? = nil
poseDetectorQueue.sync {
if _poseDetector == nil {
let options = PoseDetectorOptions()
let options = currentDetector == .pose ? PoseDetectorOptions()
: AccuratePoseDetectorOptions()
options.detectorMode = .stream
options.performanceMode = (currentDetector == .poseFast ? .fast : .accurate);
_poseDetector = PoseDetector.poseDetector(options: options)
}
detector = _poseDetector
Expand Down Expand Up @@ -806,7 +806,7 @@ extension CameraViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
height: imageHeight,
options: options)

case .poseFast, .poseAccurate:
case .pose, .poseAccurate:
detectPose(in: visionImage, width: imageWidth, height: imageHeight)
}
}
Expand All @@ -815,9 +815,9 @@ extension CameraViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
// MARK: - Constants

public enum Detector: String {
case onDeviceBarcode = "On-Device Barcode Scanner"
case onDeviceFace = "On-Device Face Detection"
case onDeviceText = "On-Device Text Recognition"
case onDeviceBarcode = "Barcode Scanning"
case onDeviceFace = "Face Detection"
case onDeviceText = "Text Recognition"
case onDeviceObjectProminentNoClassifier = "ODT, single, no labeling"
case onDeviceObjectProminentWithClassifier = "ODT, single, labeling"
case onDeviceObjectMultipleNoClassifier = "ODT, multiple, no labeling"
Expand All @@ -826,8 +826,8 @@ public enum Detector: String {
case onDeviceObjectCustomProminentWithClassifier = "ODT, custom, single, labeling"
case onDeviceObjectCustomMultipleNoClassifier = "ODT, custom, multiple, no labeling"
case onDeviceObjectCustomMultipleWithClassifier = "ODT, custom, multiple, labeling"
case poseAccurate = "Pose, accurate"
case poseFast = "Pose, fast"
case pose = "Pose Detection"
case poseAccurate = "Pose Detection, accurate"
}

private enum Constant {
Expand Down
23 changes: 11 additions & 12 deletions ios/quickstarts/vision/VisionExample/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,8 @@ class ViewController: UIViewController, UINavigationControllerDelegate {
private var poseDetector: PoseDetector? {
get {
if _poseDetector == nil {
let options = PoseDetectorOptions()
let options = AccuratePoseDetectorOptions()
options.detectorMode = .singleImage
options.performanceMode = .accurate
_poseDetector = PoseDetector.poseDetector(options: options)
}
return _poseDetector
Expand Down Expand Up @@ -167,7 +166,7 @@ class ViewController: UIViewController, UINavigationControllerDelegate {
options.shouldEnableMultipleObjects = shouldEnableMultipleObjects
options.detectorMode = .singleImage
detectObjectsOnDevice(in: imageView.image, options: options)
case .detectPose:
case .detectPoseAccurate:
detectPose(image: imageView.image)
}
} else {
Expand Down Expand Up @@ -631,7 +630,7 @@ extension ViewController: UIPickerViewDataSource, UIPickerViewDelegate {
clearResults()

if let rowIndex = DetectorPickerRow(rawValue: row) {
if rowIndex != .detectPose {
if rowIndex != .detectPoseAccurate {
// Reset the pose detector to `nil` when a new detector row is chosen. The detector will be
// re-initialized via its getter when it is needed for detection again.
poseDetector = nil
Expand Down Expand Up @@ -1013,23 +1012,23 @@ private enum DetectorPickerRow: Int {
detectObjectsCustomProminentWithClassifier,
detectObjectsCustomMultipleNoClassifier,
detectObjectsCustomMultipleWithClassifier,
detectPose
detectPoseAccurate

static let rowsCount = 14
static let componentsCount = 1

public var description: String {
switch self {
case .detectFaceOnDevice:
return "Face On-Device"
return "Face Detection"
case .detectTextOnDevice:
return "Text On-Device"
return "Text Recognition"
case .detectBarcodeOnDevice:
return "Barcode On-Device"
return "Barcode Scanning"
case .detectImageLabelsOnDevice:
return "Image Labeling On-Device"
return "Image Labeling"
case .detectImageLabelsCustomOnDevice:
return "Image Labeling Custom On-Device"
return "Image Labeling Custom"
case .detectObjectsProminentNoClassifier:
return "ODT, single, no labeling"
case .detectObjectsProminentWithClassifier:
Expand All @@ -1046,8 +1045,8 @@ private enum DetectorPickerRow: Int {
return "ODT, custom, multiple, no labeling"
case .detectObjectsCustomMultipleWithClassifier:
return "ODT, custom, multiple, labeling"
case .detectPose:
return "Pose"
case .detectPoseAccurate:
return "Pose Detection, accurate"
}
}
}
Expand Down
25 changes: 12 additions & 13 deletions ios/quickstarts/vision/VisionExampleObjc/CameraViewController.m
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ typedef NS_ENUM(NSInteger, Detector) {
DetectorOnDeviceObjectCustomProminentWithClassifier,
DetectorOnDeviceObjectCustomMultipleNoClassifier,
DetectorOnDeviceObjectCustomMultipleWithClassifier,
DetectorPose,
DetectorPoseAccurate,
DetectorPoseFast,
};

@property(nonatomic) NSArray *detectors;
Expand All @@ -77,11 +77,11 @@ @implementation CameraViewController
- (NSString *)stringForDetector:(Detector)detector {
switch (detector) {
case DetectorOnDeviceBarcode:
return @"On-Device Barcode Scanner";
return @"Barcode Scanning";
case DetectorOnDeviceFace:
return @"On-Device Face Detection";
return @"Face Detection";
case DetectorOnDeviceText:
return @"On-Device Text Recognition";
return @"Text Recognition";
case DetectorOnDeviceObjectProminentNoClassifier:
return @"ODT, single, no labeling";
case DetectorOnDeviceObjectProminentWithClassifier:
Expand All @@ -98,10 +98,10 @@ - (NSString *)stringForDetector:(Detector)detector {
return @"ODT, custom, multiple, no labeling";
case DetectorOnDeviceObjectCustomMultipleWithClassifier:
return @"ODT, custom, multiple, labeling";
case DetectorPoseFast:
return @"Pose, fast";
case DetectorPose:
return @"Pose Detection";
case DetectorPoseAccurate:
return @"Pose, accurate";
return @"Pose Detection, accurate";
}
}

Expand All @@ -119,8 +119,8 @@ - (void)viewDidLoad {
@(DetectorOnDeviceObjectCustomProminentWithClassifier),
@(DetectorOnDeviceObjectCustomMultipleNoClassifier),
@(DetectorOnDeviceObjectCustomMultipleWithClassifier),
@(DetectorPose),
@(DetectorPoseAccurate),
@(DetectorPoseFast),
];
_currentDetector = DetectorOnDeviceFace;
_isUsingFrontCamera = YES;
Expand Down Expand Up @@ -778,8 +778,8 @@ - (void)captureOutput:(AVCaptureOutput *)output
case DetectorOnDeviceText:
[self recognizeTextOnDeviceInImage:visionImage width:imageWidth height:imageHeight];
break;
case DetectorPose:
case DetectorPoseAccurate:
case DetectorPoseFast:
[self detectPoseInImage:visionImage width:imageWidth height:imageHeight];
break;
case DetectorOnDeviceObjectProminentNoClassifier:
Expand Down Expand Up @@ -832,11 +832,10 @@ - (nullable MLKPoseDetector *)poseDetector {
// main thread and used for processing on the video output queue.
@synchronized(self) {
if (_poseDetector == nil) {
MLKPoseDetectorOptions *options = [[MLKPoseDetectorOptions alloc] init];
MLKCommonPoseDetectorOptions *options = self.currentDetector == DetectorPose
? [[MLKPoseDetectorOptions alloc] init]
: [[MLKAccuratePoseDetectorOptions alloc] init];
options.detectorMode = MLKPoseDetectorModeStream;
options.performanceMode = self.currentDetector == DetectorPoseFast
? MLKPoseDetectorPerformanceModeFast
: MLKPoseDetectorPerformanceModeAccurate;
_poseDetector = [MLKPoseDetector poseDetectorWithOptions:options];
}
return _poseDetector;
Expand Down
25 changes: 12 additions & 13 deletions ios/quickstarts/vision/VisionExampleObjc/ViewController.m
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ typedef NS_ENUM(NSInteger, DetectorPickerRow) {
DetectorPickerRowDetectObjectsCustomMultipleNoClassifier,
/** On-Device vision object detector, custom model, multiple, with classification. */
DetectorPickerRowDetectObjectsCustomMultipleWithClassifier,
/** Vision pose detector. */
DetectorPickerRowDetectPose,
/** Vision pose accurate detector. */
DetectorPickerRowDetectPoseAccurate,
};

@interface ViewController () <UINavigationControllerDelegate,
Expand Down Expand Up @@ -108,15 +108,15 @@ @implementation ViewController
- (NSString *)stringForDetectorPickerRow:(DetectorPickerRow)detectorPickerRow {
switch (detectorPickerRow) {
case DetectorPickerRowDetectFaceOnDevice:
return @"Face On-Device";
return @"Face Detection";
case DetectorPickerRowDetectTextOnDevice:
return @"Text On-Device";
return @"Text Recognition";
case DetectorPickerRowDetectBarcodeOnDevice:
return @"Barcode On-Device";
return @"Barcode Scanning";
case DetectorPickerRowDetectImageLabelsOnDevice:
return @"Image Labeling On-Device";
return @"Image Labeling";
case DetectorPickerRowDetectImageLabelsCustomOnDevice:
return @"Image Labeling Custom On-Device";
return @"Image Labeling Custom";
case DetectorPickerRowDetectObjectsProminentNoClassifier:
return @"ODT, single, no labeling";
case DetectorPickerRowDetectObjectsProminentWithClassifier:
Expand All @@ -133,8 +133,8 @@ - (NSString *)stringForDetectorPickerRow:(DetectorPickerRow)detectorPickerRow {
return @"ODT, custom, multiple, no labeling";
case DetectorPickerRowDetectObjectsCustomMultipleWithClassifier:
return @"ODT, custom, multiple, labeling";
case DetectorPickerRowDetectPose:
return @"Pose";
case DetectorPickerRowDetectPoseAccurate:
return @"Pose Detection, accurate";
}
}

Expand Down Expand Up @@ -255,7 +255,7 @@ - (IBAction)detect:(id)sender {
[self detectObjectsOnDeviceInImage:_imageView.image withOptions:options];
break;
}
case DetectorPickerRowDetectPose:
case DetectorPickerRowDetectPoseAccurate:
[self detectPoseInImage:_imageView.image];
break;
}
Expand Down Expand Up @@ -692,7 +692,7 @@ - (void)pickerView:(UIPickerView *)pickerView
// Reset the pose detector to `nil` when a new detector row is chosen. If it happens to be the
// pose detector row, then it will be lazily-initialized with its getter override when accessed
// for pose detection.
if (row != DetectorPickerRowDetectPose) {
if (row != DetectorPickerRowDetectPoseAccurate) {
self.poseDetector = nil;
}
}
Expand Down Expand Up @@ -1080,9 +1080,8 @@ - (void)detectObjectsOnDeviceInImage:(UIImage *)image

- (nullable MLKPoseDetector *)poseDetector {
if (_poseDetector == nil) {
MLKPoseDetectorOptions *options = [[MLKPoseDetectorOptions alloc] init];
MLKAccuratePoseDetectorOptions *options = [[MLKAccuratePoseDetectorOptions alloc] init];
options.detectorMode = MLKPoseDetectorModeSingleImage;
options.performanceMode = MLKPoseDetectorPerformanceModeAccurate;
_poseDetector = [MLKPoseDetector poseDetectorWithOptions:options];
}
return _poseDetector;
Expand Down

0 comments on commit 4c0af65

Please sign in to comment.