Skip to content

Commit

Permalink
Update vision quickstart app for iOS R7.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 351271794
Change-Id: If69271b1ac221ca72a9da7fea9b712997efe9482
  • Loading branch information
Google ML Kit authored and zongmins committed Jan 14, 2021
1 parent 055a54e commit 09bd95d
Show file tree
Hide file tree
Showing 7 changed files with 394 additions and 125 deletions.
36 changes: 10 additions & 26 deletions ios/quickstarts/vision/VisionExample/CameraViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -255,33 +255,17 @@ class CameraViewController: UIViewController {
}
// Pose detected. Currently, only single person detection is supported.
poses.forEach { pose in
for (startLandmarkType, endLandmarkTypesArray) in UIUtilities.poseConnections() {
let startLandmark = pose.landmark(ofType: startLandmarkType)
for endLandmarkType in endLandmarkTypesArray {
let endLandmark = pose.landmark(ofType: endLandmarkType)
let startLandmarkPoint = normalizedPoint(
fromVisionPoint: startLandmark.position, width: width, height: height)
let endLandmarkPoint = normalizedPoint(
fromVisionPoint: endLandmark.position, width: width, height: height)
UIUtilities.addLineSegment(
fromPoint: startLandmarkPoint,
toPoint: endLandmarkPoint,
inView: strongSelf.annotationOverlayView,
color: UIColor.green,
width: Constant.lineWidth
)
let poseOverlayView = UIUtilities.createPoseOverlayView(
forPose: pose,
inViewWithBounds: strongSelf.annotationOverlayView.bounds,
lineWidth: Constant.lineWidth,
dotRadius: Constant.smallDotRadius,
positionTransformationClosure: { (position) -> CGPoint in
return strongSelf.normalizedPoint(fromVisionPoint: position, width: width,
height: height)
}
}
for landmark in pose.landmarks {
let landmarkPoint = normalizedPoint(
fromVisionPoint: landmark.position, width: width, height: height)
UIUtilities.addCircle(
atPoint: landmarkPoint,
to: strongSelf.annotationOverlayView,
color: UIColor.blue,
radius: Constant.smallDotRadius
)
}
)
strongSelf.annotationOverlayView.addSubview(poseOverlayView)
}
}
}
Expand Down
174 changes: 171 additions & 3 deletions ios/quickstarts/vision/VisionExample/UIUtilities.swift
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,181 @@ public class UIUtilities {
}
}

/// Creates a pose overlay view for visualizing a given `pose`.
///
/// - Parameters:
/// - pose: The pose which will be visualized.
/// - bounds: The bounds of the view to which this overlay will be added. The overlay view's
/// bounds will match this value.
/// - lineWidth: The width of the lines connecting the landmark dots.
/// - dotRadius: The radius of the landmark dots.
/// - positionTransformationClosure: Closure which transforms a landmark `position` to the
/// `UIView` `CGPoint` coordinate where it should be shown on-screen.
/// - Returns: The pose overlay view.
public static func createPoseOverlayView(
forPose pose: Pose, inViewWithBounds bounds: CGRect, lineWidth: CGFloat, dotRadius: CGFloat,
positionTransformationClosure: (VisionPoint) -> CGPoint
) -> UIView {
let overlayView = UIView(frame: bounds)

let lowerBodyHeight: CGFloat =
UIUtilities.distance(
fromPoint: pose.landmark(ofType: PoseLandmarkType.leftAnkle).position,
toPoint: pose.landmark(ofType: PoseLandmarkType.leftKnee).position)
+ UIUtilities.distance(
fromPoint: pose.landmark(ofType: PoseLandmarkType.leftKnee).position,
toPoint: pose.landmark(ofType: PoseLandmarkType.leftHip).position)

// Pick arbitrary z extents to form a range of z values mapped to our colors. Red = close, blue
// = far. Assume that the z values will roughly follow physical extents of the human body, but
// apply an adjustment ratio to increase this color-coded z-range because this is not always the
// case.
let adjustmentRatio: CGFloat = 1.2
let nearZExtent: CGFloat = -lowerBodyHeight * adjustmentRatio
let farZExtent: CGFloat = lowerBodyHeight * adjustmentRatio
let zColorRange: CGFloat = farZExtent - nearZExtent
let nearZColor = UIColor.red
let farZColor = UIColor.blue

for (startLandmarkType, endLandmarkTypesArray) in UIUtilities.poseConnections() {
let startLandmark = pose.landmark(ofType: startLandmarkType)
for endLandmarkType in endLandmarkTypesArray {
let endLandmark = pose.landmark(ofType: endLandmarkType)
let startLandmarkPoint = positionTransformationClosure(startLandmark.position)
let endLandmarkPoint = positionTransformationClosure(endLandmark.position)

let landmarkZRatio = (startLandmark.position.z - nearZExtent) / zColorRange
let connectedLandmarkZRatio = (endLandmark.position.z - nearZExtent) / zColorRange

let startColor = UIUtilities.interpolatedColor(
fromColor: nearZColor, toColor: farZColor, ratio: landmarkZRatio)
let endColor = UIUtilities.interpolatedColor(
fromColor: nearZColor, toColor: farZColor, ratio: connectedLandmarkZRatio)

UIUtilities.addLineSegment(
fromPoint: startLandmarkPoint,
toPoint: endLandmarkPoint,
inView: overlayView,
colors: [startColor, endColor],
width: lineWidth)
}
}
for landmark in pose.landmarks {
let landmarkPoint = positionTransformationClosure(landmark.position)
UIUtilities.addCircle(
atPoint: landmarkPoint,
to: overlayView,
color: UIColor.blue,
radius: dotRadius
)
}
return overlayView
}

/// Adds a gradient-colored line segment subview in a given `view`.
///
/// - Parameters:
/// - fromPoint: The starting point of the line, in the view's coordinate space.
/// - toPoint: The end point of the line, in the view's coordinate space.
/// - inView: The view to which the line should be added as a subview.
/// - colors: The colors that the gradient should traverse over. Must be non-empty.
/// - width: The width of the line segment.
private static func addLineSegment(
fromPoint: CGPoint, toPoint: CGPoint, inView: UIView, colors: [UIColor], width: CGFloat
) {
let viewWidth = inView.bounds.width
let viewHeight = inView.bounds.height
if viewWidth == 0.0 || viewHeight == 0.0 {
return
}
let path = UIBezierPath()
path.move(to: fromPoint)
path.addLine(to: toPoint)
let lineMaskLayer = CAShapeLayer()
lineMaskLayer.path = path.cgPath
lineMaskLayer.strokeColor = UIColor.black.cgColor
lineMaskLayer.fillColor = nil
lineMaskLayer.opacity = 1.0
lineMaskLayer.lineWidth = width

let gradientLayer = CAGradientLayer()
gradientLayer.startPoint = CGPoint(x: fromPoint.x / viewWidth, y: fromPoint.y / viewHeight)
gradientLayer.endPoint = CGPoint(x: toPoint.x / viewWidth, y: toPoint.y / viewHeight)
gradientLayer.frame = inView.bounds
var CGColors = [CGColor]()
for color in colors {
CGColors.append(color.cgColor)
}
if CGColors.count == 1 {
// Single-colored lines must still supply a start and end color for the gradient layer to
// render anything. Just add the single color to the colors list again to fulfill this
// requirement.
CGColors.append(colors[0].cgColor)
}
gradientLayer.colors = CGColors
gradientLayer.mask = lineMaskLayer

let lineView = UIView(frame: inView.bounds)
lineView.layer.addSublayer(gradientLayer)
inView.addSubview(lineView)
}

/// Returns a color interpolated between to other colors.
///
/// - Parameters:
/// - fromColor: The start color of the interpolation.
/// - toColor: The end color of the interpolation.
/// - ratio: The ratio in range [0, 1] by which the colors should be interpolated. Passing 0
/// results in `fromColor` and passing 1 results in `toColor`, whereas passing 0.5 results
/// in a color that is half-way between `fromColor` and `startColor`. Values are clamped
/// between 0 and 1.
/// - Returns: The interpolated color.
private static func interpolatedColor(
fromColor: UIColor, toColor: UIColor, ratio: CGFloat
) -> UIColor {
var fromR: CGFloat = 0
var fromG: CGFloat = 0
var fromB: CGFloat = 0
var fromA: CGFloat = 0
fromColor.getRed(&fromR, green: &fromG, blue: &fromB, alpha: &fromA)

var toR: CGFloat = 0
var toG: CGFloat = 0
var toB: CGFloat = 0
var toA: CGFloat = 0
toColor.getRed(&toR, green: &toG, blue: &toB, alpha: &toA)

let clampedRatio = max(0.0, min(ratio, 1.0))

let interpolatedR = fromR + (toR - fromR) * clampedRatio
let interpolatedG = fromG + (toG - fromG) * clampedRatio
let interpolatedB = fromB + (toB - fromB) * clampedRatio
let interpolatedA = fromA + (toA - fromA) * clampedRatio

return UIColor(
red: interpolatedR, green: interpolatedG, blue: interpolatedB, alpha: interpolatedA)
}

/// Returns the distance between two 3D points.
///
/// - Parameters:
/// - fromPoint: The starting point.
/// - endPoint: The end point.
/// - Returns: The distance.
private static func distance(fromPoint: Vision3DPoint, toPoint: Vision3DPoint) -> CGFloat {
let xDiff = fromPoint.x - toPoint.x
let yDiff = fromPoint.y - toPoint.y
let zDiff = fromPoint.z - toPoint.z
return CGFloat(sqrt(xDiff * xDiff + yDiff * yDiff + zDiff * zDiff))
}

// MARK: - Private

/// Returns the minimum subset of all connected pose landmarks. Each key represents a start
/// landmark, and each value in the key's value array represents an end landmark which is
/// connected to the start landmark. These connections may be used for visualizing the landmark
/// positions on a pose object.
public static func poseConnections() -> [PoseLandmarkType: [PoseLandmarkType]] {
private static func poseConnections() -> [PoseLandmarkType: [PoseLandmarkType]] {
struct PoseConnectionsHolder {
static var connections: [PoseLandmarkType: [PoseLandmarkType]] = [
PoseLandmarkType.leftEar: [PoseLandmarkType.leftEyeOuter],
Expand Down Expand Up @@ -168,8 +338,6 @@ public class UIUtilities {
return PoseConnectionsHolder.connections
}

// MARK: - Private

private static func currentUIOrientation() -> UIDeviceOrientation {
let deviceOrientation = { () -> UIDeviceOrientation in
switch UIApplication.shared.statusBarOrientation {
Expand Down
34 changes: 9 additions & 25 deletions ios/quickstarts/vision/VisionExample/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -762,32 +762,16 @@ extension ViewController {

// Pose detected. Currently, only single person detection is supported.
poses.forEach { pose in
for (startLandmarkType, endLandmarkTypesArray) in UIUtilities.poseConnections() {
let startLandmark = pose.landmark(ofType: startLandmarkType)
for endLandmarkType in endLandmarkTypesArray {
let endLandmark = pose.landmark(ofType: endLandmarkType)
let transformedStartLandmarkPoint = self.pointFrom(startLandmark.position).applying(
transform)
let transformedEndLandmarkPoint = self.pointFrom(endLandmark.position).applying(
transform)
UIUtilities.addLineSegment(
fromPoint: transformedStartLandmarkPoint,
toPoint: transformedEndLandmarkPoint,
inView: self.annotationOverlayView,
color: UIColor.green,
width: Constants.lineWidth
)
let poseOverlayView = UIUtilities.createPoseOverlayView(
forPose: pose,
inViewWithBounds: self.annotationOverlayView.bounds,
lineWidth: Constants.lineWidth,
dotRadius: Constants.smallDotRadius,
positionTransformationClosure: { (position) -> CGPoint in
return self.pointFrom(position).applying(transform)
}
}
for landmark in pose.landmarks {
let transformedPoint = self.pointFrom(landmark.position).applying(transform)
UIUtilities.addCircle(
atPoint: transformedPoint,
to: self.annotationOverlayView,
color: UIColor.blue,
radius: Constants.smallDotRadius
)
}
)
self.annotationOverlayView.addSubview(poseOverlayView)
self.resultsText = "Pose Detected"
self.showResults()
}
Expand Down
47 changes: 13 additions & 34 deletions ios/quickstarts/vision/VisionExampleObjc/CameraViewController.m
Original file line number Diff line number Diff line change
Expand Up @@ -250,9 +250,7 @@ - (void)recognizeTextOnDeviceInImage:(MLKVisionImage *)image

// Lines.
for (MLKTextLine *line in block.lines) {
NSArray<NSValue *> *points = [strongSelf convertedPointsFromPoints:line.cornerPoints
width:width
height:height];
points = [strongSelf convertedPointsFromPoints:line.cornerPoints width:width height:height];
[UIUtilities addShapeWithPoints:points
toView:strongSelf.annotationOverlayView
color:UIColor.purpleColor];
Expand Down Expand Up @@ -345,36 +343,17 @@ - (void)detectPoseInImage:(MLKVisionImage *)image width:(CGFloat)width height:(C
// Pose detection currently only supports single pose.
MLKPose *pose = poses.firstObject;

NSDictionary<MLKPoseLandmarkType, NSArray<MLKPoseLandmarkType> *> *connections =
[UIUtilities poseConnections];

for (MLKPoseLandmarkType landmarkType in connections) {
for (MLKPoseLandmarkType connectedLandmarkType in connections[landmarkType]) {
MLKPoseLandmark *landmark = [pose landmarkOfType:landmarkType];
MLKPoseLandmark *connectedLandmark = [pose landmarkOfType:connectedLandmarkType];
CGPoint landmarkPosition = [strongSelf normalizedPointFromVisionPoint:landmark.position
width:width
height:height];
CGPoint connectedLandmarkPosition =
[strongSelf normalizedPointFromVisionPoint:connectedLandmark.position
width:width
height:height];
[UIUtilities addLineSegmentFromPoint:landmarkPosition
toPoint:connectedLandmarkPosition
inView:strongSelf.annotationOverlayView
color:UIColor.greenColor
width:3.0f];
}
}
for (MLKPoseLandmark *landmark in pose.landmarks) {
CGPoint position = [strongSelf normalizedPointFromVisionPoint:landmark.position
width:width
height:height];
[UIUtilities addCircleAtPoint:position
toView:strongSelf.annotationOverlayView
color:UIColor.blueColor
radius:MLKSmallDotRadius];
}
UIView *poseOverlay = [UIUtilities poseOverlayViewForPose:pose
inViewWithBounds:self.annotationOverlayView.bounds
lineWidth:3.0f
dotRadius:MLKSmallDotRadius
positionTransformationBlock:^(MLKVisionPoint *position) {
return [strongSelf normalizedPointFromVisionPoint:position
width:width
height:height];
}];

[strongSelf.annotationOverlayView addSubview:poseOverlay];
});
}

Expand Down Expand Up @@ -597,7 +576,7 @@ - (void)presentDetectorsAlertController {
NSInteger detector = detectorType.integerValue;
UIAlertAction *action = [UIAlertAction actionWithTitle:[self stringForDetector:detector]
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *_Nonnull action) {
handler:^(UIAlertAction *_Nonnull actionArg) {
self.currentDetector = detector;
[self removeDetectionAnnotations];
}];
Expand Down
19 changes: 14 additions & 5 deletions ios/quickstarts/vision/VisionExampleObjc/UIUtilities.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,21 @@ NS_ASSUME_NONNULL_BEGIN
+ (UIDeviceOrientation)currentUIOrientation;

/**
* Returns the minimum subset of all connected pose landmarks. Each key represents a start landmark,
* and each value in the key's value array represents an end landmark which is connected to the
* start landmark. These connections may be used for visualizing the landmark positions on a pose
* object.
* Returns an overlay view for visualizing a given `pose`.
*
* @param pose The pose which will be visualized.
* @param bounds The bounds of the view to which this overlay will be added. The overlay view's
* bounds will match this value.
* @param lineWidth The width of the lines connecting the landmark dots.
* @param dotRadius The radius of the landmark dots.
* @param positionTransformationBlock Block which transforms a landmark `position` to the
* `UIView` `CGPoint` coordinate where it should be shown on-screen.
*/
+ (NSDictionary<MLKPoseLandmarkType, NSArray<MLKPoseLandmarkType> *> *)poseConnections;
+ (UIView *)poseOverlayViewForPose:(MLKPose *)pose
inViewWithBounds:(CGRect)bounds
lineWidth:(CGFloat)lineWidth
dotRadius:(CGFloat)dotRadius
positionTransformationBlock:(CGPoint (^)(MLKVisionPoint *))positionTransformationBlock;

@end

Expand Down
Loading

0 comments on commit 09bd95d

Please sign in to comment.