Skip to content

Commit

Permalink
fix comment
Browse files Browse the repository at this point in the history
  • Loading branch information
st-tuanmai committed Aug 10, 2023
1 parent f0459a5 commit 105271b
Show file tree
Hide file tree
Showing 5 changed files with 137 additions and 131 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
BFEE41EB2A45351E00BA1A35 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE41EA2A45351E00BA1A35 /* AppDelegate.swift */; };
BFEE41ED2A45351E00BA1A35 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE41EC2A45351E00BA1A35 /* SceneDelegate.swift */; };
BFEE41F72A45351F00BA1A35 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = BFEE41F52A45351F00BA1A35 /* LaunchScreen.storyboard */; };
BFEE42022A45351F00BA1A35 /* FaceLandmarkerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE42012A45351F00BA1A35 /* FaceLandmarkerTests.swift */; };
BFEE420C2A45351F00BA1A35 /* FaceLandmarkerUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE420B2A45351F00BA1A35 /* FaceLandmarkerUITests.swift */; };
BFEE420E2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE420D2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift */; };
BFEE42202A45394C00BA1A35 /* FaceLandmarkerHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE421B2A45394C00BA1A35 /* FaceLandmarkerHelper.swift */; };
Expand Down Expand Up @@ -56,7 +55,6 @@
BFEE41F62A45351F00BA1A35 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
BFEE41F82A45351F00BA1A35 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
BFEE41FD2A45351F00BA1A35 /* FaceLandmarkerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceLandmarkerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
BFEE42012A45351F00BA1A35 /* FaceLandmarkerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceLandmarkerTests.swift; sourceTree = "<group>"; };
BFEE42072A45351F00BA1A35 /* FaceLandmarkerUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceLandmarkerUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
BFEE420B2A45351F00BA1A35 /* FaceLandmarkerUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceLandmarkerUITests.swift; sourceTree = "<group>"; };
BFEE420D2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceLandmarkerUITestsLaunchTests.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -159,7 +157,6 @@
BFEE42002A45351F00BA1A35 /* FaceLandmarkerTests */ = {
isa = PBXGroup;
children = (
BFEE42012A45351F00BA1A35 /* FaceLandmarkerTests.swift */,
);
path = FaceLandmarkerTests;
sourceTree = "<group>";
Expand Down Expand Up @@ -408,7 +405,6 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
BFEE42022A45351F00BA1A35 /* FaceLandmarkerTests.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
Expand Down Expand Up @@ -576,6 +572,7 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = "";
Expand All @@ -596,6 +593,7 @@
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.google.mediapipe.examples.facelandmarker;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
Expand All @@ -608,6 +606,7 @@
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = "";
Expand All @@ -628,6 +627,7 @@
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.google.mediapipe.examples.facelandmarker;
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ import AVFoundation

protocol FaceLandmarkerHelperDelegate: AnyObject {
func faceLandmarkerHelper(_ faceLandmarkerHelper: FaceLandmarkerHelper,
didFinishDetection result: ResultBundle?,
error: Error?)
didFinishDetection result: ResultBundle?,
error: Error?)
}

class FaceLandmarkerHelper: NSObject {
Expand Down Expand Up @@ -47,7 +47,7 @@ class FaceLandmarkerHelper: NSObject {
}

/**
This method return FaceLandmarkerResult and infrenceTime when receive an image
This method returns a FaceLandmarkerResult object and infrenceTime after receiving an image
**/
func detect(image: UIImage) -> ResultBundle? {
guard let mpImage = try? MPImage(uiImage: image) else { return nil }
Expand Down Expand Up @@ -76,44 +76,57 @@ class FaceLandmarkerHelper: NSObject {
}

/**
This method return FaceLandmarkerResults and infrenceTime when receive videoUrl and inferenceIntervalMs
This method returns a FaceLandmarkerResults object and infrenceTime when receiving videoUrl and inferenceIntervalMs
**/
func detectVideoFile(url: URL, inferenceIntervalMs: Double) async -> ResultBundle? {
guard let faceLandmarker = faceLandmarker else { return nil }
let startDate = Date()
var size: CGSize = .zero
let asset: AVAsset = AVAsset(url: url)
guard let videoDurationMs = try? await asset.load(.duration).seconds * 1000 else { return nil }

// Using AVAssetImageGenerator to produce images from video content
let generator = AVAssetImageGenerator(asset:asset)
generator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 25)
generator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 25)
generator.appliesPreferredTrackTransform = true
guard let videoDurationMs = try? await asset.load(.duration).seconds * 1000 else { return nil }
let frameCount = Int(videoDurationMs / inferenceIntervalMs)
var faceLandmarkerResults: [FaceLandmarkerResult?] = []
for i in 0..<frameCount {
var size: CGSize = .zero
let startDate = Date()
// Go through each frame and detect it
for i in 0 ..< frameCount {
let timestampMs = inferenceIntervalMs * Double(i) // ms
let image:CGImage?
do {
let time = CMTime(seconds: timestampMs / 1000, preferredTimescale: 600)
try image = generator.copyCGImage(at: time, actualTime:nil)
} catch {
print(error)
return nil
}
guard let image = image else { return nil }
let uiImage = UIImage(cgImage:image)
size = uiImage.size
do {
let result = try faceLandmarker.detect(videoFrame: MPImage(uiImage: uiImage), timestampInMilliseconds: Int(timestampMs))
faceLandmarkerResults.append(result)
} catch {
print(error)
let time = CMTime(seconds: timestampMs / 1000, preferredTimescale: 600)
if let image = getImageFromVideo(generator, atTime: time) {
size = image.size
do {
let result = try faceLandmarker.detect(videoFrame: MPImage(uiImage: image), timestampInMilliseconds: Int(timestampMs))
faceLandmarkerResults.append(result)
} catch {
print(error)
faceLandmarkerResults.append(nil)
}
} else {
faceLandmarkerResults.append(nil)
}
}
let inferenceTime = Date().timeIntervalSince(startDate) / Double(frameCount) * 1000
return ResultBundle(inferenceTime: inferenceTime, faceLandmarkerResults: faceLandmarkerResults, imageSize: size)
}

/**
This method returns an image object and when receiving assetImageGenerator and time
**/
private func getImageFromVideo(_ generator: AVAssetImageGenerator, atTime time: CMTime) -> UIImage? {
let image:CGImage?
do {
try image = generator.copyCGImage(at: time, actualTime:nil)
} catch {
print(error)
return nil
}
guard let image = image else { return nil }
return UIImage(cgImage: image)
}
}

extension FaceLandmarkerHelper: FaceLandmarkerLiveStreamDelegate {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,18 @@ struct Line {
let to: CGPoint
}

/// Line connection
struct LineConnection {
let color: UIColor
let lines: [Line]
}

/**
This structure holds the display parameters for the overlay to be drawon on a detected object.
*/
struct ObjectOverlay {
let dots: [CGPoint]
let lines: [Line]
let lineConnectios: [LineConnection]
}

/// Custom view to visualize the face landmarks result on top of the input image.
Expand All @@ -35,13 +41,14 @@ class OverlayView: UIView {
var objectOverlays: [ObjectOverlay] = []
private let lineWidth: CGFloat = 3
private let pointRadius: CGFloat = 3
private let lineColor = UIColor(red: 0, green: 127/255.0, blue: 139/255.0, alpha: 1)
private let pointColor = UIColor.yellow

override func draw(_ rect: CGRect) {
for objectOverlay in objectOverlays {
drawDots(objectOverlay.dots)
drawLines(objectOverlay.lines)
for lineConnection in objectOverlay.lineConnectios {
drawLines(lineConnection.lines, lineColor: lineConnection.color)
}
}
}

Expand Down Expand Up @@ -83,35 +90,62 @@ class OverlayView: UIView {
}

let dots: [CGPoint] = transformedLandmark.map({CGPoint(x: CGFloat($0.x) * viewWidth + originX, y: CGFloat($0.y) * viewHeight + originY)})
var lines: [Line] = FaceLandmarker.faceOvalConnections()
var lineConnections: [LineConnection] = []
lineConnections.append(LineConnection(
color: UIColor(red: 0, green: 127/255.0, blue: 139/255.0, alpha: 1),
lines: FaceLandmarker.faceOvalConnections()
.map({ connection in
let start = transformedLandmark[Int(connection.start)]
let end = transformedLandmark[Int(connection.end)]
return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY),
to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY))
})))
lineConnections.append(LineConnection(
color: UIColor(red: 18/255.0, green: 181/255.0, blue: 203/255.0, alpha: 1),
lines: FaceLandmarker.rightEyebrowConnections()
.map({ connection in
let start = transformedLandmark[Int(connection.start)]
let end = transformedLandmark[Int(connection.end)]
return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY),
to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY))
})))
lineConnections.append(LineConnection(
color: UIColor(red: 18/255.0, green: 181/255.0, blue: 203/255.0, alpha: 1),
lines: FaceLandmarker.leftEyebrowConnections()
.map({ connection in
let start = transformedLandmark[Int(connection.start)]
let end = transformedLandmark[Int(connection.end)]
return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY),
to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY))
})
lines.append(contentsOf: FaceLandmarker.rightEyeConnections()
})))
lineConnections.append(LineConnection(
color: UIColor(red: 279/255.0, green: 171/255.0, blue: 0, alpha: 1),
lines: FaceLandmarker.rightEyeConnections()
.map({ connection in
let start = transformedLandmark[Int(connection.start)]
let end = transformedLandmark[Int(connection.end)]
return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY),
to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY))
}))
lines.append(contentsOf: FaceLandmarker.leftEyeConnections()
})))
lineConnections.append(LineConnection(
color: UIColor(red: 279/255.0, green: 171/255.0, blue: 0, alpha: 1),
lines: FaceLandmarker.leftEyeConnections()
.map({ connection in
let start = transformedLandmark[Int(connection.start)]
let end = transformedLandmark[Int(connection.end)]
return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY),
to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY))
}))
lines.append(contentsOf: FaceLandmarker.lipsConnections()
})))
lineConnections.append(LineConnection(
color: UIColor(red: 176/255.0, green: 0, blue: 32/255.0, alpha: 1),
lines: FaceLandmarker.lipsConnections()
.map({ connection in
let start = transformedLandmark[Int(connection.start)]
let end = transformedLandmark[Int(connection.end)]
return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY),
to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY))
}))
objectOverlays.append(ObjectOverlay(dots: dots, lines: lines))
})))
objectOverlays.append(ObjectOverlay(dots: dots, lineConnectios: lineConnections))
}

self.objectOverlays = objectOverlays
Expand All @@ -129,7 +163,7 @@ class OverlayView: UIView {
}
}

private func drawLines(_ lines: [Line]) {
private func drawLines(_ lines: [Line], lineColor: UIColor) {
let path = UIBezierPath()
for line in lines {
path.move(to: line.from)
Expand Down
Loading

0 comments on commit 105271b

Please sign in to comment.