Skip to content

Commit

Permalink
Open source
Browse files Browse the repository at this point in the history
  • Loading branch information
tattn committed Oct 9, 2023
1 parent d73e4c3 commit 34a498c
Show file tree
Hide file tree
Showing 9 changed files with 373 additions and 19 deletions.
2 changes: 1 addition & 1 deletion app/xcode/Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ let package = Package(
.target(name: "VCamMedia", dependencies: ["VCamEntity", "VCamAppExtension", "VCamLogger"]),
.target(name: "VCamBridge", dependencies: ["VCamUIFoundation"]),
.target(name: "VCamTracking", dependencies: ["VCamCamera"]),
.target(name: "VCamCamera", dependencies: ["VCamData", "VCamLogger"]),
.target(name: "VCamCamera", dependencies: ["VCamMedia", "VCamData", "VCamLogger"]),

.target(name: "VCamLogger", dependencies: []),
.target(name: "VCamDefaults", dependencies: []),
Expand Down
5 changes: 5 additions & 0 deletions app/xcode/Sources/VCamData/UserDefaultsKey.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,20 @@ public extension UserDefaults.Key {
static var skipThisVersion: Key<Version> { .init("vc_skip_version", default: "0.0.0") }
static var previousVersion: Key<String> { .init("vc_prev_version", default: "") }
static var useVowelEstimation: Key<Bool> { .init("vc_use_vowel_estimation", default: false) }
static var useEyeTracking: Key<Bool> { .init("vc_use_eye_tracking", default: true) }
static var useEmotion: Key<Bool> { .init("vc_use_emotion", default: false) }
static var cameraFps: Key<Int> { .init("vc_camera_fps", default: 24) }
static var captureDeviceId: Key<String?> { .init("vc_capture_device_id", default: nil) }
static var audioDeviceUid: Key<String?> { .init("vc_audio_device_uid", default: nil) }
static var locale: Key<String> { .init("vc_locale", default: "") }
static var pluginVersion: Key<String> { .init("vc_plugin_ver", default: "") }
static var alwaysOnTopEnabled: Key<Bool> { .init("vc_alwaysontop_enabled", default: false) }
static var trackingMethodFace: Key<TrackingMethod.Face> { .init("vc_tracking_method_face", default: .default) }
static var trackingMethodHand: Key<TrackingMethod.Hand> { .init("vc_tracking_method_hand", default: .default) }
static var trackingMethodFinger: Key<TrackingMethod.Finger> { .init("vc_tracking_method_finger", default: .default) }
static var eyeTrackingOffsetY: Key<Double> { .init("vc_eye_tracking_offset_y", default: -0.2) }
static var fingerTrackingOpenIntensity: Key<Double> { .init("vc_ftracking_open_intensity", default: 1) }
static var fingerTrackingCloseIntensity: Key<Double> { .init("vc_ftracking_close_intensity", default: 1) }
static var integrationVCamMocap: Key<Bool> { .init("vc_intg_vcammocap", default: false) }
static var macOSMicModeEnabled: Key<Bool> { .init("vc_macos_micmode_enabled", default: false) }
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ import VCamLogger
public final class AudioManager {
public init() {}

public static var isMicrophoneAuthorized: () -> Bool = { false }
public static var requestMicrophonePermission: (@escaping ((Bool) -> Void)) -> Void = { _ in }
public var onUpdateAudioBuffer: ((AVAudioPCMBuffer, AVAudioTime, TimeInterval) -> Void) = { _, _, _ in }

public var isRunning: Bool {
Expand All @@ -20,9 +22,9 @@ public final class AudioManager {
private var audioEngine = AVAudioEngine()

public func startRecording(onStart: @escaping (AVAudioFormat) -> Void) throws {
guard DeviceAuthorization.authorizationStatus(for: .mic) else {
guard Self.isMicrophoneAuthorized() else {
Logger.log("requestAuthorization")
DeviceAuthorization.requestAuthorization(type: .mic) { [self] authorized in
Self.requestMicrophonePermission { [self] authorized in
guard authorized else { return }
DispatchQueue.main.async { [self] in
try? startRecording(onStart: onStart)
Expand Down
2 changes: 1 addition & 1 deletion app/xcode/Sources/VCamTracking/AvatarCameraManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public final class AvatarCameraManager {
}

public var isBlinkerUsed: Bool {
switch Tracking.shared.faceTrackingMethod() {
switch Tracking.shared.faceTrackingMethod {
case .disabled:
return true
case .default, .iFacialMocap, .vcamMocap:
Expand Down
6 changes: 3 additions & 3 deletions app/xcode/Sources/VCamTracking/AvatarWebCamera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ public final class AvatarWebCamera {
}

private func onLandmarkUpdate(observation: VNFaceObservation, landmarks: VNFaceLandmarks2D) {
guard Tracking.shared.faceTrackingMethod() == .default else { return }
guard Tracking.shared.faceTrackingMethod == .default else { return }

let pointsInImage = landmarks.allPoints!.pointsInImage(imageSize: cameraManager.captureDeviceResolution)
let (headPosition, headRotation) = poseEstimator.estimate(pointsInImage: pointsInImage, observation: observation)
Expand Down Expand Up @@ -170,15 +170,15 @@ public final class AvatarWebCamera {
let littleMCPLeft = prevHands[4].appending(left.littleMCP)
let littleMCPRight = prevHands[5].appending(right.littleMCP)

if Tracking.shared.handTrackingMethod() == .default {
if Tracking.shared.handTrackingMethod == .default {
Tracking.shared.avatar.onHandDataReceived([
wristLeft.x, wristLeft.y, wristRight.x, wristRight.y,
thumbCMCLeft.x, thumbCMCLeft.y, thumbCMCRight.x, thumbCMCRight.y,
littleMCPLeft.x, littleMCPLeft.y, littleMCPRight.x, littleMCPRight.y
])
}

if Tracking.shared.fingerTrackingMethod() == .default {
if Tracking.shared.fingerTrackingMethod == .default {
Tracking.shared.avatar.onFingerDataReceived([
prevFingers[0].appending(left.thumbTip),
prevFingers[1].appending(left.indexTip),
Expand Down
2 changes: 1 addition & 1 deletion app/xcode/Sources/VCamTracking/HandTracking.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public final class HandTracking {
private var cancellables: Set<AnyCancellable> = []

public var configuration: FingerTrackingConfiguration {
(_configuration.open, _configuration.close, Tracking.shared.fingerTrackingMethod() != .disabled)
(_configuration.open, _configuration.close, Tracking.shared.fingerTrackingMethod != .disabled)
}

public init() {
Expand Down
226 changes: 215 additions & 11 deletions app/xcode/Sources/VCamTracking/Tracking.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,237 @@
// Created by Tatsuya Tanaka on 2023/01/01.
//

import Foundation
import Accelerate
import simd
import Combine
import VCamEntity
import VCamData
import VCamBridge

public struct Tracking {
// Currently working on open sourcing
@_cdecl("uniUseBlinker")
public func uniUseBlinker() -> Bool {
Tracking.shared.avatarCameraManager.isBlinkerUsed
}

public static var shared: Tracking!
@_cdecl("uniSupportsPerfectSync")
public func uniSupportsPerfectSync() -> Bool {
Tracking.shared.faceTrackingMethod.supportsPerfectSync
}

public init(faceTrackingMethod: @escaping () -> TrackingMethod.Face, handTrackingMethod: @escaping () -> TrackingMethod.Hand, fingerTrackingMethod: @escaping () -> TrackingMethod.Finger) {
self.faceTrackingMethod = faceTrackingMethod
self.handTrackingMethod = handTrackingMethod
self.fingerTrackingMethod = fingerTrackingMethod
}
public final class Tracking {
public static let shared = Tracking()

public private(set) var faceTrackingMethod: () -> TrackingMethod.Face
public private(set) var handTrackingMethod: () -> TrackingMethod.Hand
public private(set) var fingerTrackingMethod: () -> TrackingMethod.Finger
public private(set) var faceTrackingMethod = TrackingMethod.Face.default
public private(set) var handTrackingMethod = TrackingMethod.Hand.default
public private(set) var fingerTrackingMethod = TrackingMethod.Finger.default

public private(set) var useEyeTracking = false
public private(set) var useVowelEstimation = false

private var facialMocapLastValues: [Float] = Array(repeating: 0, count: 12)

public let avatarCameraManager = AvatarCameraManager()
public let iFacialMocapReceiver = FacialMocapReceiver()
public let vcamMotionReceiver = VCamMotionReceiver()
public let avatar = Avatar()

private let vcamMotionTracking = VCamMotionTracking()
private var cancellables: Set<AnyCancellable> = []

public init() {
UserDefaults.standard.publisher(for: \.vc_use_eye_tracking, options: [.initial, .new])
.sink { [unowned self] in useEyeTracking = $0 }
.store(in: &cancellables)
UserDefaults.standard.publisher(for: \.vc_use_vowel_estimation, options: [.initial, .new])
.sink { [unowned self] in useVowelEstimation = $0 }
.store(in: &cancellables)
}

public func configure() {
setFaceTrackingMethod(UserDefaults.standard.value(for: .trackingMethodFace))
setHandTrackingMethod(UserDefaults.standard.value(for: .trackingMethodHand))
setFingerTrackingMethod(UserDefaults.standard.value(for: .trackingMethodFinger))

Tracking.shared.avatar.onFacialDataReceived = UniBridge.shared.headTransform
Tracking.shared.avatar.onHandDataReceived = UniBridge.shared.hands
Tracking.shared.avatar.onFingerDataReceived = UniBridge.shared.fingers

Tracking.shared.avatar.oniFacialMocapReceived = { [self] data in
guard faceTrackingMethod == .iFacialMocap else { return }
if UniBridge.shared.hasPerfectSyncBlendShape {
UniBridge.shared.receivePerfectSync(data.perfectSync(useEyeTracking: useEyeTracking))
} else {
facialMocapLastValues = vDSP.linearInterpolate(facialMocapLastValues, data.vcamHeadTransform(useEyeTracking: useEyeTracking), using: 0.5)
UniBridge.shared.receiveVCamBlendShape(facialMocapLastValues)
}
}

Tracking.shared.avatar.onVCamMotionReceived = vcamMotionTracking.onVCamMotionReceived

if UserDefaults.standard.value(for: .integrationVCamMocap) {
Task {
try await Tracking.shared.vcamMotionReceiver.start(avatar: Tracking.shared.avatar)
}
}
}

public func stop() {
avatarCameraManager.stop()
}

public func resetCalibration() {
avatarCameraManager.resetCalibration()
}

public func setFaceTrackingMethod(_ method: TrackingMethod.Face) {
faceTrackingMethod = method
UserDefaults.standard.set(method, for: .trackingMethodFace)

var usage = Tracking.shared.avatarCameraManager.webCameraUsage

switch method {
case .disabled, .iFacialMocap, .vcamMocap:
usage.remove(.faceTracking)
case .default:
usage.insert(.faceTracking)

if UniBridge.shared.lipSyncWebCam.wrappedValue {
usage.insert(.lipTracking)
}
}
Tracking.shared.avatarCameraManager.setWebCamUsage(usage)

updateLipSyncIfNeeded()
}

public func setHandTrackingMethod(_ method: TrackingMethod.Hand) {
handTrackingMethod = method
UserDefaults.standard.set(method, for: .trackingMethodHand)

if handTrackingMethod == .default {
Tracking.shared.avatarCameraManager.setWebCamUsage(Tracking.shared.avatarCameraManager.webCameraUsage.union(.handTracking))
} else {
Tracking.shared.avatarCameraManager.setWebCamUsage(Tracking.shared.avatarCameraManager.webCameraUsage.subtracting(.handTracking))
}
}

public func setFingerTrackingMethod(_ method: TrackingMethod.Finger) {
fingerTrackingMethod = method
UserDefaults.standard.set(method, for: .trackingMethodFinger)

if fingerTrackingMethod == .default {
Tracking.shared.avatarCameraManager.setWebCamUsage(Tracking.shared.avatarCameraManager.webCameraUsage.union(.fingerTracking))
} else {
Tracking.shared.avatarCameraManager.setWebCamUsage(Tracking.shared.avatarCameraManager.webCameraUsage.subtracting(.fingerTracking))
}
}

public func setLipSyncType(_ type: LipSyncType) {
let useCamera = type == .camera
UniBridge.shared.lipSyncWebCam.wrappedValue = useCamera
if useCamera {
AvatarAudioManager.shared.stop(usage: .lipSync)
Tracking.shared.avatarCameraManager.setWebCamUsage(Tracking.shared.avatarCameraManager.webCameraUsage.union(.lipTracking))
} else {
AvatarAudioManager.shared.start(usage: .lipSync)
Tracking.shared.avatarCameraManager.setWebCamUsage(Tracking.shared.avatarCameraManager.webCameraUsage.subtracting(.lipTracking))
}
}

public var micLipSyncDisabled: Bool {
faceTrackingMethod.supportsPerfectSync && UniBridge.shared.hasPerfectSyncBlendShape
}

public func updateLipSyncIfNeeded() {
guard micLipSyncDisabled else {
return
}
setLipSyncType(.camera)
}
}

private extension FacialMocapData {
func vcamHeadTransform(useEyeTracking: Bool) -> [Float] {
let vowel = VowelEstimator.estimate(blendShape: blendShape)

return [
-head.translation.x, head.translation.y, head.translation.z,
head.rotation.x, -head.rotation.y, -head.rotation.z,
blendShape.eyeBlinkLeft,
blendShape.eyeBlinkRight,
blendShape.jawOpen,
useEyeTracking ? (blendShape.eyeLookInLeft - blendShape.eyeLookOutLeft) : 0,
useEyeTracking ? (blendShape.eyeLookUpLeft - blendShape.eyeLookDownLeft) : 0,
Float(vowel.rawValue)
]
}

func perfectSync(useEyeTracking: Bool) -> [Float] {
let rawRotation = head.rotationRadian
let rotation = simd_quatf(.init(rawRotation.x, -rawRotation.y, -rawRotation.z)).vector

return [
-head.translation.x, head.translation.y, head.translation.z,
rotation.x, rotation.y, rotation.z, rotation.w,
blendShape.lookAtPoint.x, blendShape.lookAtPoint.y,
blendShape.browDownLeft,
blendShape.browDownRight,
blendShape.browInnerUp,
blendShape.browOuterUpLeft,
blendShape.browOuterUpRight,
blendShape.cheekPuff,
blendShape.cheekSquintLeft,
blendShape.cheekSquintRight,
blendShape.eyeBlinkLeft,
blendShape.eyeBlinkRight,
useEyeTracking ? blendShape.eyeLookDownLeft : 0,
useEyeTracking ? blendShape.eyeLookDownRight : 0,
useEyeTracking ? blendShape.eyeLookInLeft : 0,
useEyeTracking ? blendShape.eyeLookInRight : 0,
useEyeTracking ? blendShape.eyeLookOutLeft : 0,
useEyeTracking ? blendShape.eyeLookOutRight : 0,
useEyeTracking ? blendShape.eyeLookUpLeft : 0,
useEyeTracking ? blendShape.eyeLookUpRight : 0,
useEyeTracking ? blendShape.eyeSquintLeft : 0,
useEyeTracking ? blendShape.eyeSquintRight : 0,
useEyeTracking ? blendShape.eyeWideLeft : 0,
useEyeTracking ? blendShape.eyeWideRight : 0,
blendShape.jawForward,
blendShape.jawLeft,
blendShape.jawOpen,
blendShape.jawRight,
blendShape.mouthClose,
blendShape.mouthDimpleLeft,
blendShape.mouthDimpleRight,
blendShape.mouthFrownLeft,
blendShape.mouthFrownRight,
blendShape.mouthFunnel,
blendShape.mouthLeft,
blendShape.mouthLowerDownLeft,
blendShape.mouthLowerDownRight,
blendShape.mouthPressLeft,
blendShape.mouthPressRight,
blendShape.mouthPucker,
blendShape.mouthRight,
blendShape.mouthRollLower,
blendShape.mouthRollUpper,
blendShape.mouthShrugLower,
blendShape.mouthShrugUpper,
blendShape.mouthSmileLeft,
blendShape.mouthSmileRight,
blendShape.mouthStretchLeft,
blendShape.mouthStretchRight,
blendShape.mouthUpperUpLeft,
blendShape.mouthUpperUpRight,
blendShape.noseSneerLeft,
blendShape.noseSneerRight,
blendShape.tongueOut
]
}
}

private extension UserDefaults {
@objc dynamic var vc_use_eye_tracking: Bool { value(for: .useEyeTracking) }
@objc dynamic var vc_use_vowel_estimation: Bool { value(for: .useVowelEstimation) }
}
Loading

0 comments on commit 34a498c

Please sign in to comment.