Skip to content

Commit

Permalink
0.12.1
Browse files Browse the repository at this point in the history
  • Loading branch information
tattn committed Nov 4, 2023
1 parent 0929c80 commit 50ce90a
Show file tree
Hide file tree
Showing 9 changed files with 145 additions and 170 deletions.
19 changes: 0 additions & 19 deletions app/xcode/Sources/VCamTracking/Avatar.swift

This file was deleted.

19 changes: 13 additions & 6 deletions app/xcode/Sources/VCamTracking/AvatarWebCamera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ public final class AvatarWebCamera {
facial.eyeball.y,
Float(facial.vowel.rawValue)
)
Tracking.shared.avatar.onFacialDataReceived(values)
UniBridge.shared.headTransform(values)
}

private func onHandsUpdate(_ hands: VCamHands) {
Expand All @@ -153,14 +153,21 @@ public final class AvatarWebCamera {
if hands.left == nil {
// When the track is lost or started, eliminate the effects of linearInterpolate and move directly to the initial position
prevHands[0].setValues(-.one)
prevHands[2].setValues(-.one)
prevHands[4].setValues(-.one)
} else if prevHands[0].latestValue.x == -1 {
// Minimize hand warping as much as possible (ideally, want to interpolate from a stationary pose)
prevHands[0].setValues(.init(left.wrist.x * 0.5, left.wrist.y))
prevHands[0].setValues(left.wrist)
prevHands[2].setValues(left.thumbCMC)
prevHands[4].setValues(left.littleMCP)
}
if hands.right == nil {
prevHands[1].setValues(-.one)
prevHands[3].setValues(-.one)
prevHands[5].setValues(-.one)
} else if prevHands[1].latestValue.x == -1{
prevHands[1].setValues(.init(right.wrist.x * 0.5, right.wrist.y))
prevHands[1].setValues(right.wrist)
prevHands[3].setValues(right.thumbCMC)
prevHands[5].setValues(right.littleMCP)
}

let wristLeft = prevHands[0].appending(left.wrist)
Expand All @@ -171,15 +178,15 @@ public final class AvatarWebCamera {
let littleMCPRight = prevHands[5].appending(right.littleMCP)

if Tracking.shared.handTrackingMethod == .default {
Tracking.shared.avatar.onHandDataReceived([
UniBridge.shared.hands([
wristLeft.x, wristLeft.y, wristRight.x, wristRight.y,
thumbCMCLeft.x, thumbCMCLeft.y, thumbCMCRight.x, thumbCMCRight.y,
littleMCPLeft.x, littleMCPLeft.y, littleMCPRight.x, littleMCPRight.y
])
}

if Tracking.shared.fingerTrackingMethod == .default {
Tracking.shared.avatar.onFingerDataReceived([
UniBridge.shared.fingers([
prevFingers[0].appending(left.thumbTip),
prevFingers[1].appending(left.indexTip),
prevFingers[2].appending(left.middleTip),
Expand Down
80 changes: 80 additions & 0 deletions app/xcode/Sources/VCamTracking/FacialMocapData.swift
Original file line number Diff line number Diff line change
Expand Up @@ -119,3 +119,83 @@ public extension FacialMocapData {
)
}
}

extension FacialMocapData {
func vcamHeadTransform(useEyeTracking: Bool) -> [Float] {
let vowel = VowelEstimator.estimate(blendShape: blendShape)

return [
-head.translation.x, /*head.translation.y*/0, /*head.translation.z*/0,
head.rotation.x, -head.rotation.y, -head.rotation.z,
blendShape.eyeBlinkLeft,
blendShape.eyeBlinkRight,
blendShape.jawOpen,
useEyeTracking ? (blendShape.eyeLookInLeft - blendShape.eyeLookOutLeft) : 0,
useEyeTracking ? (blendShape.eyeLookUpLeft - blendShape.eyeLookDownLeft) : 0,
Float(vowel.rawValue)
]
}

func perfectSync(useEyeTracking: Bool) -> [Float] {
let rawRotation = head.rotationRadian
let rotation = simd_quatf(.init(rawRotation.x, -rawRotation.y, -rawRotation.z)).vector

return [
-head.translation.x, /*head.translation.y*/0, /*head.translation.z*/0,
rotation.x, rotation.y, rotation.z, rotation.w,
blendShape.lookAtPoint.x, blendShape.lookAtPoint.y,
blendShape.browDownLeft,
blendShape.browDownRight,
blendShape.browInnerUp,
blendShape.browOuterUpLeft,
blendShape.browOuterUpRight,
blendShape.cheekPuff,
blendShape.cheekSquintLeft,
blendShape.cheekSquintRight,
blendShape.eyeBlinkLeft,
blendShape.eyeBlinkRight,
useEyeTracking ? blendShape.eyeLookDownLeft : 0,
useEyeTracking ? blendShape.eyeLookDownRight : 0,
useEyeTracking ? blendShape.eyeLookInLeft : 0,
useEyeTracking ? blendShape.eyeLookInRight : 0,
useEyeTracking ? blendShape.eyeLookOutLeft : 0,
useEyeTracking ? blendShape.eyeLookOutRight : 0,
useEyeTracking ? blendShape.eyeLookUpLeft : 0,
useEyeTracking ? blendShape.eyeLookUpRight : 0,
useEyeTracking ? blendShape.eyeSquintLeft : 0,
useEyeTracking ? blendShape.eyeSquintRight : 0,
useEyeTracking ? blendShape.eyeWideLeft : 0,
useEyeTracking ? blendShape.eyeWideRight : 0,
blendShape.jawForward,
blendShape.jawLeft,
blendShape.jawOpen,
blendShape.jawRight,
blendShape.mouthClose,
blendShape.mouthDimpleLeft,
blendShape.mouthDimpleRight,
blendShape.mouthFrownLeft,
blendShape.mouthFrownRight,
blendShape.mouthFunnel,
blendShape.mouthLeft,
blendShape.mouthLowerDownLeft,
blendShape.mouthLowerDownRight,
blendShape.mouthPressLeft,
blendShape.mouthPressRight,
blendShape.mouthPucker,
blendShape.mouthRight,
blendShape.mouthRollLower,
blendShape.mouthRollUpper,
blendShape.mouthShrugLower,
blendShape.mouthShrugUpper,
blendShape.mouthSmileLeft,
blendShape.mouthSmileRight,
blendShape.mouthStretchLeft,
blendShape.mouthStretchRight,
blendShape.mouthUpperUpLeft,
blendShape.mouthUpperUpRight,
blendShape.noseSneerLeft,
blendShape.noseSneerRight,
blendShape.tongueOut
]
}
}
32 changes: 23 additions & 9 deletions app/xcode/Sources/VCamTracking/FacialMocapReceiver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,18 @@

import Network
import Combine
import VCamBridge
import Accelerate

public final class FacialMocapReceiver: ObservableObject {
private var listener: NWListener?
private var connection: NWConnection?
private var facialMocapLastValues: [Float] = Array(repeating: 0, count: 12)
private static let queue = DispatchQueue(label: "com.github.tattn.vcam.facialmocapreceiver")

@MainActor @Published public private(set) var connectionStatus = ConnectionStatus.disconnected


public enum ConnectionStatus {
case disconnected
case connecting
Expand All @@ -30,12 +34,12 @@ public final class FacialMocapReceiver: ObservableObject {
public init() {}

@MainActor
public func connect(ip: String, avatar: Avatar) async throws {
public func connect(ip: String) async throws {
await stop()

let port = NWEndpoint.Port(integerLiteral: 49983)

try await startServer(port: port, avatar: avatar) { [weak self] result in
try await startServer(port: port) { [weak self] result in
switch result {
case .success: ()
case .cancel, .error:
Expand Down Expand Up @@ -70,13 +74,23 @@ public final class FacialMocapReceiver: ObservableObject {
await self.stop()
}
}

private func oniFacialMocapReceived(_ data: FacialMocapData) {
guard Tracking.shared.faceTrackingMethod == .iFacialMocap else { return }
if UniBridge.shared.hasPerfectSyncBlendShape {
UniBridge.shared.receivePerfectSync(data.perfectSync(useEyeTracking: Tracking.shared.useEyeTracking))
} else {
facialMocapLastValues = vDSP.linearInterpolate(facialMocapLastValues, data.vcamHeadTransform(useEyeTracking: Tracking.shared.useEyeTracking), using: 0.5)
UniBridge.shared.receiveVCamBlendShape(facialMocapLastValues)
}
}
}

private extension NWConnection {
func receiveData(with avatar: Avatar) {
func receiveData(with oniFacialMocapReceived: @escaping (FacialMocapData) -> Void) {
receive(minimumIncompleteLength: 1, maximumLength: 8192) { [weak self] content, contentContext, isComplete, error in
defer {
self?.receiveData(with: avatar)
self?.receiveData(with: oniFacialMocapReceived)
}

guard error == nil,
Expand All @@ -86,14 +100,14 @@ private extension NWConnection {
return
}

avatar.oniFacialMocapReceived(mocapData)
oniFacialMocapReceived(mocapData)
}
}
}

extension FacialMocapReceiver {
@MainActor
private func startServer(port: NWEndpoint.Port, avatar: Avatar, completion: @escaping (ReceiverResult) -> Void) async throws {
private func startServer(port: NWEndpoint.Port, completion: @escaping (ReceiverResult) -> Void) async throws {
connectionStatus = .connecting

let parameters = NWParameters.udp
Expand Down Expand Up @@ -121,15 +135,15 @@ extension FacialMocapReceiver {
case .waiting(let error):
if case .posix(let posixError) = error, posixError == .ECONNREFUSED {
try? await Task.sleep(nanoseconds: NSEC_PER_SEC * 2)
try? await self.startServer(port: port, avatar: avatar, completion: completion)
try? await self.startServer(port: port, completion: completion)
}
case .ready:
self.connectionStatus = .connected
connection.receiveData(with: avatar)
connection.receiveData(with: self.oniFacialMocapReceived)
case .cancelled:
self.stopAsync()
case .failed:
try? await self.startServer(port: port, avatar: avatar, completion: completion)
try? await self.startServer(port: port, completion: completion)
@unknown default: ()
}
}
Expand Down
101 changes: 3 additions & 98 deletions app/xcode/Sources/VCamTracking/Tracking.swift
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,9 @@ public final class Tracking: ObservableObject {
public private(set) var useEyeTracking = false
public private(set) var useVowelEstimation = false

private var facialMocapLastValues: [Float] = Array(repeating: 0, count: 12)

public let avatarCameraManager = AvatarCameraManager()
public let iFacialMocapReceiver = FacialMocapReceiver()
public let vcamMotionReceiver = VCamMotionReceiver()
public let avatar = Avatar()

private let vcamMotionTracking = VCamMotionTracking()
private var cancellables: Set<AnyCancellable> = []
Expand All @@ -57,25 +54,9 @@ public final class Tracking: ObservableObject {
setHandTrackingMethod(UserDefaults.standard.value(for: .trackingMethodHand))
setFingerTrackingMethod(UserDefaults.standard.value(for: .trackingMethodFinger))

Tracking.shared.avatar.onFacialDataReceived = UniBridge.shared.headTransform
Tracking.shared.avatar.onHandDataReceived = UniBridge.shared.hands
Tracking.shared.avatar.onFingerDataReceived = UniBridge.shared.fingers

Tracking.shared.avatar.oniFacialMocapReceived = { [self] data in
guard faceTrackingMethod == .iFacialMocap else { return }
if UniBridge.shared.hasPerfectSyncBlendShape {
UniBridge.shared.receivePerfectSync(data.perfectSync(useEyeTracking: useEyeTracking))
} else {
facialMocapLastValues = vDSP.linearInterpolate(facialMocapLastValues, data.vcamHeadTransform(useEyeTracking: useEyeTracking), using: 0.5)
UniBridge.shared.receiveVCamBlendShape(facialMocapLastValues)
}
}

Tracking.shared.avatar.onVCamMotionReceived = vcamMotionTracking.onVCamMotionReceived

if UserDefaults.standard.value(for: .integrationVCamMocap) {
Task {
try await Tracking.shared.vcamMotionReceiver.start(avatar: Tracking.shared.avatar)
try await startVCamMotionReceiver()
}
}
}
Expand Down Expand Up @@ -153,85 +134,9 @@ public final class Tracking: ObservableObject {
}
setLipSyncType(.camera)
}
}

private extension FacialMocapData {
func vcamHeadTransform(useEyeTracking: Bool) -> [Float] {
let vowel = VowelEstimator.estimate(blendShape: blendShape)

return [
-head.translation.x, head.translation.y, head.translation.z,
head.rotation.x, -head.rotation.y, -head.rotation.z,
blendShape.eyeBlinkLeft,
blendShape.eyeBlinkRight,
blendShape.jawOpen,
useEyeTracking ? (blendShape.eyeLookInLeft - blendShape.eyeLookOutLeft) : 0,
useEyeTracking ? (blendShape.eyeLookUpLeft - blendShape.eyeLookDownLeft) : 0,
Float(vowel.rawValue)
]
}

func perfectSync(useEyeTracking: Bool) -> [Float] {
let rawRotation = head.rotationRadian
let rotation = simd_quatf(.init(rawRotation.x, -rawRotation.y, -rawRotation.z)).vector

return [
-head.translation.x, head.translation.y, head.translation.z,
rotation.x, rotation.y, rotation.z, rotation.w,
blendShape.lookAtPoint.x, blendShape.lookAtPoint.y,
blendShape.browDownLeft,
blendShape.browDownRight,
blendShape.browInnerUp,
blendShape.browOuterUpLeft,
blendShape.browOuterUpRight,
blendShape.cheekPuff,
blendShape.cheekSquintLeft,
blendShape.cheekSquintRight,
blendShape.eyeBlinkLeft,
blendShape.eyeBlinkRight,
useEyeTracking ? blendShape.eyeLookDownLeft : 0,
useEyeTracking ? blendShape.eyeLookDownRight : 0,
useEyeTracking ? blendShape.eyeLookInLeft : 0,
useEyeTracking ? blendShape.eyeLookInRight : 0,
useEyeTracking ? blendShape.eyeLookOutLeft : 0,
useEyeTracking ? blendShape.eyeLookOutRight : 0,
useEyeTracking ? blendShape.eyeLookUpLeft : 0,
useEyeTracking ? blendShape.eyeLookUpRight : 0,
useEyeTracking ? blendShape.eyeSquintLeft : 0,
useEyeTracking ? blendShape.eyeSquintRight : 0,
useEyeTracking ? blendShape.eyeWideLeft : 0,
useEyeTracking ? blendShape.eyeWideRight : 0,
blendShape.jawForward,
blendShape.jawLeft,
blendShape.jawOpen,
blendShape.jawRight,
blendShape.mouthClose,
blendShape.mouthDimpleLeft,
blendShape.mouthDimpleRight,
blendShape.mouthFrownLeft,
blendShape.mouthFrownRight,
blendShape.mouthFunnel,
blendShape.mouthLeft,
blendShape.mouthLowerDownLeft,
blendShape.mouthLowerDownRight,
blendShape.mouthPressLeft,
blendShape.mouthPressRight,
blendShape.mouthPucker,
blendShape.mouthRight,
blendShape.mouthRollLower,
blendShape.mouthRollUpper,
blendShape.mouthShrugLower,
blendShape.mouthShrugUpper,
blendShape.mouthSmileLeft,
blendShape.mouthSmileRight,
blendShape.mouthStretchLeft,
blendShape.mouthStretchRight,
blendShape.mouthUpperUpLeft,
blendShape.mouthUpperUpRight,
blendShape.noseSneerLeft,
blendShape.noseSneerRight,
blendShape.tongueOut
]
public func startVCamMotionReceiver() async throws {
try await vcamMotionReceiver.start(with: vcamMotionTracking)
}
}

Expand Down
Loading

0 comments on commit 50ce90a

Please sign in to comment.