我正在尝试让 kalidokit 面部和身体追踪工作,但无论我做什么我都无法让身体追踪工作。面部跟踪工作得很好。在类似的项目中,例如 kalidokit-react face+body tracking 工作正常,但我想因为我试图让它通过 a-frame 工作,所以我仍然无法弄清楚渲染问题。 这是我的代码:
*AFRAME.registerComponent("m-avatar", {
schema: {
src: { type: "string" },
motion: { type: "string" },
},
async init() {
this.camera = AFRAME.scenes[0].camera;
this.scene = this.el.object3D;
this.renderer = AFRAME.scenes[0].renderer;
var currentVrm = null;
var currentMixer = null;
this.modelSrc = this.data.src; //localavatar;
this.defaultPose = idlePose;
this.currentClip = null;
this.vrm = null;
this.mixer = null;
},
async update(oldData) {
this.camera = AFRAME.scenes[0].camera;
this.scene = this.el.object3D;
this.renderer = AFRAME.scenes[0].renderer;
var currentVrm = null;
var currentMixer = null;
var currentClip = null;
var currentScene = null;
var currentCamera = null;
if (this.data.src !== oldData.src) {
const _vrmData = await loadVRM(this.modelSrc).then(async (vrm) => {
currentVrm = vrm;
this.scene.add(currentVrm.scene);
const head = currentVrm.humanoid.getBoneNode("head");
this.camera.position.set(0.0, head.getWorldPosition(new THREE.Vector3()).y, 6.0);
currentMixer = new THREE.AnimationMixer(currentVrm.scene);
currentMixer.timeScale = 1;
return await loadMixamoAnimation(this.defaultPose, currentVrm).then(async (clip) => {
currentClip = currentMixer.clipAction(clip); //.setEffectiveWeight(1.0) doesnwork
//currentClip.play();
return [currentVrm, currentMixer, currentClip];
});
});
this.vrm = _vrmData[0];
this.mixer = _vrmData[1];
this.currentClip = _vrmData[2];
const clock = new THREE.Clock();
clock.start();
function animate() {
requestAnimationFrame(animate);
const delta = clock.getDelta();
if (currentMixer) {
currentMixer.update(delta);
}
if (currentVrm) {
currentVrm.update(delta);
}
}
animate();
}
if (oldData.motion && this.data.motion !== oldData.motion && this.vrm && this.mixer) {
if (!this.data.motion.includes("fbx")) {
// Animate Rotation Helper function
const rigRotation = (name, rotation = { x: 0, y: 0, z: 0 }, dampener = 1, lerpAmount = 0.3) => {
if (!this.vrm) {
return;
}
//const Part = this.vrm.humanoid.getBoneNode(VRMSchema.HumanoidBoneName[name]); //
const Part = this.vrm.humanoid.getRawBoneNode(VRMHumanBoneName[name]);
if (!Part) {
return;
}
let euler = new THREE.Euler(rotation.x * dampener, rotation.y * dampener, rotation.z * dampener, rotation.rotationOrder || "XYZ");
let quaternion = new THREE.Quaternion().setFromEuler(euler);
Part.quaternion.slerp(quaternion, lerpAmount); // interpolate
};
// Animate Position Helper Function
const rigPosition = (name, position = { x: 0, y: 0, z: 0 }, dampener = 1, lerpAmount = 0.3) => {
if (!this.vrm) {
return;
}
//const Part = this.vrm.humanoid.getBoneNode(VRMSchema.HumanoidBoneName[name]); //
const Part = this.vrm.humanoid.getRawBoneNode(VRMHumanBoneName[name]);
if (!Part) {
return;
}
let vector = new THREE.Vector3(position.x * dampener, position.y * dampener, position.z * dampener);
Part.position.lerp(vector, lerpAmount); // interpolate
};
const rigFace = (riggedFace) => {
if (!this.vrm) {
return;
}
//rigRotation("Neck", riggedFace.head, 0.7);
// Blendshapes and Preset Name Schema
const Blendshape = this.vrm.expressionManager;
const PresetName = VRMExpressionPresetName;
// Simple example without winking. Interpolate based on old blendshape, then stabilize blink with `Kalidokit` helper function.
// for VRM, 1 is closed, 0 is open.
riggedFace.eye.l = lerp(clamp(1 - riggedFace.eye.l, 0, 1), Blendshape.getValue(PresetName.Blink), 0.5);
riggedFace.eye.r = lerp(clamp(1 - riggedFace.eye.r, 0, 1), Blendshape.getValue(PresetName.Blink), 0.5);
riggedFace.eye = Face.stabilizeBlink(riggedFace.eye, riggedFace.head.y);
Blendshape.setValue(PresetName.Blink, riggedFace.eye.l);
// Interpolate and set mouth blendshapes
Blendshape.setValue(PresetName.Ih, lerp(riggedFace.mouth.shape.I, Blendshape.getValue(PresetName.Ih), 0.5));
Blendshape.setValue(PresetName.Aa, lerp(riggedFace.mouth.shape.A, Blendshape.getValue(PresetName.Aa), 0.5));
Blendshape.setValue(PresetName.Ee, lerp(riggedFace.mouth.shape.E, Blendshape.getValue(PresetName.Ee), 0.5));
Blendshape.setValue(PresetName.Oh, lerp(riggedFace.mouth.shape.O, Blendshape.getValue(PresetName.Oh), 0.5));
Blendshape.setValue(PresetName.Uu, lerp(riggedFace.mouth.shape.U, Blendshape.getValue(PresetName.Uu), 0.5));
//PUPILS
//interpolate pupil and keep a copy of the value
let lookTarget = new THREE.Euler(lerp(oldLookTarget.x, riggedFace.pupil.y, 0.4), lerp(oldLookTarget.y, riggedFace.pupil.x, 0.4), 0, "XYZ");
oldLookTarget.copy(lookTarget);
const yaw = THREE.MathUtils.RAD2DEG * lookTarget.y;
const pitch = THREE.MathUtils.RAD2DEG * lookTarget.x;
this.vrm.lookAt.applier.applyYawPitch(yaw, pitch);
};
const clock = new THREE.Clock();
//clock.start();
currentVrm = this.vrm;
currentMixer = this.mixer;
if (currentVrm && currentMixer) {
/* VRM Character Animator */
const animateVRM = async (results) => {
// Take the results from `Holistic` and animate character based on its Face, Pose, and Hand Keypoints.
let riggedPose, riggedLeftHand, riggedRightHand, riggedFace;
const faceLandmarks = results.faceLandmarks;
// Pose 3D Landmarks are with respect to Hip distance in meters
const pose3DLandmarks = results.za; //results.ea;
// Pose 2D landmarks are with respect to videoWidth and videoHeight
const pose2DLandmarks = results.poseLandmarks;
// Be careful, hand landmarks may be reversed
const leftHandLandmarks = results.rightHandLandmarks;
const rightHandLandmarks = results.leftHandLandmarks;
const videoElement = document.getElementById("video-in");
// Animate Face
if (faceLandmarks) {
riggedFace = await Face.solve(faceLandmarks, {
runtime: "tfjs", //mediapipe doesnt work makes the eyes close
video: videoElement,
});
rigFace(riggedFace);
}
// Animate Pose
if (pose2DLandmarks && pose3DLandmarks) {
riggedPose = await Pose.solve(pose3DLandmarks, pose2DLandmarks, {
runtime: "mediapipe",
video: videoElement,
});
rigRotation("Hips", riggedPose.Hips.rotation, 0.7);
rigPosition(
"Hips",
{
x: riggedPose.Hips.position.x, // Reverse direction
y: riggedPose.Hips.position.y + 1, // Add a bit of height
z: -riggedPose.Hips.position.z, // Reverse direction
},
1,
0.07
);
rigRotation("Chest", riggedPose.Spine, 0.25, 0.3);
rigRotation("Spine", riggedPose.Spine, 0.45, 0.3);
rigRotation("RightUpperArm", riggedPose.RightUpperArm, 1, 0.3);
rigRotation("RightLowerArm", riggedPose.RightLowerArm, 1, 0.3);
rigRotation("LeftUpperArm", riggedPose.LeftUpperArm, 1, 0.3);
rigRotation("LeftLowerArm", riggedPose.LeftLowerArm, 1, 0.3);
rigRotation("LeftUpperLeg", riggedPose.LeftUpperLeg, 1, 0.3);
rigRotation("LeftLowerLeg", riggedPose.LeftLowerLeg, 1, 0.3);
rigRotation("RightUpperLeg", riggedPose.RightUpperLeg, 1, 0.3);
rigRotation("RightLowerLeg", riggedPose.RightLowerLeg, 1, 0.3);
}
// Animate Hands
if (leftHandLandmarks) {
riggedLeftHand = await Hand.solve(leftHandLandmarks, "Left");
rigRotation("LeftHand", {
// Combine pose rotation Z and hand rotation X Y
z: riggedPose.LeftHand.z,
y: riggedLeftHand.LeftWrist.y,
x: riggedLeftHand.LeftWrist.x,
});
rigRotation("LeftRingProximal", riggedLeftHand.LeftRingProximal);
rigRotation("LeftRingIntermediate", riggedLeftHand.LeftRingIntermediate);
rigRotation("LeftRingDistal", riggedLeftHand.LeftRingDistal);
rigRotation("LeftIndexProximal", riggedLeftHand.LeftIndexProximal);
rigRotation("LeftIndexIntermediate", riggedLeftHand.LeftIndexIntermediate);
rigRotation("LeftIndexDistal", riggedLeftHand.LeftIndexDistal);
rigRotation("LeftMiddleProximal", riggedLeftHand.LeftMiddleProximal);
rigRotation("LeftMiddleIntermediate", riggedLeftHand.LeftMiddleIntermediate);
rigRotation("LeftMiddleDistal", riggedLeftHand.LeftMiddleDistal);
rigRotation("LeftThumbProximal", riggedLeftHand.LeftThumbProximal);
rigRotation("LeftThumbIntermediate", riggedLeftHand.LeftThumbIntermediate);
rigRotation("LeftThumbDistal", riggedLeftHand.LeftThumbDistal);
rigRotation("LeftLittleProximal", riggedLeftHand.LeftLittleProximal);
rigRotation("LeftLittleIntermediate", riggedLeftHand.LeftLittleIntermediate);
rigRotation("LeftLittleDistal", riggedLeftHand.LeftLittleDistal);
}
if (rightHandLandmarks) {
riggedRightHand = await Hand.solve(rightHandLandmarks, "Right");
rigRotation("RightHand", {
// Combine Z axis from pose hand and X/Y axis from hand wrist rotation
z: riggedPose.RightHand.z,
y: riggedRightHand.RightWrist.y,
x: riggedRightHand.RightWrist.x,
});
rigRotation("RightRingProximal", riggedRightHand.RightRingProximal);
rigRotation("RightRingIntermediate", riggedRightHand.RightRingIntermediate);
rigRotation("RightRingDistal", riggedRightHand.RightRingDistal);
rigRotation("RightIndexProximal", riggedRightHand.RightIndexProximal);
rigRotation("RightIndexIntermediate", riggedRightHand.RightIndexIntermediate);
rigRotation("RightIndexDistal", riggedRightHand.RightIndexDistal);
rigRotation("RightMiddleProximal", riggedRightHand.RightMiddleProximal);
rigRotation("RightMiddleIntermediate", riggedRightHand.RightMiddleIntermediate);
rigRotation("RightMiddleDistal", riggedRightHand.RightMiddleDistal);
rigRotation("RightThumbProximal", riggedRightHand.RightThumbProximal);
rigRotation("RightThumbIntermediate", riggedRightHand.RightThumbIntermediate);
rigRotation("RightThumbDistal", riggedRightHand.RightThumbDistal);
rigRotation("RightLittleProximal", riggedRightHand.RightLittleProximal);
rigRotation("RightLittleIntermediate", riggedRightHand.RightLittleIntermediate);
rigRotation("RightLittleDistal", riggedRightHand.RightLittleDistal);
}
};
await animateVRM(JSON.parse(this.data.motion));
const clock = new THREE.Clock();
function animate() {
requestAnimationFrame(animate);
const delta = clock.getDelta();
if (currentMixer) {
currentMixer.update(delta);
}
if (currentVrm) {
currentVrm.update(delta);
}
}
animate();
}
}
}
},
});*
这里是我使用的相关库
"@mediapipe/camera_utils": "^0.3.1632432234",
"@mediapipe/drawing_utils": "^0.3.1620248257",
"@mediapipe/face_mesh": "^0.4.1633559619",
"@mediapipe/holistic": "^0.5.1635989137",
"@pixiv/three-vrm": "1.0.0-beta.11",
"@pixiv/types-vrm-0.0": "1.0.0-beta.11",
"@pixiv/types-vrmc-vrm-1.0": "1.0.0-beta.11",
"@pixiv/three-vrm": "1.0.0-beta.11",
"aframe",
"three.js"