Skip to content

Commit 50b5d6e

Browse files
jcgglclaude
andcommitted
revert: replace emotion lerp with instant setEmotion + 10-frame queue trim
Lerp approach was too slow due to ONNX pipeline latency. Reverted to instant setEmotion() with 10-frame (~333ms) queue buffer for transition. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent b62d8c2 commit 50b5d6e

File tree

2 files changed

+13
-53
lines changed

2 files changed

+13
-53
lines changed

examples/guide/index.html

Lines changed: 7 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1207,29 +1207,11 @@ <h2 class="step-title">Add Real-time Microphone</h2>
12071207
const emotionSliders = EMOTION_KEYS.map(k => $(`emo-${k}`));
12081208
const emotionVals = EMOTION_KEYS.map(k => $(`emo-${k}-val`));
12091209

1210-
// Emotion lerp: smoothly transition from current to target over ~500ms
1211-
const emotionCurrent = new Float32Array(5);
1212-
const emotionTarget = new Float32Array(5);
1213-
const EMOTION_LERP_SPEED = 12.0; // per second (reaches target in ~80-150ms)
1214-
1215-
function updateEmotionTarget() {
1216-
for (let i = 0; i < 5; i++) emotionTarget[i] = parseInt(emotionSliders[i].value) / 100;
1217-
}
1218-
1219-
function lerpEmotion(dt) {
1210+
function updateEmotionVector() {
1211+
const vec = emotionSliders.map(s => parseInt(s.value) / 100);
12201212
if (!lipsync?.setEmotion) return;
1221-
let changed = false;
1222-
for (let i = 0; i < 5; i++) {
1223-
if (Math.abs(emotionCurrent[i] - emotionTarget[i]) > 0.001) {
1224-
emotionCurrent[i] += (emotionTarget[i] - emotionCurrent[i]) * Math.min(EMOTION_LERP_SPEED * dt, 1);
1225-
changed = true;
1226-
} else {
1227-
emotionCurrent[i] = emotionTarget[i];
1228-
}
1229-
}
1230-
if (changed) {
1231-
try { lipsync.setEmotion(Array.from(emotionCurrent)); } catch {}
1232-
}
1213+
try { lipsync.setEmotion(vec); } catch (e) { console.warn('setEmotion:', e.message); }
1214+
if (micActive && frameQueue.length > 10) frameQueue.length = 10;
12331215
}
12341216

12351217
function showEmotionPanel(show) {
@@ -1241,16 +1223,14 @@ <h2 class="step-title">Add Real-time Microphone</h2>
12411223
emotionSliders.forEach(s => { s.value = 0; });
12421224
emotionVals.forEach(v => { v.textContent = '0%'; });
12431225
document.querySelectorAll('.emotion-presets button').forEach(b => b.classList.remove('active'));
1244-
emotionCurrent.fill(0);
1245-
emotionTarget.fill(0);
12461226
}
12471227

12481228
// Slider input events
12491229
emotionSliders.forEach((slider, i) => {
12501230
slider.addEventListener('input', () => {
12511231
emotionVals[i].textContent = slider.value + '%';
12521232
document.querySelectorAll('.emotion-presets button').forEach(b => b.classList.remove('active'));
1253-
updateEmotionTarget();
1233+
updateEmotionVector();
12541234
});
12551235
});
12561236

@@ -1264,7 +1244,7 @@ <h2 class="step-title">Add Real-time Microphone</h2>
12641244
s.value = EMOTION_KEYS[i] === preset ? 100 : 0;
12651245
emotionVals[i].textContent = s.value + '%';
12661246
});
1267-
updateEmotionTarget();
1247+
updateEmotionVector();
12681248
});
12691249
});
12701250

@@ -1525,10 +1505,7 @@ <h2 class="step-title">Add Real-time Microphone</h2>
15251505
}
15261506
}
15271507

1528-
// 3) Emotion lerp (smooth transition between emotion states)
1529-
lerpEmotion(dt);
1530-
1531-
// 4) Bone weights, VRM update, render
1508+
// 3) Bone weights, VRM update, render
15321509
updateBoneWeights(dt);
15331510
if (vrm) vrm.update(dt);
15341511
if (mixer) mixer.update(dt);

examples/vanilla-comparison/index.html

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -467,41 +467,25 @@ <h1>Anima<span>Sync</span></h1>
467467
// ================================================================
468468
const emoSliders = document.querySelectorAll('.emo-v2');
469469
const emoPresetBtns = document.querySelectorAll('.emo-presets button');
470-
const emoCurrent = new Float32Array(5);
471-
const emoTarget = new Float32Array(5);
472-
const EMO_LERP_SPEED = 12.0;
473470

474-
function updateEmoTarget() {
475-
for (let i = 0; i < 5; i++) emoTarget[i] = parseInt(emoSliders[i].value) / 100;
476-
}
477-
478-
function lerpV2Emotion(dt) {
471+
function applyV2Emotion() {
479472
if (!lsV2?.setEmotion) return;
480-
let changed = false;
481-
for (let i = 0; i < 5; i++) {
482-
if (Math.abs(emoCurrent[i] - emoTarget[i]) > 0.001) {
483-
emoCurrent[i] += (emoTarget[i] - emoCurrent[i]) * Math.min(EMO_LERP_SPEED * dt, 1);
484-
changed = true;
485-
} else {
486-
emoCurrent[i] = emoTarget[i];
487-
}
488-
}
489-
if (changed) {
490-
try { lsV2.setEmotion(Array.from(emoCurrent)); } catch {}
491-
}
473+
const vec = Array.from(emoSliders).map(s => parseInt(s.value) / 100);
474+
try { lsV2.setEmotion(vec); } catch (e) { console.warn('setEmotion:', e); }
475+
if (micActive && queueV2.length > 10) queueV2.length = 10;
492476
}
493477

494478
emoSliders.forEach(s => s.addEventListener('input', () => {
495479
emoPresetBtns.forEach(b => b.classList.remove('active'));
496-
updateEmoTarget();
480+
applyV2Emotion();
497481
}));
498482

499483
emoPresetBtns.forEach(btn => btn.addEventListener('click', () => {
500484
const idx = parseInt(btn.dataset.emo);
501485
emoPresetBtns.forEach(b => b.classList.remove('active'));
502486
btn.classList.add('active');
503487
emoSliders.forEach(s => { s.value = parseInt(s.dataset.idx) === idx ? 100 : 0; });
504-
updateEmoTarget();
488+
applyV2Emotion();
505489
}));
506490

507491
// VRM drop handler (drops onto either pane, loads into both)
@@ -737,7 +721,6 @@ <h1>Anima<span>Sync</span></h1>
737721
ft = 0;
738722
}
739723

740-
lerpV2Emotion(dt);
741724
updateBoneWeights(dt);
742725
if (vrmV1) vrmV1.update(dt);
743726
if (vrmV2) vrmV2.update(dt);

0 commit comments

Comments
 (0)