Spaces:
Runtime error
Runtime error
import React, { Suspense, useEffect, useRef, useState, useMemo } from 'react' | |
import { Canvas, useFrame } from '@react-three/fiber' | |
import { useGLTF, useTexture, Loader, Environment, useFBX, useAnimations, OrthographicCamera } from '@react-three/drei'; | |
import { MeshStandardMaterial } from 'three/src/materials/MeshStandardMaterial'; | |
import { LinearEncoding, sRGBEncoding } from 'three/src/constants'; | |
import { LineBasicMaterial, MeshPhysicalMaterial, Vector2 } from 'three'; | |
import ReactAudioPlayer from 'react-audio-player'; | |
import createAnimation from './converter'; | |
import blinkData from './blendDataBlink.json'; | |
import * as THREE from 'three'; | |
import axios from 'axios'; | |
const _ = require('lodash'); | |
const host = 'https://detomo-ai-avatar-backend.hf.space' | |
function Avatar({ avatar_url, speak, chat, setSpeak, text, language, setAudioSource, playing }) { | |
let gltf = useGLTF(avatar_url); | |
let morphTargetDictionaryBody = null; | |
let morphTargetDictionaryLowerTeeth = null; | |
const [ | |
bodyTexture, | |
eyesTexture, | |
teethTexture, | |
bodySpecularTexture, | |
bodyRoughnessTexture, | |
bodyNormalTexture, | |
teethNormalTexture, | |
// teethSpecularTexture, | |
hairTexture, | |
tshirtDiffuseTexture, | |
tshirtNormalTexture, | |
tshirtRoughnessTexture, | |
hairAlphaTexture, | |
hairNormalTexture, | |
hairRoughnessTexture, | |
] = useTexture([ | |
"/images/body.webp", | |
"/images/eyes.webp", | |
"/images/teeth_diffuse.webp", | |
"/images/body_specular.webp", | |
"/images/body_roughness.webp", | |
"/images/body_normal.webp", | |
"/images/teeth_normal.webp", | |
// "/images/teeth_specular.webp", | |
"/images/h_color.webp", | |
"/images/tshirt_diffuse.webp", | |
"/images/tshirt_normal.webp", | |
"/images/tshirt_roughness.webp", | |
"/images/h_alpha.webp", | |
"/images/h_normal.webp", | |
"/images/h_roughness.webp", | |
]); | |
_.each([ | |
bodyTexture, | |
eyesTexture, | |
teethTexture, | |
teethNormalTexture, | |
bodySpecularTexture, | |
bodyRoughnessTexture, | |
bodyNormalTexture, | |
tshirtDiffuseTexture, | |
tshirtNormalTexture, | |
tshirtRoughnessTexture, | |
hairAlphaTexture, | |
hairNormalTexture, | |
hairRoughnessTexture | |
], t => { | |
t.encoding = sRGBEncoding; | |
t.flipY = false; | |
}); | |
bodyNormalTexture.encoding = LinearEncoding; | |
tshirtNormalTexture.encoding = LinearEncoding; | |
teethNormalTexture.encoding = LinearEncoding; | |
hairNormalTexture.encoding = LinearEncoding; | |
gltf.scene.traverse(node => { | |
if(node.type === 'Mesh' || node.type === 'LineSegments' || node.type === 'SkinnedMesh') { | |
node.castShadow = true; | |
node.receiveShadow = true; | |
node.frustumCulled = false; | |
if (node.name.includes("Body")) { | |
node.castShadow = true; | |
node.receiveShadow = true; | |
node.material = new MeshPhysicalMaterial(); | |
node.material.map = bodyTexture; | |
// node.material.shininess = 60; | |
node.material.roughness = 1.7; | |
// node.material.specularMap = bodySpecularTexture; | |
node.material.roughnessMap = bodyRoughnessTexture; | |
node.material.normalMap = bodyNormalTexture; | |
node.material.normalScale = new Vector2(0.6, 0.6); | |
morphTargetDictionaryBody = node.morphTargetDictionary; | |
node.material.envMapIntensity = 0.8; | |
// node.material.visible = false; | |
} | |
if (node.name.includes("Eyes")) { | |
node.material = new MeshStandardMaterial(); | |
node.material.map = eyesTexture; | |
// node.material.shininess = 100; | |
node.material.roughness = 0.1; | |
node.material.envMapIntensity = 0.5; | |
} | |
if (node.name.includes("Brows")) { | |
node.material = new LineBasicMaterial({color: 0x000000}); | |
node.material.linewidth = 1; | |
node.material.opacity = 0.5; | |
node.material.transparent = true; | |
node.visible = false; | |
} | |
if (node.name.includes("Teeth")) { | |
node.receiveShadow = true; | |
node.castShadow = true; | |
node.material = new MeshStandardMaterial(); | |
node.material.roughness = 0.1; | |
node.material.map = teethTexture; | |
node.material.normalMap = teethNormalTexture; | |
node.material.envMapIntensity = 0.7; | |
} | |
if (node.name.includes("Hair")) { | |
node.material = new MeshStandardMaterial(); | |
node.material.map = hairTexture; | |
node.material.alphaMap = hairAlphaTexture; | |
node.material.normalMap = hairNormalTexture; | |
node.material.roughnessMap = hairRoughnessTexture; | |
node.material.transparent = true; | |
node.material.depthWrite = false; | |
node.material.side = 2; | |
node.material.color.setHex(0x000000); | |
node.material.envMapIntensity = 0.3; | |
} | |
if (node.name.includes("TSHIRT")) { | |
node.material = new MeshStandardMaterial(); | |
node.material.map = tshirtDiffuseTexture; | |
node.material.roughnessMap = tshirtRoughnessTexture; | |
node.material.normalMap = tshirtNormalTexture; | |
node.material.color.setHex(0xffffff); | |
node.material.envMapIntensity = 0.5; | |
} | |
if (node.name.includes("TeethLower")) { | |
morphTargetDictionaryLowerTeeth = node.morphTargetDictionary; | |
} | |
} | |
}); | |
const [clips, setClips] = useState([]); | |
const mixer = useMemo(() => new THREE.AnimationMixer(gltf.scene), []); | |
useEffect(() => { | |
if (speak === false) | |
return; | |
if (chat === true) { | |
makeChat(text) | |
.then(response => { | |
// Xử lý dữ liệu từ OpenAI API tại đây, ví dụ: | |
let returnedText = response.data.response; | |
// Gửi dữ liệu này tới backend: | |
makeSpeech(returnedText, language) | |
.then(backendResponse => { | |
let {blendData, filename} = backendResponse.data; | |
let newClips = [ | |
createAnimation(blendData, morphTargetDictionaryBody, 'HG_Body'), | |
createAnimation(blendData, morphTargetDictionaryLowerTeeth, 'HG_TeethLower') | |
]; | |
filename = host + filename; | |
setClips(newClips); | |
setAudioSource(filename); | |
}); | |
}) | |
.catch(err => { | |
console.error(err); | |
setSpeak(false); | |
}) | |
} else { | |
makeSpeech(text, language) | |
.then(response => { | |
let {blendData, filename} = response.data; | |
let newClips = [ | |
createAnimation(blendData, morphTargetDictionaryBody, 'HG_Body'), | |
createAnimation(blendData, morphTargetDictionaryLowerTeeth, 'HG_TeethLower') | |
]; | |
filename = host + filename; | |
setClips(newClips); | |
setAudioSource(filename); | |
}) | |
.catch(err => { | |
console.error(err); | |
setSpeak(false); | |
}) | |
} | |
}, [speak]); | |
let idleFbx = useFBX('/idle.fbx'); | |
let { clips: idleClips } = useAnimations(idleFbx.animations); | |
idleClips[0].tracks = _.filter(idleClips[0].tracks, track => { | |
return track.name.includes("Head") || track.name.includes("Neck") || track.name.includes("Spine2"); | |
}); | |
idleClips[0].tracks = _.map(idleClips[0].tracks, track => { | |
if (track.name.includes("Head")) { | |
track.name = "head.quaternion"; | |
} | |
if (track.name.includes("Neck")) { | |
track.name = "neck.quaternion"; | |
} | |
if (track.name.includes("Spine")) { | |
track.name = "spine2.quaternion"; | |
} | |
return track; | |
}); | |
useEffect(() => { | |
let idleClipAction = mixer.clipAction(idleClips[0]); | |
idleClipAction.play(); | |
let blinkClip = createAnimation(blinkData, morphTargetDictionaryBody, 'HG_Body'); | |
let blinkAction = mixer.clipAction(blinkClip); | |
blinkAction.play(); | |
}, []); | |
// Play animation clips when available | |
useEffect(() => { | |
if (playing === false) | |
return; | |
_.each(clips, clip => { | |
let clipAction = mixer.clipAction(clip); | |
clipAction.setLoop(THREE.LoopOnce); | |
clipAction.play(); | |
}); | |
}, [playing]); | |
useFrame((state, delta) => { | |
mixer.update(delta); | |
}); | |
return ( | |
<group name="avatar"> | |
<primitive object={gltf.scene} dispose={null} /> | |
</group> | |
); | |
} | |
function makeSpeech(text, language) { | |
return axios.post(host + '/talk', { text, language }); | |
} | |
function makeChat(text) { | |
return axios.post(host + '/chat', { text }); | |
} | |
function ToggleSwitch({ checked, onChange }) { | |
return ( | |
<div | |
onClick={onChange} | |
style={{ | |
...STYLES.speak, | |
width: '60px', | |
height: '30px', | |
borderRadius: '15px', | |
padding: 0, | |
position: 'relative', | |
backgroundColor: checked ? '#555' : '#222', | |
transition: 'background-color 0.3s' | |
}} | |
> | |
<div | |
style={{ | |
width: '28px', | |
height: '28px', | |
borderRadius: '50%', | |
backgroundColor: '#FFF', | |
position: 'absolute', | |
top: '1px', | |
left: checked ? '32px' : '1px', | |
transition: 'left 0.3s' | |
}} | |
/> | |
</div> | |
); | |
} | |
const STYLES = { | |
area: {position: 'absolute', bottom:'10px', left: '30%', zIndex: 500, display: 'flex', flexDirection: 'column', alignItems: 'center'}, | |
text: {margin: '0px', width:'700px', padding: '5px', background: 'none', color: '#000000', fontSize: '1.6em', border: 'none'}, | |
speak: {padding: '10px', display: 'inline-block', color: '#FFFFFF', background: '#222222', border: 'None', marginLeft: '10px'}, | |
area2: {position: 'absolute', top:'5px', right: '15px', zIndex: 500}, | |
label: {color: '#777777', fontSize:'1.2em'}, | |
languageSelect: {border: 'none', padding: '10px', backgroundColor: '#222222', color: '#fff', display: 'inline-block'}, | |
buttonContainer: {display: 'flex', justifyContent: 'center', gap: '10px'} | |
} | |
function App() { | |
const audioPlayer = useRef(); | |
const [speak, setSpeak] = useState(false); | |
const [text, setText] = useState("My name is Arwen. I'm a virtual human who can speak whatever you type here along with realistic facial movements."); | |
const [audioSource, setAudioSource] = useState(null); | |
const [playing, setPlaying] = useState(false); | |
const [language, setLanguage] = useState('en-US'); | |
const [loop, setLoop] = useState(false); | |
const [chat, setChat] = useState(false); | |
// End of play | |
function playerEnded(e) { | |
setAudioSource(null); | |
setSpeak(false); | |
setPlaying(false); | |
if (loop) { | |
setTimeout(() => { | |
setSpeak(true); // Khởi động lại yêu cầu đến backend sau 5s | |
}, 5000); | |
} | |
} | |
// Player is read | |
function playerReady(e) { | |
audioPlayer.current.audioEl.current.play(); | |
setPlaying(true); | |
} | |
return ( | |
<div className="full"> | |
<div style={STYLES.area}> | |
<textarea rows={4} type="text" style={STYLES.text} value={text} onChange={(e) => setText(e.target.value.substring(0, 500))} /> | |
<div style={STYLES.buttonContainer}> | |
<button onClick={() => setSpeak(true)} style={STYLES.speak}> { speak? 'Running...': 'Generate' }</button> | |
<label> | |
<select style={STYLES.languageSelect} value={language} onChange={e => setLanguage(e.target.value)}> | |
<option value="en-US">English (US)</option> | |
<option value="ja-JP">Japanese</option> | |
</select> | |
</label> | |
<ToggleSwitch checked={!loop} onChange={() => setLoop(!loop)} /> | |
<label style={{...STYLES.label, marginLeft: '10px'}}> | |
Loop | |
</label> | |
<ToggleSwitch checked={!chat} onChange={() => setChat(!chat)} /> | |
<label style={{...STYLES.label, marginLeft: '10px'}}> | |
Chat | |
</label> | |
</div> | |
</div> | |
<ReactAudioPlayer | |
src={audioSource} | |
ref={audioPlayer} | |
onEnded={playerEnded} | |
onCanPlayThrough={playerReady} | |
/> | |
{/* <Stats /> */} | |
<Canvas dpr={2} onCreated={(ctx) => { | |
ctx.gl.physicallyCorrectLights = true; | |
}}> | |
<OrthographicCamera | |
makeDefault | |
zoom={2000} | |
position={[0, 1.65, 1]} | |
/> | |
{/* <OrbitControls | |
target={[0, 1.65, 0]} | |
/> */} | |
<Suspense fallback={null}> | |
<Environment background={false} files="/images/photo_studio_loft_hall_1k.hdr" /> | |
</Suspense> | |
<Suspense fallback={null}> | |
<Bg /> | |
</Suspense> | |
<Suspense fallback={null}> | |
<Avatar | |
avatar_url="/model.glb" | |
speak={speak} | |
chat={chat} | |
setSpeak={setSpeak} | |
text={text} | |
language={language} | |
setAudioSource={setAudioSource} | |
playing={playing} | |
/> | |
</Suspense> | |
</Canvas> | |
<Loader dataInterpolation={(p) => `Loading... please wait`} /> | |
</div> | |
) | |
} | |
function Bg() { | |
const texture = useTexture('/images/bg.webp'); | |
return( | |
<mesh position={[0, 1.5, -2]} scale={[0.8, 0.8, 0.8]}> | |
<planeBufferGeometry /> | |
<meshBasicMaterial map={texture} /> | |
</mesh> | |
) | |
} | |
export default App; | |