Files
gradio-webrtc/frontend/shared/VideoChat/webrtc_utils.ts
neil.xh f476f9cf29 gs对话接入
本次代码评审新增并完善了gs视频聊天功能,包括前后端接口定义、状态管理及UI组件实现,并引入了新的依赖库以支持更多互动特性。
Link: https://code.alibaba-inc.com/xr-paas/gradio_webrtc/codereview/21273476
* 更新python 部分

* 合并videochat前端部分

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 替换audiowave

* 导入路径修改

* 合并websocket mode逻辑

* feat: gaussian avatar chat

* 增加其他渲染的入参

* feat: ws连接和使用

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 右边距离超出容器宽度,则向左移动

* 配置传递

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 高斯包异常

* 同步webrtc_utils

* 更新webrtc_utils

* 兼容on_chat_datachannel

* 修复设备名称列表没有正常显示的问题

* copy 传递 webrtc_id

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 保证webrtc 完成后再进行websocket连接

* feat: 音频表情数据接入

* dist 上传

* canvas 隐藏

* feat: 高斯文件下载进度透出

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 修改无法获取权限问题

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 先获取权限再获取设备

* fix: gs资源下载完成前不处理ws数据

* fix: merge

* 话术调整

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 修复设备切换后重新对话,又切换回默认设备的问题

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 更新localvideo 尺寸

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 不能默认default

* 修改音频权限问题

* 更新打包结果

* fix: 对话按钮状态跟gs资源挂钩,删除无用代码

* fix: merge

* feat: gs渲染模块从npm包引入

* fix

* 新增对话记录

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 样式修改

* 更新包

* fix: gs数字人初始化位置和静音

* 对话记录滚到底部

* 至少100%高度

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 略微上移文本框

* 开始连接时清空对话记录

* fix: update gs render npm

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 逻辑保证

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* feat: 音频初始化配置是否静音

* actionsbar在有字幕时调整位置

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 样式优化

* feat: 增加readme

* fix: 资源图片

* fix: docs

* fix: update gs render sdk

* fix: gs模式下画面位置计算

* fix: update readme

* 设备判断,太窄处理

* Merge branch 'feature/update-fastrtc-0.0.19' of gitlab.alibaba-inc.com:xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* 是否有权限和是否有设备分开

* feat: gs 下载和加载钩子函数分离

* Merge branch 'feature/update-fastrtc-0.0.19' of http://gitlab.alibaba-inc.com/xr-paas/gradio_webrtc into feature/update-fastrtc-0.0.19

* fix: update gs render sdk

* 替换

* dist

* 上传文件

* del
2025-04-16 19:09:04 +08:00

194 lines
4.9 KiB
TypeScript

export function createPeerConnection(pc, node) {
// register some listeners to help debugging
pc.addEventListener(
"icegatheringstatechange",
() => {
console.debug(pc.iceGatheringState);
},
false,
);
pc.addEventListener(
"iceconnectionstatechange",
() => {
console.debug(pc.iceConnectionState);
},
false,
);
pc.addEventListener(
"signalingstatechange",
() => {
console.debug(pc.signalingState);
},
false,
);
// connect audio / video from server to local
pc.addEventListener("track", (evt) => {
console.debug("track event listener");
if (node && node.srcObject !== evt.streams[0]) {
console.debug("streams", evt.streams);
node.srcObject = evt.streams[0];
console.debug("node.srcOject", node.srcObject);
if (evt.track.kind === "audio") {
node.volume = 1.0; // Ensure volume is up
node.muted = false;
node.autoplay = true;
// Attempt to play (needed for some browsers)
node.play().catch((e) => console.debug("Autoplay failed:", e));
}
}
});
return pc;
}
export async function start(
stream,
pc: RTCPeerConnection,
node,
server_fn,
webrtc_id,
modality: "video" | "audio" = "video",
on_change_cb: (msg: "change" | "tick") => void = () => {},
rtp_params = {},
additional_message_cb: (msg: object) => void = () => {},
reject_cb: (msg: object) => void = () => {},
) {
pc = createPeerConnection(pc, node);
const data_channel = pc.createDataChannel("text");
data_channel.onopen = () => {
console.debug("Data channel is open");
data_channel.send("handshake");
data_channel.send(JSON.stringify({type:'init'}));
};
data_channel.onmessage = (event) => {
console.debug("Received message:", event.data);
let event_json;
try {
event_json = JSON.parse(event.data);
} catch (e) {
console.debug("Error parsing JSON");
}
if (
event.data === "change" ||
event.data === "tick" ||
event.data === "stopword" ||
event_json?.type === "warning" ||
event_json?.type === "error" ||
event_json?.type === "send_input" ||
event_json?.type === "fetch_output" ||
event_json?.type === "stopword" ||
event_json?.type === "end_stream"
) {
on_change_cb(event_json ?? event.data);
}
additional_message_cb(event_json ?? event.data);
};
if (stream) {
stream.getTracks().forEach(async (track) => {
console.debug("Track stream callback", track);
const sender = pc.addTrack(track, stream);
const params = sender.getParameters();
const updated_params = { ...params, ...rtp_params };
await sender.setParameters(updated_params);
console.debug("sender params", sender.getParameters());
});
} else {
console.debug("Creating transceiver!");
pc.addTransceiver(modality, { direction: "recvonly" });
}
await negotiate(pc, server_fn, webrtc_id, reject_cb);
return [pc, data_channel] as const;
}
function make_offer(
server_fn: any,
body,
reject_cb: (msg: object) => void = () => {},
): Promise<object> {
return new Promise((resolve, reject) => {
server_fn(body).then((data) => {
console.debug("data", data);
if (data?.status === "failed") {
reject_cb(data);
console.debug("rejecting");
reject("error");
}
resolve(data);
});
});
}
async function negotiate(
pc: RTCPeerConnection,
server_fn: any,
webrtc_id: string,
reject_cb: (msg: object) => void = () => {},
): Promise<void> {
pc.onicecandidate = ({ candidate }) => {
if (candidate) {
console.debug("Sending ICE candidate", candidate);
server_fn({
candidate: candidate.toJSON(),
webrtc_id: webrtc_id,
type: "ice-candidate",
}).catch((err) => console.error("Error sending ICE candidate:", err));
}
};
return pc
.createOffer()
.then((offer) => {
return pc.setLocalDescription(offer);
})
.then(() => {
var offer = pc.localDescription;
return make_offer(
server_fn,
{
sdp: offer.sdp,
type: offer.type,
webrtc_id: webrtc_id,
},
reject_cb,
);
})
.then((response) => {
return response;
})
.then((answer) => {
return pc.setRemoteDescription(answer);
});
}
export function stop(pc: RTCPeerConnection) {
console.debug("Stopping peer connection");
// close transceivers
if (pc.getTransceivers) {
pc.getTransceivers().forEach((transceiver) => {
if (transceiver.stop) {
transceiver.stop();
}
});
}
// close local audio / video
if (pc.getSenders()) {
pc.getSenders().forEach((sender) => {
console.log("sender", sender);
if (sender.track && sender.track.stop) sender.track.stop();
});
}
// close peer connection
setTimeout(() => {
pc.close();
}, 500);
}