mirror of
https://github.com/HumanAIGC-Engineering/gradio-webrtc.git
synced 2026-02-05 09:59:22 +08:00
t :# 请为您的变更输入提交说明。以 '#' 开始的行将被忽略,而一个空的提交
This commit is contained in:
164
frontend/shared/AudioWave.svelte
Normal file
164
frontend/shared/AudioWave.svelte
Normal file
@@ -0,0 +1,164 @@
|
||||
<script lang="ts">
|
||||
import { onDestroy } from 'svelte';
|
||||
import type {ComponentType} from 'svelte';
|
||||
|
||||
import PulsingIcon from './PulsingIcon.svelte';
|
||||
|
||||
export let numBars = 16;
|
||||
export let stream_state: "open" | "closed" | "waiting" = "closed";
|
||||
export let audio_source_callback: () => MediaStream;
|
||||
export let icon: string | undefined | ComponentType = undefined;
|
||||
export let icon_button_color: string = "var(--color-accent)";
|
||||
export let pulse_color: string = "var(--color-accent)";
|
||||
|
||||
let audioContext: AudioContext;
|
||||
let analyser: AnalyserNode;
|
||||
let dataArray: Uint8Array;
|
||||
let animationId: number;
|
||||
let pulseScale = 1;
|
||||
|
||||
$: containerWidth = icon
|
||||
? "128px"
|
||||
: `calc((var(--boxSize) + var(--gutter)) * ${numBars})`;
|
||||
|
||||
$: if(stream_state === "open") setupAudioContext();
|
||||
|
||||
onDestroy(() => {
|
||||
if (animationId) {
|
||||
cancelAnimationFrame(animationId);
|
||||
}
|
||||
if (audioContext) {
|
||||
audioContext.close();
|
||||
}
|
||||
});
|
||||
|
||||
function setupAudioContext() {
|
||||
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
||||
analyser = audioContext.createAnalyser();
|
||||
const source = audioContext.createMediaStreamSource(audio_source_callback());
|
||||
|
||||
source.connect(analyser);
|
||||
|
||||
analyser.fftSize = 64;
|
||||
analyser.smoothingTimeConstant = 0.8;
|
||||
dataArray = new Uint8Array(analyser.frequencyBinCount);
|
||||
|
||||
updateVisualization();
|
||||
}
|
||||
|
||||
function updateVisualization() {
|
||||
analyser.getByteFrequencyData(dataArray);
|
||||
|
||||
// Update bars
|
||||
const bars = document.querySelectorAll('.gradio-webrtc-waveContainer .gradio-webrtc-box');
|
||||
for (let i = 0; i < bars.length; i++) {
|
||||
const barHeight = (dataArray[i] / 255) * 2;
|
||||
bars[i].style.transform = `scaleY(${Math.max(0.1, barHeight)})`;
|
||||
}
|
||||
|
||||
animationId = requestAnimationFrame(updateVisualization);
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="gradio-webrtc-waveContainer">
|
||||
{#if icon}
|
||||
<div class="gradio-webrtc-icon-container">
|
||||
<div
|
||||
class="gradio-webrtc-icon"
|
||||
style:transform={`scale(${pulseScale})`}
|
||||
style:background={icon_button_color}
|
||||
>
|
||||
<PulsingIcon
|
||||
{stream_state}
|
||||
{pulse_color}
|
||||
{icon}
|
||||
{icon_button_color}
|
||||
{audio_source_callback}/>
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="gradio-webrtc-boxContainer" style:width={containerWidth}>
|
||||
{#each Array(numBars) as _}
|
||||
<div class="gradio-webrtc-box"></div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.gradio-webrtc-waveContainer {
|
||||
position: relative;
|
||||
display: flex;
|
||||
min-height: 100px;
|
||||
max-height: 128px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.gradio-webrtc-boxContainer {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
height: 64px;
|
||||
--boxSize: 8px;
|
||||
--gutter: 4px;
|
||||
}
|
||||
|
||||
.gradio-webrtc-box {
|
||||
height: 100%;
|
||||
width: var(--boxSize);
|
||||
background: var(--color-accent);
|
||||
border-radius: 8px;
|
||||
transition: transform 0.05s ease;
|
||||
}
|
||||
|
||||
.gradio-webrtc-icon-container {
|
||||
position: relative;
|
||||
width: 128px;
|
||||
height: 128px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.gradio-webrtc-icon {
|
||||
position: relative;
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
border-radius: 50%;
|
||||
transition: transform 0.1s ease;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
.icon-image {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
object-fit: contain;
|
||||
filter: brightness(0) invert(1);
|
||||
}
|
||||
|
||||
.pulse-ring {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
border-radius: 50%;
|
||||
animation: pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
transform: translate(-50%, -50%) scale(1);
|
||||
opacity: 0.5;
|
||||
}
|
||||
100% {
|
||||
transform: translate(-50%, -50%) scale(var(--max-scale, 3));
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
454
frontend/shared/InteractiveAudio.svelte
Normal file
454
frontend/shared/InteractiveAudio.svelte
Normal file
@@ -0,0 +1,454 @@
|
||||
<script lang="ts">
|
||||
import {
|
||||
BlockLabel,
|
||||
} from "@gradio/atoms";
|
||||
import type { I18nFormatter } from "@gradio/utils";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
import { onMount } from "svelte";
|
||||
import { fade } from "svelte/transition";
|
||||
import { StreamingBar } from "@gradio/statustracker";
|
||||
import {
|
||||
Circle,
|
||||
Square,
|
||||
Spinner,
|
||||
Music,
|
||||
DropdownArrow,
|
||||
Microphone
|
||||
} from "@gradio/icons";
|
||||
|
||||
import { start, stop } from "./webrtc_utils";
|
||||
import { get_devices, set_available_devices } from "./stream_utils";
|
||||
import AudioWave from "./AudioWave.svelte";
|
||||
import WebcamPermissions from "./WebcamPermissions.svelte";
|
||||
|
||||
export let mode: "send-receive" | "send";
|
||||
export let value: string | null = null;
|
||||
export let label: string | undefined = undefined;
|
||||
export let show_label = true;
|
||||
export let rtc_configuration: Object | null = null;
|
||||
export let i18n: I18nFormatter;
|
||||
export let time_limit: number | null = null;
|
||||
export let track_constraints: MediaTrackConstraints = {};
|
||||
export let rtp_params: RTCRtpParameters = {} as RTCRtpParameters;
|
||||
export let on_change_cb: (mg: "tick" | "change") => void;
|
||||
export let icon: string | undefined = undefined;
|
||||
export let icon_button_color: string = "var(--color-accent)";
|
||||
export let pulse_color: string = "var(--color-accent)";
|
||||
export let button_labels: {start: string, stop: string, waiting: string};
|
||||
|
||||
let stopword_recognized = false;
|
||||
|
||||
let notification_sound;
|
||||
|
||||
onMount(() => {
|
||||
if (value === "__webrtc_value__") {
|
||||
notification_sound = new Audio("https://huggingface.co/datasets/freddyaboulton/bucket/resolve/main/pop-sounds.mp3");
|
||||
}
|
||||
});
|
||||
|
||||
let _on_change_cb = (msg: "change" | "tick" | "stopword") => {
|
||||
console.log("msg", msg);
|
||||
if (msg === "stopword") {
|
||||
console.log("stopword recognized");
|
||||
stopword_recognized = true;
|
||||
setTimeout(() => {
|
||||
stopword_recognized = false;
|
||||
}, 3000);
|
||||
} else {
|
||||
console.log("calling on_change_cb with msg", msg);
|
||||
on_change_cb(msg);
|
||||
}
|
||||
};
|
||||
|
||||
let options_open = false;
|
||||
|
||||
let _time_limit: number | null = null;
|
||||
|
||||
export let server: {
|
||||
offer: (body: any) => Promise<any>;
|
||||
};
|
||||
|
||||
let stream_state: "open" | "closed" | "waiting" = "closed";
|
||||
let audio_player: HTMLAudioElement;
|
||||
let pc: RTCPeerConnection;
|
||||
let _webrtc_id = null;
|
||||
let stream: MediaStream;
|
||||
let available_audio_devices: MediaDeviceInfo[];
|
||||
let selected_device: MediaDeviceInfo | null = null;
|
||||
let mic_accessed = false;
|
||||
|
||||
const audio_source_callback = () => {
|
||||
console.log("stream in callback", stream);
|
||||
if(mode==="send") return stream;
|
||||
else return audio_player.srcObject as MediaStream
|
||||
}
|
||||
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
tick: undefined;
|
||||
state_change: undefined;
|
||||
error: string
|
||||
play: undefined;
|
||||
stop: undefined;
|
||||
}>();
|
||||
|
||||
|
||||
async function access_mic(): Promise<void> {
|
||||
|
||||
try {
|
||||
const constraints = selected_device ? { deviceId: { exact: selected_device.deviceId }, ...track_constraints } : track_constraints;
|
||||
const stream_ = await navigator.mediaDevices.getUserMedia({ audio: constraints });
|
||||
stream = stream_;
|
||||
} catch (err) {
|
||||
if (!navigator.mediaDevices) {
|
||||
dispatch("error", i18n("audio.no_device_support"));
|
||||
return;
|
||||
}
|
||||
if (err instanceof DOMException && err.name == "NotAllowedError") {
|
||||
dispatch("error", i18n("audio.allow_recording_access"));
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
available_audio_devices = set_available_devices(await get_devices(), "audioinput");
|
||||
mic_accessed = true;
|
||||
const used_devices = stream
|
||||
.getTracks()
|
||||
.map((track) => track.getSettings()?.deviceId)[0];
|
||||
|
||||
selected_device = used_devices
|
||||
? available_audio_devices.find((device) => device.deviceId === used_devices) ||
|
||||
available_audio_devices[0]
|
||||
: available_audio_devices[0];
|
||||
}
|
||||
|
||||
async function start_stream(): Promise<void> {
|
||||
if( stream_state === "open"){
|
||||
stop(pc);
|
||||
stream_state = "closed";
|
||||
_time_limit = null;
|
||||
await access_mic();
|
||||
return;
|
||||
}
|
||||
_webrtc_id = Math.random().toString(36).substring(2);
|
||||
value = _webrtc_id;
|
||||
pc = new RTCPeerConnection(rtc_configuration);
|
||||
pc.addEventListener("connectionstatechange",
|
||||
async (event) => {
|
||||
switch(pc.connectionState) {
|
||||
case "connected":
|
||||
console.info("connected");
|
||||
stream_state = "open";
|
||||
_time_limit = time_limit;
|
||||
break;
|
||||
case "disconnected":
|
||||
console.info("closed");
|
||||
stream_state = "closed";
|
||||
_time_limit = null;
|
||||
stop(pc);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
)
|
||||
stream_state = "waiting"
|
||||
stream = null
|
||||
|
||||
try {
|
||||
await access_mic();
|
||||
} catch (err) {
|
||||
if (!navigator.mediaDevices) {
|
||||
dispatch("error", i18n("audio.no_device_support"));
|
||||
return;
|
||||
}
|
||||
if (err instanceof DOMException && err.name == "NotAllowedError") {
|
||||
dispatch("error", i18n("audio.allow_recording_access"));
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (stream == null) return;
|
||||
|
||||
start(stream, pc, mode === "send" ? null: audio_player, server.offer, _webrtc_id, "audio", _on_change_cb, rtp_params).then((connection) => {
|
||||
pc = connection;
|
||||
}).catch(() => {
|
||||
console.info("catching")
|
||||
dispatch("error", "Too many concurrent users. Come back later!");
|
||||
});
|
||||
}
|
||||
|
||||
function handle_click_outside(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
options_open = false;
|
||||
}
|
||||
|
||||
function click_outside(node: Node, cb: any): any {
|
||||
const handle_click = (event: MouseEvent): void => {
|
||||
if (
|
||||
node &&
|
||||
!node.contains(event.target as Node) &&
|
||||
!event.defaultPrevented
|
||||
) {
|
||||
cb(event);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("click", handle_click, true);
|
||||
|
||||
return {
|
||||
destroy() {
|
||||
document.removeEventListener("click", handle_click, true);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const handle_device_change = async (event: InputEvent): Promise<void> => {
|
||||
const target = event.target as HTMLInputElement;
|
||||
const device_id = target.value;
|
||||
|
||||
stream = await navigator.mediaDevices.getUserMedia({ audio: {deviceId: { exact: device_id }, ...track_constraints }});
|
||||
selected_device =
|
||||
available_audio_devices.find(
|
||||
(device) => device.deviceId === device_id
|
||||
) || null;
|
||||
options_open = false;
|
||||
};
|
||||
|
||||
$: if(stopword_recognized){
|
||||
notification_sound.play();
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<BlockLabel
|
||||
{show_label}
|
||||
Icon={Music}
|
||||
float={false}
|
||||
label={label || i18n("audio.audio")}
|
||||
/>
|
||||
<div class="audio-container">
|
||||
<audio
|
||||
class="standard-player"
|
||||
class:hidden={value === "__webrtc_value__"}
|
||||
on:load
|
||||
bind:this={audio_player}
|
||||
on:ended={() => dispatch("stop")}
|
||||
on:play={() => dispatch("play")}
|
||||
/>
|
||||
{#if !mic_accessed}
|
||||
<div
|
||||
in:fade={{ delay: 100, duration: 200 }}
|
||||
title="grant webcam access"
|
||||
style="height: 100%"
|
||||
>
|
||||
<WebcamPermissions icon={Microphone} on:click={async () => access_mic()} />
|
||||
</div>
|
||||
{:else}
|
||||
<AudioWave {audio_source_callback} {stream_state} {icon} {icon_button_color} {pulse_color}/>
|
||||
<StreamingBar time_limit={_time_limit} />
|
||||
<div class="button-wrap" class:pulse={stopword_recognized}>
|
||||
<button
|
||||
on:click={start_stream}
|
||||
aria-label={"start stream"}
|
||||
>
|
||||
{#if stream_state === "waiting"}
|
||||
<div class="icon-with-text">
|
||||
<div class="icon color-primary" title="spinner">
|
||||
<Spinner />
|
||||
</div>
|
||||
{button_labels.waiting || i18n("audio.waiting")}
|
||||
</div>
|
||||
{:else if stream_state === "open"}
|
||||
<div class="icon-with-text">
|
||||
<div class="icon color-primary" title="stop recording">
|
||||
<Square />
|
||||
</div>
|
||||
{button_labels.stop || i18n("audio.stop")}
|
||||
</div>
|
||||
{:else}
|
||||
<div class="icon-with-text">
|
||||
<div class="icon color-primary" title="start recording">
|
||||
<Circle />
|
||||
</div>
|
||||
{button_labels.start || i18n("audio.record")}
|
||||
</div>
|
||||
{/if}
|
||||
</button>
|
||||
{#if stream_state === "closed"}
|
||||
<button
|
||||
class="icon"
|
||||
on:click={() => (options_open = true)}
|
||||
aria-label="select input source"
|
||||
>
|
||||
<DropdownArrow />
|
||||
</button>
|
||||
{/if}
|
||||
{#if options_open && selected_device}
|
||||
<select
|
||||
class="select-wrap"
|
||||
aria-label="select source"
|
||||
use:click_outside={handle_click_outside}
|
||||
on:change={handle_device_change}
|
||||
>
|
||||
<button
|
||||
class="inset-icon"
|
||||
on:click|stopPropagation={() => (options_open = false)}
|
||||
>
|
||||
<DropdownArrow />
|
||||
</button>
|
||||
{#if available_audio_devices.length === 0}
|
||||
<option value="">{i18n("common.no_devices")}</option>
|
||||
{:else}
|
||||
{#each available_audio_devices as device}
|
||||
<option
|
||||
value={device.deviceId}
|
||||
selected={selected_device.deviceId === device.deviceId}
|
||||
>
|
||||
{device.label}
|
||||
</option>
|
||||
{/each}
|
||||
{/if}
|
||||
</select>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
||||
.audio-container {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
|
||||
:global(::part(wrapper)) {
|
||||
margin-bottom: var(--size-2);
|
||||
}
|
||||
|
||||
.standard-player {
|
||||
width: 100%;
|
||||
padding: var(--size-2);
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
.button-wrap {
|
||||
margin-top: var(--size-2);
|
||||
margin-bottom: var(--size-2);
|
||||
background-color: var(--block-background-fill);
|
||||
border: 1px solid var(--border-color-primary);
|
||||
border-radius: var(--radius-xl);
|
||||
padding: var(--size-1-5);
|
||||
display: flex;
|
||||
bottom: var(--size-2);
|
||||
box-shadow: var(--shadow-drop-lg);
|
||||
border-radius: var(--radius-xl);
|
||||
line-height: var(--size-3);
|
||||
color: var(--button-secondary-text-color);
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
transform: scale(1);
|
||||
box-shadow: 0 0 0 0 rgba(var(--primary-500-rgb), 0.7);
|
||||
}
|
||||
|
||||
70% {
|
||||
transform: scale(1.25);
|
||||
box-shadow: 0 0 0 10px rgba(var(--primary-500-rgb), 0);
|
||||
}
|
||||
|
||||
100% {
|
||||
transform: scale(1);
|
||||
box-shadow: 0 0 0 0 rgba(var(--primary-500-rgb), 0);
|
||||
}
|
||||
}
|
||||
|
||||
.pulse {
|
||||
animation: pulse 1s infinite;
|
||||
}
|
||||
|
||||
.icon-with-text {
|
||||
min-width: var(--size-16);
|
||||
align-items: center;
|
||||
margin: 0 var(--spacing-xl);
|
||||
display: flex;
|
||||
justify-content: space-evenly;
|
||||
gap: var(--size-2);
|
||||
}
|
||||
|
||||
@media (--screen-md) {
|
||||
button {
|
||||
bottom: var(--size-4);
|
||||
}
|
||||
}
|
||||
|
||||
@media (--screen-xl) {
|
||||
button {
|
||||
bottom: var(--size-8);
|
||||
}
|
||||
}
|
||||
|
||||
.icon {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.color-primary {
|
||||
fill: var(--primary-600);
|
||||
stroke: var(--primary-600);
|
||||
color: var(--primary-600);
|
||||
}
|
||||
|
||||
.select-wrap {
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
color: var(--button-secondary-text-color);
|
||||
background-color: transparent;
|
||||
width: 95%;
|
||||
font-size: var(--text-md);
|
||||
position: absolute;
|
||||
bottom: var(--size-2);
|
||||
background-color: var(--block-background-fill);
|
||||
box-shadow: var(--shadow-drop-lg);
|
||||
border-radius: var(--radius-xl);
|
||||
z-index: var(--layer-top);
|
||||
border: 1px solid var(--border-color-primary);
|
||||
text-align: left;
|
||||
line-height: var(--size-4);
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
left: 50%;
|
||||
transform: translate(-50%, 0);
|
||||
max-width: var(--size-52);
|
||||
}
|
||||
|
||||
.select-wrap > option {
|
||||
padding: 0.25rem 0.5rem;
|
||||
border-bottom: 1px solid var(--border-color-accent);
|
||||
padding-right: var(--size-8);
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.select-wrap > option:hover {
|
||||
background-color: var(--color-accent);
|
||||
}
|
||||
|
||||
.select-wrap > option:last-child {
|
||||
border: none;
|
||||
}
|
||||
</style>
|
||||
89
frontend/shared/InteractiveVideo.svelte
Normal file
89
frontend/shared/InteractiveVideo.svelte
Normal file
@@ -0,0 +1,89 @@
|
||||
<script lang="ts">
|
||||
import { createEventDispatcher } from "svelte";
|
||||
import type { ComponentType } from "svelte";
|
||||
import type { FileData, Client } from "@gradio/client";
|
||||
import { BlockLabel } from "@gradio/atoms";
|
||||
import Webcam from "./Webcam.svelte";
|
||||
import { Video } from "@gradio/icons";
|
||||
|
||||
import type { I18nFormatter } from "@gradio/utils";
|
||||
|
||||
export let value: string = null;
|
||||
export let label: string | undefined = undefined;
|
||||
export let show_label = true;
|
||||
export let include_audio: boolean;
|
||||
export let i18n: I18nFormatter;
|
||||
export let active_source: "webcam" | "upload" = "webcam";
|
||||
export let handle_reset_value: () => void = () => {};
|
||||
export let stream_handler: Client["stream"];
|
||||
export let time_limit: number | null = null;
|
||||
export let button_labels: {start: string, stop: string, waiting: string};
|
||||
export let server: {
|
||||
offer: (body: any) => Promise<any>;
|
||||
};
|
||||
export let rtc_configuration: Object;
|
||||
export let track_constraints: MediaTrackConstraints = {};
|
||||
export let mode: "send" | "send-receive";
|
||||
export let on_change_cb: (msg: "change" | "tick") => void;
|
||||
export let rtp_params: RTCRtpParameters = {} as RTCRtpParameters;
|
||||
export let icon: string | undefined | ComponentType = undefined;
|
||||
export let icon_button_color: string = "var(--color-accent)";
|
||||
export let pulse_color: string = "var(--color-accent)";
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
change: FileData | null;
|
||||
clear?: never;
|
||||
play?: never;
|
||||
pause?: never;
|
||||
end?: never;
|
||||
drag: boolean;
|
||||
error: string;
|
||||
upload: FileData;
|
||||
start_recording?: never;
|
||||
stop_recording?: never;
|
||||
tick: never;
|
||||
}>();
|
||||
|
||||
let dragging = false;
|
||||
$: dispatch("drag", dragging);
|
||||
|
||||
$: console.log("value", value)
|
||||
|
||||
</script>
|
||||
|
||||
<BlockLabel {show_label} Icon={Video} label={label || "Video"} />
|
||||
<div data-testid="video" class="video-container">
|
||||
<Webcam
|
||||
{rtc_configuration}
|
||||
{include_audio}
|
||||
{time_limit}
|
||||
{track_constraints}
|
||||
{mode}
|
||||
{rtp_params}
|
||||
{on_change_cb}
|
||||
{icon}
|
||||
{icon_button_color}
|
||||
{pulse_color}
|
||||
{button_labels}
|
||||
on:error
|
||||
on:start_recording
|
||||
on:stop_recording
|
||||
on:tick
|
||||
{i18n}
|
||||
stream_every={0.5}
|
||||
{server}
|
||||
bind:webrtc_id={value}
|
||||
/>
|
||||
|
||||
<!-- <SelectSource {sources} bind:active_source /> -->
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.video-container {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
</style>
|
||||
151
frontend/shared/PulsingIcon.svelte
Normal file
151
frontend/shared/PulsingIcon.svelte
Normal file
@@ -0,0 +1,151 @@
|
||||
<script lang="ts">
|
||||
import { onDestroy } from 'svelte';
|
||||
import type {ComponentType} from 'svelte';
|
||||
|
||||
export let stream_state: "open" | "closed" | "waiting" = "closed";
|
||||
export let audio_source_callback: () => MediaStream;
|
||||
export let icon: string | ComponentType = undefined;
|
||||
export let icon_button_color: string = "var(--color-accent)";
|
||||
export let pulse_color: string = "var(--color-accent)";
|
||||
|
||||
let audioContext: AudioContext;
|
||||
let analyser: AnalyserNode;
|
||||
let dataArray: Uint8Array;
|
||||
let animationId: number;
|
||||
let pulseScale = 1;
|
||||
let pulseIntensity = 0;
|
||||
|
||||
$: if(stream_state === "open") setupAudioContext();
|
||||
|
||||
onDestroy(() => {
|
||||
if (animationId) {
|
||||
cancelAnimationFrame(animationId);
|
||||
}
|
||||
if (audioContext) {
|
||||
audioContext.close();
|
||||
}
|
||||
});
|
||||
|
||||
function setupAudioContext() {
|
||||
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
||||
analyser = audioContext.createAnalyser();
|
||||
const source = audioContext.createMediaStreamSource(audio_source_callback());
|
||||
|
||||
source.connect(analyser);
|
||||
|
||||
analyser.fftSize = 64;
|
||||
analyser.smoothingTimeConstant = 0.8;
|
||||
dataArray = new Uint8Array(analyser.frequencyBinCount);
|
||||
|
||||
updateVisualization();
|
||||
}
|
||||
|
||||
function updateVisualization() {
|
||||
|
||||
analyser.getByteFrequencyData(dataArray);
|
||||
|
||||
// Calculate average amplitude for pulse effect
|
||||
const average = Array.from(dataArray).reduce((a, b) => a + b, 0) / dataArray.length;
|
||||
const normalizedAverage = average / 255;
|
||||
pulseScale = 1 + (normalizedAverage * 0.15);
|
||||
pulseIntensity = normalizedAverage;
|
||||
animationId = requestAnimationFrame(updateVisualization);
|
||||
|
||||
}
|
||||
|
||||
$: maxPulseScale = 1 + (pulseIntensity * 10); // Scale from 1x to 3x based on intensity
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<div class="gradio-webrtc-icon-wrapper">
|
||||
<div class="gradio-webrtc-pulsing-icon-container">
|
||||
{#if pulseIntensity > 0}
|
||||
{#each Array(3) as _, i}
|
||||
<div
|
||||
class="pulse-ring"
|
||||
style:background={pulse_color}
|
||||
style:animation-delay={`${i * 0.4}s`}
|
||||
style:--max-scale={maxPulseScale}
|
||||
style:opacity={0.5 * pulseIntensity}
|
||||
/>
|
||||
{/each}
|
||||
{/if}
|
||||
|
||||
<div
|
||||
class="gradio-webrtc-pulsing-icon"
|
||||
style:transform={`scale(${pulseScale})`}
|
||||
style:background={icon_button_color}
|
||||
>
|
||||
{#if typeof icon === "string"}
|
||||
<img
|
||||
src={icon}
|
||||
alt="Audio visualization icon"
|
||||
class="icon-image"
|
||||
/>
|
||||
{:else}
|
||||
<svelte:component this={icon} />
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.gradio-webrtc-icon-wrapper {
|
||||
position: relative;
|
||||
display: flex;
|
||||
max-height: 128px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.gradio-webrtc-pulsing-icon-container {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.gradio-webrtc-pulsing-icon {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border-radius: 50%;
|
||||
transition: transform 0.1s ease;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
.icon-image {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: contain;
|
||||
filter: brightness(0) invert(1);
|
||||
}
|
||||
|
||||
.pulse-ring {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border-radius: 50%;
|
||||
animation: pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
transform: translate(-50%, -50%) scale(1);
|
||||
opacity: 0.5;
|
||||
}
|
||||
100% {
|
||||
transform: translate(-50%, -50%) scale(var(--max-scale, 3));
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
135
frontend/shared/StaticAudio.svelte
Normal file
135
frontend/shared/StaticAudio.svelte
Normal file
@@ -0,0 +1,135 @@
|
||||
<script lang="ts">
|
||||
import { Empty } from "@gradio/atoms";
|
||||
import {
|
||||
BlockLabel,
|
||||
} from "@gradio/atoms";
|
||||
import { Music } from "@gradio/icons";
|
||||
import type { I18nFormatter } from "@gradio/utils";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
import { onMount } from "svelte";
|
||||
|
||||
import { start, stop } from "./webrtc_utils";
|
||||
import AudioWave from "./AudioWave.svelte";
|
||||
|
||||
|
||||
export let value: string | null = null;
|
||||
export let label: string | undefined = undefined;
|
||||
export let show_label = true;
|
||||
export let rtc_configuration: Object | null = null;
|
||||
export let i18n: I18nFormatter;
|
||||
export let on_change_cb: (msg: "change" | "tick") => void;
|
||||
export let icon: string | undefined = undefined;
|
||||
export let icon_button_color: string = "var(--color-accent)";
|
||||
export let pulse_color: string = "var(--color-accent)";
|
||||
|
||||
export let server: {
|
||||
offer: (body: any) => Promise<any>;
|
||||
};
|
||||
|
||||
let stream_state: "open" | "closed" | "waiting" = "closed";
|
||||
let audio_player: HTMLAudioElement;
|
||||
let pc: RTCPeerConnection;
|
||||
let _webrtc_id = Math.random().toString(36).substring(2);
|
||||
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
tick: undefined;
|
||||
error: string
|
||||
play: undefined;
|
||||
stop: undefined;
|
||||
}>();
|
||||
|
||||
onMount(() => {
|
||||
window.setInterval(() => {
|
||||
if (stream_state == "open") {
|
||||
dispatch("tick");
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
)
|
||||
|
||||
async function start_stream(value: string): Promise<string> {
|
||||
if( value === "start_webrtc_stream") {
|
||||
stream_state = "waiting";
|
||||
_webrtc_id = Math.random().toString(36).substring(2)
|
||||
value = _webrtc_id;
|
||||
console.log("set value to ", value);
|
||||
pc = new RTCPeerConnection(rtc_configuration);
|
||||
pc.addEventListener("connectionstatechange",
|
||||
async (event) => {
|
||||
switch(pc.connectionState) {
|
||||
case "connected":
|
||||
console.info("connected");
|
||||
stream_state = "open";
|
||||
break;
|
||||
case "disconnected":
|
||||
console.info("closed");
|
||||
stop(pc);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
)
|
||||
let stream = null;
|
||||
start(stream, pc, audio_player, server.offer, _webrtc_id, "audio", on_change_cb).then((connection) => {
|
||||
pc = connection;
|
||||
}).catch(() => {
|
||||
console.info("catching")
|
||||
dispatch("error", "Too many concurrent users. Come back later!");
|
||||
});
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
$: start_stream(value).then((val) => {
|
||||
value = val;
|
||||
});
|
||||
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<BlockLabel
|
||||
{show_label}
|
||||
Icon={Music}
|
||||
float={false}
|
||||
label={label || i18n("audio.audio")}
|
||||
/>
|
||||
<audio
|
||||
class="standard-player"
|
||||
class:hidden={true}
|
||||
on:load
|
||||
bind:this={audio_player}
|
||||
on:ended={() => dispatch("stop")}
|
||||
on:play={() => dispatch("play")}
|
||||
/>
|
||||
{#if value !== "__webrtc_value__"}
|
||||
<div class="audio-container">
|
||||
<AudioWave audio_source_callback={() => audio_player.srcObject} {stream_state} {icon} {icon_button_color} {pulse_color}/>
|
||||
</div>
|
||||
{/if}
|
||||
{#if value === "__webrtc_value__"}
|
||||
<Empty size="small">
|
||||
<Music />
|
||||
</Empty>
|
||||
{/if}
|
||||
|
||||
|
||||
<style>
|
||||
.audio-container {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.standard-player {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
119
frontend/shared/StaticVideo.svelte
Normal file
119
frontend/shared/StaticVideo.svelte
Normal file
@@ -0,0 +1,119 @@
|
||||
<script lang="ts">
|
||||
import { createEventDispatcher, onMount} from "svelte";
|
||||
import {
|
||||
BlockLabel,
|
||||
Empty
|
||||
} from "@gradio/atoms";
|
||||
import { Video } from "@gradio/icons";
|
||||
|
||||
import { start, stop } from "./webrtc_utils";
|
||||
|
||||
|
||||
export let value: string | null = null;
|
||||
export let label: string | undefined = undefined;
|
||||
export let show_label = true;
|
||||
export let rtc_configuration: Object | null = null;
|
||||
export let on_change_cb: (msg: "change" | "tick") => void;
|
||||
export let server: {
|
||||
offer: (body: any) => Promise<any>;
|
||||
};
|
||||
|
||||
let video_element: HTMLVideoElement;
|
||||
|
||||
let _webrtc_id = Math.random().toString(36).substring(2);
|
||||
|
||||
let pc: RTCPeerConnection;
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
error: string;
|
||||
tick: undefined;
|
||||
}>();
|
||||
|
||||
let stream_state = "closed";
|
||||
|
||||
onMount(() => {
|
||||
window.setInterval(() => {
|
||||
if (stream_state == "open") {
|
||||
dispatch("tick");
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
)
|
||||
|
||||
$: if( value === "start_webrtc_stream") {
|
||||
_webrtc_id = Math.random().toString(36).substring(2);
|
||||
value = _webrtc_id;
|
||||
pc = new RTCPeerConnection(rtc_configuration);
|
||||
pc.addEventListener("connectionstatechange",
|
||||
async (event) => {
|
||||
switch(pc.connectionState) {
|
||||
case "connected":
|
||||
console.log("connected");
|
||||
stream_state = "open";
|
||||
break;
|
||||
case "disconnected":
|
||||
console.log("closed");
|
||||
stop(pc);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
)
|
||||
start(null, pc, video_element, server.offer, _webrtc_id, "video", on_change_cb).then((connection) => {
|
||||
pc = connection;
|
||||
}).catch(() => {
|
||||
console.log("catching")
|
||||
dispatch("error", "Too many concurrent users. Come back later!");
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<BlockLabel {show_label} Icon={Video} label={label || "Video"} />
|
||||
|
||||
{#if value === "__webrtc_value__"}
|
||||
<Empty unpadded_box={true} size="large"><Video /></Empty>
|
||||
{/if}
|
||||
<div class="wrap">
|
||||
<video
|
||||
class:hidden={value === "__webrtc_value__"}
|
||||
bind:this={video_element}
|
||||
autoplay={true}
|
||||
on:loadeddata={dispatch.bind(null, "loadeddata")}
|
||||
on:click={dispatch.bind(null, "click")}
|
||||
on:play={dispatch.bind(null, "play")}
|
||||
on:pause={dispatch.bind(null, "pause")}
|
||||
on:ended={dispatch.bind(null, "ended")}
|
||||
on:mouseover={dispatch.bind(null, "mouseover")}
|
||||
on:mouseout={dispatch.bind(null, "mouseout")}
|
||||
on:focus={dispatch.bind(null, "focus")}
|
||||
on:blur={dispatch.bind(null, "blur")}
|
||||
on:load
|
||||
data-testid={$$props["data-testid"]}
|
||||
crossorigin="anonymous"
|
||||
>
|
||||
<track kind="captions" />
|
||||
</video>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<style>
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.wrap {
|
||||
position: relative;
|
||||
background-color: var(--background-fill-secondary);
|
||||
height: var(--size-full);
|
||||
width: var(--size-full);
|
||||
border-radius: var(--radius-xl);
|
||||
}
|
||||
.wrap :global(video) {
|
||||
height: var(--size-full);
|
||||
width: var(--size-full);
|
||||
}
|
||||
</style>
|
||||
434
frontend/shared/Webcam.svelte
Normal file
434
frontend/shared/Webcam.svelte
Normal file
@@ -0,0 +1,434 @@
|
||||
<script lang="ts">
|
||||
import { createEventDispatcher, onMount } from "svelte";
|
||||
import type { ComponentType } from "svelte";
|
||||
import {
|
||||
Circle,
|
||||
Square,
|
||||
DropdownArrow,
|
||||
Spinner,
|
||||
Microphone as Mic
|
||||
} from "@gradio/icons";
|
||||
import type { I18nFormatter } from "@gradio/utils";
|
||||
import { StreamingBar } from "@gradio/statustracker";
|
||||
import WebcamPermissions from "./WebcamPermissions.svelte";
|
||||
import { fade } from "svelte/transition";
|
||||
import {
|
||||
get_devices,
|
||||
get_video_stream,
|
||||
set_available_devices
|
||||
} from "./stream_utils";
|
||||
import { start, stop } from "./webrtc_utils";
|
||||
import PulsingIcon from "./PulsingIcon.svelte";
|
||||
|
||||
let video_source: HTMLVideoElement;
|
||||
let available_video_devices: MediaDeviceInfo[] = [];
|
||||
let selected_device: MediaDeviceInfo | null = null;
|
||||
let _time_limit: number | null = null;
|
||||
export let time_limit: number | null = null;
|
||||
let stream_state: "open" | "waiting" | "closed" = "closed";
|
||||
export let on_change_cb: (msg: "tick" | "change") => void;
|
||||
export let mode: "send-receive" | "send";
|
||||
const _webrtc_id = Math.random().toString(36).substring(2);
|
||||
export let rtp_params: RTCRtpParameters = {} as RTCRtpParameters;
|
||||
export let icon: string | undefined | ComponentType = undefined;
|
||||
export let icon_button_color: string = "var(--color-accent)";
|
||||
export let pulse_color: string = "var(--color-accent)";
|
||||
export let button_labels: {start: string, stop: string, waiting: string};
|
||||
|
||||
export const modify_stream: (state: "open" | "closed" | "waiting") => void = (
|
||||
state: "open" | "closed" | "waiting"
|
||||
) => {
|
||||
if (state === "closed") {
|
||||
_time_limit = null;
|
||||
stream_state = "closed";
|
||||
} else if (state === "waiting") {
|
||||
stream_state = "waiting";
|
||||
} else {
|
||||
stream_state = "open";
|
||||
}
|
||||
};
|
||||
|
||||
let canvas: HTMLCanvasElement;
|
||||
export let track_constraints: MediaTrackConstraints | null = null;
|
||||
export let rtc_configuration: Object;
|
||||
export let stream_every = 1;
|
||||
export let server: {
|
||||
offer: (body: any) => Promise<any>;
|
||||
};
|
||||
|
||||
export let include_audio: boolean;
|
||||
export let i18n: I18nFormatter;
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
tick: undefined;
|
||||
error: string;
|
||||
start_recording: undefined;
|
||||
stop_recording: undefined;
|
||||
close_stream: undefined;
|
||||
}>();
|
||||
|
||||
onMount(() => (canvas = document.createElement("canvas")));
|
||||
|
||||
const handle_device_change = async (event: InputEvent): Promise<void> => {
|
||||
const target = event.target as HTMLInputElement;
|
||||
const device_id = target.value;
|
||||
|
||||
await get_video_stream(include_audio, video_source, device_id, track_constraints).then(
|
||||
async (local_stream) => {
|
||||
stream = local_stream;
|
||||
selected_device =
|
||||
available_video_devices.find(
|
||||
(device) => device.deviceId === device_id
|
||||
) || null;
|
||||
options_open = false;
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
async function access_webcam(): Promise<void> {
|
||||
try {
|
||||
get_video_stream(include_audio, video_source, null, track_constraints)
|
||||
.then(async (local_stream) => {
|
||||
webcam_accessed = true;
|
||||
available_video_devices = await get_devices();
|
||||
stream = local_stream;
|
||||
})
|
||||
.then(() => set_available_devices(available_video_devices))
|
||||
.then((devices) => {
|
||||
available_video_devices = devices;
|
||||
|
||||
const used_devices = stream
|
||||
.getTracks()
|
||||
.map((track) => track.getSettings()?.deviceId)[0];
|
||||
|
||||
selected_device = used_devices
|
||||
? devices.find((device) => device.deviceId === used_devices) ||
|
||||
available_video_devices[0]
|
||||
: available_video_devices[0];
|
||||
});
|
||||
|
||||
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
|
||||
dispatch("error", i18n("image.no_webcam_support"));
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof DOMException && err.name == "NotAllowedError") {
|
||||
dispatch("error", i18n("image.allow_webcam_access"));
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let recording = false;
|
||||
let stream: MediaStream;
|
||||
|
||||
let webcam_accessed = false;
|
||||
let pc: RTCPeerConnection;
|
||||
export let webrtc_id;
|
||||
|
||||
async function start_webrtc(): Promise<void> {
|
||||
if (stream_state === 'closed') {
|
||||
pc = new RTCPeerConnection(rtc_configuration);
|
||||
pc.addEventListener("connectionstatechange",
|
||||
async (event) => {
|
||||
switch(pc.connectionState) {
|
||||
case "connected":
|
||||
stream_state = "open";
|
||||
_time_limit = time_limit;
|
||||
break;
|
||||
case "disconnected":
|
||||
stream_state = "closed";
|
||||
_time_limit = null;
|
||||
stop(pc);
|
||||
await access_webcam();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
)
|
||||
stream_state = "waiting"
|
||||
webrtc_id = Math.random().toString(36).substring(2);
|
||||
start(stream, pc, mode === "send" ? null: video_source, server.offer, webrtc_id, "video", on_change_cb, rtp_params).then((connection) => {
|
||||
pc = connection;
|
||||
}).catch(() => {
|
||||
console.info("catching")
|
||||
stream_state = "closed";
|
||||
dispatch("error", "Too many concurrent users. Come back later!");
|
||||
});
|
||||
} else {
|
||||
stop(pc);
|
||||
stream_state = "closed";
|
||||
_time_limit = null;
|
||||
await access_webcam();
|
||||
}
|
||||
}
|
||||
|
||||
let options_open = false;
|
||||
|
||||
export function click_outside(node: Node, cb: any): any {
|
||||
const handle_click = (event: MouseEvent): void => {
|
||||
if (
|
||||
node &&
|
||||
!node.contains(event.target as Node) &&
|
||||
!event.defaultPrevented
|
||||
) {
|
||||
cb(event);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("click", handle_click, true);
|
||||
|
||||
return {
|
||||
destroy() {
|
||||
document.removeEventListener("click", handle_click, true);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function handle_click_outside(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
options_open = false;
|
||||
}
|
||||
|
||||
const audio_source_callback = () => video_source.srcObject as MediaStream;
|
||||
</script>
|
||||
|
||||
<div class="wrap">
|
||||
<StreamingBar time_limit={_time_limit} />
|
||||
{#if stream_state === "open" && include_audio}
|
||||
<div class="audio-indicator">
|
||||
<PulsingIcon
|
||||
stream_state={stream_state}
|
||||
audio_source_callback={audio_source_callback}
|
||||
icon={icon || Mic}
|
||||
icon_button_color={icon_button_color}
|
||||
pulse_color={pulse_color}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
<!-- svelte-ignore a11y-media-has-caption -->
|
||||
<!-- need to suppress for video streaming https://github.com/sveltejs/svelte/issues/5967 -->
|
||||
<video
|
||||
bind:this={video_source}
|
||||
class:hide={!webcam_accessed}
|
||||
class:flip={(stream_state != "open") || (stream_state === "open" && include_audio)}
|
||||
autoplay={true}
|
||||
playsinline={true}
|
||||
/>
|
||||
<!-- svelte-ignore a11y-missing-attribute -->
|
||||
{#if !webcam_accessed}
|
||||
<div
|
||||
in:fade={{ delay: 100, duration: 200 }}
|
||||
title="grant webcam access"
|
||||
style="height: 100%"
|
||||
>
|
||||
<WebcamPermissions on:click={async () => access_webcam()} />
|
||||
</div>
|
||||
{:else}
|
||||
<div class="button-wrap">
|
||||
<button
|
||||
on:click={start_webrtc}
|
||||
aria-label={"start stream"}
|
||||
>
|
||||
{#if stream_state === "waiting"}
|
||||
<div class="icon-with-text">
|
||||
<div class="icon color-primary" title="spinner">
|
||||
<Spinner />
|
||||
</div>
|
||||
{button_labels.waiting || i18n("audio.waiting")}
|
||||
</div>
|
||||
{:else if stream_state === "open"}
|
||||
<div class="icon-with-text">
|
||||
<div class="icon color-primary" title="stop recording">
|
||||
<Square />
|
||||
</div>
|
||||
{button_labels.stop || i18n("audio.stop")}
|
||||
</div>
|
||||
{:else}
|
||||
<div class="icon-with-text">
|
||||
<div class="icon color-primary" title="start recording">
|
||||
<Circle />
|
||||
</div>
|
||||
{button_labels.start || i18n("audio.record")}
|
||||
</div>
|
||||
{/if}
|
||||
</button>
|
||||
{#if !recording}
|
||||
<button
|
||||
class="icon"
|
||||
on:click={() => (options_open = true)}
|
||||
aria-label="select input source"
|
||||
>
|
||||
<DropdownArrow />
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
{#if options_open && selected_device}
|
||||
<select
|
||||
class="select-wrap"
|
||||
aria-label="select source"
|
||||
use:click_outside={handle_click_outside}
|
||||
on:change={handle_device_change}
|
||||
>
|
||||
<button
|
||||
class="inset-icon"
|
||||
on:click|stopPropagation={() => (options_open = false)}
|
||||
>
|
||||
<DropdownArrow />
|
||||
</button>
|
||||
{#if available_video_devices.length === 0}
|
||||
<option value="">{i18n("common.no_devices")}</option>
|
||||
{:else}
|
||||
{#each available_video_devices as device}
|
||||
<option
|
||||
value={device.deviceId}
|
||||
selected={selected_device.deviceId === device.deviceId}
|
||||
>
|
||||
{device.label}
|
||||
</option>
|
||||
{/each}
|
||||
{/if}
|
||||
</select>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.wrap {
|
||||
position: relative;
|
||||
width: var(--size-full);
|
||||
height: var(--size-full);
|
||||
}
|
||||
|
||||
.hide {
|
||||
display: none;
|
||||
}
|
||||
|
||||
video {
|
||||
width: var(--size-full);
|
||||
height: var(--size-full);
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.button-wrap {
|
||||
position: absolute;
|
||||
background-color: var(--block-background-fill);
|
||||
border: 1px solid var(--border-color-primary);
|
||||
border-radius: var(--radius-xl);
|
||||
padding: var(--size-1-5);
|
||||
display: flex;
|
||||
bottom: var(--size-2);
|
||||
left: 50%;
|
||||
transform: translate(-50%, 0);
|
||||
box-shadow: var(--shadow-drop-lg);
|
||||
border-radius: var(--radius-xl);
|
||||
line-height: var(--size-3);
|
||||
color: var(--button-secondary-text-color);
|
||||
}
|
||||
|
||||
.icon-with-text {
|
||||
min-width: var(--size-16);
|
||||
align-items: center;
|
||||
margin: 0 var(--spacing-xl);
|
||||
display: flex;
|
||||
justify-content: space-evenly;
|
||||
/* Add gap between icon and text */
|
||||
gap: var(--size-2);
|
||||
}
|
||||
|
||||
.audio-indicator {
|
||||
position: absolute;
|
||||
top: var(--size-2);
|
||||
right: var(--size-2);
|
||||
z-index: var(--layer-2);
|
||||
height: var(--size-5);
|
||||
width: var(--size-5);
|
||||
}
|
||||
|
||||
@media (--screen-md) {
|
||||
button {
|
||||
bottom: var(--size-4);
|
||||
}
|
||||
}
|
||||
|
||||
@media (--screen-xl) {
|
||||
button {
|
||||
bottom: var(--size-8);
|
||||
}
|
||||
}
|
||||
|
||||
.icon {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.color-primary {
|
||||
fill: var(--primary-600);
|
||||
stroke: var(--primary-600);
|
||||
color: var(--primary-600);
|
||||
}
|
||||
|
||||
.flip {
|
||||
transform: scaleX(-1);
|
||||
}
|
||||
|
||||
.select-wrap {
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
color: var(--button-secondary-text-color);
|
||||
background-color: transparent;
|
||||
width: 95%;
|
||||
font-size: var(--text-md);
|
||||
position: absolute;
|
||||
bottom: var(--size-2);
|
||||
background-color: var(--block-background-fill);
|
||||
box-shadow: var(--shadow-drop-lg);
|
||||
border-radius: var(--radius-xl);
|
||||
z-index: var(--layer-top);
|
||||
border: 1px solid var(--border-color-primary);
|
||||
text-align: left;
|
||||
line-height: var(--size-4);
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
left: 50%;
|
||||
transform: translate(-50%, 0);
|
||||
max-width: var(--size-52);
|
||||
}
|
||||
|
||||
.select-wrap > option {
|
||||
padding: 0.25rem 0.5rem;
|
||||
border-bottom: 1px solid var(--border-color-accent);
|
||||
padding-right: var(--size-8);
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.select-wrap > option:hover {
|
||||
background-color: var(--color-accent);
|
||||
}
|
||||
|
||||
.select-wrap > option:last-child {
|
||||
border: none;
|
||||
}
|
||||
|
||||
.inset-icon {
|
||||
position: absolute;
|
||||
top: 5px;
|
||||
right: -6.5px;
|
||||
width: var(--size-10);
|
||||
height: var(--size-5);
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
@media (--screen-md) {
|
||||
.wrap {
|
||||
font-size: var(--text-lg);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
49
frontend/shared/WebcamPermissions.svelte
Normal file
49
frontend/shared/WebcamPermissions.svelte
Normal file
@@ -0,0 +1,49 @@
|
||||
<script lang="ts">
|
||||
import { Webcam } from "@gradio/icons";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
export let icon = Webcam;
|
||||
$: text = icon === Webcam ? "Click to Access Webcam" : "Click to Access Microphone";
|
||||
|
||||
const dispatch = createEventDispatcher<{
|
||||
click: undefined;
|
||||
}>();
|
||||
</script>
|
||||
|
||||
<button style:height="100%" on:click={() => dispatch("click")}>
|
||||
<div class="wrap">
|
||||
<span class="icon-wrap">
|
||||
<svelte:component this={icon} />
|
||||
</span>
|
||||
{text}
|
||||
</div>
|
||||
</button>
|
||||
|
||||
<style>
|
||||
button {
|
||||
cursor: pointer;
|
||||
width: var(--size-full);
|
||||
}
|
||||
|
||||
.wrap {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: var(--size-60);
|
||||
color: var(--block-label-text-color);
|
||||
height: 100%;
|
||||
padding-top: var(--size-3);
|
||||
}
|
||||
|
||||
.icon-wrap {
|
||||
width: 30px;
|
||||
margin-bottom: var(--spacing-lg);
|
||||
}
|
||||
|
||||
@media (--screen-md) {
|
||||
.wrap {
|
||||
font-size: var(--text-lg);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
1
frontend/shared/index.ts
Normal file
1
frontend/shared/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { default as Video } from "./Video.svelte";
|
||||
53
frontend/shared/stream_utils.ts
Normal file
53
frontend/shared/stream_utils.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
export function get_devices(): Promise<MediaDeviceInfo[]> {
|
||||
return navigator.mediaDevices.enumerateDevices();
|
||||
}
|
||||
|
||||
export function handle_error(error: string): void {
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
export function set_local_stream(
|
||||
local_stream: MediaStream | null,
|
||||
video_source: HTMLVideoElement,
|
||||
): void {
|
||||
video_source.srcObject = local_stream;
|
||||
video_source.muted = true;
|
||||
video_source.play();
|
||||
}
|
||||
|
||||
export async function get_video_stream(
|
||||
include_audio: boolean,
|
||||
video_source: HTMLVideoElement,
|
||||
device_id?: string,
|
||||
track_constraints?: MediaTrackConstraints,
|
||||
): Promise<MediaStream> {
|
||||
const fallback_constraints = track_constraints || {
|
||||
width: { ideal: 500 },
|
||||
height: { ideal: 500 },
|
||||
};
|
||||
|
||||
const constraints = {
|
||||
video: device_id
|
||||
? { deviceId: { exact: device_id }, ...fallback_constraints }
|
||||
: fallback_constraints,
|
||||
audio: include_audio,
|
||||
};
|
||||
|
||||
return navigator.mediaDevices
|
||||
.getUserMedia(constraints)
|
||||
.then((local_stream: MediaStream) => {
|
||||
set_local_stream(local_stream, video_source);
|
||||
return local_stream;
|
||||
});
|
||||
}
|
||||
|
||||
export function set_available_devices(
|
||||
devices: MediaDeviceInfo[],
|
||||
kind: "videoinput" | "audioinput" = "videoinput",
|
||||
): MediaDeviceInfo[] {
|
||||
const cameras = devices.filter(
|
||||
(device: MediaDeviceInfo) => device.kind === kind,
|
||||
);
|
||||
|
||||
return cameras;
|
||||
}
|
||||
146
frontend/shared/utils.ts
Normal file
146
frontend/shared/utils.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { toBlobURL } from "@ffmpeg/util";
|
||||
import { FFmpeg } from "@ffmpeg/ffmpeg";
|
||||
import { lookup } from "mrmime";
|
||||
|
||||
export const prettyBytes = (bytes: number): string => {
|
||||
let units = ["B", "KB", "MB", "GB", "PB"];
|
||||
let i = 0;
|
||||
while (bytes > 1024) {
|
||||
bytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
let unit = units[i];
|
||||
return bytes.toFixed(1) + " " + unit;
|
||||
};
|
||||
|
||||
export const playable = (): boolean => {
|
||||
// TODO: Fix this
|
||||
// let video_element = document.createElement("video");
|
||||
// let mime_type = mime.lookup(filename);
|
||||
// return video_element.canPlayType(mime_type) != "";
|
||||
return true; // FIX BEFORE COMMIT - mime import causing issues
|
||||
};
|
||||
|
||||
export function loaded(
|
||||
node: HTMLVideoElement,
|
||||
{ autoplay }: { autoplay: boolean },
|
||||
): any {
|
||||
async function handle_playback(): Promise<void> {
|
||||
if (!autoplay) return;
|
||||
await node.play();
|
||||
}
|
||||
|
||||
node.addEventListener("loadeddata", handle_playback);
|
||||
|
||||
return {
|
||||
destroy(): void {
|
||||
node.removeEventListener("loadeddata", handle_playback);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export default async function loadFfmpeg(): Promise<FFmpeg> {
|
||||
const ffmpeg = new FFmpeg();
|
||||
const baseURL = "https://unpkg.com/@ffmpeg/core@0.12.4/dist/esm";
|
||||
|
||||
await ffmpeg.load({
|
||||
coreURL: await toBlobURL(`${baseURL}/ffmpeg-core.js`, "text/javascript"),
|
||||
wasmURL: await toBlobURL(`${baseURL}/ffmpeg-core.wasm`, "application/wasm"),
|
||||
});
|
||||
|
||||
return ffmpeg;
|
||||
}
|
||||
|
||||
export function blob_to_data_url(blob: Blob): Promise<string> {
|
||||
return new Promise((fulfill, reject) => {
|
||||
let reader = new FileReader();
|
||||
reader.onerror = reject;
|
||||
reader.onload = () => fulfill(reader.result as string);
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
}
|
||||
|
||||
export async function trimVideo(
|
||||
ffmpeg: FFmpeg,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
videoElement: HTMLVideoElement,
|
||||
): Promise<any> {
|
||||
const videoUrl = videoElement.src;
|
||||
const mimeType = lookup(videoElement.src) || "video/mp4";
|
||||
const blobUrl = await toBlobURL(videoUrl, mimeType);
|
||||
const response = await fetch(blobUrl);
|
||||
const vidBlob = await response.blob();
|
||||
const type = getVideoExtensionFromMimeType(mimeType) || "mp4";
|
||||
const inputName = `input.${type}`;
|
||||
const outputName = `output.${type}`;
|
||||
|
||||
try {
|
||||
if (startTime === 0 && endTime === 0) {
|
||||
return vidBlob;
|
||||
}
|
||||
|
||||
await ffmpeg.writeFile(
|
||||
inputName,
|
||||
new Uint8Array(await vidBlob.arrayBuffer()),
|
||||
);
|
||||
|
||||
let command = [
|
||||
"-i",
|
||||
inputName,
|
||||
...(startTime !== 0 ? ["-ss", startTime.toString()] : []),
|
||||
...(endTime !== 0 ? ["-to", endTime.toString()] : []),
|
||||
"-c:a",
|
||||
"copy",
|
||||
outputName,
|
||||
];
|
||||
|
||||
await ffmpeg.exec(command);
|
||||
const outputData = await ffmpeg.readFile(outputName);
|
||||
const outputBlob = new Blob([outputData], {
|
||||
type: `video/${type}`,
|
||||
});
|
||||
|
||||
return outputBlob;
|
||||
} catch (error) {
|
||||
console.error("Error initializing FFmpeg:", error);
|
||||
return vidBlob;
|
||||
}
|
||||
}
|
||||
|
||||
const getVideoExtensionFromMimeType = (mimeType: string): string | null => {
|
||||
const videoMimeToExtensionMap: { [key: string]: string } = {
|
||||
"video/mp4": "mp4",
|
||||
"video/webm": "webm",
|
||||
"video/ogg": "ogv",
|
||||
"video/quicktime": "mov",
|
||||
"video/x-msvideo": "avi",
|
||||
"video/x-matroska": "mkv",
|
||||
"video/mpeg": "mpeg",
|
||||
"video/3gpp": "3gp",
|
||||
"video/3gpp2": "3g2",
|
||||
"video/h261": "h261",
|
||||
"video/h263": "h263",
|
||||
"video/h264": "h264",
|
||||
"video/jpeg": "jpgv",
|
||||
"video/jpm": "jpm",
|
||||
"video/mj2": "mj2",
|
||||
"video/mpv": "mpv",
|
||||
"video/vnd.ms-playready.media.pyv": "pyv",
|
||||
"video/vnd.uvvu.mp4": "uvu",
|
||||
"video/vnd.vivo": "viv",
|
||||
"video/x-f4v": "f4v",
|
||||
"video/x-fli": "fli",
|
||||
"video/x-flv": "flv",
|
||||
"video/x-m4v": "m4v",
|
||||
"video/x-ms-asf": "asf",
|
||||
"video/x-ms-wm": "wm",
|
||||
"video/x-ms-wmv": "wmv",
|
||||
"video/x-ms-wmx": "wmx",
|
||||
"video/x-ms-wvx": "wvx",
|
||||
"video/x-sgi-movie": "movie",
|
||||
"video/x-smv": "smv",
|
||||
};
|
||||
|
||||
return videoMimeToExtensionMap[mimeType] || null;
|
||||
};
|
||||
184
frontend/shared/webrtc_utils.ts
Normal file
184
frontend/shared/webrtc_utils.ts
Normal file
@@ -0,0 +1,184 @@
|
||||
export function createPeerConnection(pc, node) {
|
||||
// register some listeners to help debugging
|
||||
pc.addEventListener(
|
||||
"icegatheringstatechange",
|
||||
() => {
|
||||
console.debug(pc.iceGatheringState);
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
pc.addEventListener(
|
||||
"iceconnectionstatechange",
|
||||
() => {
|
||||
console.debug(pc.iceConnectionState);
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
pc.addEventListener(
|
||||
"signalingstatechange",
|
||||
() => {
|
||||
console.debug(pc.signalingState);
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
// connect audio / video from server to local
|
||||
pc.addEventListener("track", (evt) => {
|
||||
console.debug("track event listener");
|
||||
if (node && node.srcObject !== evt.streams[0]) {
|
||||
console.debug("streams", evt.streams);
|
||||
node.srcObject = evt.streams[0];
|
||||
console.debug("node.srcOject", node.srcObject);
|
||||
if (evt.track.kind === "audio") {
|
||||
node.volume = 1.0; // Ensure volume is up
|
||||
node.muted = false;
|
||||
node.autoplay = true;
|
||||
// Attempt to play (needed for some browsers)
|
||||
node.play().catch((e) => console.debug("Autoplay failed:", e));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return pc;
|
||||
}
|
||||
|
||||
export async function start(
|
||||
stream,
|
||||
pc: RTCPeerConnection,
|
||||
node,
|
||||
server_fn,
|
||||
webrtc_id,
|
||||
modality: "video" | "audio" = "video",
|
||||
on_change_cb: (msg: "change" | "tick") => void = () => {},
|
||||
rtp_params = {},
|
||||
) {
|
||||
pc = createPeerConnection(pc, node);
|
||||
const data_channel = pc.createDataChannel("text");
|
||||
|
||||
data_channel.onopen = () => {
|
||||
console.debug("Data channel is open");
|
||||
data_channel.send("handshake");
|
||||
};
|
||||
|
||||
data_channel.onmessage = (event) => {
|
||||
console.debug("Received message:", event.data);
|
||||
let event_json;
|
||||
try {
|
||||
event_json = JSON.parse(event.data);
|
||||
} catch (e) {
|
||||
console.debug("Error parsing JSON");
|
||||
}
|
||||
console.log("event_json", event_json);
|
||||
if (
|
||||
event.data === "change" ||
|
||||
event.data === "tick" ||
|
||||
event.data === "stopword" ||
|
||||
event_json?.type === "warning" ||
|
||||
event_json?.type === "error"
|
||||
) {
|
||||
console.debug(`${event.data} event received`);
|
||||
on_change_cb(event_json ?? event.data);
|
||||
}
|
||||
};
|
||||
|
||||
if (stream) {
|
||||
stream.getTracks().forEach(async (track) => {
|
||||
console.debug("Track stream callback", track);
|
||||
const sender = pc.addTrack(track, stream);
|
||||
const params = sender.getParameters();
|
||||
const updated_params = { ...params, ...rtp_params };
|
||||
await sender.setParameters(updated_params);
|
||||
console.debug("sender params", sender.getParameters());
|
||||
});
|
||||
} else {
|
||||
console.debug("Creating transceiver!");
|
||||
pc.addTransceiver(modality, { direction: "recvonly" });
|
||||
}
|
||||
|
||||
await negotiate(pc, server_fn, webrtc_id);
|
||||
return pc;
|
||||
}
|
||||
|
||||
function make_offer(server_fn: any, body): Promise<object> {
|
||||
return new Promise((resolve, reject) => {
|
||||
server_fn(body).then((data) => {
|
||||
console.debug("data", data);
|
||||
if (data?.status === "failed") {
|
||||
console.debug("rejecting");
|
||||
reject("error");
|
||||
}
|
||||
resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function negotiate(
|
||||
pc: RTCPeerConnection,
|
||||
server_fn: any,
|
||||
webrtc_id: string,
|
||||
): Promise<void> {
|
||||
return pc
|
||||
.createOffer()
|
||||
.then((offer) => {
|
||||
return pc.setLocalDescription(offer);
|
||||
})
|
||||
.then(() => {
|
||||
// wait for ICE gathering to complete
|
||||
return new Promise<void>((resolve) => {
|
||||
console.debug("ice gathering state", pc.iceGatheringState);
|
||||
if (pc.iceGatheringState === "complete") {
|
||||
resolve();
|
||||
} else {
|
||||
const checkState = () => {
|
||||
if (pc.iceGatheringState === "complete") {
|
||||
console.debug("ice complete");
|
||||
pc.removeEventListener("icegatheringstatechange", checkState);
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
pc.addEventListener("icegatheringstatechange", checkState);
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
var offer = pc.localDescription;
|
||||
return make_offer(server_fn, {
|
||||
sdp: offer.sdp,
|
||||
type: offer.type,
|
||||
webrtc_id: webrtc_id,
|
||||
});
|
||||
})
|
||||
.then((response) => {
|
||||
return response;
|
||||
})
|
||||
.then((answer) => {
|
||||
return pc.setRemoteDescription(answer);
|
||||
});
|
||||
}
|
||||
|
||||
export function stop(pc: RTCPeerConnection) {
|
||||
console.debug("Stopping peer connection");
|
||||
// close transceivers
|
||||
if (pc.getTransceivers) {
|
||||
pc.getTransceivers().forEach((transceiver) => {
|
||||
if (transceiver.stop) {
|
||||
transceiver.stop();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// close local audio / video
|
||||
if (pc.getSenders()) {
|
||||
pc.getSenders().forEach((sender) => {
|
||||
console.log("sender", sender);
|
||||
if (sender.track && sender.track.stop) sender.track.stop();
|
||||
});
|
||||
}
|
||||
|
||||
// close peer connection
|
||||
setTimeout(() => {
|
||||
pc.close();
|
||||
}, 500);
|
||||
}
|
||||
Reference in New Issue
Block a user