Audio in only (#15)

* Audio + Video / test Audio

* Add code

* Fix demo

* support additional inputs

* Add code

* Add code
This commit is contained in:
Freddy Boulton
2024-10-30 13:08:09 -04:00
committed by GitHub
parent 2068b91854
commit 3bf4a437fb
29 changed files with 1613 additions and 416 deletions

View File

@@ -31,11 +31,11 @@
export let rtc_configuration: Object;
export let time_limit: number | null = null;
export let modality: "video" | "audio" = "video";
export let mode: "send-receive" | "receive" = "send-receive";
export let mode: "send-receive" | "receive" | "send" = "send-receive";
export let track_constraints: MediaTrackConstraints = {};
const on_change_cb = () => {
gradio.dispatch("state_change");
const on_change_cb = (msg: "change" | "tick") => {
gradio.dispatch(msg === "change" ? "state_change" : "tick");
}
let dragging = false;
@@ -87,7 +87,7 @@
on:tick={() => gradio.dispatch("tick")}
on:error={({ detail }) => gradio.dispatch("error", detail)}
/>
{:else if mode === "send-receive" && modality === "video"}
{:else if (mode === "send-receive" || mode == "send") && modality === "video"}
<Video
bind:value={value}
{label}
@@ -97,6 +97,7 @@
{server}
{rtc_configuration}
{time_limit}
{mode}
{on_change_cb}
on:clear={() => gradio.dispatch("clear")}
on:play={() => gradio.dispatch("play")}
@@ -113,7 +114,7 @@
>
<UploadText i18n={gradio.i18n} type="video" />
</Video>
{:else if mode === "send-receive" && modality === "audio"}
{:else if (mode === "send-receive" || mode === "send") && modality === "audio"}
<InteractiveAudio
bind:value={value}
{on_change_cb}
@@ -123,6 +124,7 @@
{rtc_configuration}
{time_limit}
{track_constraints}
{mode}
i18n={gradio.i18n}
on:tick={() => gradio.dispatch("tick")}
on:error={({ detail }) => gradio.dispatch("error", detail)}

View File

@@ -6,4 +6,4 @@ export default {
build: {
target: "modules",
},
};
};

View File

@@ -24,7 +24,8 @@
"mrmime": "^2.0.0"
},
"devDependencies": {
"@gradio/preview": "0.12.0"
"@gradio/preview": "0.12.0",
"prettier": "3.3.3"
},
"peerDependencies": {
"svelte": "^4.0.0"
@@ -4112,6 +4113,21 @@
"node": "^10 || ^12 || >=14"
}
},
"node_modules/prettier": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz",
"integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==",
"dev": true,
"bin": {
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
},
"node_modules/prismjs": {
"version": "1.29.0",
"resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz",

View File

@@ -22,7 +22,8 @@
"mrmime": "^2.0.0"
},
"devDependencies": {
"@gradio/preview": "0.12.0"
"@gradio/preview": "0.12.0",
"prettier": "3.3.3"
},
"exports": {
"./package.json": "./package.json",

View File

@@ -3,13 +3,12 @@
export let numBars = 16;
export let stream_state: "open" | "closed" | "waiting" = "closed";
export let audio_source: HTMLAudioElement;
export let audio_source_callback: () => MediaStream;
let audioContext: AudioContext;
let analyser: AnalyserNode;
let dataArray: Uint8Array;
let animationId: number;
let is_muted = false;
$: containerWidth = `calc((var(--boxSize) + var(--gutter)) * ${numBars})`;
@@ -27,7 +26,7 @@
function setupAudioContext() {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
analyser = audioContext.createAnalyser();
const source = audioContext.createMediaStreamSource(audio_source.srcObject);
const source = audioContext.createMediaStreamSource(audio_source_callback());
// Only connect to analyser, not to destination
source.connect(analyser);

View File

@@ -5,19 +5,25 @@
import type { I18nFormatter } from "@gradio/utils";
import { createEventDispatcher } from "svelte";
import { onMount } from "svelte";
import { fade } from "svelte/transition";
import { StreamingBar } from "@gradio/statustracker";
import {
Circle,
Square,
Spinner,
Music
Music,
DropdownArrow,
Microphone
} from "@gradio/icons";
import { start, stop } from "./webrtc_utils";
import { get_devices, set_available_devices } from "./stream_utils";
import AudioWave from "./AudioWave.svelte";
import WebcamPermissions from "./WebcamPermissions.svelte";
export let mode: "send-receive" | "send";
export let value: string | null = null;
export let label: string | undefined = undefined;
export let show_label = true;
@@ -25,7 +31,9 @@
export let i18n: I18nFormatter;
export let time_limit: number | null = null;
export let track_constraints: MediaTrackConstraints = {};
export let on_change_cb: () => void;
export let on_change_cb: (mg: "tick" | "change") => void;
let options_open = false;
let _time_limit: number | null = null;
@@ -37,6 +45,16 @@
let audio_player: HTMLAudioElement;
let pc: RTCPeerConnection;
let _webrtc_id = null;
let stream: MediaStream;
let available_audio_devices: MediaDeviceInfo[];
let selected_device: MediaDeviceInfo | null = null;
let mic_accessed = false;
const audio_source_callback = () => {
console.log("stream in callback", stream);
if(mode==="send") return stream;
else return audio_player.srcObject as MediaStream
}
const dispatch = createEventDispatcher<{
@@ -48,22 +66,41 @@
}>();
onMount(() => {
window.setInterval(() => {
if (stream_state == "open") {
dispatch("tick");
async function access_mic(): Promise<void> {
try {
const constraints = selected_device ? { deviceId: { exact: selected_device.deviceId }, ...track_constraints } : track_constraints;
const stream_ = await navigator.mediaDevices.getUserMedia({ audio: constraints });
stream = stream_;
} catch (err) {
if (!navigator.mediaDevices) {
dispatch("error", i18n("audio.no_device_support"));
return;
}
}, 1000);
if (err instanceof DOMException && err.name == "NotAllowedError") {
dispatch("error", i18n("audio.allow_recording_access"));
return;
}
throw err;
}
)
available_audio_devices = set_available_devices(await get_devices(), "audioinput");
mic_accessed = true;
const used_devices = stream
.getTracks()
.map((track) => track.getSettings()?.deviceId)[0];
selected_device = used_devices
? available_audio_devices.find((device) => device.deviceId === used_devices) ||
available_audio_devices[0]
: available_audio_devices[0];
}
async function start_stream(): Promise<void> {
if( stream_state === "open"){
stop(pc);
stream_state = "closed";
_time_limit = null;
await access_mic();
return;
}
_webrtc_id = Math.random().toString(36).substring(2);
@@ -89,10 +126,10 @@
}
)
stream_state = "waiting"
let stream = null
stream = null
try {
stream = await navigator.mediaDevices.getUserMedia({ audio: track_constraints });
await access_mic();
} catch (err) {
if (!navigator.mediaDevices) {
dispatch("error", i18n("audio.no_device_support"));
@@ -106,13 +143,51 @@
}
if (stream == null) return;
start(stream, pc, audio_player, server.offer, _webrtc_id, "audio", on_change_cb).then((connection) => {
start(stream, pc, mode === "send" ? null: audio_player, server.offer, _webrtc_id, "audio", on_change_cb).then((connection) => {
pc = connection;
}).catch(() => {
console.info("catching")
dispatch("error", "Too many concurrent users. Come back later!");
});
}
function handle_click_outside(event: MouseEvent): void {
event.preventDefault();
event.stopPropagation();
options_open = false;
}
function click_outside(node: Node, cb: any): any {
const handle_click = (event: MouseEvent): void => {
if (
node &&
!node.contains(event.target as Node) &&
!event.defaultPrevented
) {
cb(event);
}
};
document.addEventListener("click", handle_click, true);
return {
destroy() {
document.removeEventListener("click", handle_click, true);
}
};
}
const handle_device_change = async (event: InputEvent): Promise<void> => {
const target = event.target as HTMLInputElement;
const device_id = target.value;
stream = await navigator.mediaDevices.getUserMedia({ audio: {deviceId: { exact: device_id }, ...track_constraints }});
selected_device =
available_audio_devices.find(
(device) => device.deviceId === device_id
) || null;
options_open = false;
};
@@ -133,37 +208,83 @@
on:ended={() => dispatch("stop")}
on:play={() => dispatch("play")}
/>
<AudioWave audio_source={audio_player} {stream_state}/>
<StreamingBar time_limit={_time_limit} />
<div class="button-wrap">
<button
on:click={start_stream}
aria-label={"start stream"}
{#if !mic_accessed}
<div
in:fade={{ delay: 100, duration: 200 }}
title="grant webcam access"
style="height: 100%"
>
{#if stream_state === "waiting"}
<div class="icon-with-text" style="width:var(--size-24);">
<div class="icon color-primary" title="spinner">
<Spinner />
<WebcamPermissions icon={Microphone} on:click={async () => access_mic()} />
</div>
{:else}
<AudioWave {audio_source_callback} {stream_state}/>
<StreamingBar time_limit={_time_limit} />
<div class="button-wrap">
<button
on:click={start_stream}
aria-label={"start stream"}
>
{#if stream_state === "waiting"}
<div class="icon-with-text" style="width:var(--size-24);">
<div class="icon color-primary" title="spinner">
<Spinner />
</div>
{i18n("audio.waiting")}
</div>
{i18n("audio.waiting")}
</div>
{:else if stream_state === "open"}
<div class="icon-with-text">
<div class="icon color-primary" title="stop recording">
<Square />
{:else if stream_state === "open"}
<div class="icon-with-text">
<div class="icon color-primary" title="stop recording">
<Square />
</div>
{i18n("audio.stop")}
</div>
{i18n("audio.stop")}
</div>
{:else}
<div class="icon-with-text">
<div class="icon color-primary" title="start recording">
<Circle />
{:else}
<div class="icon-with-text">
<div class="icon color-primary" title="start recording">
<Circle />
</div>
{i18n("audio.record")}
</div>
{i18n("audio.record")}
</div>
{/if}
</button>
{#if stream_state === "closed"}
<button
class="icon"
on:click={() => (options_open = true)}
aria-label="select input source"
>
<DropdownArrow />
</button>
{/if}
</button>
</div>
{#if options_open && selected_device}
<select
class="select-wrap"
aria-label="select source"
use:click_outside={handle_click_outside}
on:change={handle_device_change}
>
<button
class="inset-icon"
on:click|stopPropagation={() => (options_open = false)}
>
<DropdownArrow />
</button>
{#if available_audio_devices.length === 0}
<option value="">{i18n("common.no_devices")}</option>
{:else}
{#each available_audio_devices as device}
<option
value={device.deviceId}
selected={selected_device.deviceId === device.deviceId}
>
{device.label}
</option>
{/each}
{/if}
</select>
{/if}
</div>
{/if}
</div>
<style>
@@ -239,4 +360,44 @@
stroke: var(--primary-600);
color: var(--primary-600);
}
.select-wrap {
-webkit-appearance: none;
-moz-appearance: none;
appearance: none;
color: var(--button-secondary-text-color);
background-color: transparent;
width: 95%;
font-size: var(--text-md);
position: absolute;
bottom: var(--size-2);
background-color: var(--block-background-fill);
box-shadow: var(--shadow-drop-lg);
border-radius: var(--radius-xl);
z-index: var(--layer-top);
border: 1px solid var(--border-color-primary);
text-align: left;
line-height: var(--size-4);
white-space: nowrap;
text-overflow: ellipsis;
left: 50%;
transform: translate(-50%, 0);
max-width: var(--size-52);
}
.select-wrap > option {
padding: 0.25rem 0.5rem;
border-bottom: 1px solid var(--border-color-accent);
padding-right: var(--size-8);
text-overflow: ellipsis;
overflow: hidden;
}
.select-wrap > option:hover {
background-color: var(--color-accent);
}
.select-wrap > option:last-child {
border: none;
}
</style>

View File

@@ -21,7 +21,8 @@
};
export let rtc_configuration: Object;
export let track_constraints: MediaTrackConstraints = {};
export let on_change_cb: () => void;
export let mode: "send" | "send-receive";
export let on_change_cb: (msg: "change" | "tick") => void;
const dispatch = createEventDispatcher<{
change: FileData | null;
@@ -51,6 +52,7 @@
{include_audio}
{time_limit}
{track_constraints}
{mode}
{on_change_cb}
on:error
on:start_recording

View File

@@ -17,7 +17,7 @@
export let show_label = true;
export let rtc_configuration: Object | null = null;
export let i18n: I18nFormatter;
export let on_change_cb: () => void;
export let on_change_cb: (msg: "change" | "tick") => void;
export let server: {
offer: (body: any) => Promise<any>;
@@ -103,7 +103,7 @@
/>
{#if value !== "__webrtc_value__"}
<div class="audio-container">
<AudioWave audio_source={audio_player} {stream_state}/>
<AudioWave audio_source_callback={() => audio_player.srcObject} {stream_state}/>
</div>
{/if}
{#if value === "__webrtc_value__"}

View File

@@ -13,7 +13,7 @@
export let label: string | undefined = undefined;
export let show_label = true;
export let rtc_configuration: Object | null = null;
export let on_change_cb: () => void;
export let on_change_cb: (msg: "change" | "tick") => void;
export let server: {
offer: (body: any) => Promise<any>;
};

View File

@@ -24,9 +24,12 @@
let _time_limit: number | null = null;
export let time_limit: number | null = null;
let stream_state: "open" | "waiting" | "closed" = "closed";
export let on_change_cb: () => void;
export let on_change_cb: (msg: "tick" | "change") => void;
export let mode: "send-receive" | "send";
const _webrtc_id = Math.random().toString(36).substring(2);
console.log("mode", mode);
export const modify_stream: (state: "open" | "closed" | "waiting") => void = (
state: "open" | "closed" | "waiting"
) => {
@@ -131,6 +134,7 @@
case "disconnected":
stream_state = "closed";
_time_limit = null;
stop(pc);
await access_webcam();
break;
default:
@@ -140,7 +144,7 @@
)
stream_state = "waiting"
webrtc_id = Math.random().toString(36).substring(2);
start(stream, pc, video_source, server.offer, webrtc_id, "video", on_change_cb).then((connection) => {
start(stream, pc, mode === "send" ? null: video_source, server.offer, webrtc_id, "video", on_change_cb).then((connection) => {
pc = connection;
}).catch(() => {
console.info("catching")

View File

@@ -2,6 +2,9 @@
import { Webcam } from "@gradio/icons";
import { createEventDispatcher } from "svelte";
export let icon = Webcam;
$: text = icon === Webcam ? "Click to Access Webcam" : "Click to Access Microphone";
const dispatch = createEventDispatcher<{
click: undefined;
}>();
@@ -10,9 +13,9 @@
<button style:height="100%" on:click={() => dispatch("click")}>
<div class="wrap">
<span class="icon-wrap">
<Webcam />
<svelte:component this={icon} />
</span>
{"Click to Access Webcam"}
{text}
</div>
</button>

View File

@@ -1,50 +1,53 @@
export function get_devices(): Promise<MediaDeviceInfo[]> {
return navigator.mediaDevices.enumerateDevices();
return navigator.mediaDevices.enumerateDevices();
}
export function handle_error(error: string): void {
throw new Error(error);
throw new Error(error);
}
export function set_local_stream(
local_stream: MediaStream | null,
video_source: HTMLVideoElement
local_stream: MediaStream | null,
video_source: HTMLVideoElement,
): void {
video_source.srcObject = local_stream;
video_source.muted = true;
video_source.play();
video_source.srcObject = local_stream;
video_source.muted = true;
video_source.play();
}
export async function get_video_stream(
include_audio: boolean,
video_source: HTMLVideoElement,
device_id?: string,
track_constraints?: MediaTrackConstraints,
include_audio: boolean,
video_source: HTMLVideoElement,
device_id?: string,
track_constraints?: MediaTrackConstraints,
): Promise<MediaStream> {
const fallback_constraints = track_constraints || {
width: { ideal: 500 },
height: { ideal: 500 }
};
const fallback_constraints = track_constraints || {
width: { ideal: 500 },
height: { ideal: 500 },
};
const constraints = {
video: device_id ? { deviceId: { exact: device_id }, ...fallback_constraints } : fallback_constraints,
audio: include_audio
};
const constraints = {
video: device_id
? { deviceId: { exact: device_id }, ...fallback_constraints }
: fallback_constraints,
audio: include_audio,
};
return navigator.mediaDevices
.getUserMedia(constraints)
.then((local_stream: MediaStream) => {
set_local_stream(local_stream, video_source);
return local_stream;
});
return navigator.mediaDevices
.getUserMedia(constraints)
.then((local_stream: MediaStream) => {
set_local_stream(local_stream, video_source);
return local_stream;
});
}
export function set_available_devices(
devices: MediaDeviceInfo[]
devices: MediaDeviceInfo[],
kind: "videoinput" | "audioinput" = "videoinput",
): MediaDeviceInfo[] {
const cameras = devices.filter(
(device: MediaDeviceInfo) => device.kind === "videoinput"
);
const cameras = devices.filter(
(device: MediaDeviceInfo) => device.kind === kind,
);
return cameras;
}
return cameras;
}

View File

@@ -3,144 +3,144 @@ import { FFmpeg } from "@ffmpeg/ffmpeg";
import { lookup } from "mrmime";
export const prettyBytes = (bytes: number): string => {
let units = ["B", "KB", "MB", "GB", "PB"];
let i = 0;
while (bytes > 1024) {
bytes /= 1024;
i++;
}
let unit = units[i];
return bytes.toFixed(1) + " " + unit;
let units = ["B", "KB", "MB", "GB", "PB"];
let i = 0;
while (bytes > 1024) {
bytes /= 1024;
i++;
}
let unit = units[i];
return bytes.toFixed(1) + " " + unit;
};
export const playable = (): boolean => {
// TODO: Fix this
// let video_element = document.createElement("video");
// let mime_type = mime.lookup(filename);
// return video_element.canPlayType(mime_type) != "";
return true; // FIX BEFORE COMMIT - mime import causing issues
// TODO: Fix this
// let video_element = document.createElement("video");
// let mime_type = mime.lookup(filename);
// return video_element.canPlayType(mime_type) != "";
return true; // FIX BEFORE COMMIT - mime import causing issues
};
export function loaded(
node: HTMLVideoElement,
{ autoplay }: { autoplay: boolean }
node: HTMLVideoElement,
{ autoplay }: { autoplay: boolean },
): any {
async function handle_playback(): Promise<void> {
if (!autoplay) return;
await node.play();
}
async function handle_playback(): Promise<void> {
if (!autoplay) return;
await node.play();
}
node.addEventListener("loadeddata", handle_playback);
node.addEventListener("loadeddata", handle_playback);
return {
destroy(): void {
node.removeEventListener("loadeddata", handle_playback);
}
};
return {
destroy(): void {
node.removeEventListener("loadeddata", handle_playback);
},
};
}
export default async function loadFfmpeg(): Promise<FFmpeg> {
const ffmpeg = new FFmpeg();
const baseURL = "https://unpkg.com/@ffmpeg/core@0.12.4/dist/esm";
const ffmpeg = new FFmpeg();
const baseURL = "https://unpkg.com/@ffmpeg/core@0.12.4/dist/esm";
await ffmpeg.load({
coreURL: await toBlobURL(`${baseURL}/ffmpeg-core.js`, "text/javascript"),
wasmURL: await toBlobURL(`${baseURL}/ffmpeg-core.wasm`, "application/wasm")
});
await ffmpeg.load({
coreURL: await toBlobURL(`${baseURL}/ffmpeg-core.js`, "text/javascript"),
wasmURL: await toBlobURL(`${baseURL}/ffmpeg-core.wasm`, "application/wasm"),
});
return ffmpeg;
return ffmpeg;
}
export function blob_to_data_url(blob: Blob): Promise<string> {
return new Promise((fulfill, reject) => {
let reader = new FileReader();
reader.onerror = reject;
reader.onload = () => fulfill(reader.result as string);
reader.readAsDataURL(blob);
});
return new Promise((fulfill, reject) => {
let reader = new FileReader();
reader.onerror = reject;
reader.onload = () => fulfill(reader.result as string);
reader.readAsDataURL(blob);
});
}
export async function trimVideo(
ffmpeg: FFmpeg,
startTime: number,
endTime: number,
videoElement: HTMLVideoElement
ffmpeg: FFmpeg,
startTime: number,
endTime: number,
videoElement: HTMLVideoElement,
): Promise<any> {
const videoUrl = videoElement.src;
const mimeType = lookup(videoElement.src) || "video/mp4";
const blobUrl = await toBlobURL(videoUrl, mimeType);
const response = await fetch(blobUrl);
const vidBlob = await response.blob();
const type = getVideoExtensionFromMimeType(mimeType) || "mp4";
const inputName = `input.${type}`;
const outputName = `output.${type}`;
const videoUrl = videoElement.src;
const mimeType = lookup(videoElement.src) || "video/mp4";
const blobUrl = await toBlobURL(videoUrl, mimeType);
const response = await fetch(blobUrl);
const vidBlob = await response.blob();
const type = getVideoExtensionFromMimeType(mimeType) || "mp4";
const inputName = `input.${type}`;
const outputName = `output.${type}`;
try {
if (startTime === 0 && endTime === 0) {
return vidBlob;
}
try {
if (startTime === 0 && endTime === 0) {
return vidBlob;
}
await ffmpeg.writeFile(
inputName,
new Uint8Array(await vidBlob.arrayBuffer())
);
await ffmpeg.writeFile(
inputName,
new Uint8Array(await vidBlob.arrayBuffer()),
);
let command = [
"-i",
inputName,
...(startTime !== 0 ? ["-ss", startTime.toString()] : []),
...(endTime !== 0 ? ["-to", endTime.toString()] : []),
"-c:a",
"copy",
outputName
];
let command = [
"-i",
inputName,
...(startTime !== 0 ? ["-ss", startTime.toString()] : []),
...(endTime !== 0 ? ["-to", endTime.toString()] : []),
"-c:a",
"copy",
outputName,
];
await ffmpeg.exec(command);
const outputData = await ffmpeg.readFile(outputName);
const outputBlob = new Blob([outputData], {
type: `video/${type}`
});
await ffmpeg.exec(command);
const outputData = await ffmpeg.readFile(outputName);
const outputBlob = new Blob([outputData], {
type: `video/${type}`,
});
return outputBlob;
} catch (error) {
console.error("Error initializing FFmpeg:", error);
return vidBlob;
}
return outputBlob;
} catch (error) {
console.error("Error initializing FFmpeg:", error);
return vidBlob;
}
}
const getVideoExtensionFromMimeType = (mimeType: string): string | null => {
const videoMimeToExtensionMap: { [key: string]: string } = {
"video/mp4": "mp4",
"video/webm": "webm",
"video/ogg": "ogv",
"video/quicktime": "mov",
"video/x-msvideo": "avi",
"video/x-matroska": "mkv",
"video/mpeg": "mpeg",
"video/3gpp": "3gp",
"video/3gpp2": "3g2",
"video/h261": "h261",
"video/h263": "h263",
"video/h264": "h264",
"video/jpeg": "jpgv",
"video/jpm": "jpm",
"video/mj2": "mj2",
"video/mpv": "mpv",
"video/vnd.ms-playready.media.pyv": "pyv",
"video/vnd.uvvu.mp4": "uvu",
"video/vnd.vivo": "viv",
"video/x-f4v": "f4v",
"video/x-fli": "fli",
"video/x-flv": "flv",
"video/x-m4v": "m4v",
"video/x-ms-asf": "asf",
"video/x-ms-wm": "wm",
"video/x-ms-wmv": "wmv",
"video/x-ms-wmx": "wmx",
"video/x-ms-wvx": "wvx",
"video/x-sgi-movie": "movie",
"video/x-smv": "smv"
};
const videoMimeToExtensionMap: { [key: string]: string } = {
"video/mp4": "mp4",
"video/webm": "webm",
"video/ogg": "ogv",
"video/quicktime": "mov",
"video/x-msvideo": "avi",
"video/x-matroska": "mkv",
"video/mpeg": "mpeg",
"video/3gpp": "3gp",
"video/3gpp2": "3g2",
"video/h261": "h261",
"video/h263": "h263",
"video/h264": "h264",
"video/jpeg": "jpgv",
"video/jpm": "jpm",
"video/mj2": "mj2",
"video/mpv": "mpv",
"video/vnd.ms-playready.media.pyv": "pyv",
"video/vnd.uvvu.mp4": "uvu",
"video/vnd.vivo": "viv",
"video/x-f4v": "f4v",
"video/x-fli": "fli",
"video/x-flv": "flv",
"video/x-m4v": "m4v",
"video/x-ms-asf": "asf",
"video/x-ms-wm": "wm",
"video/x-ms-wmv": "wmv",
"video/x-ms-wmx": "wmx",
"video/x-ms-wvx": "wvx",
"video/x-sgi-movie": "movie",
"video/x-smv": "smv",
};
return videoMimeToExtensionMap[mimeType] || null;
return videoMimeToExtensionMap[mimeType] || null;
};

View File

@@ -1,162 +1,166 @@
export function createPeerConnection(pc, node) {
// register some listeners to help debugging
pc.addEventListener(
"icegatheringstatechange",
() => {
console.debug(pc.iceGatheringState);
},
false
);
// register some listeners to help debugging
pc.addEventListener(
"icegatheringstatechange",
() => {
console.debug(pc.iceGatheringState);
},
false,
);
pc.addEventListener(
"iceconnectionstatechange",
() => {
console.debug(pc.iceConnectionState);
},
false
);
pc.addEventListener(
"iceconnectionstatechange",
() => {
console.debug(pc.iceConnectionState);
},
false,
);
pc.addEventListener(
"signalingstatechange",
() => {
console.debug(pc.signalingState);
},
false
);
pc.addEventListener(
"signalingstatechange",
() => {
console.debug(pc.signalingState);
},
false,
);
// connect audio / video from server to local
pc.addEventListener("track", (evt) => {
console.debug("track event listener");
if (node.srcObject !== evt.streams[0]) {
console.debug("streams", evt.streams);
node.srcObject = evt.streams[0];
console.debug("node.srcOject", node.srcObject);
if (evt.track.kind === 'audio') {
node.volume = 1.0; // Ensure volume is up
node.muted = false;
node.autoplay = true;
// Attempt to play (needed for some browsers)
node.play().catch(e => console.debug("Autoplay failed:", e));
}
}
});
// connect audio / video from server to local
pc.addEventListener("track", (evt) => {
console.debug("track event listener");
if (node && node.srcObject !== evt.streams[0]) {
console.debug("streams", evt.streams);
node.srcObject = evt.streams[0];
console.debug("node.srcOject", node.srcObject);
if (evt.track.kind === "audio") {
node.volume = 1.0; // Ensure volume is up
node.muted = false;
node.autoplay = true;
// Attempt to play (needed for some browsers)
node.play().catch((e) => console.debug("Autoplay failed:", e));
}
}
});
return pc;
return pc;
}
export async function start(stream, pc: RTCPeerConnection, node, server_fn, webrtc_id,
modality: "video" | "audio" = "video", on_change_cb: () => void = () => {}) {
pc = createPeerConnection(pc, node);
const data_channel = pc.createDataChannel("text");
export async function start(
stream,
pc: RTCPeerConnection,
node,
server_fn,
webrtc_id,
modality: "video" | "audio" = "video",
on_change_cb: (msg: "change" | "tick") => void = () => {},
) {
pc = createPeerConnection(pc, node);
const data_channel = pc.createDataChannel("text");
data_channel.onopen = () => {
console.debug("Data channel is open");
data_channel.send("handshake");
};
data_channel.onopen = () => {
console.debug("Data channel is open");
data_channel.send("handshake");
};
data_channel.onmessage = (event) => {
console.debug("Received message:", event.data);
if (event.data === "change") {
console.debug("Change event received");
on_change_cb();
}
};
data_channel.onmessage = (event) => {
console.debug("Received message:", event.data);
if (event.data === "change" || event.data === "tick") {
console.debug(`${event.data} event received`);
on_change_cb(event.data);
}
};
if (stream) {
stream.getTracks().forEach((track) => {
console.debug("Track stream callback", track);
pc.addTrack(track, stream);
});
} else {
console.debug("Creating transceiver!");
pc.addTransceiver(modality, { direction: "recvonly" });
}
if (stream) {
stream.getTracks().forEach((track) => {
console.debug("Track stream callback", track);
pc.addTrack(track, stream);
});
} else {
console.debug("Creating transceiver!");
pc.addTransceiver(modality, { direction: "recvonly" });
}
await negotiate(pc, server_fn, webrtc_id);
return pc;
await negotiate(pc, server_fn, webrtc_id);
return pc;
}
function make_offer(server_fn: any, body): Promise<object> {
return new Promise((resolve, reject) => {
server_fn(body).then((data) => {
console.debug("data", data)
if(data?.status === "failed") {
console.debug("rejecting")
reject("error")
}
resolve(data);
})
})
return new Promise((resolve, reject) => {
server_fn(body).then((data) => {
console.debug("data", data);
if (data?.status === "failed") {
console.debug("rejecting");
reject("error");
}
resolve(data);
});
});
}
async function negotiate(
pc: RTCPeerConnection,
server_fn: any,
webrtc_id: string,
pc: RTCPeerConnection,
server_fn: any,
webrtc_id: string,
): Promise<void> {
return pc
.createOffer()
.then((offer) => {
return pc.setLocalDescription(offer);
})
.then(() => {
// wait for ICE gathering to complete
return new Promise<void>((resolve) => {
console.debug("ice gathering state", pc.iceGatheringState);
if (pc.iceGatheringState === "complete") {
resolve();
} else {
const checkState = () => {
if (pc.iceGatheringState === "complete") {
console.debug("ice complete");
pc.removeEventListener("icegatheringstatechange", checkState);
resolve();
}
};
pc.addEventListener("icegatheringstatechange", checkState);
}
});
})
.then(() => {
var offer = pc.localDescription;
return make_offer(
server_fn,
{
sdp: offer.sdp,
type: offer.type,
webrtc_id: webrtc_id
},
);
})
.then((response) => {
return response;
})
.then((answer) => {
return pc.setRemoteDescription(answer);
})
return pc
.createOffer()
.then((offer) => {
return pc.setLocalDescription(offer);
})
.then(() => {
// wait for ICE gathering to complete
return new Promise<void>((resolve) => {
console.debug("ice gathering state", pc.iceGatheringState);
if (pc.iceGatheringState === "complete") {
resolve();
} else {
const checkState = () => {
if (pc.iceGatheringState === "complete") {
console.debug("ice complete");
pc.removeEventListener("icegatheringstatechange", checkState);
resolve();
}
};
pc.addEventListener("icegatheringstatechange", checkState);
}
});
})
.then(() => {
var offer = pc.localDescription;
return make_offer(server_fn, {
sdp: offer.sdp,
type: offer.type,
webrtc_id: webrtc_id,
});
})
.then((response) => {
return response;
})
.then((answer) => {
return pc.setRemoteDescription(answer);
});
}
export function stop(pc: RTCPeerConnection) {
console.debug("Stopping peer connection");
// close transceivers
if (pc.getTransceivers) {
pc.getTransceivers().forEach((transceiver) => {
if (transceiver.stop) {
transceiver.stop();
}
});
}
console.debug("Stopping peer connection");
// close transceivers
if (pc.getTransceivers) {
pc.getTransceivers().forEach((transceiver) => {
if (transceiver.stop) {
transceiver.stop();
}
});
}
// close local audio / video
if (pc.getSenders()) {
pc.getSenders().forEach((sender) => {
console.log("sender", sender);
if (sender.track && sender.track.stop) sender.track.stop();
});
}
// close local audio / video
if (pc.getSenders()) {
pc.getSenders().forEach((sender) => {
console.log("sender", sender);
if (sender.track && sender.track.stop) sender.track.stop();
});
}
// close peer connection
setTimeout(() => {
pc.close();
}, 500);
// close peer connection
setTimeout(() => {
pc.close();
}, 500);
}