make code

This commit is contained in:
freddyaboulton
2024-10-22 16:24:21 -07:00
parent cff6073df0
commit e7f3e63c79
20 changed files with 427 additions and 156 deletions

View File

@@ -32,6 +32,7 @@
export let time_limit: number | null = null;
export let modality: "video" | "audio" = "video";
export let mode: "send-receive" | "receive" = "send-receive";
export let track_constraints: MediaTrackConstraints = {};
let dragging = false;
@@ -113,6 +114,7 @@
{server}
{rtc_configuration}
{time_limit}
{track_constraints}
i18n={gradio.i18n}
on:tick={() => gradio.dispatch("tick")}
on:error={({ detail }) => gradio.dispatch("error", detail)}

View File

@@ -25,7 +25,6 @@
});
function setupAudioContext() {
console.log("set up")
audioContext = new (window.AudioContext || window.webkitAudioContext)();
analyser = audioContext.createAnalyser();
console.log("audio_source", audio_source.srcObject);
@@ -50,16 +49,6 @@
animationId = requestAnimationFrame(updateBars);
}
function toggleMute() {
if (audio_source && audio_source.srcObject) {
const audioTracks = (audio_source.srcObject as MediaStream).getAudioTracks();
audioTracks.forEach(track => {
track.enabled = !track.enabled;
});
is_muted = !audioTracks[0].enabled;
}
}
</script>
@@ -75,6 +64,8 @@
<style>
.waveContainer {
position: relative;
display: flex;
min-height: 100px;
max-height: 128px;
}

View File

@@ -24,6 +24,7 @@
export let rtc_configuration: Object | null = null;
export let i18n: I18nFormatter;
export let time_limit: number | null = null;
export let track_constraints: MediaTrackConstraints = {};
let _time_limit: number | null = null;
$: console.log("time_limit", time_limit);
@@ -87,14 +88,7 @@
let stream = null
try {
stream = await navigator.mediaDevices.getUserMedia({ audio: {
echoCancellation: true,
noiseSuppression: {exact: true},
autoGainControl: {exact: true},
sampleRate: {ideal: 48000},
sampleSize: {ideal: 16},
channelCount: 2,
} });
stream = await navigator.mediaDevices.getUserMedia({ audio: track_constraints });
} catch (err) {
if (!navigator.mediaDevices) {
dispatch("error", i18n("audio.no_device_support"));

View File

@@ -20,6 +20,7 @@
offer: (body: any) => Promise<any>;
};
export let rtc_configuration: Object;
export let track_constraints: MediaTrackConstraints = {};
const dispatch = createEventDispatcher<{
change: FileData | null;
@@ -48,6 +49,7 @@
{rtc_configuration}
{include_audio}
{time_limit}
{track_constraints}
on:error
on:start_recording
on:stop_recording

View File

@@ -22,7 +22,7 @@
offer: (body: any) => Promise<any>;
};
let stream_state: "open" | "closed" | "connecting" = "closed";
let stream_state: "open" | "closed" | "waiting" = "closed";
let audio_player: HTMLAudioElement;
let pc: RTCPeerConnection;
let _webrtc_id = Math.random().toString(36).substring(2);
@@ -35,7 +35,6 @@
stop: undefined;
}>();
onMount(() => {
window.setInterval(() => {
if (stream_state == "open") {
@@ -45,10 +44,11 @@
}
)
async function start_stream(value: string): Promise<void> {
async function start_stream(value: string): Promise<string> {
if( value === "start_webrtc_stream") {
stream_state = "connecting";
stream_state = "waiting";
value = _webrtc_id;
console.log("set value to ", value);
pc = new RTCPeerConnection(rtc_configuration);
pc.addEventListener("connectionstatechange",
async (event) => {
@@ -74,9 +74,12 @@
dispatch("error", "Too many concurrent users. Come back later!");
});
}
return value;
}
$: start_stream(value);
$: start_stream(value).then((val) => {
value = val;
});
@@ -97,23 +100,28 @@
on:play={() => dispatch("play")}
/>
{#if value !== "__webrtc_value__"}
<div class="audio-container">
<AudioWave audio_source={audio_player} {stream_state}/>
</div>
{/if}
{#if value === "__webrtc_value__"}
<Empty size="small">
<Music />
</Empty>
<Empty size="small">
<Music />
</Empty>
{/if}
<style>
:global(::part(wrapper)) {
margin-bottom: var(--size-2);
}
.audio-container {
display: flex;
height: 100%;
flex-direction: column;
justify-content: center;
align-items: center;
}
.standard-player {
width: 100%;
padding: var(--size-2);
}
.hidden {

View File

@@ -40,6 +40,7 @@
};
let canvas: HTMLCanvasElement;
export let track_constraints: MediaTrackConstraints | null = null;
export let rtc_configuration: Object;
export let stream_every = 1;
export let server: {
@@ -63,7 +64,7 @@
const target = event.target as HTMLInputElement;
const device_id = target.value;
await get_video_stream(include_audio, video_source, device_id).then(
await get_video_stream(include_audio, video_source, device_id, track_constraints).then(
async (local_stream) => {
stream = local_stream;
selected_device =

View File

@@ -18,15 +18,16 @@ export function set_local_stream(
export async function get_video_stream(
include_audio: boolean,
video_source: HTMLVideoElement,
device_id?: string
device_id?: string,
track_constraints?: MediaTrackConstraints,
): Promise<MediaStream> {
const size = {
const fallback_constraints = track_constraints || {
width: { ideal: 500 },
height: { ideal: 500 }
};
const constraints = {
video: device_id ? { deviceId: { exact: device_id }, ...size } : size,
video: device_id ? { deviceId: { exact: device_id }, ...fallback_constraints } : fallback_constraints,
audio: include_audio
};

View File

@@ -48,8 +48,6 @@ export async function start(stream, pc: RTCPeerConnection, node, server_fn, webr
pc = createPeerConnection(pc, node);
if (stream) {
stream.getTracks().forEach((track) => {
if(modality == "video") track.applyConstraints({ frameRate: { max: 30 } });
else if(modality == "audio") track.applyConstraints({ sampleRate: 48000, channelCount: 1 });
console.debug("Track stream callback", track);
pc.addTrack(track, stream);
});