diff --git a/demo/app.py b/demo/app.py
index 5689a63..068a56f 100644
--- a/demo/app.py
+++ b/demo/app.py
@@ -38,7 +38,7 @@ class GeminiHandler(AsyncAudioVideoStreamHandler):
expected_layout,
output_sample_rate,
output_frame_size,
- input_sample_rate=16000,
+ input_sample_rate=24000,
)
self.audio_queue = asyncio.Queue()
self.video_queue = asyncio.Queue()
@@ -69,13 +69,6 @@ class GeminiHandler(AsyncAudioVideoStreamHandler):
async def video_emit(self) -> VideoEmitType:
return await self.video_queue.get()
- async def generator(self):
- while not self.quit.is_set():
- turn = self.session.receive()
- async for response in turn:
- if data := response.data:
- yield data
-
async def receive(self, frame: tuple[int, np.ndarray]) -> None:
frame_size, array = frame
self.audio_queue.put_nowait(array)
@@ -95,19 +88,19 @@ class GeminiHandler(AsyncAudioVideoStreamHandler):
css = """
-#video-source {max-width: 600px !important; max-height: 600 !important;}
+#video-source {max-width: 1500px !important; max-height: 600px !important;}
"""
with gr.Blocks(css=css) as demo:
with gr.Column():
webrtc = WebRTC(
- label="Video Chat",
+ width=1500,
+ height=500,
+ label="Local",
modality="audio-video",
mode="send-receive",
elem_id="video-source",
- pulse_color="rgb(35, 157, 225)",
- icon_button_color="rgb(35, 157, 225)",
)
webrtc.stream(
GeminiHandler(),
diff --git a/frontend/shared/Webcam.svelte b/frontend/shared/Webcam.svelte
index c074fc0..4eb4074 100644
--- a/frontend/shared/Webcam.svelte
+++ b/frontend/shared/Webcam.svelte
@@ -6,7 +6,9 @@
Square,
DropdownArrow,
Spinner,
- Microphone as Mic
+ Microphone as Mic,
+ VolumeMuted,
+ VolumeHigh
} from "@gradio/icons";
import type { I18nFormatter } from "@gradio/utils";
import { StreamingBar } from "@gradio/statustracker";
@@ -63,6 +65,18 @@
export let i18n: I18nFormatter;
let isKeepLocal = mode === "send-receive" && include_audio
+ let volumeMuted = false
+ let micMuted = false
+ const handel_volume_mute = () => {
+ volumeMuted = !volumeMuted
+ }
+ const handle_mic_mute = () => {
+ micMuted = !micMuted
+ stream.getTracks().forEach(track => {
+ if (track.kind.includes('audio'))
+ track.enabled = !micMuted
+ })
+ }
const dispatch = createEventDispatcher<{
tick: undefined;
@@ -258,6 +272,7 @@
class:flip={stream_state != "open" ||
(stream_state === "open" && include_audio)}
autoplay={true}
+ muted={volumeMuted}
playsinline={true}
/>
@@ -267,6 +282,7 @@
class:hide={!webcam_accessed}
class:flip={(stream_state != "open") || (stream_state === "open" && include_audio)}
autoplay={true}
+ muted={volumeMuted}
playsinline={true}
/>
{/if}
@@ -319,7 +335,35 @@