everything working on spaces

This commit is contained in:
freddyaboulton
2024-09-25 12:18:32 -04:00
parent 83be4aa3ea
commit 11c828edb5
17 changed files with 91 additions and 1157 deletions

View File

@@ -28,6 +28,19 @@ import cv2
import numpy as np
from gradio_webrtc import WebRTC
from pathlib import Path
from twilio.rest import Client
import os
account_sid = os.environ.get("TWILIO_ACCOUNT_SID")
auth_token = os.environ.get("TWILIO_AUTH_TOKEN")
client = Client(account_sid, auth_token)
token = client.tokens.create()
rtc_configuration = {
"iceServers": token.ice_servers,
"iceTransportPolicy": "relay",
}
CLASSES = [
"background",
@@ -113,7 +126,7 @@ with gr.Blocks(css=css) as demo:
""")
with gr.Column(elem_classes=["my-column"]):
with gr.Group(elem_classes=["my-group"]):
image = WebRTC(label="Strean")
image = WebRTC(label="Strean", rtc_configuration=rtc_configuration)
conf_threshold = gr.Slider(
label="Confidence Threshold",
minimum=0.0,
@@ -426,16 +439,16 @@ int | None
</tr>
<tr>
<td align="left"><code>streaming</code></td>
<td align="left"><code>rtc_configuration</code></td>
<td align="left" style="width: 25%;">
```python
bool
dict[str, Any] | None
```
</td>
<td align="left"><code>False</code></td>
<td align="left">when used set as an output, takes video chunks yielded from the backend and combines them into one streaming video output. Each chunk should be a video file with a .ts extension using an h.264 encoding. Mp4 files are also accepted but they will be converted to h.264 encoding.</td>
<td align="left"><code>None</code></td>
<td align="left">None</td>
</tr>
</tbody></table>

View File

@@ -141,7 +141,7 @@ class WebRTC(Component):
show_download_button: bool | None = None,
min_length: int | None = None,
max_length: int | None = None,
streaming: bool = False,
rtc_configuration: dict[str, Any] | None = None,
):
"""
Parameters:
@@ -185,7 +185,7 @@ class WebRTC(Component):
self.show_download_button = show_download_button
self.min_length = min_length
self.max_length = max_length
self.streaming = streaming
self.rtc_configuration = rtc_configuration
self.event_handler: Callable | None = None
super().__init__(
label=label,
@@ -269,6 +269,7 @@ class WebRTC(Component):
print(pc.iceConnectionState)
if pc.iceConnectionState == "failed":
await pc.close()
self.connections.pop(body['webrtc_id'], None)
self.pcs.discard(pc)
@pc.on("connectionstatechange")

View File

@@ -3,6 +3,19 @@ import cv2
import numpy as np
from gradio_webrtc import WebRTC
from pathlib import Path
from twilio.rest import Client
import os
account_sid = os.environ.get("TWILIO_ACCOUNT_SID")
auth_token = os.environ.get("TWILIO_AUTH_TOKEN")
client = Client(account_sid, auth_token)
token = client.tokens.create()
rtc_configuration = {
"iceServers": token.ice_servers,
"iceTransportPolicy": "relay",
}
CLASSES = [
"background",
@@ -88,7 +101,7 @@ with gr.Blocks(css=css) as demo:
""")
with gr.Column(elem_classes=["my-column"]):
with gr.Group(elem_classes=["my-group"]):
image = WebRTC(label="Strean")
image = WebRTC(label="Strean", rtc_configuration=rtc_configuration)
conf_threshold = gr.Slider(
label="Confidence Threshold",
minimum=0.0,

View File

@@ -1,3 +1,5 @@
safetensors==0.4.3
opencv-python
https://gradio-builds.s3.amazonaws.com/5.0-dev/e85cc9248cc33e187528f24f3b4415ca7b9e7134/take2/gradio-5.0.0b1-py3-none-any.whl
twilio
https://huggingface.co/datasets/freddyaboulton/bucket/resolve/main/gradio-5.0.0b3-py3-none-any.whl
https://huggingface.co/datasets/freddyaboulton/bucket/resolve/main/gradio_webrtc-0.0.1-py3-none-any.whl

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import Video from "./shared/Video.svelte";
import { playable } from "./shared/utils";
import { type FileData } from "@gradio/client";
@@ -28,16 +27,12 @@
class:gallery={type === "gallery"}
class:selected
>
<Video
muted
playsinline
bind:node={video}
<video
bind:this={video}
on:loadeddata={init}
on:mouseover={video.play.bind(video)}
on:mouseout={video.pause.bind(video)}
src={value?.video.url}
is_stream={false}
{loop}
/>
</div>
{:else}

View File

@@ -1,9 +1,7 @@
<svelte:options accessors={true} />
<script lang="ts">
import type { Gradio, ShareData } from "@gradio/utils";
import type { FileData } from "@gradio/client";
import { Block, UploadText } from "@gradio/atoms";
import Video from "./shared/InteractiveVideo.svelte";
import { StatusTracker } from "@gradio/statustracker";
@@ -27,26 +25,24 @@
export let container = false;
export let scale: number | null = null;
export let min_width: number | undefined = undefined;
export let gradio: Gradio<{
change: never;
clear: never;
play: never;
pause: never;
upload: never;
stop: never;
end: never;
start_recording: never;
stop_recording: never;
share: ShareData;
error: string;
warning: string;
clear_status: LoadingStatus;
tick: never;
}>;
export let mirror_webcam: boolean;
export let include_audio: boolean;
let active_source: "webcam" | "upload";
export let gradio;
export let rtc_configuration: Object;
// export let gradio: Gradio<{
// change: never;
// clear: never;
// play: never;
// pause: never;
// upload: never;
// stop: never;
// end: never;
// start_recording: never;
// stop_recording: never;
// share: ShareData;
// error: string;
// warning: string;
// clear_status: LoadingStatus;
// tick: never;
// }>;
let dragging = false;
@@ -54,51 +50,9 @@
</script>
<!-- {#if !interactive}
<Block
{visible}
variant={value === null && active_source === "upload" ? "dashed" : "solid"}
border_mode={dragging ? "focus" : "base"}
padding={false}
{elem_id}
{elem_classes}
{height}
{width}
{container}
{scale}
{min_width}
allow_overflow={false}
>
<StatusTracker
autoscroll={gradio.autoscroll}
i18n={gradio.i18n}
{...loading_status}
on:clear_status={() => gradio.dispatch("clear_status", loading_status)}
/>
<StaticVideo
value={_video}
subtitle={_subtitle}
{label}
{show_label}
{autoplay}
{loop}
{show_share_button}
{show_download_button}
on:play={() => gradio.dispatch("play")}
on:pause={() => gradio.dispatch("pause")}
on:stop={() => gradio.dispatch("stop")}
on:end={() => gradio.dispatch("end")}
on:share={({ detail }) => gradio.dispatch("share", detail)}
on:error={({ detail }) => gradio.dispatch("error", detail)}
i18n={gradio.i18n}
upload={(...args) => gradio.client.upload(...args)}
/>
</Block>
{:else} -->
<Block
{visible}
variant={value === null && active_source === "upload" ? "dashed" : "solid"}
variant={"solid"}
border_mode={dragging ? "focus" : "base"}
padding={false}
{elem_id}
@@ -121,10 +75,11 @@
bind:value={value}
{label}
{show_label}
{active_source}
{include_audio}
active_source={"webcam"}
include_audio={false}
{root}
{server}
{rtc_configuration}
on:clear={() => gradio.dispatch("clear")}
on:play={() => gradio.dispatch("play")}
on:pause={() => gradio.dispatch("pause")}

View File

@@ -1,6 +1,4 @@
export { default as BaseInteractiveVideo } from "./shared/InteractiveVideo.svelte";
export { default as BaseStaticVideo } from "./shared/VideoPreview.svelte";
export { default as BasePlayer } from "./shared/Player.svelte";
export { prettyBytes, playable, loaded } from "./shared/utils";
export { default as BaseExample } from "./Example.svelte";
import { default as Index } from "./Index.svelte";

View File

@@ -34,16 +34,6 @@
"gradio": "./Example.svelte",
"svelte": "./dist/Example.svelte",
"types": "./dist/Example.svelte.d.ts"
},
"./shared": {
"gradio": "./shared/index.ts",
"svelte": "./dist/shared/index.js",
"types": "./dist/shared/index.d.ts"
},
"./base": {
"gradio": "./shared/VideoPreview.svelte",
"svelte": "./dist/shared/VideoPreview.svelte",
"types": "./dist/shared/VideoPreview.svelte.d.ts"
}
},
"peerDependencies": {

View File

@@ -19,8 +19,7 @@
export let server: {
offer: (body: any) => Promise<any>;
};
let has_change_history = false;
export let rtc_configuration: Object;
const dispatch = createEventDispatcher<{
change: FileData | null;
@@ -46,6 +45,7 @@
<div data-testid="video" class="video-container">
<Webcam
{root}
{rtc_configuration}
{include_audio}
on:error
on:start_recording

View File

@@ -1,270 +0,0 @@
<script lang="ts">
import { createEventDispatcher } from "svelte";
import { Play, Pause, Maximise, Undo } from "@gradio/icons";
import Video from "./Video.svelte";
import VideoControls from "./VideoControls.svelte";
import type { FileData, Client } from "@gradio/client";
import { prepare_files } from "@gradio/client";
import { format_time } from "@gradio/utils";
import type { I18nFormatter } from "@gradio/utils";
export let root = "";
export let src: string;
export let subtitle: string | null = null;
export let mirror: boolean;
export let autoplay: boolean;
export let loop: boolean;
export let label = "test";
export let interactive = false;
export let handle_change: (video: FileData) => void = () => {};
export let handle_reset_value: () => void = () => {};
export let upload: Client["upload"];
export let is_stream: boolean | undefined;
export let i18n: I18nFormatter;
export let show_download_button = false;
export let value: FileData | null = null;
export let handle_clear: () => void = () => {};
export let has_change_history = false;
const dispatch = createEventDispatcher<{
play: undefined;
pause: undefined;
stop: undefined;
end: undefined;
clear: undefined;
}>();
let time = 0;
let duration: number;
let paused = true;
let video: HTMLVideoElement;
let processingVideo = false;
function handleMove(e: TouchEvent | MouseEvent): void {
if (!duration) return;
if (e.type === "click") {
handle_click(e as MouseEvent);
return;
}
if (e.type !== "touchmove" && !((e as MouseEvent).buttons & 1)) return;
const clientX =
e.type === "touchmove"
? (e as TouchEvent).touches[0].clientX
: (e as MouseEvent).clientX;
const { left, right } = (
e.currentTarget as HTMLProgressElement
).getBoundingClientRect();
time = (duration * (clientX - left)) / (right - left);
}
async function play_pause(): Promise<void> {
if (document.fullscreenElement != video) {
const isPlaying =
video.currentTime > 0 &&
!video.paused &&
!video.ended &&
video.readyState > video.HAVE_CURRENT_DATA;
if (!isPlaying) {
await video.play();
} else video.pause();
}
}
function handle_click(e: MouseEvent): void {
const { left, right } = (
e.currentTarget as HTMLProgressElement
).getBoundingClientRect();
time = (duration * (e.clientX - left)) / (right - left);
}
function handle_end(): void {
dispatch("stop");
dispatch("end");
}
const handle_trim_video = async (videoBlob: Blob): Promise<void> => {
let _video_blob = new File([videoBlob], "video.mp4");
const val = await prepare_files([_video_blob]);
let value = ((await upload(val, root))?.filter(Boolean) as FileData[])[0];
handle_change(value);
};
function open_full_screen(): void {
video.requestFullscreen();
}
</script>
<div class="wrap">
<div class="mirror-wrap" class:mirror>
<Video
{src}
preload="auto"
{autoplay}
{loop}
{is_stream}
on:click={play_pause}
on:play
on:pause
on:ended={handle_end}
bind:currentTime={time}
bind:duration
bind:paused
bind:node={video}
data-testid={`${label}-player`}
{processingVideo}
on:load
>
<track kind="captions" src={subtitle} default />
</Video>
</div>
<div class="controls">
<div class="inner">
<span
role="button"
tabindex="0"
class="icon"
aria-label="play-pause-replay-button"
on:click={play_pause}
on:keydown={play_pause}
>
{#if time === duration}
<Undo />
{:else if paused}
<Play />
{:else}
<Pause />
{/if}
</span>
<span class="time">{format_time(time)} / {format_time(duration)}</span>
<!-- TODO: implement accessible video timeline for 4.0 -->
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-noninteractive-element-interactions -->
<progress
value={time / duration || 0}
on:mousemove={handleMove}
on:touchmove|preventDefault={handleMove}
on:click|stopPropagation|preventDefault={handle_click}
/>
<div
role="button"
tabindex="0"
class="icon"
aria-label="full-screen"
on:click={open_full_screen}
on:keypress={open_full_screen}
>
<Maximise />
</div>
</div>
</div>
</div>
{#if interactive}
<VideoControls
videoElement={video}
showRedo
{handle_trim_video}
{handle_reset_value}
bind:processingVideo
{value}
{i18n}
{show_download_button}
{handle_clear}
{has_change_history}
/>
{/if}
<style lang="postcss">
span {
text-shadow: 0 0 8px rgba(0, 0, 0, 0.5);
}
progress {
margin-right: var(--size-3);
border-radius: var(--radius-sm);
width: var(--size-full);
height: var(--size-2);
}
progress::-webkit-progress-bar {
border-radius: 2px;
background-color: rgba(255, 255, 255, 0.2);
overflow: hidden;
}
progress::-webkit-progress-value {
background-color: rgba(255, 255, 255, 0.9);
}
.mirror {
transform: scaleX(-1);
}
.mirror-wrap {
position: relative;
height: 100%;
width: 100%;
}
.controls {
position: absolute;
bottom: 0;
opacity: 0;
transition: 500ms;
margin: var(--size-2);
border-radius: var(--radius-md);
background: var(--color-grey-800);
padding: var(--size-2) var(--size-1);
width: calc(100% - 0.375rem * 2);
width: calc(100% - var(--size-2) * 2);
}
.wrap:hover .controls {
opacity: 1;
}
.inner {
display: flex;
justify-content: space-between;
align-items: center;
padding-right: var(--size-2);
padding-left: var(--size-2);
width: var(--size-full);
height: var(--size-full);
}
.icon {
display: flex;
justify-content: center;
cursor: pointer;
width: var(--size-6);
color: white;
}
.time {
flex-shrink: 0;
margin-right: var(--size-3);
margin-left: var(--size-3);
color: white;
font-size: var(--text-sm);
font-family: var(--font-mono);
}
.wrap {
position: relative;
background-color: var(--background-fill-secondary);
height: var(--size-full);
width: var(--size-full);
border-radius: var(--radius-xl);
}
.wrap :global(video) {
height: var(--size-full);
width: var(--size-full);
}
</style>

View File

@@ -1,197 +0,0 @@
<script lang="ts">
import type { HTMLVideoAttributes } from "svelte/elements";
import { createEventDispatcher } from "svelte";
import { loaded } from "./utils";
import { resolve_wasm_src } from "@gradio/wasm/svelte";
import Hls from "hls.js";
export let src: HTMLVideoAttributes["src"] = undefined;
export let muted: HTMLVideoAttributes["muted"] = undefined;
export let playsinline: HTMLVideoAttributes["playsinline"] = undefined;
export let preload: HTMLVideoAttributes["preload"] = undefined;
export let autoplay: HTMLVideoAttributes["autoplay"] = undefined;
export let controls: HTMLVideoAttributes["controls"] = undefined;
export let currentTime: number | undefined = undefined;
export let duration: number | undefined = undefined;
export let paused: boolean | undefined = undefined;
export let node: HTMLVideoElement | undefined = undefined;
export let loop: boolean;
export let is_stream;
export let processingVideo = false;
let resolved_src: typeof src;
let stream_active = false;
// The `src` prop can be updated before the Promise from `resolve_wasm_src` is resolved.
// In such a case, the resolved value for the old `src` has to be discarded,
// This variable `latest_src` is used to pick up only the value resolved for the latest `src` prop.
let latest_src: typeof src;
$: {
// In normal (non-Wasm) Gradio, the `<img>` element should be rendered with the passed `src` props immediately
// without waiting for `resolve_wasm_src()` to resolve.
// If it waits, a blank element is displayed until the async task finishes
// and it leads to undesirable flickering.
// So set `src` to `resolved_src` here.
resolved_src = src;
latest_src = src;
const resolving_src = src;
resolve_wasm_src(resolving_src).then((s) => {
if (latest_src === resolving_src) {
resolved_src = s;
}
});
}
const dispatch = createEventDispatcher();
function load_stream(
src: string | null | undefined,
is_stream: boolean,
node: HTMLVideoElement | undefined
): void {
if (!src || !is_stream) return;
if (!node) return;
if (Hls.isSupported() && !stream_active) {
const hls = new Hls({
maxBufferLength: 1, // 0.5 seconds (500 ms)
maxMaxBufferLength: 1, // Maximum max buffer length in seconds
lowLatencyMode: true // Enable low latency mode
});
hls.loadSource(src);
hls.attachMedia(node);
hls.on(Hls.Events.MANIFEST_PARSED, function () {
(node as HTMLVideoElement).play();
});
hls.on(Hls.Events.ERROR, function (event, data) {
console.error("HLS error:", event, data);
if (data.fatal) {
switch (data.type) {
case Hls.ErrorTypes.NETWORK_ERROR:
console.error(
"Fatal network error encountered, trying to recover"
);
hls.startLoad();
break;
case Hls.ErrorTypes.MEDIA_ERROR:
console.error("Fatal media error encountered, trying to recover");
hls.recoverMediaError();
break;
default:
console.error("Fatal error, cannot recover");
hls.destroy();
break;
}
}
});
stream_active = true;
}
}
$: src, (stream_active = false);
$: load_stream(src, is_stream, node);
</script>
<!--
The spread operator with `$$props` or `$$restProps` can't be used here
to pass props from the parent component to the <video> element
because of its unexpected behavior: https://github.com/sveltejs/svelte/issues/7404
For example, if we add {...$$props} or {...$$restProps}, the boolean props aside it like `controls` will be compiled as string "true" or "false" on the actual DOM.
Then, even when `controls` is false, the compiled DOM would be `<video controls="false">` which is equivalent to `<video controls>` since the string "false" is even truthy.
-->
<div class:hidden={!processingVideo} class="overlay">
<span class="load-wrap">
<span class="loader" />
</span>
</div>
<video
src={resolved_src}
{muted}
{playsinline}
{preload}
{autoplay}
{controls}
{loop}
on:loadeddata={dispatch.bind(null, "loadeddata")}
on:click={dispatch.bind(null, "click")}
on:play={dispatch.bind(null, "play")}
on:pause={dispatch.bind(null, "pause")}
on:ended={dispatch.bind(null, "ended")}
on:mouseover={dispatch.bind(null, "mouseover")}
on:mouseout={dispatch.bind(null, "mouseout")}
on:focus={dispatch.bind(null, "focus")}
on:blur={dispatch.bind(null, "blur")}
on:load
bind:currentTime
bind:duration
bind:paused
bind:this={node}
use:loaded={{ autoplay: autoplay ?? false }}
data-testid={$$props["data-testid"]}
crossorigin="anonymous"
>
<slot />
</video>
<style>
.overlay {
position: absolute;
background-color: rgba(0, 0, 0, 0.4);
width: 100%;
height: 100%;
}
.hidden {
display: none;
}
.load-wrap {
display: flex;
justify-content: center;
align-items: center;
height: 100%;
}
.loader {
display: flex;
position: relative;
background-color: var(--border-color-accent-subdued);
animation: shadowPulse 2s linear infinite;
box-shadow:
-24px 0 var(--border-color-accent-subdued),
24px 0 var(--border-color-accent-subdued);
margin: var(--spacing-md);
border-radius: 50%;
width: 10px;
height: 10px;
scale: 0.5;
}
@keyframes shadowPulse {
33% {
box-shadow:
-24px 0 var(--border-color-accent-subdued),
24px 0 #fff;
background: #fff;
}
66% {
box-shadow:
-24px 0 #fff,
24px 0 #fff;
background: var(--border-color-accent-subdued);
}
100% {
box-shadow:
-24px 0 #fff,
24px 0 var(--border-color-accent-subdued);
background: #fff;
}
}
</style>

View File

@@ -1,202 +0,0 @@
<script lang="ts">
import { Undo, Trim, Clear } from "@gradio/icons";
import VideoTimeline from "./VideoTimeline.svelte";
import { trimVideo } from "./utils";
import { FFmpeg } from "@ffmpeg/ffmpeg";
import loadFfmpeg from "./utils";
import { onMount } from "svelte";
import { format_time } from "@gradio/utils";
import { IconButton } from "@gradio/atoms";
import { ModifyUpload } from "@gradio/upload";
import type { FileData } from "@gradio/client";
export let videoElement: HTMLVideoElement;
export let showRedo = false;
export let interactive = true;
export let mode = "";
export let handle_reset_value: () => void;
export let handle_trim_video: (videoBlob: Blob) => void;
export let processingVideo = false;
export let i18n: (key: string) => string;
export let value: FileData | null = null;
export let show_download_button = false;
export let handle_clear: () => void = () => {};
export let has_change_history = false;
let ffmpeg: FFmpeg;
onMount(async () => {
ffmpeg = await loadFfmpeg();
});
$: if (mode === "edit" && trimmedDuration === null && videoElement)
trimmedDuration = videoElement.duration;
let trimmedDuration: number | null = null;
let dragStart = 0;
let dragEnd = 0;
let loadingTimeline = false;
const toggleTrimmingMode = (): void => {
if (mode === "edit") {
mode = "";
trimmedDuration = videoElement.duration;
} else {
mode = "edit";
}
};
</script>
<div class="container" class:hidden={mode !== "edit"}>
{#if mode === "edit"}
<div class="timeline-wrapper">
<VideoTimeline
{videoElement}
bind:dragStart
bind:dragEnd
bind:trimmedDuration
bind:loadingTimeline
/>
</div>
{/if}
<div class="controls" data-testid="waveform-controls">
{#if mode === "edit" && trimmedDuration !== null}
<time
aria-label="duration of selected region in seconds"
class:hidden={loadingTimeline}>{format_time(trimmedDuration)}</time
>
<div class="edit-buttons">
<button
class:hidden={loadingTimeline}
class="text-button"
on:click={() => {
mode = "";
processingVideo = true;
trimVideo(ffmpeg, dragStart, dragEnd, videoElement)
.then((videoBlob) => {
handle_trim_video(videoBlob);
})
.then(() => {
processingVideo = false;
});
}}>Trim</button
>
<button
class="text-button"
class:hidden={loadingTimeline}
on:click={toggleTrimmingMode}>Cancel</button
>
</div>
{:else}
<div />
{/if}
</div>
</div>
<ModifyUpload
{i18n}
on:clear={() => handle_clear()}
download={show_download_button ? value?.url : null}
>
{#if showRedo && mode === ""}
<IconButton
Icon={Undo}
label="Reset video to initial value"
disabled={processingVideo || !has_change_history}
on:click={() => {
handle_reset_value();
mode = "";
}}
/>
{/if}
{#if interactive && mode === ""}
<IconButton
Icon={Trim}
label="Trim video to selection"
disabled={processingVideo}
on:click={toggleTrimmingMode}
/>
{/if}
</ModifyUpload>
<style>
.container {
width: 100%;
}
time {
color: var(--color-accent);
font-weight: bold;
padding-left: var(--spacing-xs);
}
.timeline-wrapper {
display: flex;
align-items: center;
justify-content: center;
width: 100%;
}
.text-button {
border: 1px solid var(--neutral-400);
border-radius: var(--radius-sm);
font-weight: 300;
font-size: var(--size-3);
text-align: center;
color: var(--neutral-400);
height: var(--size-5);
font-weight: bold;
padding: 0 5px;
margin-left: 5px;
}
.text-button:hover,
.text-button:focus {
color: var(--color-accent);
border-color: var(--color-accent);
}
.controls {
display: flex;
justify-content: space-between;
align-items: center;
margin: var(--spacing-lg);
overflow: hidden;
}
.edit-buttons {
display: flex;
gap: var(--spacing-sm);
}
@media (max-width: 320px) {
.controls {
flex-direction: column;
align-items: flex-start;
}
.edit-buttons {
margin-top: var(--spacing-sm);
}
.controls * {
margin: var(--spacing-sm);
}
.controls .text-button {
margin-left: 0;
}
}
.container {
display: flex;
flex-direction: column;
}
.hidden {
display: none;
}
</style>

View File

@@ -1,108 +0,0 @@
<script lang="ts">
import { createEventDispatcher, afterUpdate, tick } from "svelte";
import {
BlockLabel,
Empty,
IconButton,
ShareButton,
IconButtonWrapper
} from "@gradio/atoms";
import type { FileData, Client } from "@gradio/client";
import { Video, Download } from "@gradio/icons";
import { DownloadLink } from "@gradio/wasm/svelte";
import Player from "./Player.svelte";
import type { I18nFormatter } from "js/core/src/gradio_helper";
export let value: FileData | null = null;
export let subtitle: FileData | null = null;
export let label: string | undefined = undefined;
export let show_label = true;
export let autoplay: boolean;
export let show_share_button = true;
export let show_download_button = true;
export let loop: boolean;
export let i18n: I18nFormatter;
export let upload: Client["upload"];
let old_value: FileData | null = null;
let old_subtitle: FileData | null = null;
const dispatch = createEventDispatcher<{
change: FileData;
play: undefined;
pause: undefined;
end: undefined;
stop: undefined;
}>();
$: value && dispatch("change", value);
afterUpdate(async () => {
// needed to bust subtitle caching issues on Chrome
if (
value !== old_value &&
subtitle !== old_subtitle &&
old_subtitle !== null
) {
old_value = value;
value = null;
await tick();
value = old_value;
}
old_value = value;
old_subtitle = subtitle;
});
</script>
<BlockLabel {show_label} Icon={Video} label={label || "Video"} />
{#if !value || value.url === undefined}
<Empty unpadded_box={true} size="large"><Video /></Empty>
{:else}
{#key value.url}
<Player
src={value.url}
subtitle={subtitle?.url}
is_stream={value.is_stream}
{autoplay}
on:play
on:pause
on:stop
on:end
on:load
mirror={false}
{label}
{loop}
interactive={false}
{upload}
{i18n}
/>
{/key}
<div data-testid="download-div">
<IconButtonWrapper>
{#if show_download_button}
<DownloadLink
href={value.is_stream
? value.url?.replace("playlist.m3u8", "playlist-file")
: value.url}
download={value.orig_name || value.path}
>
<IconButton Icon={Download} label="Download" />
</DownloadLink>
{/if}
{#if show_share_button}
<ShareButton
{i18n}
on:error
on:share
{value}
formatter={async (value) => {
if (!value) return "";
let url = await uploadToHuggingFace(value.data, "url");
return url;
}}
/>
{/if}
</IconButtonWrapper>
</div>
{/if}

View File

@@ -1,279 +0,0 @@
<script lang="ts">
import { onMount, onDestroy } from "svelte";
export let videoElement: HTMLVideoElement;
export let trimmedDuration: number | null;
export let dragStart: number;
export let dragEnd: number;
export let loadingTimeline: boolean;
let thumbnails: string[] = [];
let numberOfThumbnails = 10;
let intervalId: ReturnType<typeof setInterval> | undefined;
let videoDuration: number;
let leftHandlePosition = 0;
let rightHandlePosition = 100;
let dragging: string | null = null;
const startDragging = (side: string | null): void => {
dragging = side;
};
$: loadingTimeline = thumbnails.length !== numberOfThumbnails;
const stopDragging = (): void => {
dragging = null;
};
const drag = (event: { clientX: number }, distance?: number): void => {
if (dragging) {
const timeline = document.getElementById("timeline");
if (!timeline) return;
const rect = timeline.getBoundingClientRect();
let newPercentage = ((event.clientX - rect.left) / rect.width) * 100;
if (distance) {
// Move handle based on arrow key press
newPercentage =
dragging === "left"
? leftHandlePosition + distance
: rightHandlePosition + distance;
} else {
// Move handle based on mouse drag
newPercentage = ((event.clientX - rect.left) / rect.width) * 100;
}
newPercentage = Math.max(0, Math.min(newPercentage, 100)); // Keep within 0 and 100
if (dragging === "left") {
leftHandlePosition = Math.min(newPercentage, rightHandlePosition);
// Calculate the new time and set it for the videoElement
const newTimeLeft = (leftHandlePosition / 100) * videoDuration;
videoElement.currentTime = newTimeLeft;
dragStart = newTimeLeft;
} else if (dragging === "right") {
rightHandlePosition = Math.max(newPercentage, leftHandlePosition);
const newTimeRight = (rightHandlePosition / 100) * videoDuration;
videoElement.currentTime = newTimeRight;
dragEnd = newTimeRight;
}
const startTime = (leftHandlePosition / 100) * videoDuration;
const endTime = (rightHandlePosition / 100) * videoDuration;
trimmedDuration = endTime - startTime;
leftHandlePosition = leftHandlePosition;
rightHandlePosition = rightHandlePosition;
}
};
const moveHandle = (e: KeyboardEvent): void => {
if (dragging) {
// Calculate the movement distance as a percentage of the video duration
const distance = (1 / videoDuration) * 100;
if (e.key === "ArrowLeft") {
drag({ clientX: 0 }, -distance);
} else if (e.key === "ArrowRight") {
drag({ clientX: 0 }, distance);
}
}
};
const generateThumbnail = (): void => {
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
if (!ctx) return;
canvas.width = videoElement.videoWidth;
canvas.height = videoElement.videoHeight;
ctx.drawImage(videoElement, 0, 0, canvas.width, canvas.height);
const thumbnail: string = canvas.toDataURL("image/jpeg", 0.7);
thumbnails = [...thumbnails, thumbnail];
};
onMount(() => {
const loadMetadata = (): void => {
videoDuration = videoElement.duration;
const interval = videoDuration / numberOfThumbnails;
let captures = 0;
const onSeeked = (): void => {
generateThumbnail();
captures++;
if (captures < numberOfThumbnails) {
videoElement.currentTime += interval;
} else {
videoElement.removeEventListener("seeked", onSeeked);
}
};
videoElement.addEventListener("seeked", onSeeked);
videoElement.currentTime = 0;
};
if (videoElement.readyState >= 1) {
loadMetadata();
} else {
videoElement.addEventListener("loadedmetadata", loadMetadata);
}
});
onDestroy(() => {
window.removeEventListener("mousemove", drag);
window.removeEventListener("mouseup", stopDragging);
window.removeEventListener("keydown", moveHandle);
if (intervalId !== undefined) {
clearInterval(intervalId);
}
});
onMount(() => {
window.addEventListener("mousemove", drag);
window.addEventListener("mouseup", stopDragging);
window.addEventListener("keydown", moveHandle);
});
</script>
<div class="container">
{#if loadingTimeline}
<div class="load-wrap">
<span aria-label="loading timeline" class="loader" />
</div>
{:else}
<div id="timeline" class="thumbnail-wrapper">
<button
aria-label="start drag handle for trimming video"
class="handle left"
on:mousedown={() => startDragging("left")}
on:blur={stopDragging}
on:keydown={(e) => {
if (e.key === "ArrowLeft" || e.key == "ArrowRight") {
startDragging("left");
}
}}
style="left: {leftHandlePosition}%;"
/>
<div
class="opaque-layer"
style="left: {leftHandlePosition}%; right: {100 - rightHandlePosition}%"
/>
{#each thumbnails as thumbnail, i (i)}
<img src={thumbnail} alt={`frame-${i}`} draggable="false" />
{/each}
<button
aria-label="end drag handle for trimming video"
class="handle right"
on:mousedown={() => startDragging("right")}
on:blur={stopDragging}
on:keydown={(e) => {
if (e.key === "ArrowLeft" || e.key == "ArrowRight") {
startDragging("right");
}
}}
style="left: {rightHandlePosition}%;"
/>
</div>
{/if}
</div>
<style>
.load-wrap {
display: flex;
justify-content: center;
align-items: center;
height: 100%;
}
.loader {
display: flex;
position: relative;
background-color: var(--border-color-accent-subdued);
animation: shadowPulse 2s linear infinite;
box-shadow:
-24px 0 var(--border-color-accent-subdued),
24px 0 var(--border-color-accent-subdued);
margin: var(--spacing-md);
border-radius: 50%;
width: 10px;
height: 10px;
scale: 0.5;
}
@keyframes shadowPulse {
33% {
box-shadow:
-24px 0 var(--border-color-accent-subdued),
24px 0 #fff;
background: #fff;
}
66% {
box-shadow:
-24px 0 #fff,
24px 0 #fff;
background: var(--border-color-accent-subdued);
}
100% {
box-shadow:
-24px 0 #fff,
24px 0 var(--border-color-accent-subdued);
background: #fff;
}
}
.container {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
margin: var(--spacing-lg) var(--spacing-lg) 0 var(--spacing-lg);
}
#timeline {
display: flex;
height: var(--size-10);
flex: 1;
position: relative;
}
img {
flex: 1 1 auto;
min-width: 0;
object-fit: cover;
height: var(--size-12);
border: 1px solid var(--block-border-color);
user-select: none;
z-index: 1;
}
.handle {
width: 3px;
background-color: var(--color-accent);
cursor: ew-resize;
height: var(--size-12);
z-index: 3;
position: absolute;
}
.opaque-layer {
background-color: rgba(230, 103, 40, 0.25);
border: 1px solid var(--color-accent);
height: var(--size-12);
position: absolute;
z-index: 2;
}
</style>

View File

@@ -43,6 +43,7 @@
};
let canvas: HTMLCanvasElement;
export let rtc_configuration: Object;
export let pending = false;
export let root = "";
export let stream_every = 1;
@@ -121,7 +122,16 @@
async function start_webrtc(): Promise<void> {
if (stream_state === 'closed') {
pc = new RTCPeerConnection();
const fallback_config = {
iceServers: [
{
urls: 'stun:stun.l.google.com:19302'
}
]
};
const configuration = rtc_configuration || fallback_config;
console.log("config", configuration);
pc = new RTCPeerConnection(configuration);
pc.addEventListener("connectionstatechange",
(event) => {
switch(pc.connectionState) {

View File

@@ -16,7 +16,7 @@ requires-python = ">=3.10"
authors = [{ name = "YOUR NAME", email = "YOUREMAIL@domain.com" }]
keywords = ["gradio-custom-component", "gradio-template-Video", "streaming", "webrtc", "realtime"]
# Add dependencies here
dependencies = ["gradio>=4.0,<5.0", "aiortc"]
dependencies = ["gradio>=4.0,<6.0", "aiortc"]
classifiers = [
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',