mirror of
https://github.com/HumanAIGC-Engineering/gradio-webrtc.git
synced 2026-02-05 01:49:23 +08:00
* Add code * add code * add code * Rename messages * rename * add code * Add demo * docs + demos + bug fixes * add code * styles * user guide * Styles * Add code * misc docs updates * print nit * whisper + pr * url for images * whsiper update * Fix bugs * remove demo files * version number * Fix pypi readme * Fix * demos * Add llama code editor * Update llama code editor and object detection cookbook * Add more cookbook demos * add code * Fix links for PR deploys * add code * Fix the install * add tts * TTS docs * Typo * Pending bubbles for reply on pause * Stream redesign (#63) * better error handling * Websocket error handling * add code --------- Co-authored-by: Freddy Boulton <freddyboulton@hf-freddy.local> * remove docs from dist * Some docs typos * more typos * upload changes + docs * docs * better phone * update docs * add code * Make demos better * fix docs + websocket start_up * remove mention of FastAPI app * fastphone tweaks * add code * ReplyOnStopWord fixes * Fix cookbook * Fix pypi readme * add code * bump versions * sambanova cookbook * Fix tags * Llm voice chat * kyutai tag * Add error message to all index.html * STT module uses Moonshine * Not required from typing extensions * fix llm voice chat * Add vpn warning * demo fixes * demos * Add more ui args and gemini audio-video * update cookbook * version 9 --------- Co-authored-by: Freddy Boulton <freddyboulton@hf-freddy.local>
46 lines
1.0 KiB
Python
46 lines
1.0 KiB
Python
from fastapi import FastAPI
|
|
from fastapi.responses import RedirectResponse
|
|
from fastrtc import Stream
|
|
from gradio.utils import get_space
|
|
|
|
try:
|
|
from demo.llama_code_editor.handler import (
|
|
CodeHandler,
|
|
)
|
|
from demo.llama_code_editor.ui import demo as ui
|
|
except (ImportError, ModuleNotFoundError):
|
|
from handler import CodeHandler
|
|
from ui import demo as ui
|
|
|
|
|
|
stream = Stream(
|
|
handler=CodeHandler,
|
|
modality="audio",
|
|
mode="send-receive",
|
|
concurrency_limit=10 if get_space() else None,
|
|
time_limit=90 if get_space() else None,
|
|
)
|
|
|
|
stream.ui = ui
|
|
|
|
app = FastAPI()
|
|
|
|
|
|
@app.get("/")
|
|
async def _():
|
|
url = "/ui" if not get_space() else "https://fastrtc-llama-code-editor.hf.space/ui/"
|
|
return RedirectResponse(url)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
import os
|
|
|
|
if (mode := os.getenv("MODE")) == "UI":
|
|
stream.ui.launch(server_port=7860, server_name="0.0.0.0")
|
|
elif mode == "PHONE":
|
|
stream.fastphone(host="0.0.0.0", port=7860)
|
|
else:
|
|
import uvicorn
|
|
|
|
uvicorn.run(app, host="0.0.0.0", port=7860)
|