From 6905810f3786e12b55890a7466ddc4b57d0c49cc Mon Sep 17 00:00:00 2001 From: Rohan Richard <62638352+rohanprichard@users.noreply.github.com> Date: Sat, 8 Mar 2025 00:54:23 +0530 Subject: [PATCH] Adding nextjs + 11labs + openai streaming demo (#139) * adding nextjs + 11labs + openai streaming demo * removing package-lock --- demo/nextjs_voice_chat/README.md | 74 +++++ demo/nextjs_voice_chat/backend/env.py | 7 + demo/nextjs_voice_chat/backend/server.py | 133 ++++++++ .../frontend/fastrtc-demo/.gitignore | 41 +++ .../frontend/fastrtc-demo/README.md | 36 ++ .../frontend/fastrtc-demo/app/favicon.ico | Bin 0 -> 25931 bytes .../frontend/fastrtc-demo/app/globals.css | 130 ++++++++ .../frontend/fastrtc-demo/app/layout.tsx | 44 +++ .../frontend/fastrtc-demo/app/page.tsx | 16 + .../frontend/fastrtc-demo/components.json | 21 ++ .../components/background-circle-provider.tsx | 123 +++++++ .../components/theme-provider.tsx | 101 ++++++ .../components/ui/ai-voice-input.tsx | 114 +++++++ .../components/ui/background-circles.tsx | 309 ++++++++++++++++++ .../fastrtc-demo/components/ui/reset-chat.tsx | 18 + .../components/ui/theme-toggle.tsx | 61 ++++ .../components/ui/theme-transition.tsx | 120 +++++++ .../frontend/fastrtc-demo/eslint.config.mjs | 28 ++ .../frontend/fastrtc-demo/lib/utils.ts | 6 + .../fastrtc-demo/lib/webrtc-client.ts | 189 +++++++++++ .../frontend/fastrtc-demo/next.config.ts | 7 + .../frontend/fastrtc-demo/package.json | 33 ++ .../frontend/fastrtc-demo/postcss.config.mjs | 5 + .../frontend/fastrtc-demo/public/file.svg | 1 + .../frontend/fastrtc-demo/public/globe.svg | 1 + .../frontend/fastrtc-demo/public/next.svg | 1 + .../frontend/fastrtc-demo/public/vercel.svg | 1 + .../frontend/fastrtc-demo/public/window.svg | 1 + .../frontend/fastrtc-demo/tsconfig.json | 27 ++ demo/nextjs_voice_chat/requirements.txt | 5 + demo/nextjs_voice_chat/run.sh | 1 + 31 files changed, 1654 insertions(+) create mode 100644 demo/nextjs_voice_chat/README.md create mode 100644 demo/nextjs_voice_chat/backend/env.py create mode 100644 demo/nextjs_voice_chat/backend/server.py create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/.gitignore create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/README.md create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/app/favicon.ico create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/app/globals.css create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/app/layout.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/app/page.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components.json create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/background-circle-provider.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/theme-provider.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/ai-voice-input.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/background-circles.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/reset-chat.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-toggle.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-transition.tsx create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/eslint.config.mjs create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/utils.ts create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/webrtc-client.ts create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/next.config.ts create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/package.json create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/postcss.config.mjs create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/public/file.svg create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/public/globe.svg create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/public/next.svg create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/public/vercel.svg create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/public/window.svg create mode 100644 demo/nextjs_voice_chat/frontend/fastrtc-demo/tsconfig.json create mode 100644 demo/nextjs_voice_chat/requirements.txt create mode 100755 demo/nextjs_voice_chat/run.sh diff --git a/demo/nextjs_voice_chat/README.md b/demo/nextjs_voice_chat/README.md new file mode 100644 index 0000000..3e3dce5 --- /dev/null +++ b/demo/nextjs_voice_chat/README.md @@ -0,0 +1,74 @@ +# FastRTC POC +A simple POC for a fast real-time voice chat application using FastAPI and FastRTC by [rohanprichard](https://github.com/rohanprichard). I wanted to make one as an example with more production-ready languages, rather than just Gradio. + +## Setup +1. Set your API keys in an `.env` file based on the `.env.example` file +2. Create a virtual environment and install the dependencies + ```bash + python3 -m venv env + source env/bin/activate + pip install -r requirements.txt + ``` + +3. Run the server + ```bash + ./run.sh + ``` +4. Navigate into the frontend directory in another terminal + ```bash + cd frontend/fastrtc-demo + ``` +5. Run the frontend + ```bash + npm install + npm run dev + ``` +6. Go to the URL and click the microphone icon to start chatting! + +7. Reset chats by clicking the trash button on the bottom right + +## Notes +You can choose to not install the requirements for TTS and STT by removing the `[tts, stt]` from the specifier in the `requirements.txt` file. + +- The STT is currently using the ElevenLabs API. +- The LLM is currently using the OpenAI API. +- The TTS is currently using the ElevenLabs API. +- The VAD is currently using the Silero VAD model. +- You may need to install ffmpeg if you get errors in STT + +The prompt can be changed in the `backend/server.py` file and modified as you like. + +### Audio Parameters + +#### AlgoOptions + +- **audio_chunk_duration**: Length of audio chunks in seconds. Smaller values allow for faster processing but may be less accurate. +- **started_talking_threshold**: If a chunk has more than this many seconds of speech, the system considers that the user has started talking. +- **speech_threshold**: After the user has started speaking, if a chunk has less than this many seconds of speech, the system considers that the user has paused. + +#### SileroVadOptions + +- **threshold**: Speech probability threshold (0.0-1.0). Values above this are considered speech. Higher values are more strict. +- **min_speech_duration_ms**: Speech segments shorter than this (in milliseconds) are filtered out. +- **min_silence_duration_ms**: The system waits for this duration of silence (in milliseconds) before considering speech to be finished. +- **speech_pad_ms**: Padding added to both ends of detected speech segments to prevent cutting off words. +- **max_speech_duration_s**: Maximum allowed duration for a speech segment in seconds. Prevents indefinite listening. + +### Tuning Recommendations + +- If the AI interrupts you too early: + - Increase `min_silence_duration_ms` + - Increase `speech_threshold` + - Increase `speech_pad_ms` + +- If the AI is slow to respond after you finish speaking: + - Decrease `min_silence_duration_ms` + - Decrease `speech_threshold` + +- If the system fails to detect some speech: + - Lower the `threshold` value + - Decrease `started_talking_threshold` + + +## Credits: +Credit for the UI components goes to Shadcn, Aceternity UI and Kokonut UI. diff --git a/demo/nextjs_voice_chat/backend/env.py b/demo/nextjs_voice_chat/backend/env.py new file mode 100644 index 0000000..f678f64 --- /dev/null +++ b/demo/nextjs_voice_chat/backend/env.py @@ -0,0 +1,7 @@ +from dotenv import load_dotenv +import os + +load_dotenv() + +LLM_API_KEY = os.getenv("LLM_API_KEY") +ELEVENLABS_API_KEY = os.getenv("ELEVENLABS_API_KEY") diff --git a/demo/nextjs_voice_chat/backend/server.py b/demo/nextjs_voice_chat/backend/server.py new file mode 100644 index 0000000..ef3b687 --- /dev/null +++ b/demo/nextjs_voice_chat/backend/server.py @@ -0,0 +1,133 @@ +import fastapi +from fastapi.responses import FileResponse +from fastrtc import ReplyOnPause, Stream, AlgoOptions, SileroVadOptions +from fastrtc.utils import audio_to_bytes +from openai import OpenAI +import logging +import time +from fastapi.middleware.cors import CORSMiddleware +from elevenlabs import VoiceSettings, stream +from elevenlabs.client import ElevenLabs +import numpy as np +import io + +from .env import LLM_API_KEY, ELEVENLABS_API_KEY + + +sys_prompt = """ +You are a helpful assistant. You are witty, engaging and fun. You love being interactive with the user. +You also can add minimalistic utterances like 'uh-huh' or 'mm-hmm' to the conversation to make it more natural. However, only vocalization are allowed, no actions or other non-vocal sounds. +Begin a conversation with a self-deprecating joke like 'I'm not sure if I'm ready for this...' or 'I bet you already regret clicking that button...' +""" + +messages = [{"role": "system", "content": sys_prompt}] + +openai_client = OpenAI( + api_key=LLM_API_KEY +) + +elevenlabs_client = ElevenLabs(api_key=ELEVENLABS_API_KEY) + +logging.basicConfig(level=logging.INFO) + +def echo(audio): + + stt_time = time.time() + + logging.info("Performing STT") + + transcription = elevenlabs_client.speech_to_text.convert( + file=audio_to_bytes(audio), + model_id="scribe_v1", + tag_audio_events=False, + language_code="eng", + diarize=False, + ) + prompt = transcription.text + if prompt == "": + logging.info("STT returned empty string") + return + logging.info(f"STT response: {prompt}") + + messages.append({"role": "user", "content": prompt}) + + logging.info(f"STT took {time.time() - stt_time} seconds") + + llm_time = time.time() + + def text_stream(): + global full_response + full_response = "" + + response = openai_client.chat.completions.create( + model="gpt-3.5-turbo", + messages=messages, + max_tokens=200, + stream=True + ) + + for chunk in response: + if chunk.choices[0].finish_reason == "stop": + break + if chunk.choices[0].delta.content: + full_response += chunk.choices[0].delta.content + yield chunk.choices[0].delta.content + + audio_stream = elevenlabs_client.generate( + text=text_stream(), + voice="Rachel", # Cassidy is also really good + voice_settings=VoiceSettings( + similarity_boost=0.9, + stability=0.6, + style=0.4, + speed=1 + ), + model="eleven_multilingual_v2", + output_format="pcm_24000", + stream=True + ) + + for audio_chunk in audio_stream: + audio_array = np.frombuffer(audio_chunk, dtype=np.int16).astype(np.float32) / 32768.0 + yield (24000, audio_array) + + messages.append({"role": "assistant", "content": full_response + " "}) + logging.info(f"LLM response: {full_response}") + logging.info(f"LLM took {time.time() - llm_time} seconds") + + +stream = Stream(ReplyOnPause(echo, + algo_options=AlgoOptions( + audio_chunk_duration=0.5, + started_talking_threshold=0.1, + speech_threshold=0.03 + ), + model_options=SileroVadOptions( + threshold=0.75, + min_speech_duration_ms=250, + min_silence_duration_ms=1500, + speech_pad_ms=400, + max_speech_duration_s=15 + )), + modality="audio", + mode="send-receive" + ) + +app = fastapi.FastAPI() + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +stream.mount(app) + +@app.get("/reset") +async def reset(): + global messages + logging.info("Resetting chat") + messages = [{"role": "system", "content": sys_prompt}] + return {"status": "success"} \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/.gitignore b/demo/nextjs_voice_chat/frontend/fastrtc-demo/.gitignore new file mode 100644 index 0000000..5ef6a52 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/.gitignore @@ -0,0 +1,41 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/README.md b/demo/nextjs_voice_chat/frontend/fastrtc-demo/README.md new file mode 100644 index 0000000..e215bc4 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/README.md @@ -0,0 +1,36 @@ +This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app). + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +# or +pnpm dev +# or +bun dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. + +This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details. diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/favicon.ico b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..718d6fea4835ec2d246af9800eddb7ffb276240c GIT binary patch literal 25931 zcmeHv30#a{`}aL_*G&7qml|y<+KVaDM2m#dVr!KsA!#An?kSQM(q<_dDNCpjEux83 zLb9Z^XxbDl(w>%i@8hT6>)&Gu{h#Oeyszu?xtw#Zb1mO{pgX9699l+Qppw7jXaYf~-84xW z)w4x8?=youko|}Vr~(D$UXIbiXABHh`p1?nn8Po~fxRJv}|0e(BPs|G`(TT%kKVJAdg5*Z|x0leQq0 zkdUBvb#>9F()jo|T~kx@OM8$9wzs~t2l;K=woNssA3l6|sx2r3+kdfVW@e^8e*E}v zA1y5{bRi+3Z`uD3{F7LgFJDdvm;nJilkzDku>BwXH(8ItVCXk*-lSJnR?-2UN%hJ){&rlvg`CDTj z)Bzo!3v7Ou#83zEDEFcKt(f1E0~=rqeEbTnMvWR#{+9pg%7G8y>u1OVRUSoox-ovF z2Ydma(;=YuBY(eI|04{hXzZD6_f(v~H;C~y5=DhAC{MMS>2fm~1H_t2$56pc$NH8( z5bH|<)71dV-_oCHIrzrT`2s-5w_+2CM0$95I6X8p^r!gHp+j_gd;9O<1~CEQQGS8) zS9Qh3#p&JM-G8rHekNmKVewU;pJRcTAog68KYo^dRo}(M>36U4Us zfgYWSiHZL3;lpWT=zNAW>Dh#mB!_@Lg%$ms8N-;aPqMn+C2HqZgz&9~Eu z4|Kp<`$q)Uw1R?y(~S>ePdonHxpV1#eSP1B;Ogo+-Pk}6#0GsZZ5!||ev2MGdh}_m z{DeR7?0-1^zVs&`AV6Vt;r3`I`OI_wgs*w=eO%_#7Kepl{B@xiyCANc(l zzIyd4y|c6PXWq9-|KM8(zIk8LPk(>a)zyFWjhT!$HJ$qX1vo@d25W<fvZQ2zUz5WRc(UnFMKHwe1| zWmlB1qdbiA(C0jmnV<}GfbKtmcu^2*P^O?MBLZKt|As~ge8&AAO~2K@zbXelK|4T<{|y4`raF{=72kC2Kn(L4YyenWgrPiv z@^mr$t{#X5VuIMeL!7Ab6_kG$&#&5p*Z{+?5U|TZ`B!7llpVmp@skYz&n^8QfPJzL z0G6K_OJM9x+Wu2gfN45phANGt{7=C>i34CV{Xqlx(fWpeAoj^N0Biu`w+MVcCUyU* zDZuzO0>4Z6fbu^T_arWW5n!E45vX8N=bxTVeFoep_G#VmNlQzAI_KTIc{6>c+04vr zx@W}zE5JNSU>!THJ{J=cqjz+4{L4A{Ob9$ZJ*S1?Ggg3klFp!+Y1@K+pK1DqI|_gq z5ZDXVpge8-cs!o|;K73#YXZ3AShj50wBvuq3NTOZ`M&qtjj#GOFfgExjg8Gn8>Vq5 z`85n+9|!iLCZF5$HJ$Iu($dm?8~-ofu}tEc+-pyke=3!im#6pk_Wo8IA|fJwD&~~F zc16osQ)EBo58U7XDuMexaPRjU@h8tXe%S{fA0NH3vGJFhuyyO!Uyl2^&EOpX{9As0 zWj+P>{@}jxH)8|r;2HdupP!vie{sJ28b&bo!8`D^x}TE$%zXNb^X1p@0PJ86`dZyj z%ce7*{^oo+6%&~I!8hQy-vQ7E)0t0ybH4l%KltWOo~8cO`T=157JqL(oq_rC%ea&4 z2NcTJe-HgFjNg-gZ$6!Y`SMHrlj}Etf7?r!zQTPPSv}{so2e>Fjs1{gzk~LGeesX%r(Lh6rbhSo_n)@@G-FTQy93;l#E)hgP@d_SGvyCp0~o(Y;Ee8{ zdVUDbHm5`2taPUOY^MAGOw*>=s7=Gst=D+p+2yON!0%Hk` zz5mAhyT4lS*T3LS^WSxUy86q&GnoHxzQ6vm8)VS}_zuqG?+3td68_x;etQAdu@sc6 zQJ&5|4(I?~3d-QOAODHpZ=hlSg(lBZ!JZWCtHHSj`0Wh93-Uk)_S%zsJ~aD>{`A0~ z9{AG(e|q3g5B%wYKRxiL2Y$8(4w6bzchKuloQW#e&S3n+P- z8!ds-%f;TJ1>)v)##>gd{PdS2Oc3VaR`fr=`O8QIO(6(N!A?pr5C#6fc~Ge@N%Vvu zaoAX2&(a6eWy_q&UwOhU)|P3J0Qc%OdhzW=F4D|pt0E4osw;%<%Dn58hAWD^XnZD= z>9~H(3bmLtxpF?a7su6J7M*x1By7YSUbxGi)Ot0P77`}P3{)&5Un{KD?`-e?r21!4vTTnN(4Y6Lin?UkSM z`MXCTC1@4A4~mvz%Rh2&EwY))LeoT=*`tMoqcEXI>TZU9WTP#l?uFv+@Dn~b(>xh2 z;>B?;Tz2SR&KVb>vGiBSB`@U7VIWFSo=LDSb9F{GF^DbmWAfpms8Sx9OX4CnBJca3 zlj9(x!dIjN?OG1X4l*imJNvRCk}F%!?SOfiOq5y^mZW)jFL@a|r-@d#f7 z2gmU8L3IZq0ynIws=}~m^#@&C%J6QFo~Mo4V`>v7MI-_!EBMMtb%_M&kvAaN)@ZVw z+`toz&WG#HkWDjnZE!6nk{e-oFdL^$YnbOCN}JC&{$#$O27@|Tn-skXr)2ml2~O!5 zX+gYoxhoc7qoU?C^3~&!U?kRFtnSEecWuH0B0OvLodgUAi}8p1 zrO6RSXHH}DMc$&|?D004DiOVMHV8kXCP@7NKB zgaZq^^O<7PoKEp72kby@W0Z!Y*Ay{&vfg#C&gG@YVR9g?FEocMUi1gSN$+V+ayF45{a zuDZDTN}mS|;BO%gEf}pjBfN2-gIrU#G5~cucA;dokXW89%>AyXJJI z9X4UlIWA|ZYHgbI z5?oFk@A=Ik7lrEQPDH!H+b`7_Y~aDb_qa=B2^Y&Ow41cU=4WDd40dp5(QS-WMN-=Y z9g;6_-JdNU;|6cPwf$ak*aJIcwL@1n$#l~zi{c{EW?T;DaW*E8DYq?Umtz{nJ&w-M zEMyTDrC&9K$d|kZe2#ws6)L=7K+{ zQw{XnV6UC$6-rW0emqm8wJoeZK)wJIcV?dST}Z;G0Arq{dVDu0&4kd%N!3F1*;*pW zR&qUiFzK=@44#QGw7k1`3t_d8&*kBV->O##t|tonFc2YWrL7_eqg+=+k;!F-`^b8> z#KWCE8%u4k@EprxqiV$VmmtiWxDLgnGu$Vs<8rppV5EajBXL4nyyZM$SWVm!wnCj-B!Wjqj5-5dNXukI2$$|Bu3Lrw}z65Lc=1G z^-#WuQOj$hwNGG?*CM_TO8Bg-1+qc>J7k5c51U8g?ZU5n?HYor;~JIjoWH-G>AoUP ztrWWLbRNqIjW#RT*WqZgPJXU7C)VaW5}MiijYbABmzoru6EmQ*N8cVK7a3|aOB#O& zBl8JY2WKfmj;h#Q!pN%9o@VNLv{OUL?rixHwOZuvX7{IJ{(EdPpuVFoQqIOa7giLVkBOKL@^smUA!tZ1CKRK}#SSM)iQHk)*R~?M!qkCruaS!#oIL1c z?J;U~&FfH#*98^G?i}pA{ z9Jg36t4=%6mhY(quYq*vSxptes9qy|7xSlH?G=S@>u>Ebe;|LVhs~@+06N<4CViBk zUiY$thvX;>Tby6z9Y1edAMQaiH zm^r3v#$Q#2T=X>bsY#D%s!bhs^M9PMAcHbCc0FMHV{u-dwlL;a1eJ63v5U*?Q_8JO zT#50!RD619#j_Uf))0ooADz~*9&lN!bBDRUgE>Vud-i5ck%vT=r^yD*^?Mp@Q^v+V zG#-?gKlr}Eeqifb{|So?HM&g91P8|av8hQoCmQXkd?7wIJwb z_^v8bbg`SAn{I*4bH$u(RZ6*xUhuA~hc=8czK8SHEKTzSxgbwi~9(OqJB&gwb^l4+m`k*Q;_?>Y-APi1{k zAHQ)P)G)f|AyjSgcCFps)Fh6Bca*Xznq36!pV6Az&m{O8$wGFD? zY&O*3*J0;_EqM#jh6^gMQKpXV?#1?>$ml1xvh8nSN>-?H=V;nJIwB07YX$e6vLxH( zqYwQ>qxwR(i4f)DLd)-$P>T-no_c!LsN@)8`e;W@)-Hj0>nJ-}Kla4-ZdPJzI&Mce zv)V_j;(3ERN3_@I$N<^|4Lf`B;8n+bX@bHbcZTopEmDI*Jfl)-pFDvo6svPRoo@(x z);_{lY<;);XzT`dBFpRmGrr}z5u1=pC^S-{ce6iXQlLGcItwJ^mZx{m$&DA_oEZ)B{_bYPq-HA zcH8WGoBG(aBU_j)vEy+_71T34@4dmSg!|M8Vf92Zj6WH7Q7t#OHQqWgFE3ARt+%!T z?oLovLVlnf?2c7pTc)~cc^($_8nyKwsN`RA-23ed3sdj(ys%pjjM+9JrctL;dy8a( z@en&CQmnV(()bu|Y%G1-4a(6x{aLytn$T-;(&{QIJB9vMox11U-1HpD@d(QkaJdEb zG{)+6Dos_L+O3NpWo^=gR?evp|CqEG?L&Ut#D*KLaRFOgOEK(Kq1@!EGcTfo+%A&I z=dLbB+d$u{sh?u)xP{PF8L%;YPPW53+@{>5W=Jt#wQpN;0_HYdw1{ksf_XhO4#2F= zyPx6Lx2<92L-;L5PD`zn6zwIH`Jk($?Qw({erA$^bC;q33hv!d!>%wRhj# zal^hk+WGNg;rJtb-EB(?czvOM=H7dl=vblBwAv>}%1@{}mnpUznfq1cE^sgsL0*4I zJ##!*B?=vI_OEVis5o+_IwMIRrpQyT_Sq~ZU%oY7c5JMIADzpD!Upz9h@iWg_>>~j zOLS;wp^i$-E?4<_cp?RiS%Rd?i;f*mOz=~(&3lo<=@(nR!_Rqiprh@weZlL!t#NCc zO!QTcInq|%#>OVgobj{~ixEUec`E25zJ~*DofsQdzIa@5^nOXj2T;8O`l--(QyU^$t?TGY^7#&FQ+2SS3B#qK*k3`ye?8jUYSajE5iBbJls75CCc(m3dk{t?- zopcER9{Z?TC)mk~gpi^kbbu>b-+a{m#8-y2^p$ka4n60w;Sc2}HMf<8JUvhCL0B&Btk)T`ctE$*qNW8L$`7!r^9T+>=<=2qaq-;ll2{`{Rg zc5a0ZUI$oG&j-qVOuKa=*v4aY#IsoM+1|c4Z)<}lEDvy;5huB@1RJPquU2U*U-;gu z=En2m+qjBzR#DEJDO`WU)hdd{Vj%^0V*KoyZ|5lzV87&g_j~NCjwv0uQVqXOb*QrQ zy|Qn`hxx(58c70$E;L(X0uZZ72M1!6oeg)(cdKO ze0gDaTz+ohR-#d)NbAH4x{I(21yjwvBQfmpLu$)|m{XolbgF!pmsqJ#D}(ylp6uC> z{bqtcI#hT#HW=wl7>p!38sKsJ`r8}lt-q%Keqy%u(xk=yiIJiUw6|5IvkS+#?JTBl z8H5(Q?l#wzazujH!8o>1xtn8#_w+397*_cy8!pQGP%K(Ga3pAjsaTbbXJlQF_+m+-UpUUent@xM zg%jqLUExj~o^vQ3Gl*>wh=_gOr2*|U64_iXb+-111aH}$TjeajM+I20xw(((>fej-@CIz4S1pi$(#}P7`4({6QS2CaQS4NPENDp>sAqD z$bH4KGzXGffkJ7R>V>)>tC)uax{UsN*dbeNC*v}#8Y#OWYwL4t$ePR?VTyIs!wea+ z5Urmc)X|^`MG~*dS6pGSbU+gPJoq*^a=_>$n4|P^w$sMBBy@f*Z^Jg6?n5?oId6f{ z$LW4M|4m502z0t7g<#Bx%X;9<=)smFolV&(V^(7Cv2-sxbxopQ!)*#ZRhTBpx1)Fc zNm1T%bONzv6@#|dz(w02AH8OXe>kQ#1FMCzO}2J_mST)+ExmBr9cva-@?;wnmWMOk z{3_~EX_xadgJGv&H@zK_8{(x84`}+c?oSBX*Ge3VdfTt&F}yCpFP?CpW+BE^cWY0^ zb&uBN!Ja3UzYHK-CTyA5=L zEMW{l3Usky#ly=7px648W31UNV@K)&Ub&zP1c7%)`{);I4b0Q<)B}3;NMG2JH=X$U zfIW4)4n9ZM`-yRj67I)YSLDK)qfUJ_ij}a#aZN~9EXrh8eZY2&=uY%2N0UFF7<~%M zsB8=erOWZ>Ct_#^tHZ|*q`H;A)5;ycw*IcmVxi8_0Xk}aJA^ath+E;xg!x+As(M#0=)3!NJR6H&9+zd#iP(m0PIW8$ z1Y^VX`>jm`W!=WpF*{ioM?C9`yOR>@0q=u7o>BP-eSHqCgMDj!2anwH?s%i2p+Q7D zzszIf5XJpE)IG4;d_(La-xenmF(tgAxK`Y4sQ}BSJEPs6N_U2vI{8=0C_F?@7<(G; zo$~G=8p+076G;`}>{MQ>t>7cm=zGtfbdDXm6||jUU|?X?CaE?(<6bKDYKeHlz}DA8 zXT={X=yp_R;HfJ9h%?eWvQ!dRgz&Su*JfNt!Wu>|XfU&68iRikRrHRW|ZxzRR^`eIGt zIeiDgVS>IeExKVRWW8-=A=yA`}`)ZkWBrZD`hpWIxBGkh&f#ijr449~m`j6{4jiJ*C!oVA8ZC?$1RM#K(_b zL9TW)kN*Y4%^-qPpMP7d4)o?Nk#>aoYHT(*g)qmRUb?**F@pnNiy6Fv9rEiUqD(^O zzyS?nBrX63BTRYduaG(0VVG2yJRe%o&rVrLjbxTaAFTd8s;<<@Qs>u(<193R8>}2_ zuwp{7;H2a*X7_jryzriZXMg?bTuegABb^87@SsKkr2)0Gyiax8KQWstw^v#ix45EVrcEhr>!NMhprl$InQMzjSFH54x5k9qHc`@9uKQzvL4ihcq{^B zPrVR=o_ic%Y>6&rMN)hTZsI7I<3&`#(nl+3y3ys9A~&^=4?PL&nd8)`OfG#n zwAMN$1&>K++c{^|7<4P=2y(B{jJsQ0a#U;HTo4ZmWZYvI{+s;Td{Yzem%0*k#)vjpB zia;J&>}ICate44SFYY3vEelqStQWFihx%^vQ@Do(sOy7yR2@WNv7Y9I^yL=nZr3mb zXKV5t@=?-Sk|b{XMhA7ZGB@2hqsx}4xwCW!in#C zI@}scZlr3-NFJ@NFaJlhyfcw{k^vvtGl`N9xSo**rDW4S}i zM9{fMPWo%4wYDG~BZ18BD+}h|GQKc-g^{++3MY>}W_uq7jGHx{mwE9fZiPCoxN$+7 zrODGGJrOkcPQUB(FD5aoS4g~7#6NR^ma7-!>mHuJfY5kTe6PpNNKC9GGRiu^L31uG z$7v`*JknQHsYB!Tm_W{a32TM099djW%5e+j0Ve_ct}IM>XLF1Ap+YvcrLV=|CKo6S zb+9Nl3_YdKP6%Cxy@6TxZ>;4&nTneadr z_ES90ydCev)LV!dN=#(*f}|ZORFdvkYBni^aLbUk>BajeWIOcmHP#8S)*2U~QKI%S zyrLmtPqb&TphJ;>yAxri#;{uyk`JJqODDw%(Z=2`1uc}br^V%>j!gS)D*q*f_-qf8&D;W1dJgQMlaH5er zN2U<%Smb7==vE}dDI8K7cKz!vs^73o9f>2sgiTzWcwY|BMYHH5%Vn7#kiw&eItCqa zIkR2~Q}>X=Ar8W|^Ms41Fm8o6IB2_j60eOeBB1Br!boW7JnoeX6Gs)?7rW0^5psc- zjS16yb>dFn>KPOF;imD}e!enuIniFzv}n$m2#gCCv4jM#ArwlzZ$7@9&XkFxZ4n!V zj3dyiwW4Ki2QG{@i>yuZXQizw_OkZI^-3otXC{!(lUpJF33gI60ak;Uqitp74|B6I zgg{b=Iz}WkhCGj1M=hu4#Aw173YxIVbISaoc z-nLZC*6Tgivd5V`K%GxhBsp@SUU60-rfc$=wb>zdJzXS&-5(NRRodFk;Kxk!S(O(a0e7oY=E( zAyS;Ow?6Q&XA+cnkCb{28_1N8H#?J!*$MmIwLq^*T_9-z^&UE@A(z9oGYtFy6EZef LrJugUA?W`A8`#=m literal 0 HcmV?d00001 diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/globals.css b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/globals.css new file mode 100644 index 0000000..7ae6ba4 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/globals.css @@ -0,0 +1,130 @@ +@import "tailwindcss"; + +@plugin "tailwindcss-animate"; + +@custom-variant dark (&:is(.dark *)); + +@theme inline { + --color-background: var(--background); + --color-foreground: var(--foreground); + --font-sans: var(--font-geist-sans); + --font-mono: var(--font-geist-mono); + --color-sidebar-ring: var(--sidebar-ring); + --color-sidebar-border: var(--sidebar-border); + --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); + --color-sidebar-accent: var(--sidebar-accent); + --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); + --color-sidebar-primary: var(--sidebar-primary); + --color-sidebar-foreground: var(--sidebar-foreground); + --color-sidebar: var(--sidebar); + --color-chart-5: var(--chart-5); + --color-chart-4: var(--chart-4); + --color-chart-3: var(--chart-3); + --color-chart-2: var(--chart-2); + --color-chart-1: var(--chart-1); + --color-ring: var(--ring); + --color-input: var(--input); + --color-border: var(--border); + --color-destructive-foreground: var(--destructive-foreground); + --color-destructive: var(--destructive); + --color-accent-foreground: var(--accent-foreground); + --color-accent: var(--accent); + --color-muted-foreground: var(--muted-foreground); + --color-muted: var(--muted); + --color-secondary-foreground: var(--secondary-foreground); + --color-secondary: var(--secondary); + --color-primary-foreground: var(--primary-foreground); + --color-primary: var(--primary); + --color-popover-foreground: var(--popover-foreground); + --color-popover: var(--popover); + --color-card-foreground: var(--card-foreground); + --color-card: var(--card); + --radius-sm: calc(var(--radius) - 4px); + --radius-md: calc(var(--radius) - 2px); + --radius-lg: var(--radius); + --radius-xl: calc(var(--radius) + 4px); +} + +:root { + --background: oklch(1 0 0); + --foreground: oklch(0.129 0.042 264.695); + --card: oklch(1 0 0); + --card-foreground: oklch(0.129 0.042 264.695); + --popover: oklch(1 0 0); + --popover-foreground: oklch(0.129 0.042 264.695); + --primary: oklch(0.208 0.042 265.755); + --primary-foreground: oklch(0.984 0.003 247.858); + --secondary: oklch(0.968 0.007 247.896); + --secondary-foreground: oklch(0.208 0.042 265.755); + --muted: oklch(0.968 0.007 247.896); + --muted-foreground: oklch(0.554 0.046 257.417); + --accent: oklch(0.968 0.007 247.896); + --accent-foreground: oklch(0.208 0.042 265.755); + --destructive: oklch(0.577 0.245 27.325); + --destructive-foreground: oklch(0.577 0.245 27.325); + --border: oklch(0.929 0.013 255.508); + --input: oklch(0.929 0.013 255.508); + --ring: oklch(0.704 0.04 256.788); + --chart-1: oklch(0.646 0.222 41.116); + --chart-2: oklch(0.6 0.118 184.704); + --chart-3: oklch(0.398 0.07 227.392); + --chart-4: oklch(0.828 0.189 84.429); + --chart-5: oklch(0.769 0.188 70.08); + --radius: 0.625rem; + --sidebar: oklch(0.984 0.003 247.858); + --sidebar-foreground: oklch(0.129 0.042 264.695); + --sidebar-primary: oklch(0.208 0.042 265.755); + --sidebar-primary-foreground: oklch(0.984 0.003 247.858); + --sidebar-accent: oklch(0.968 0.007 247.896); + --sidebar-accent-foreground: oklch(0.208 0.042 265.755); + --sidebar-border: oklch(0.929 0.013 255.508); + --sidebar-ring: oklch(0.704 0.04 256.788); +} + +.dark { + --background: oklch(0.129 0.042 264.695); + --foreground: oklch(0.984 0.003 247.858); + --card: oklch(0.129 0.042 264.695); + --card-foreground: oklch(0.984 0.003 247.858); + --popover: oklch(0.129 0.042 264.695); + --popover-foreground: oklch(0.984 0.003 247.858); + --primary: oklch(0.984 0.003 247.858); + --primary-foreground: oklch(0.208 0.042 265.755); + --secondary: oklch(0.279 0.041 260.031); + --secondary-foreground: oklch(0.984 0.003 247.858); + --muted: oklch(0.279 0.041 260.031); + --muted-foreground: oklch(0.704 0.04 256.788); + --accent: oklch(0.279 0.041 260.031); + --accent-foreground: oklch(0.984 0.003 247.858); + --destructive: oklch(0.396 0.141 25.723); + --destructive-foreground: oklch(0.637 0.237 25.331); + --border: oklch(0.279 0.041 260.031); + --input: oklch(0.279 0.041 260.031); + --ring: oklch(0.446 0.043 257.281); + --chart-1: oklch(0.488 0.243 264.376); + --chart-2: oklch(0.696 0.17 162.48); + --chart-3: oklch(0.769 0.188 70.08); + --chart-4: oklch(0.627 0.265 303.9); + --chart-5: oklch(0.645 0.246 16.439); + --sidebar: oklch(0.208 0.042 265.755); + --sidebar-foreground: oklch(0.984 0.003 247.858); + --sidebar-primary: oklch(0.488 0.243 264.376); + --sidebar-primary-foreground: oklch(0.984 0.003 247.858); + --sidebar-accent: oklch(0.279 0.041 260.031); + --sidebar-accent-foreground: oklch(0.984 0.003 247.858); + --sidebar-border: oklch(0.279 0.041 260.031); + --sidebar-ring: oklch(0.446 0.043 257.281); +} + +@layer base { + * { + @apply border-border outline-ring/50; + } + body { + @apply bg-background text-foreground; + } +} + +.no-transitions * { + transition: none !important; +} diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/layout.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/layout.tsx new file mode 100644 index 0000000..428c1d1 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/layout.tsx @@ -0,0 +1,44 @@ +import type { Metadata } from "next"; +import { Geist, Geist_Mono } from "next/font/google"; +import "./globals.css"; +import { ThemeProvider } from "@/components/theme-provider"; +import { ThemeTransition } from "@/components/ui/theme-transition"; + +const geistSans = Geist({ + variable: "--font-geist-sans", + subsets: ["latin"], +}); + +const geistMono = Geist_Mono({ + variable: "--font-geist-mono", + subsets: ["latin"], +}); + +export const metadata: Metadata = { + title: "FastRTC Demo", + description: "Interactive WebRTC demo with audio visualization", +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + + + {children} + + + + + ); +} diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/page.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/page.tsx new file mode 100644 index 0000000..fe41cea --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/app/page.tsx @@ -0,0 +1,16 @@ +import { BackgroundCircleProvider } from "@/components/background-circle-provider"; +import { ThemeToggle } from "@/components/ui/theme-toggle"; +import { ResetChat } from "@/components/ui/reset-chat"; +export default function Home() { + return ( +
+ +
+ +
+
+ +
+
+ ); +} diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components.json b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components.json new file mode 100644 index 0000000..a08feaa --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "", + "css": "app/globals.css", + "baseColor": "slate", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "iconLibrary": "lucide" +} \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/background-circle-provider.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/background-circle-provider.tsx new file mode 100644 index 0000000..eb0925b --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/background-circle-provider.tsx @@ -0,0 +1,123 @@ +"use client" + +import { useState, useEffect, useRef, useCallback } from "react"; +import { BackgroundCircles } from "@/components/ui/background-circles"; +import { AIVoiceInput } from "@/components/ui/ai-voice-input"; +import { WebRTCClient } from "@/lib/webrtc-client"; + +export function BackgroundCircleProvider() { + const [currentVariant, setCurrentVariant] = + useState("octonary"); + const [isConnected, setIsConnected] = useState(false); + const [webrtcClient, setWebrtcClient] = useState(null); + const [audioLevel, setAudioLevel] = useState(0); + const audioRef = useRef(null); + + // Memoize callbacks to prevent recreation on each render + const handleConnected = useCallback(() => setIsConnected(true), []); + const handleDisconnected = useCallback(() => setIsConnected(false), []); + + const handleAudioStream = useCallback((stream: MediaStream) => { + if (audioRef.current) { + audioRef.current.srcObject = stream; + } + }, []); + + const handleAudioLevel = useCallback((level: number) => { + // Apply some smoothing to the audio level + setAudioLevel(prev => prev * 0.7 + level * 0.3); + }, []); + + // Get all available variants + const variants = Object.keys( + COLOR_VARIANTS + ) as (keyof typeof COLOR_VARIANTS)[]; + + // Function to change to the next color variant + const changeVariant = () => { + const currentIndex = variants.indexOf(currentVariant); + const nextVariant = variants[(currentIndex + 1) % variants.length]; + setCurrentVariant(nextVariant); + }; + + useEffect(() => { + // Initialize WebRTC client with memoized callbacks + const client = new WebRTCClient({ + onConnected: handleConnected, + onDisconnected: handleDisconnected, + onAudioStream: handleAudioStream, + onAudioLevel: handleAudioLevel + }); + setWebrtcClient(client); + + return () => { + client.disconnect(); + }; + }, [handleConnected, handleDisconnected, handleAudioStream, handleAudioLevel]); + + const handleStart = () => { + webrtcClient?.connect(); + }; + + const handleStop = () => { + webrtcClient?.disconnect(); + }; + + return ( +
+ +
+ +
+
+ ); +} + +export default { BackgroundCircleProvider } + +const COLOR_VARIANTS = { + primary: { + border: [ + "border-emerald-500/60", + "border-cyan-400/50", + "border-slate-600/30", + ], + gradient: "from-emerald-500/30", + }, + secondary: { + border: [ + "border-violet-500/60", + "border-fuchsia-400/50", + "border-slate-600/30", + ], + gradient: "from-violet-500/30", + }, + senary: { + border: [ + "border-blue-500/60", + "border-sky-400/50", + "border-slate-600/30", + ], + gradient: "from-blue-500/30", + }, // blue + octonary: { + border: [ + "border-red-500/60", + "border-rose-400/50", + "border-slate-600/30", + ], + gradient: "from-red-500/30", + }, +} as const; \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/theme-provider.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/theme-provider.tsx new file mode 100644 index 0000000..896e023 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/theme-provider.tsx @@ -0,0 +1,101 @@ +"use client"; + +import { createContext, useContext, useEffect, useState } from "react"; + +type Theme = "light" | "dark" | "system"; + +type ThemeProviderProps = { + children: React.ReactNode; + defaultTheme?: Theme; + storageKey?: string; + attribute?: string; + enableSystem?: boolean; + disableTransitionOnChange?: boolean; +}; + +type ThemeProviderState = { + theme: Theme; + setTheme: (theme: Theme) => void; +}; + +const initialState: ThemeProviderState = { + theme: "system", + setTheme: () => null, +}; + +const ThemeProviderContext = createContext(initialState); + +export function ThemeProvider({ + children, + defaultTheme = "system", + storageKey = "theme", + attribute = "class", + enableSystem = true, + disableTransitionOnChange = false, + ...props +}: ThemeProviderProps) { + const [theme, setTheme] = useState(defaultTheme); + + useEffect(() => { + const savedTheme = localStorage.getItem(storageKey) as Theme | null; + + if (savedTheme) { + setTheme(savedTheme); + } else if (defaultTheme === "system" && enableSystem) { + const systemTheme = window.matchMedia("(prefers-color-scheme: dark)").matches + ? "dark" + : "light"; + setTheme(systemTheme); + } + }, [defaultTheme, storageKey, enableSystem]); + + useEffect(() => { + const root = window.document.documentElement; + + if (disableTransitionOnChange) { + root.classList.add("no-transitions"); + + // Force a reflow + window.getComputedStyle(root).getPropertyValue("opacity"); + + setTimeout(() => { + root.classList.remove("no-transitions"); + }, 0); + } + + root.classList.remove("light", "dark"); + + if (theme === "system" && enableSystem) { + const systemTheme = window.matchMedia("(prefers-color-scheme: dark)").matches + ? "dark" + : "light"; + root.classList.add(systemTheme); + } else { + root.classList.add(theme); + } + + localStorage.setItem(storageKey, theme); + }, [theme, storageKey, enableSystem, disableTransitionOnChange]); + + const value = { + theme, + setTheme: (theme: Theme) => { + setTheme(theme); + }, + }; + + return ( + + {children} + + ); +} + +export const useTheme = () => { + const context = useContext(ThemeProviderContext); + + if (context === undefined) + throw new Error("useTheme must be used within a ThemeProvider"); + + return context; +}; diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/ai-voice-input.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/ai-voice-input.tsx new file mode 100644 index 0000000..f3558b8 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/ai-voice-input.tsx @@ -0,0 +1,114 @@ +"use client"; + +import { Mic, Square } from "lucide-react"; +import { useState, useEffect } from "react"; +import { cn } from "@/lib/utils"; + +interface AIVoiceInputProps { + onStart?: () => void; + onStop?: (duration: number) => void; + isConnected?: boolean; + className?: string; +} + +export function AIVoiceInput({ + onStart, + onStop, + isConnected = false, + className +}: AIVoiceInputProps) { + const [active, setActive] = useState(false); + const [time, setTime] = useState(0); + const [isClient, setIsClient] = useState(false); + const [status, setStatus] = useState<'disconnected' | 'connecting' | 'connected'>('disconnected'); + + useEffect(() => { + setIsClient(true); + }, []); + + useEffect(() => { + let intervalId: NodeJS.Timeout; + + if (active) { + intervalId = setInterval(() => { + setTime((t) => t + 1); + }, 1000); + } else { + setTime(0); + } + + return () => clearInterval(intervalId); + }, [active]); + + useEffect(() => { + if (isConnected) { + setStatus('connected'); + setActive(true); + } else { + setStatus('disconnected'); + setActive(false); + } + }, [isConnected]); + + const formatTime = (seconds: number) => { + const mins = Math.floor(seconds / 60); + const secs = seconds % 60; + return `${mins.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}`; + }; + + const handleStart = () => { + setStatus('connecting'); + onStart?.(); + }; + + const handleStop = () => { + onStop?.(time); + setStatus('disconnected'); + }; + + return ( +
+
+
+ {status === 'connected' ? 'Connected' : status === 'connecting' ? 'Connecting...' : 'Disconnected'} +
+ + + + + {formatTime(time)} + +
+
+ ); +} \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/background-circles.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/background-circles.tsx new file mode 100644 index 0000000..c899496 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/background-circles.tsx @@ -0,0 +1,309 @@ +"use client"; + +import { motion } from "framer-motion"; +import clsx from "clsx"; +import { useState, useEffect } from "react"; + +interface BackgroundCirclesProps { + title?: string; + description?: string; + className?: string; + variant?: keyof typeof COLOR_VARIANTS; + audioLevel?: number; + isActive?: boolean; +} + +const COLOR_VARIANTS = { + primary: { + border: [ + "border-emerald-500/60", + "border-cyan-400/50", + "border-slate-600/30", + ], + gradient: "from-emerald-500/30", + }, + secondary: { + border: [ + "border-violet-500/60", + "border-fuchsia-400/50", + "border-slate-600/30", + ], + gradient: "from-violet-500/30", + }, + tertiary: { + border: [ + "border-orange-500/60", + "border-yellow-400/50", + "border-slate-600/30", + ], + gradient: "from-orange-500/30", + }, + quaternary: { + border: [ + "border-purple-500/60", + "border-pink-400/50", + "border-slate-600/30", + ], + gradient: "from-purple-500/30", + }, + quinary: { + border: [ + "border-red-500/60", + "border-rose-400/50", + "border-slate-600/30", + ], + gradient: "from-red-500/30", + }, // red + senary: { + border: [ + "border-blue-500/60", + "border-sky-400/50", + "border-slate-600/30", + ], + gradient: "from-blue-500/30", + }, // blue + septenary: { + border: [ + "border-gray-500/60", + "border-gray-400/50", + "border-slate-600/30", + ], + gradient: "from-gray-500/30", + }, + octonary: { + border: [ + "border-red-500/60", + "border-rose-400/50", + "border-slate-600/30", + ], + gradient: "from-red-500/30", + }, +} as const; + +const AnimatedGrid = () => ( + +
+ +); + +export function BackgroundCircles({ + title = "", + description = "", + className, + variant = "octonary", + audioLevel = 0, + isActive = false, +}: BackgroundCirclesProps) { + const variantStyles = COLOR_VARIANTS[variant]; + const [animationParams, setAnimationParams] = useState({ + scale: 1, + duration: 5, + intensity: 0 + }); + const [isLoaded, setIsLoaded] = useState(false); + + // Initial page load animation + useEffect(() => { + // Small delay to ensure the black screen is visible first + const timer = setTimeout(() => { + setIsLoaded(true); + }, 300); + + return () => clearTimeout(timer); + }, []); + + // Update animation based on audio level + useEffect(() => { + if (isActive && audioLevel > 0) { + // Simple enhancement of audio level for more dramatic effect + const enhancedLevel = Math.min(1, audioLevel * 1.5); + + setAnimationParams({ + scale: 1 + enhancedLevel * 0.3, + duration: Math.max(2, 5 - enhancedLevel * 3), + intensity: enhancedLevel + }); + } else if (animationParams.intensity > 0) { + // Only reset if we need to (prevents unnecessary updates) + const timer = setTimeout(() => { + setAnimationParams({ + scale: 1, + duration: 5, + intensity: 0 + }); + }, 300); + + return () => clearTimeout(timer); + } + }, [audioLevel, isActive, animationParams.intensity]); + + return ( + <> + {/* Initial black overlay that fades out */} + + +
+ + + {[0, 1, 2].map((i) => ( + +
+ + ))} + + +
+ + + + {/* Additional glow that appears only during high audio levels */} + {isActive && animationParams.intensity > 0.4 && ( + + )} +
+
+ + ); +} + +export function DemoCircles() { + const [currentVariant, setCurrentVariant] = + useState("octonary"); + + const variants = Object.keys( + COLOR_VARIANTS + ) as (keyof typeof COLOR_VARIANTS)[]; + + function getNextVariant() { + const currentIndex = variants.indexOf(currentVariant); + const nextVariant = variants[(currentIndex + 1) % variants.length]; + return nextVariant; + } + + return ( + <> + +
+ +
+ + ); +} diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/reset-chat.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/reset-chat.tsx new file mode 100644 index 0000000..b53a21a --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/reset-chat.tsx @@ -0,0 +1,18 @@ +"use client" + +import { Trash } from "lucide-react" + +export function ResetChat() { + return ( + + ) +} + diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-toggle.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-toggle.tsx new file mode 100644 index 0000000..a6ef0d8 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-toggle.tsx @@ -0,0 +1,61 @@ +"use client"; + +import { useTheme } from "@/components/theme-provider"; +import { cn } from "@/lib/utils"; +import { Moon, Sun } from "lucide-react"; +import { useRef } from "react"; + +interface ThemeToggleProps { + className?: string; +} + +export function ThemeToggle({ className }: ThemeToggleProps) { + const { theme } = useTheme(); + const buttonRef = useRef(null); + + const toggleTheme = () => { + // Instead of directly changing the theme, dispatch a custom event + const newTheme = theme === "light" ? "dark" : "light"; + + // Dispatch custom event with the new theme + window.dispatchEvent( + new CustomEvent('themeToggleRequest', { + detail: { theme: newTheme } + }) + ); + }; + + return ( + + ); +} \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-transition.tsx b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-transition.tsx new file mode 100644 index 0000000..caf9601 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/components/ui/theme-transition.tsx @@ -0,0 +1,120 @@ +"use client"; + +import { useTheme } from "@/components/theme-provider"; +import { useEffect, useState } from "react"; +import { motion, AnimatePresence } from "framer-motion"; + +interface ThemeTransitionProps { + className?: string; +} + +export function ThemeTransition({ className }: ThemeTransitionProps) { + const { theme, setTheme } = useTheme(); + const [position, setPosition] = useState({ x: 0, y: 0 }); + const [isAnimating, setIsAnimating] = useState(false); + const [pendingTheme, setPendingTheme] = useState(null); + const [visualTheme, setVisualTheme] = useState(theme); + + // Track mouse/touch position for click events + useEffect(() => { + const handleMouseMove = (e: MouseEvent) => { + setPosition({ x: e.clientX, y: e.clientY }); + }; + + const handleTouchMove = (e: TouchEvent) => { + if (e.touches[0]) { + setPosition({ x: e.touches[0].clientX, y: e.touches[0].clientY }); + } + }; + + window.addEventListener("mousemove", handleMouseMove); + window.addEventListener("touchmove", handleTouchMove); + + return () => { + window.removeEventListener("mousemove", handleMouseMove); + window.removeEventListener("touchmove", handleTouchMove); + }; + }, []); + + // Listen for theme toggle requests + useEffect(() => { + // Custom event for theme toggle requests + const handleThemeToggle = (e: CustomEvent) => { + if (isAnimating) return; // Prevent multiple animations + + const newTheme = e.detail.theme; + if (newTheme === theme) return; + + // Store the pending theme but don't apply it yet + setPendingTheme(newTheme); + setIsAnimating(true); + + // The actual theme will be applied mid-animation + }; + + window.addEventListener('themeToggleRequest' as any, handleThemeToggle as EventListener); + + return () => { + window.removeEventListener('themeToggleRequest' as any, handleThemeToggle as EventListener); + }; + }, [theme, isAnimating]); + + // Apply the theme change mid-animation + useEffect(() => { + if (isAnimating && pendingTheme) { + // Set visual theme immediately for the animation + setVisualTheme(pendingTheme); + + // Apply the actual theme change after a delay (mid-animation) + const timer = setTimeout(() => { + setTheme(pendingTheme as any); + }, 400); // Half of the animation duration + + // End the animation after it completes + const endTimer = setTimeout(() => { + setIsAnimating(false); + setPendingTheme(null); + }, 1000); // Match with animation duration + + return () => { + clearTimeout(timer); + clearTimeout(endTimer); + }; + } + }, [isAnimating, pendingTheme, setTheme]); + + return ( + + {isAnimating && ( + + + + )} + + ); +} \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/eslint.config.mjs b/demo/nextjs_voice_chat/frontend/fastrtc-demo/eslint.config.mjs new file mode 100644 index 0000000..521f586 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/eslint.config.mjs @@ -0,0 +1,28 @@ +import { dirname } from "path"; +import { fileURLToPath } from "url"; +import { FlatCompat } from "@eslint/eslintrc"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const compat = new FlatCompat({ + baseDirectory: __dirname, +}); + +const eslintConfig = [ + ...compat.extends("next/core-web-vitals", "next/typescript"), + { + rules: { + "no-unused-vars": "off", + "no-explicit-any": "off", + "no-console": "off", + "no-debugger": "off", + "eqeqeq": "off", + "curly": "off", + "quotes": "off", + "semi": "off", + }, + }, +]; + +export default eslintConfig; diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/utils.ts b/demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/utils.ts new file mode 100644 index 0000000..bd0c391 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/utils.ts @@ -0,0 +1,6 @@ +import { clsx, type ClassValue } from "clsx" +import { twMerge } from "tailwind-merge" + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)) +} diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/webrtc-client.ts b/demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/webrtc-client.ts new file mode 100644 index 0000000..72ea3ac --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/lib/webrtc-client.ts @@ -0,0 +1,189 @@ +interface WebRTCClientOptions { + onConnected?: () => void; + onDisconnected?: () => void; + onMessage?: (message: any) => void; + onAudioStream?: (stream: MediaStream) => void; + onAudioLevel?: (level: number) => void; +} + +export class WebRTCClient { + private peerConnection: RTCPeerConnection | null = null; + private mediaStream: MediaStream | null = null; + private dataChannel: RTCDataChannel | null = null; + private options: WebRTCClientOptions; + private audioContext: AudioContext | null = null; + private analyser: AnalyserNode | null = null; + private dataArray: Uint8Array | null = null; + private animationFrameId: number | null = null; + + constructor(options: WebRTCClientOptions = {}) { + this.options = options; + } + + async connect() { + try { + this.peerConnection = new RTCPeerConnection(); + + // Get user media + try { + this.mediaStream = await navigator.mediaDevices.getUserMedia({ + audio: true + }); + } catch (mediaError: any) { + console.error('Media error:', mediaError); + if (mediaError.name === 'NotAllowedError') { + throw new Error('Microphone access denied. Please allow microphone access and try again.'); + } else if (mediaError.name === 'NotFoundError') { + throw new Error('No microphone detected. Please connect a microphone and try again.'); + } else { + throw mediaError; + } + } + + this.setupAudioAnalysis(); + + this.mediaStream.getTracks().forEach(track => { + if (this.peerConnection) { + this.peerConnection.addTrack(track, this.mediaStream!); + } + }); + + this.peerConnection.addEventListener('track', (event) => { + if (this.options.onAudioStream) { + this.options.onAudioStream(event.streams[0]); + } + }); + + this.dataChannel = this.peerConnection.createDataChannel('text'); + + this.dataChannel.addEventListener('message', (event) => { + try { + const message = JSON.parse(event.data); + console.log('Received message:', message); + + if (this.options.onMessage) { + this.options.onMessage(message); + } + } catch (error) { + console.error('Error parsing message:', error); + } + }); + + // Create and send offer + const offer = await this.peerConnection.createOffer(); + await this.peerConnection.setLocalDescription(offer); + + // Use same-origin request to avoid CORS preflight + const response = await fetch('http://localhost:8000/webrtc/offer', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + mode: 'cors', // Explicitly set CORS mode + credentials: 'same-origin', + body: JSON.stringify({ + sdp: offer.sdp, + type: offer.type, + webrtc_id: Math.random().toString(36).substring(7) + }) + }); + + const serverResponse = await response.json(); + await this.peerConnection.setRemoteDescription(serverResponse); + + if (this.options.onConnected) { + this.options.onConnected(); + } + } catch (error) { + console.error('Error connecting:', error); + this.disconnect(); + throw error; + } + } + + private setupAudioAnalysis() { + if (!this.mediaStream) return; + + try { + this.audioContext = new AudioContext(); + this.analyser = this.audioContext.createAnalyser(); + this.analyser.fftSize = 256; + + const source = this.audioContext.createMediaStreamSource(this.mediaStream); + source.connect(this.analyser); + + const bufferLength = this.analyser.frequencyBinCount; + this.dataArray = new Uint8Array(bufferLength); + + this.startAnalysis(); + } catch (error) { + console.error('Error setting up audio analysis:', error); + } + } + + private startAnalysis() { + if (!this.analyser || !this.dataArray || !this.options.onAudioLevel) return; + + // Add throttling to prevent too many updates + let lastUpdateTime = 0; + const throttleInterval = 100; // Only update every 100ms + + const analyze = () => { + this.analyser!.getByteFrequencyData(this.dataArray!); + + const currentTime = Date.now(); + // Only update if enough time has passed since last update + if (currentTime - lastUpdateTime > throttleInterval) { + // Calculate average volume level (0-1) + let sum = 0; + for (let i = 0; i < this.dataArray!.length; i++) { + sum += this.dataArray![i]; + } + const average = sum / this.dataArray!.length / 255; + + this.options.onAudioLevel!(average); + lastUpdateTime = currentTime; + } + + this.animationFrameId = requestAnimationFrame(analyze); + }; + + this.animationFrameId = requestAnimationFrame(analyze); + } + + private stopAnalysis() { + if (this.animationFrameId !== null) { + cancelAnimationFrame(this.animationFrameId); + this.animationFrameId = null; + } + + if (this.audioContext) { + this.audioContext.close(); + this.audioContext = null; + } + + this.analyser = null; + this.dataArray = null; + } + + disconnect() { + this.stopAnalysis(); + + if (this.mediaStream) { + this.mediaStream.getTracks().forEach(track => track.stop()); + this.mediaStream = null; + } + + if (this.peerConnection) { + this.peerConnection.close(); + this.peerConnection = null; + } + + this.dataChannel = null; + + if (this.options.onDisconnected) { + this.options.onDisconnected(); + } + } +} \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/next.config.ts b/demo/nextjs_voice_chat/frontend/fastrtc-demo/next.config.ts new file mode 100644 index 0000000..e9ffa30 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/next.config.ts @@ -0,0 +1,7 @@ +import type { NextConfig } from "next"; + +const nextConfig: NextConfig = { + /* config options here */ +}; + +export default nextConfig; diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/package.json b/demo/nextjs_voice_chat/frontend/fastrtc-demo/package.json new file mode 100644 index 0000000..93c285e --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/package.json @@ -0,0 +1,33 @@ +{ + "name": "fastrtc-demo", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev --turbopack", + "build": "next build --no-lint", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "framer-motion": "^12.4.10", + "lucide-react": "^0.477.0", + "next": "15.2.2-canary.1", + "react": "^19.0.0", + "react-dom": "^19.0.0", + "tailwind-merge": "^3.0.2", + "tailwindcss-animate": "^1.0.7" + }, + "devDependencies": { + "@eslint/eslintrc": "^3", + "@tailwindcss/postcss": "^4", + "@types/node": "^20", + "@types/react": "^19", + "@types/react-dom": "^19", + "eslint": "^9", + "eslint-config-next": "15.2.2-canary.1", + "tailwindcss": "^4", + "typescript": "^5" + } +} diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/postcss.config.mjs b/demo/nextjs_voice_chat/frontend/fastrtc-demo/postcss.config.mjs new file mode 100644 index 0000000..c7bcb4b --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/postcss.config.mjs @@ -0,0 +1,5 @@ +const config = { + plugins: ["@tailwindcss/postcss"], +}; + +export default config; diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/file.svg b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/file.svg new file mode 100644 index 0000000..004145c --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/file.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/globe.svg b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/globe.svg new file mode 100644 index 0000000..567f17b --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/globe.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/next.svg b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/next.svg new file mode 100644 index 0000000..5174b28 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/next.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/vercel.svg b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/vercel.svg new file mode 100644 index 0000000..7705396 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/vercel.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/window.svg b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/window.svg new file mode 100644 index 0000000..b2b2a44 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/public/window.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/demo/nextjs_voice_chat/frontend/fastrtc-demo/tsconfig.json b/demo/nextjs_voice_chat/frontend/fastrtc-demo/tsconfig.json new file mode 100644 index 0000000..d8b9323 --- /dev/null +++ b/demo/nextjs_voice_chat/frontend/fastrtc-demo/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "ES2017", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": ["./*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/demo/nextjs_voice_chat/requirements.txt b/demo/nextjs_voice_chat/requirements.txt new file mode 100644 index 0000000..d9f0bb3 --- /dev/null +++ b/demo/nextjs_voice_chat/requirements.txt @@ -0,0 +1,5 @@ +openai +fastapi +python-dotenv +elevenlabs +fastrtc[vad, stt, tts] \ No newline at end of file diff --git a/demo/nextjs_voice_chat/run.sh b/demo/nextjs_voice_chat/run.sh new file mode 100755 index 0000000..814e8bd --- /dev/null +++ b/demo/nextjs_voice_chat/run.sh @@ -0,0 +1 @@ +uvicorn backend.server:app --host 0.0.0.0 --port 8000 \ No newline at end of file