mirror of
https://github.com/OpenBMB/MiniCPM-V.git
synced 2026-02-05 18:29:18 +08:00
Update web_demo_streamlit-minicpmv2_6.py
1. Avoid using 'None' string when `user_text` is empty. 2. Added `st.spinner` to display a loading message during AI content generation.
This commit is contained in:
@@ -173,9 +173,13 @@ def encode_video(video_path):
|
|||||||
return frames
|
return frames
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# User input box
|
# User input box
|
||||||
user_text = st.chat_input("Enter your question")
|
user_text = st.chat_input("Enter your question")
|
||||||
if user_text:
|
if user_text is not None:
|
||||||
|
if user_text.strip() is "":
|
||||||
|
st.warning('Input message could not be empty!', icon="⚠️")
|
||||||
|
else:
|
||||||
# Display user input and save it to session history
|
# Display user input and save it to session history
|
||||||
with st.chat_message(U_NAME, avatar="user"):
|
with st.chat_message(U_NAME, avatar="user"):
|
||||||
st.session_state.chat_history.append({
|
st.session_state.chat_history.append({
|
||||||
@@ -251,6 +255,7 @@ if user_text:
|
|||||||
print("params:", params) # debug
|
print("params:", params) # debug
|
||||||
|
|
||||||
# Generate and display the model's responses
|
# Generate and display the model's responses
|
||||||
|
with st.spinner('AI is thinking...'):
|
||||||
response = model.chat(image=None, msgs=msgs, context=None, tokenizer=tokenizer, **params)
|
response = model.chat(image=None, msgs=msgs, context=None, tokenizer=tokenizer, **params)
|
||||||
st.session_state.response = st.write_stream(response)
|
st.session_state.response = st.write_stream(response)
|
||||||
st.session_state.chat_history.append({
|
st.session_state.chat_history.append({
|
||||||
@@ -261,3 +266,4 @@ if user_text:
|
|||||||
})
|
})
|
||||||
|
|
||||||
st.divider() # Add separators to the interface
|
st.divider() # Add separators to the interface
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user