# Attention!
Since version 1.24.0 streamlit provides official elements to [build conversational apps](https://docs.streamlit.io/knowledge-base/tutorials/build-conversational-apps).
The new elements are more flexible, extensible and better supported, I would suggest to use them.
However, streamlit>=1.23 requires protobuf>=4 when some package requires protobuf<=3. In this condition you can use this package(<1.0.0) with streamlit<=1.22 as alternative. They are all simple to render text messages.
This package(>=1.0.0) will focus on wrapper of official chat elements to make chat with LLMs more convenient.
# Chatbox component for streamlit
A Streamlit component to show chat messages.
It's basiclly a wrapper of streamlit officeial elements including the chat elemnts.
- demo

- demo agent

## Features
- support streaming output.
- support markdown/image/video/audio messages, and all streamlit elements could be supported by customized `OutputElement`.
- output multiple messages at once, and make them collapsable.
- maintain session state context bound to chat conversation
- export & import chat histories
This make it easy to chat with langchain LLMs in streamlit.
Goto [webui](https://github.com/chatchat-space/Langchain-Chatchat/blob/master/webui_pages/dialogue/dialogue.py) of [langchain-chatchat](https://github.com/chatchat-space/Langchain-Chatchat) to see the actual application.
## Install
just `pip install -U streamlit-chatbox`
## Usage examples
```python
import streamlit as st
from streamlit_chatbox import *
import time
import simplejson as json
llm = FakeLLM()
chat_box = ChatBox(
use_rich_markdown=True, # use streamlit-markdown
user_theme="green", # see streamlit_markdown.st_markdown for all available themes
assistant_theme="blue",
)
chat_box.use_chat_name("chat1") # add a chat conversatoin
def on_chat_change():
chat_box.use_chat_name(st.session_state["chat_name"])
chat_box.context_to_session() # restore widget values to st.session_state when chat name changed
with st.sidebar:
st.subheader('start to chat using streamlit')
chat_name = st.selectbox("Chat Session:", ["default", "chat1"], key="chat_name", on_change=on_chat_change)
chat_box.use_chat_name(chat_name)
streaming = st.checkbox('streaming', key="streaming")
in_expander = st.checkbox('show messages in expander', key="in_expander")
show_history = st.checkbox('show session state', key="show_history")
chat_box.context_from_session(exclude=["chat_name"]) # save widget values to chat context
st.divider()
btns = st.container()
file = st.file_uploader(
"chat history json",
type=["json"]
)
if st.button("Load Json") and file:
data = json.load(file)
chat_box.from_dict(data)
chat_box.init_session()
chat_box.output_messages()
def on_feedback(
feedback,
chat_history_id: str = "",
history_index: int = -1,
):
reason = feedback["text"]
score_int = chat_box.set_feedback(feedback=feedback, history_index=history_index) # convert emoji to integer
# do something
st.session_state["need_rerun"] = True
feedback_kwargs = {
"feedback_type": "thumbs",
"optional_text_label": "wellcome to feedback",
}
if query := st.chat_input('input your question here'):
chat_box.user_say(query)
if streaming:
generator = llm.chat_stream(query)
elements = chat_box.ai_say(
[
# you can use string for Markdown output if no other parameters provided
Markdown("thinking", in_expander=in_expander,
expanded=True, title="answer"),
Markdown("", in_expander=in_expander, title="references"),
]
)
time.sleep(1)
text = ""
for x, docs in generator:
text += x
chat_box.update_msg(text, element_index=0, streaming=True)
# update the element without focus
chat_box.update_msg(text, element_index=0, streaming=False, state="complete")
chat_box.update_msg("\n\n".join(docs), element_index=1, streaming=False, state="complete")
chat_history_id = "some id"
chat_box.show_feedback(**feedback_kwargs,
key=chat_history_id,
on_submit=on_feedback,
kwargs={"chat_history_id": chat_history_id, "history_index": len(chat_box.history) - 1})
else:
text, docs = llm.chat(query)
chat_box.ai_say(
[
Markdown(text, in_expander=in_expander,
expanded=True, title="answer"),
Markdown("\n\n".join(docs), in_expander=in_expander,
title="references"),
]
)
cols = st.columns(2)
if cols[0].button('show me the multimedia'):
chat_box.ai_say(Image(
'https://tse4-mm.cn.bing.net/th/id/OIP-C.cy76ifbr2oQPMEs2H82D-QHaEv?w=284&h=181&c=7&r=0&o=5&dpr=1.5&pid=1.7'))
time.sleep(0.5)
chat_box.ai_say(
Video('https://sample-videos.com/video123/mp4/720/big_buck_bunny_720p_1mb.mp4'))
time.sleep(0.5)
chat_box.ai_say(
Audio('https://sample-videos.com/video123/mp4/720/big_buck_bunny_720p_1mb.mp4'))
if cols[1].button('run agent'):
chat_box.user_say('run agent')
agent = FakeAgent()
text = ""
# streaming:
chat_box.ai_say() # generate a blank placeholder to render messages
for d in agent.run_stream():
if d["type"] == "complete":
chat_box.update_msg(expanded=False, state="complete")
chat_box.insert_msg(d["llm_output"])
break
if d["status"] == 1:
chat_box.update_msg(expanded=False, state="complete")
text = ""
chat_box.insert_msg(Markdown(text, title=d["text"], in_expander=True, expanded=True))
elif d["status"] == 2:
text += d["llm_output"]
chat_box.update_msg(text, streaming=True)
else:
chat_box.update_msg(text, streaming=False)
btns.download_button(
"Export Markdown",
"".join(chat_box.export2md()),
file_name=f"chat_history.md",
mime="text/markdown",
)
btns.download_button(
"Export Json",
chat_box.to_json(),
file_name="chat_history.json",
mime="text/json",
)
if btns.button("clear history"):
chat_box.init_session(clear=True)
st.experimental_rerun()
if show_history:
st.write(st.session_state)
```
## Todos
- [x] wrapper of official chat elements
- [ ] input messages: (this depends on the official st.chat_input improvement by #7069)
- [x] TEXT
- [ ] IMAGE
- [ ] file upload
- [ ] paste from clipboard(streamlit_bokeh_events)
- [ ] VIDEO
- [ ] file upload
- [ ] AUDIO
- [ ] file upload
- [ ] audio-recorder-streamlit
- [x] output message types:
- [x] Text/Markdown/Image/Audio/Video
- [x] any other output types supported by streamlit
- [ ] improve output performance
- [x] streaming output message
- [x] show message in expander
- [x] rich output message using streamlit-markdown
- [x] feedback by user
- [x] export & import chat history
- [x] export to markdown
- [x] export to json
- [x] import json
- [x] support output of langchain' Agent.
- [x] conext bound to chat
# changelog
## v1.1.13
- add Json output element
- can choose to use streamlit-markdown instead of st.markdown. currently need streamlit==1.37.1 when streaming
- user can register custom output method with `ChatBox.register_output_method`. This is useful to use thirdparty components:
```python3
from streamlit_chatbox import *
from streamlit_markdown import st_hack_markdown
ChatBox.register_output_method("st_markdown", st_hack_markdown)
cb = ChatBox()
cb.user_say(OutputElement("user defined output method", output_method="st_markdown", theme_color="blue", mermaid_theme_CSS=""))
```
Raw data
{
"_id": null,
"home_page": "https://github.com/liunux4odoo/streamlit-chatbox",
"name": "streamlit-chatbox",
"maintainer": null,
"docs_url": null,
"requires_python": ">=3.8",
"maintainer_email": null,
"keywords": null,
"author": "liunux",
"author_email": "liunux@qq.com",
"download_url": null,
"platform": null,
"description": "# Attention!\r\n\r\nSince version 1.24.0 streamlit provides official elements to [build conversational apps](https://docs.streamlit.io/knowledge-base/tutorials/build-conversational-apps).\r\n\r\nThe new elements are more flexible, extensible and better supported, I would suggest to use them. \r\n\r\nHowever, streamlit>=1.23 requires protobuf>=4 when some package requires protobuf<=3. In this condition you can use this package(<1.0.0) with streamlit<=1.22 as alternative. They are all simple to render text messages.\r\n\r\nThis package(>=1.0.0) will focus on wrapper of official chat elements to make chat with LLMs more convenient.\r\n\r\n# Chatbox component for streamlit\r\n\r\nA Streamlit component to show chat messages.\r\nIt's basiclly a wrapper of streamlit officeial elements including the chat elemnts.\r\n\r\n- demo\r\n\r\n\r\n- demo agent\r\n\r\n\r\n## Features\r\n\r\n- support streaming output.\r\n- support markdown/image/video/audio messages, and all streamlit elements could be supported by customized `OutputElement`.\r\n- output multiple messages at once, and make them collapsable.\r\n- maintain session state context bound to chat conversation\r\n- export & import chat histories\r\n\r\nThis make it easy to chat with langchain LLMs in streamlit.\r\n\r\nGoto [webui](https://github.com/chatchat-space/Langchain-Chatchat/blob/master/webui_pages/dialogue/dialogue.py) of [langchain-chatchat](https://github.com/chatchat-space/Langchain-Chatchat) to see the actual application.\r\n\r\n\r\n## Install\r\n\r\njust `pip install -U streamlit-chatbox`\r\n\r\n## Usage examples\r\n\r\n```python\r\nimport streamlit as st\r\nfrom streamlit_chatbox import *\r\nimport time\r\nimport simplejson as json\r\n\r\n\r\nllm = FakeLLM()\r\nchat_box = ChatBox(\r\n use_rich_markdown=True, # use streamlit-markdown\r\n user_theme=\"green\", # see streamlit_markdown.st_markdown for all available themes\r\n assistant_theme=\"blue\",\r\n)\r\nchat_box.use_chat_name(\"chat1\") # add a chat conversatoin\r\n\r\ndef on_chat_change():\r\n chat_box.use_chat_name(st.session_state[\"chat_name\"])\r\n chat_box.context_to_session() # restore widget values to st.session_state when chat name changed\r\n\r\n\r\nwith st.sidebar:\r\n st.subheader('start to chat using streamlit')\r\n chat_name = st.selectbox(\"Chat Session:\", [\"default\", \"chat1\"], key=\"chat_name\", on_change=on_chat_change)\r\n chat_box.use_chat_name(chat_name)\r\n streaming = st.checkbox('streaming', key=\"streaming\")\r\n in_expander = st.checkbox('show messages in expander', key=\"in_expander\")\r\n show_history = st.checkbox('show session state', key=\"show_history\")\r\n chat_box.context_from_session(exclude=[\"chat_name\"]) # save widget values to chat context\r\n\r\n st.divider()\r\n\r\n btns = st.container()\r\n\r\n file = st.file_uploader(\r\n \"chat history json\",\r\n type=[\"json\"]\r\n )\r\n\r\n if st.button(\"Load Json\") and file:\r\n data = json.load(file)\r\n chat_box.from_dict(data)\r\n\r\n\r\nchat_box.init_session()\r\nchat_box.output_messages()\r\n\r\ndef on_feedback(\r\n feedback,\r\n chat_history_id: str = \"\",\r\n history_index: int = -1,\r\n):\r\n reason = feedback[\"text\"]\r\n score_int = chat_box.set_feedback(feedback=feedback, history_index=history_index) # convert emoji to integer\r\n # do something\r\n st.session_state[\"need_rerun\"] = True\r\n\r\n\r\nfeedback_kwargs = {\r\n \"feedback_type\": \"thumbs\",\r\n \"optional_text_label\": \"wellcome to feedback\",\r\n}\r\n\r\nif query := st.chat_input('input your question here'):\r\n chat_box.user_say(query)\r\n if streaming:\r\n generator = llm.chat_stream(query)\r\n elements = chat_box.ai_say(\r\n [\r\n # you can use string for Markdown output if no other parameters provided\r\n Markdown(\"thinking\", in_expander=in_expander,\r\n expanded=True, title=\"answer\"),\r\n Markdown(\"\", in_expander=in_expander, title=\"references\"),\r\n ]\r\n )\r\n time.sleep(1)\r\n text = \"\"\r\n for x, docs in generator:\r\n text += x\r\n chat_box.update_msg(text, element_index=0, streaming=True)\r\n # update the element without focus\r\n chat_box.update_msg(text, element_index=0, streaming=False, state=\"complete\")\r\n chat_box.update_msg(\"\\n\\n\".join(docs), element_index=1, streaming=False, state=\"complete\")\r\n chat_history_id = \"some id\"\r\n chat_box.show_feedback(**feedback_kwargs,\r\n key=chat_history_id,\r\n on_submit=on_feedback,\r\n kwargs={\"chat_history_id\": chat_history_id, \"history_index\": len(chat_box.history) - 1})\r\n else:\r\n text, docs = llm.chat(query)\r\n chat_box.ai_say(\r\n [\r\n Markdown(text, in_expander=in_expander,\r\n expanded=True, title=\"answer\"),\r\n Markdown(\"\\n\\n\".join(docs), in_expander=in_expander,\r\n title=\"references\"),\r\n ]\r\n )\r\n\r\ncols = st.columns(2)\r\nif cols[0].button('show me the multimedia'):\r\n chat_box.ai_say(Image(\r\n 'https://tse4-mm.cn.bing.net/th/id/OIP-C.cy76ifbr2oQPMEs2H82D-QHaEv?w=284&h=181&c=7&r=0&o=5&dpr=1.5&pid=1.7'))\r\n time.sleep(0.5)\r\n chat_box.ai_say(\r\n Video('https://sample-videos.com/video123/mp4/720/big_buck_bunny_720p_1mb.mp4'))\r\n time.sleep(0.5)\r\n chat_box.ai_say(\r\n Audio('https://sample-videos.com/video123/mp4/720/big_buck_bunny_720p_1mb.mp4'))\r\n\r\nif cols[1].button('run agent'):\r\n chat_box.user_say('run agent')\r\n agent = FakeAgent()\r\n text = \"\"\r\n\r\n # streaming:\r\n chat_box.ai_say() # generate a blank placeholder to render messages\r\n for d in agent.run_stream():\r\n if d[\"type\"] == \"complete\":\r\n chat_box.update_msg(expanded=False, state=\"complete\")\r\n chat_box.insert_msg(d[\"llm_output\"])\r\n break\r\n\r\n if d[\"status\"] == 1:\r\n chat_box.update_msg(expanded=False, state=\"complete\")\r\n text = \"\"\r\n chat_box.insert_msg(Markdown(text, title=d[\"text\"], in_expander=True, expanded=True))\r\n elif d[\"status\"] == 2:\r\n text += d[\"llm_output\"]\r\n chat_box.update_msg(text, streaming=True)\r\n else:\r\n chat_box.update_msg(text, streaming=False)\r\n\r\nbtns.download_button(\r\n \"Export Markdown\",\r\n \"\".join(chat_box.export2md()),\r\n file_name=f\"chat_history.md\",\r\n mime=\"text/markdown\",\r\n)\r\n\r\nbtns.download_button(\r\n \"Export Json\",\r\n chat_box.to_json(),\r\n file_name=\"chat_history.json\",\r\n mime=\"text/json\",\r\n)\r\n\r\nif btns.button(\"clear history\"):\r\n chat_box.init_session(clear=True)\r\n st.experimental_rerun()\r\n\r\n\r\nif show_history:\r\n st.write(st.session_state)\r\n\r\n```\r\n\r\n## Todos\r\n\r\n- [x] wrapper of official chat elements\r\n- [ ] input messages: (this depends on the official st.chat_input improvement by #7069)\r\n\t- [x] TEXT\r\n\t- [ ] IMAGE\r\n\t\t- [ ] file upload\r\n\t\t- [ ] paste from clipboard(streamlit_bokeh_events)\r\n\t- [ ] VIDEO\r\n\t\t- [ ] file upload\r\n\t- [ ] AUDIO\r\n\t\t- [ ] file upload\r\n\t\t- [ ] audio-recorder-streamlit\r\n\r\n- [x] output message types:\r\n\t- [x] Text/Markdown/Image/Audio/Video\r\n\t- [x] any other output types supported by streamlit\r\n\r\n- [ ] improve output performance\r\n\t- [x] streaming output message\r\n\t- [x] show message in expander\r\n\t- [x] rich output message using streamlit-markdown\r\n - [x] feedback by user\r\n\r\n- [x] export & import chat history\r\n\t- [x] export to markdown\r\n\t- [x] export to json\r\n - [x] import json\r\n\r\n- [x] support output of langchain' Agent.\r\n- [x] conext bound to chat\r\n\r\n# changelog\r\n\r\n## v1.1.13\r\n- add Json output element\r\n- can choose to use streamlit-markdown instead of st.markdown. currently need streamlit==1.37.1 when streaming\r\n- user can register custom output method with `ChatBox.register_output_method`. This is useful to use thirdparty components:\r\n ```python3\r\n from streamlit_chatbox import *\r\n from streamlit_markdown import st_hack_markdown\r\n\r\n ChatBox.register_output_method(\"st_markdown\", st_hack_markdown)\r\n cb = ChatBox()\r\n cb.user_say(OutputElement(\"user defined output method\", output_method=\"st_markdown\", theme_color=\"blue\", mermaid_theme_CSS=\"\"))\r\n ```\r\n",
"bugtrack_url": null,
"license": null,
"summary": "A chat box and some helpful tools used to build chatbot app with streamlit",
"version": "1.1.13.post1",
"project_urls": {
"Homepage": "https://github.com/liunux4odoo/streamlit-chatbox"
},
"split_keywords": [],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "0b49c64000202715ffd570819e8db068c9e7a793d663deba867cf16706b4d823",
"md5": "74fcecd2bb4ed6408580672e3e0044b8",
"sha256": "8fc7b64af998b7ad3437f6eb49c41ae34b699dac4cc57c5d9a9e76142b948299"
},
"downloads": -1,
"filename": "streamlit_chatbox-1.1.13.post1-py3-none-any.whl",
"has_sig": false,
"md5_digest": "74fcecd2bb4ed6408580672e3e0044b8",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": ">=3.8",
"size": 15248,
"upload_time": "2024-10-15T05:47:17",
"upload_time_iso_8601": "2024-10-15T05:47:17.046018Z",
"url": "https://files.pythonhosted.org/packages/0b/49/c64000202715ffd570819e8db068c9e7a793d663deba867cf16706b4d823/streamlit_chatbox-1.1.13.post1-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-10-15 05:47:17",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "liunux4odoo",
"github_project": "streamlit-chatbox",
"travis_ci": false,
"coveralls": false,
"github_actions": false,
"lcname": "streamlit-chatbox"
}