add chatbot with local llama3.2 1B model on Ollama
This commit is contained in:
6
chatbot_config.json
Normal file
6
chatbot_config.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"api_url": "https://api.deepseek.com",
|
||||||
|
"api_key": "sk-12165b127043441697a8940918e207ac",
|
||||||
|
"ollama_api_url": "http://localhost:11434/v1",
|
||||||
|
"ollama_api_key": "ollama"
|
||||||
|
}
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"api_url": "https://api.deepseek.com",
|
|
||||||
"api_key": "sk-12165b127043441697a8940918e207ac"
|
|
||||||
}
|
|
||||||
@@ -12,7 +12,7 @@ with st.expander("See Source Code"):
|
|||||||
st.code(f.read(), language="python")
|
st.code(f.read(), language="python")
|
||||||
|
|
||||||
# Load API credentials from config.json
|
# Load API credentials from config.json
|
||||||
with open('config.json') as config_file:
|
with open('chatbot_config.json') as config_file:
|
||||||
config = json.load(config_file)
|
config = json.load(config_file)
|
||||||
openai_api_base_url = config["api_url"]
|
openai_api_base_url = config["api_url"]
|
||||||
openai_api_key = config["api_key"]
|
openai_api_key = config["api_key"]
|
||||||
73
llama_chatbot.py
Normal file
73
llama_chatbot.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import streamlit as st
|
||||||
|
from openai import OpenAI # OpenAI compatibility
|
||||||
|
import json
|
||||||
|
# reference:
|
||||||
|
# - Use OpenAI to connect Ollama: https://ollama.com/blog/openai-compatibility
|
||||||
|
# - Build Chatbot with streamlit: https://streamlit.io/generative-ai
|
||||||
|
# - finetune: https://docs.loopin.network/tutorials/LLM/llama3-finetune
|
||||||
|
# - Ollama docker: https://hub.docker.com/r/ollama/ollama
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Set up the Streamlit app
|
||||||
|
st.markdown("<h1 style='text-align: center; color: #451002;'>BuffBot🦬</h1>", unsafe_allow_html=True)
|
||||||
|
# st.subheader()
|
||||||
|
st.info("Powered by llama3.2:1b model via [Ollama](https://ollama.com/library/llama3.2:1b)!")
|
||||||
|
with st.expander("See Source Code"):
|
||||||
|
with open(__file__, "r") as f:
|
||||||
|
st.code(f.read(), language="python")
|
||||||
|
|
||||||
|
# Load API credentials from config.json
|
||||||
|
with open('chatbot_config.json') as config_file:
|
||||||
|
config = json.load(config_file)
|
||||||
|
api_base_url = config["ollama_api_url"]
|
||||||
|
api_key = config["ollama_api_key"]
|
||||||
|
|
||||||
|
client = OpenAI(api_key=api_key, base_url=api_base_url)
|
||||||
|
|
||||||
|
# Initialize session state to store chat history and message count
|
||||||
|
if "messages" not in st.session_state:
|
||||||
|
st.session_state.messages = []
|
||||||
|
|
||||||
|
# print welcome message
|
||||||
|
with st.chat_message("assistant", avatar="🦬"):
|
||||||
|
st.markdown("Welcome to BuffBot! How can I help you today??")
|
||||||
|
|
||||||
|
# Display chat history
|
||||||
|
for message in st.session_state.messages:
|
||||||
|
if message["role"] == "user":
|
||||||
|
avatar="🤠"
|
||||||
|
else:
|
||||||
|
avatar="🦬"
|
||||||
|
with st.chat_message(message["role"], avatar=avatar):
|
||||||
|
st.markdown(message["content"])
|
||||||
|
|
||||||
|
# Chat input
|
||||||
|
if prompt := st.chat_input("Type your message..."):
|
||||||
|
# Add user message to chat history
|
||||||
|
st.session_state.messages.append({"role": "user", "content": prompt})
|
||||||
|
with st.chat_message("user", avatar="🤠"):
|
||||||
|
st.markdown(prompt)
|
||||||
|
|
||||||
|
# Call DeepSeek for a response
|
||||||
|
with st.chat_message("assistant", avatar="🦬"):
|
||||||
|
with st.spinner('Thinking...'):
|
||||||
|
stream = client.chat.completions.create(
|
||||||
|
model="llama3.2:1b",
|
||||||
|
messages=[
|
||||||
|
{"role": m["role"], "content": m["content"]}
|
||||||
|
for m in st.session_state.messages
|
||||||
|
],
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
response = st.write_stream(stream)
|
||||||
|
st.session_state.messages.append({"role": "assistant", "content": response})
|
||||||
|
|
||||||
|
|
||||||
|
if st.button("Clear Chat"):
|
||||||
|
st.session_state.messages = []
|
||||||
|
st.toast("Chat Cleaned", icon="🧹")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Reference in New Issue
Block a user