diff --git a/chatbot_config.json b/chatbot_config.json
new file mode 100644
index 000000000..ddaef19c7
--- /dev/null
+++ b/chatbot_config.json
@@ -0,0 +1,6 @@
+{
+ "api_url": "https://api.deepseek.com",
+ "api_key": "sk-12165b127043441697a8940918e207ac",
+ "ollama_api_url": "http://localhost:11434/v1",
+ "ollama_api_key": "ollama"
+}
\ No newline at end of file
diff --git a/config.json b/config.json
deleted file mode 100644
index de439fc2b..000000000
--- a/config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "api_url": "https://api.deepseek.com",
- "api_key": "sk-12165b127043441697a8940918e207ac"
-}
\ No newline at end of file
diff --git a/chatbot.py b/deepseek_chatbot.py
similarity index 97%
rename from chatbot.py
rename to deepseek_chatbot.py
index 06395ba27..7cdf87e45 100644
--- a/chatbot.py
+++ b/deepseek_chatbot.py
@@ -12,7 +12,7 @@ with st.expander("See Source Code"):
st.code(f.read(), language="python")
# Load API credentials from config.json
-with open('config.json') as config_file:
+with open('chatbot_config.json') as config_file:
config = json.load(config_file)
openai_api_base_url = config["api_url"]
openai_api_key = config["api_key"]
diff --git a/llama_chatbot.py b/llama_chatbot.py
new file mode 100644
index 000000000..b51b43523
--- /dev/null
+++ b/llama_chatbot.py
@@ -0,0 +1,73 @@
+import streamlit as st
+from openai import OpenAI # OpenAI compatibility
+import json
+# reference:
+# - Use OpenAI to connect Ollama: https://ollama.com/blog/openai-compatibility
+# - Build Chatbot with streamlit: https://streamlit.io/generative-ai
+# - finetune: https://docs.loopin.network/tutorials/LLM/llama3-finetune
+# - Ollama docker: https://hub.docker.com/r/ollama/ollama
+
+
+
+
+# Set up the Streamlit app
+st.markdown("
BuffBot🦬
", unsafe_allow_html=True)
+# st.subheader()
+st.info("Powered by llama3.2:1b model via [Ollama](https://ollama.com/library/llama3.2:1b)!")
+with st.expander("See Source Code"):
+ with open(__file__, "r") as f:
+ st.code(f.read(), language="python")
+
+# Load API credentials from config.json
+with open('chatbot_config.json') as config_file:
+ config = json.load(config_file)
+ api_base_url = config["ollama_api_url"]
+ api_key = config["ollama_api_key"]
+
+client = OpenAI(api_key=api_key, base_url=api_base_url)
+
+# Initialize session state to store chat history and message count
+if "messages" not in st.session_state:
+ st.session_state.messages = []
+
+# print welcome message
+with st.chat_message("assistant", avatar="🦬"):
+ st.markdown("Welcome to BuffBot! How can I help you today??")
+
+# Display chat history
+for message in st.session_state.messages:
+ if message["role"] == "user":
+ avatar="🤠"
+ else:
+ avatar="🦬"
+ with st.chat_message(message["role"], avatar=avatar):
+ st.markdown(message["content"])
+
+# Chat input
+if prompt := st.chat_input("Type your message..."):
+ # Add user message to chat history
+ st.session_state.messages.append({"role": "user", "content": prompt})
+ with st.chat_message("user", avatar="🤠"):
+ st.markdown(prompt)
+
+ # Call DeepSeek for a response
+ with st.chat_message("assistant", avatar="🦬"):
+ with st.spinner('Thinking...'):
+ stream = client.chat.completions.create(
+ model="llama3.2:1b",
+ messages=[
+ {"role": m["role"], "content": m["content"]}
+ for m in st.session_state.messages
+ ],
+ stream=True,
+ )
+ response = st.write_stream(stream)
+ st.session_state.messages.append({"role": "assistant", "content": response})
+
+
+if st.button("Clear Chat"):
+ st.session_state.messages = []
+ st.toast("Chat Cleaned", icon="🧹")
+
+
+