ogegadavis254 commited on
Commit
a9c7401
1 Parent(s): fe5f88e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -38
app.py CHANGED
@@ -1,3 +1,9 @@
 
 
 
 
 
 
1
  import streamlit as st
2
  from openai import OpenAI
3
  import os
@@ -6,68 +12,64 @@ from dotenv import load_dotenv
6
  load_dotenv()
7
 
8
  # Initialize the client
9
- client = OpenAI(api_key=os.getenv('OPENAI_API_KEY'))
 
 
 
10
 
11
- # Model link for Mistral
12
  model_link = "mistralai/Mistral-7B-Instruct-v0.2"
13
 
14
  def reset_conversation():
15
- '''
16
- Resets Conversation
17
- '''
18
  st.session_state.messages = []
 
19
 
20
  # Set the temperature value directly in the code
21
  temperature = 0.5
22
 
23
- # Add reset button to clear conversation
24
- st.button('Reset Chat', on_click=reset_conversation)
 
25
 
26
  # Initialize chat history
27
  if "messages" not in st.session_state:
28
  st.session_state.messages = []
29
 
 
 
 
30
  # Display chat messages from history on app rerun
31
  for message in st.session_state.messages:
32
  with st.chat_message(message["role"]):
33
  st.markdown(message["content"])
34
 
35
  # Accept user input
36
- if prompt := st.chat_input("Type your message here..."):
37
 
 
38
  # Display user message in chat message container
39
  with st.chat_message("user"):
40
  st.markdown(prompt)
41
-
42
  # Add user message to chat history
43
  st.session_state.messages.append({"role": "user", "content": prompt})
44
 
45
- # Interact with the model
46
- try:
47
- # Construct the conversation context
48
- conversation_context = [{"role": "system", "content": "You are a helpful assistant."}]
49
- for msg in st.session_state.messages:
50
- conversation_context.append(msg)
51
-
52
- # Send the conversation context to the API
53
- response = client.chat.completions.create(
54
- model=model_link,
55
- messages=conversation_context,
56
- temperature=temperature,
57
- max_tokens=150 # Adjust the max tokens according to your needs
58
- )
59
-
60
- assistant_response = response.choices[0].message["content"]
61
-
62
- # Display assistant response in chat message container
63
- with st.chat_message("assistant"):
64
- st.markdown(assistant_response)
65
-
66
- # Append the assistant's response to the chat history
67
- st.session_state.messages.append({"role": "assistant", "content": assistant_response})
68
-
69
- except Exception as e:
70
- # Display error message to user
71
- with st.chat_message("assistant"):
72
- st.markdown("Sorry, I couldn't process your request. Please try again later.")
73
- st.error(f"An error occurred: {e}")
 
1
+ """
2
+ Simple Chatbot
3
+ @author: Nigel Gebodh
4
+ @email: [email protected]
5
+ """
6
+
7
  import streamlit as st
8
  from openai import OpenAI
9
  import os
 
12
  load_dotenv()
13
 
14
  # Initialize the client
15
+ client = OpenAI(
16
+ base_url="https://api-inference.huggingface.co/v1",
17
+ api_key=os.environ.get('HUGGINGFACEHUB_API_TOKEN') # Replace with your token
18
+ )
19
 
 
20
  model_link = "mistralai/Mistral-7B-Instruct-v0.2"
21
 
22
  def reset_conversation():
23
+ """Resets Conversation"""
24
+ st.session_state.conversation = []
 
25
  st.session_state.messages = []
26
+ return None
27
 
28
  # Set the temperature value directly in the code
29
  temperature = 0.5
30
 
31
+ # Add a button to clear conversation
32
+ if st.button('Reset Chat'):
33
+ reset_conversation()
34
 
35
  # Initialize chat history
36
  if "messages" not in st.session_state:
37
  st.session_state.messages = []
38
 
39
+ st.title("Mistral-7B Chatbot")
40
+ st.subheader("Ask me anything!")
41
+
42
  # Display chat messages from history on app rerun
43
  for message in st.session_state.messages:
44
  with st.chat_message(message["role"]):
45
  st.markdown(message["content"])
46
 
47
  # Accept user input
48
+ prompt = st.chat_input("Type your message here...")
49
 
50
+ if prompt:
51
  # Display user message in chat message container
52
  with st.chat_message("user"):
53
  st.markdown(prompt)
 
54
  # Add user message to chat history
55
  st.session_state.messages.append({"role": "user", "content": prompt})
56
 
57
+ # Display assistant response in chat message container
58
+ with st.chat_message("assistant"):
59
+ try:
60
+ response = client.chat.completions.create(
61
+ model=model_link,
62
+ messages=[
63
+ {"role": m["role"], "content": m["content"]}
64
+ for m in st.session_state.messages
65
+ ],
66
+ temperature=temperature,
67
+ max_tokens=3000
68
+ )['choices'][0]['message']['content']
69
+
70
+ st.markdown(response)
71
+ st.session_state.messages.append({"role": "assistant", "content": response})
72
+
73
+ except Exception as e:
74
+ st.markdown("An error occurred. Please try again later.")
75
+ st.markdown(f"Error details: {e}")