import gradio as gr from openai import OpenAI # Point to the local server client = OpenAI(base_url="http://localhost:1234/v1", api_key="lm-studio") def chat(message, history): """ Handles user input and sends it to the model. """ # Append user message to history immediately history.append((message, None)) # Format messages for OpenAI API messages = [{"role": "system", "content": "Always answer in rhymes."}] for user_msg, assistant_msg in history: messages.append({"role": "user", "content": user_msg}) # Only add assistant message if it exists if assistant_msg: messages.append({"role": "assistant", "content": assistant_msg}) # Create chat completion completion = client.chat.completions.create( model="lmstudio-community/Meta-Llama-3-8B-Instruct-GGUF", messages=messages, temperature=0.7, ) # Check if choices is not empty if completion.choices: response = completion.choices[0].message.content # Update the last message in history with the response history[-1] = (message, response) else: response = "No response received from the model." # No need to update history here as the message is already added return history, response # Create the Gradio interface using gr.Blocks # Create the Gradio interface using gr.Blocks with gr.Blocks() as iface: chat_history = gr.Chatbot(label="Conversation History") message_input = gr.Textbox(label="Your message") send_button = gr.Button(value="Send") # Connect components with the chat function within gr.Blocks context # outputs から message_input を削除 send_button.click( chat, inputs=[message_input, chat_history], outputs=[chat_history] ) # Launch the Gradio interface iface.launch(share=False)