Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from openai import OpenAI | |
import pprint | |
import chromadb | |
from chromadb.utils.embedding_functions import OpenAIEmbeddingFunction | |
# Load environment variables | |
client = OpenAI(api_key=os.getenv("OPENAI_KEY")) | |
pp = pprint.PrettyPrinter(indent=4) | |
def generate_response(messages): | |
model_name = os.getenv("MODEL_NAME") | |
response = client.chat.completions.create(model=model_name, messages=messages, temperature=0.5, max_tokens=250) | |
print("Request:") | |
pp.pprint(messages) | |
print(f"Completion tokens: {response.usage.completion_tokens}, Prompt tokens: {response.usage.prompt_tokens}, Total tokens: {response.usage.total_tokens}") | |
return response.choices[0].message | |
def chat_interface(user_input): | |
chroma_client = chromadb.Client() | |
embedding_function = OpenAIEmbeddingFunction(api_key=os.getenv("OPENAI_KEY"), model_name=os.getenv("EMBEDDING_MODEL")) | |
collection = chroma_client.create_collection(name="conversations", embedding_function=embedding_function) | |
messages = [{"role": "system", "content": "You are a kind and friendly chatbot"}] | |
results = collection.query(query_texts=[user_input], n_results=2) | |
for res in results['documents'][0]: | |
messages.append({"role": "user", "content": f"previous chat: {res}"}) | |
messages.append({"role": "user", "content": user_input}) | |
response = generate_response(messages) | |
return response | |
def main(): | |
interface = gr.Interface(fn=chat_interface, inputs="text", outputs="text", title="Chatbot Interface") | |
interface.launch() | |
if __name__ == "__main__": | |
main() | |