Spaces:
Sleeping
Sleeping
File size: 2,086 Bytes
ddead39 6aa451c 65e117d 6aa451c ddead39 6aa451c 65e117d ddead39 6aa451c f518c40 6aa451c f518c40 ddead39 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
import os
import gradio as gr
from openai import OpenAI
import pprint
import chromadb
from chromadb.utils.embedding_functions import OpenAIEmbeddingFunction
# Load environment variables
client = OpenAI(api_key=os.getenv("OPENAI_KEY"))
pp = pprint.PrettyPrinter(indent=4)
current_id = 0
chat_history = []
chat_metadata = []
history_ids = []
chroma_client = chromadb.Client()
embedding_function = OpenAIEmbeddingFunction(api_key=os.getenv("OPENAI_KEY"), model_name=os.getenv("EMBEDDING_MODEL"))
collection = chroma_client.create_collection(name="conversations", embedding_function=embedding_function)
messages = [{"role": "system", "content": "You are a kind and friendly chatbot"}]
def generate_response(messages):
model_name = os.getenv("MODEL_NAME")
response = client.chat.completions.create(model=model_name, messages=messages, temperature=0.5, max_tokens=250)
print("Request:")
pp.pprint(messages)
print(f"Completion tokens: {response.usage.completion_tokens}, Prompt tokens: {response.usage.prompt_tokens}, Total tokens: {response.usage.total_tokens}")
return response.choices[0].message
def chat_interface(user_input):
global current_id
results = collection.query(query_texts=[user_input], n_results=2)
for res in results['documents'][0]:
messages.append({"role": "user", "content": f"previous chat: {res}"})
messages.append({"role": "user", "content": user_input})
response = generate_response(messages)
chat_metadata.append({"role":"user"})
chat_history.append(user_input)
chat_metadata.append({"role":"assistant"})
chat_history.append(response.content)
current_id += 1
history_ids.append(f"id_{current_id}")
current_id += 1
history_ids.append(f"id_{current_id}")
collection.add(
documents=chat_history,
metadatas=chat_metadata,
ids=history_ids
)
return response.content
def main():
interface = gr.Interface(fn=chat_interface, inputs="text", outputs="text", title="Chatbot Interface")
interface.launch()
if __name__ == "__main__":
main()
|