import openai import gradio as gr from langchain.retriever import RetrievalQA from langchain.chains.question_answering import load_qa_cha from langchain.llms import OpenAI from langchain.document_loaders import TextLoader from langchain.indexes import VectorstoreIndexCreator from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.text_splitter import CharacterTextSplitter from langchain.embeddings import OpenAIEmbeddings from langchain.vectorstores import Chroma # Initialize OpenAI API key openai.api_key = "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" # Load text from file loader = TextLoader("Dropsheets.txt") documents = loader.load() # split the documents into chunks text_splitter = RecursiveCharacterTextSplitter(chunk_size=1024, chunk_overlap=0) texts = text_splitter.split_documents(documents) # select embeddings embeddings = OpenAIEmbeddings() # create the vectorestore to use as the index db = Chroma.from_documents(texts, embeddings) # expose this index in a retriever interface retriever = db.as_retriever(search_type="similarity", search_kwargs={"k":2}) # Define OpenAI GPT-3.5 model function ## def generate_text(query): # response = openai.Completion.create( # engine="text-davinci-002", # temperature=0, # max_tokens=7000, # prompt=prompt # ) # return response.choices[0].text.strip() # Create Gradio interface input_text = gr.Textbox(label="Enter prompt", type="text") output_text = gr.Textbox(label="AI response", type="text") demo = gr.Interface( fn = None, inputs=input_text, outputs=output_text, title="AI Chatbot for PlanetTogether Knowledge Base", description="Ask a question about the PlanetTogether APS:", examples=[["How do you create an Alternate Path?"]], theme="default" ) # create a chain to answer questions qa = RetrievalQA.from_chain_type( llm=OpenAI(), chain_type="stuff", retriever=retriever) result = qa({"query": query}) retriever.get_relevant_documents(query) # Launch demo demo.launch()