import os import pinecone from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from langchain.chains import RetrievalQA from langchain.embeddings.openai import OpenAIEmbeddings from langchain.llms import OpenAI from langchain.vectorstores import Pinecone PINECONE_API_KEY = os.getenv('PINECONE_API_KEY') PINECONE_ENV = os.getenv('PINECONE_ENV') OPENAI_API_KEY = os.getenv('OPENAI_API_KEY') PINECONE_INDEX_NAME = os.getenv('PINECONE_INDEX_NAME') def parse_response(response): result = response['result'] result += '\n\nSources: \n' for source_name in response["source_documents"]: result += ''.join((source_name.metadata['source'], " page #:", str(source_name.metadata['page']), ' \n')) return result app = FastAPI() app.add_middleware( CORSMiddleware, allow_origins=['*'] ) @app.get("/") def read_root(): return {"message": "Hello World"} @app.get("/api/python") def hello_python(): return {"message": "Hello Python"} @app.get("/prompt") def read_root(p: str='According to HQ H303140, what is "Country of origin" means?'): pinecone.init( api_key=PINECONE_API_KEY, environment=PINECONE_ENV ) index = pinecone.Index(PINECONE_INDEX_NAME) index.describe_index_stats() embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY) docsearch = Pinecone.from_existing_index(PINECONE_INDEX_NAME, embeddings) retriever = docsearch.as_retriever( include_metadata=True, metadata_key='source' ) llm = OpenAI(temperature=0, openai_api_key=OPENAI_API_KEY) qa_chain = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=True) response = qa_chain(p) return { "prompt": p, "response": parse_response(response) }