# Author: Du Mingzhe (dumingzhex@gmail.com) # Date: 2024/03/09 from openai import OpenAI class LLMClient(): def __init__(self, api_key, model_name="gpt-3.5-turbo") -> None: super().__init__() self.model_name = model_name self.llm_client = OpenAI(api_key=api_key) def prompt_list_generate(self, query, history, web_results, personal_results): prompt_list = [] prompt_list += [ {"role": m["role"], "content": m["content"]} for m in history ], return prompt_list def generate(self, query, history=None, web_results=None, personal_results=None): prompt_list = self.prompt_list_generate(query, history, web_results, personal_results) response = self.llm_client.chat.completions.create( model = self.model_name, messages = prompt_list ) return response.choices[0].message.content