lindsay-qu commited on
Commit
923679f
·
verified ·
1 Parent(s): 6ec8a6d

Update models/gpt4_model.py

Browse files
Files changed (1) hide show
  1. models/gpt4_model.py +38 -16
models/gpt4_model.py CHANGED
@@ -22,16 +22,27 @@ class GPT4Model(BaseModel):
22
  base_url=os.environ["OPENAI_API_BASE"]
23
  )
24
  print("start api call")
25
- output = await client.chat.completions.create(
26
- messages=messages,
27
- model=self.generation_model,
28
- temperature=self.temperature,
29
- max_tokens=1000,
30
- )
31
- print("end api call")
32
- response = output.choices[0].message.content
33
- # content = response.choices[0]['message']['content']
34
-
 
 
 
 
 
 
 
 
 
 
 
35
  return response
36
 
37
  def respond(self, messages: list[dict]) -> str:
@@ -41,12 +52,23 @@ class GPT4Model(BaseModel):
41
  )
42
  # OpenAI.api_key=os.environ["OPENAI_API_KEY"]
43
  # OpenAI.api_base=os.environ["OPENAI_API_BASE"]
44
- response = client.chat.completions.create(
45
- messages=messages,
46
- model=self.generation_model,
47
- temperature=self.temperature,
48
- max_tokens=1000,
49
- ).choices[0].message.content
 
 
 
 
 
 
 
 
 
 
 
50
  return response
51
 
52
  def embedding(self, texts: list[str]) -> list[float]:
 
22
  base_url=os.environ["OPENAI_API_BASE"]
23
  )
24
  print("start api call")
25
+ try:
26
+ output = await client.chat.completions.create(
27
+ messages=messages,
28
+ model=self.generation_model,
29
+ temperature=self.temperature,
30
+ max_tokens=1000,
31
+ )
32
+ print("end api call")
33
+ response = output.choices[0].message.content
34
+ except:
35
+ try:
36
+ output = await client.chat.completions.create(
37
+ messages=messages,
38
+ model=self.generation_model,
39
+ temperature=self.temperature,
40
+ max_tokens=1000,
41
+ )
42
+ print("end api call")
43
+ response = output.choices[0].message.content
44
+ except:
45
+ response = "No answer provided."
46
  return response
47
 
48
  def respond(self, messages: list[dict]) -> str:
 
52
  )
53
  # OpenAI.api_key=os.environ["OPENAI_API_KEY"]
54
  # OpenAI.api_base=os.environ["OPENAI_API_BASE"]
55
+ try:
56
+ response = client.chat.completions.create(
57
+ messages=messages,
58
+ model=self.generation_model,
59
+ temperature=self.temperature,
60
+ max_tokens=1000,
61
+ ).choices[0].message.content
62
+ except:
63
+ try:
64
+ response = client.chat.completions.create(
65
+ messages=messages,
66
+ model=self.generation_model,
67
+ temperature=self.temperature,
68
+ max_tokens=1000,
69
+ ).choices[0].message.content
70
+ except:
71
+ response = "No answer provided."
72
  return response
73
 
74
  def embedding(self, texts: list[str]) -> list[float]: