kenken999 commited on
Commit
5870eb4
Β·
1 Parent(s): 9a030f1
chat_history.db CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:67b367330e511a92f3c65494805f38d2cde0ba6173aadec781969f99fbb2553c
3
- size 1847296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ed4ab0802de1b9750bc674622f95c04a92db3812e42ee9a897ceac41ed3b93d
3
+ size 1855488
controllers/gra_02_openInterpreter/OpenInterpreter.py CHANGED
@@ -10,7 +10,7 @@ def format_response(chunk, full_response):
10
  if chunk["type"] == "message":
11
  full_response += chunk.get("content", "")
12
  if chunk.get("end", False):
13
- full_response += "\n\n"
14
 
15
  # Code
16
  if chunk["type"] == "code":
@@ -49,7 +49,7 @@ def format_response(chunk, full_response):
49
 
50
  # 言θͺžγ‚Ώγ‚°γͺしでコードブロックを開始
51
  if chunk.get("start", False):
52
- full_response += "```code\n"
53
 
54
  if chunk.get("format", "") == "active_line":
55
  if not console_content.strip():
@@ -108,7 +108,7 @@ def add_message_to_db(role, message_type, content):
108
  conn.commit()
109
  conn.close()
110
 
111
- def get_recent_messages(limit=1):
112
  conn = sqlite3.connect(db_name)
113
  cursor = conn.cursor()
114
  cursor.execute("SELECT role, type, content FROM history ORDER BY timestamp DESC LIMIT ?", (limit,))
@@ -120,20 +120,13 @@ def format_responses(chunk, full_response):
120
  # This function will format the response from the interpreter
121
  return full_response + chunk.get("content", "")
122
 
123
- def ensure_triple_quotes(s):
124
- if not (s.rstrip().endswith("```") or s.rstrip().endswith('"""')):
125
- s += "```"
126
- return s
127
-
128
-
129
-
130
  def chat_with_interpreter(message, history=None, a=None, b=None, c=None, d=None):
131
  if message == "reset":
132
  interpreter.reset()
133
  return "Interpreter reset", history
134
 
135
  full_response = ""
136
- recent_messages = get_recent_messages(3)
137
 
138
  for role, message_type, content in recent_messages:
139
  entry = {"role": role, "type": message_type, "content": content}
@@ -146,11 +139,8 @@ def chat_with_interpreter(message, history=None, a=None, b=None, c=None, d=None)
146
  for chunk in interpreter.chat(message, display=False, stream=False):
147
  if isinstance(chunk, dict):
148
  full_response = format_response(chunk, full_response)
149
- full_response = ensure_triple_quotes(full_response)
150
-
151
  else:
152
  raise TypeError("Expected chunk to be a dictionary")
153
-
154
  print(full_response)
155
  yield full_response
156
 
@@ -159,7 +149,7 @@ def chat_with_interpreter(message, history=None, a=None, b=None, c=None, d=None)
159
  add_message_to_db("assistant", "message", full_response)
160
 
161
  yield full_response
162
- return full_response, history
163
 
164
 
165
  def chat_with_interpreter_no_stream(message, history=None, a=None, b=None, c=None, d=None):
 
10
  if chunk["type"] == "message":
11
  full_response += chunk.get("content", "")
12
  if chunk.get("end", False):
13
+ full_response += "\n"
14
 
15
  # Code
16
  if chunk["type"] == "code":
 
49
 
50
  # 言θͺžγ‚Ώγ‚°γͺしでコードブロックを開始
51
  if chunk.get("start", False):
52
+ full_response += "```result\n"
53
 
54
  if chunk.get("format", "") == "active_line":
55
  if not console_content.strip():
 
108
  conn.commit()
109
  conn.close()
110
 
111
+ def get_recent_messages(limit=4):
112
  conn = sqlite3.connect(db_name)
113
  cursor = conn.cursor()
114
  cursor.execute("SELECT role, type, content FROM history ORDER BY timestamp DESC LIMIT ?", (limit,))
 
120
  # This function will format the response from the interpreter
121
  return full_response + chunk.get("content", "")
122
 
 
 
 
 
 
 
 
123
  def chat_with_interpreter(message, history=None, a=None, b=None, c=None, d=None):
124
  if message == "reset":
125
  interpreter.reset()
126
  return "Interpreter reset", history
127
 
128
  full_response = ""
129
+ recent_messages = get_recent_messages(limit=4)
130
 
131
  for role, message_type, content in recent_messages:
132
  entry = {"role": role, "type": message_type, "content": content}
 
139
  for chunk in interpreter.chat(message, display=False, stream=False):
140
  if isinstance(chunk, dict):
141
  full_response = format_response(chunk, full_response)
 
 
142
  else:
143
  raise TypeError("Expected chunk to be a dictionary")
 
144
  print(full_response)
145
  yield full_response
146
 
 
149
  add_message_to_db("assistant", "message", full_response)
150
 
151
  yield full_response
152
+ #return full_response, history
153
 
154
 
155
  def chat_with_interpreter_no_stream(message, history=None, a=None, b=None, c=None, d=None):