nachoremer commited on
Commit
3929c07
1 Parent(s): acfa594

falta guardar bien

Browse files
Files changed (1) hide show
  1. app.py +179 -92
app.py CHANGED
@@ -14,6 +14,11 @@ import gspread
14
  from groq import Client
15
  import random, string
16
 
 
 
 
 
 
17
  # Initialize Google Sheets client
18
  client = init_google_sheets_client()
19
  sheet = client.open(google_sheets_name)
@@ -68,48 +73,103 @@ chat_history = []
68
  model_history = []
69
 
70
 
71
- # Function to save comment and score
72
- def save_comment_score(chat_responses, score, comment, story_name, user_name, system_prompt, models):
 
 
 
 
 
 
 
 
 
 
 
 
73
  print("Saving comment and score...")
74
- print(chat_responses)
75
  print(model_history)
76
  full_chat_history = ""
77
 
78
- # Create formatted chat history with roles
79
- #and model in model_history
80
- for message in chat_responses:
81
- print(message['role'])
82
- if message['role'] == 'user': # User message
83
- full_chat_history += f"User: {message['content']}\n"
84
- if message['role'] == 'assistant': # Assistant message
85
- full_chat_history += f"Model:{model_history.pop(0)} Assistant: {message['content']}\n"
86
-
87
- timestamp = datetime.now(timezone.utc) - timedelta(hours=3) # Adjust to GMT-3
88
- timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S")
89
- model_name = (' ').join(models)
90
- # Append data to local data storage
91
- print(full_chat_history)
92
- data.append([
93
- timestamp_str,
94
- user_name,
95
- model_name,
96
- system_prompt,
97
- story_name,
98
- full_chat_history,
99
- score,
100
- comment
101
- ])
102
-
103
- # Append data to Google Sheets
104
- try:
105
- user_sheet = client.open(google_sheets_name).worksheet(user_name)
106
- except gspread.exceptions.WorksheetNotFound:
107
- user_sheet = client.open(google_sheets_name).add_worksheet(title=user_name, rows="100", cols="20")
108
 
109
- user_sheet.append_row([timestamp_str, user_name, model_name, system_prompt, story_name, full_chat_history, score, comment])
 
 
 
 
110
 
111
- df = pd.DataFrame(data, columns=["Timestamp", "User Name", "Model Name", "System Prompt", "Story Name", "Chat History", "Score", "Comment"])
112
- return df[["Chat History", "Score", "Comment"]], gr.update(value="") # Show only the required columns and clear the comment input box
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
113
 
114
 
115
  # Function to handle interaction with model
@@ -157,7 +217,6 @@ Here is the story:
157
  print("Story title does not match.")
158
 
159
 
160
-
161
  #i=[story_dropdown, model_dropdown, system_prompt_dropdown],
162
  #o=[chatbot_output, chat_history_json, data_table, selected_story_textbox])
163
  #recibo varios respuestas las muestro nomas, agrego al contexto solo la que se
@@ -165,32 +224,37 @@ Here is the story:
165
 
166
  def send_multiple_selected_story(title, models, system_prompt):
167
  global model_history
168
- global chatbot_aswser_list
 
169
  resp_list = []
170
  print(models)
171
  #iterate over words
172
- shuffle_models = randomize_key_order(all_models)
173
- for index, model in enumerate(shuffle_models):
174
- if model in models:
175
- #respuesta consulta,
176
- resp, context, _ = send_selected_story(title, model, system_prompt)
177
- chatbot_aswser_list[alphabet[index]] = {'response': resp, 'model': model}
178
- try:
179
- print(resp)
180
- resp_list.append(gr.Chatbot(value=[resp], visible=True, type='messages'))
181
- except gr.exceptions.Error:
182
- print(f"error for en modelo {model}")
183
- else:
184
- try:
185
- resp_list.append(gr.Chatbot(type='messages', visible=False))
186
- except gr.exceptions.Error:
187
- print(f"error, else en modelo {model}")
188
-
 
 
 
189
  try:
190
  resp_list.insert(0, gr.Chatbot(value=context, type='messages'))
 
191
  except gr.exceptions.Error:
192
  print(f"error en main output\n {context}")
193
- #return main_output, resp_list[0], resp_list[1], resp_list[2], resp_list[3], models, story,
194
  return resp_list
195
 
196
  #inputs=[user_input, chatbot_main_output, model_checkbox, chat_radio, assistant_user_input, chatbot_resp[0], chatbot_resp[1], chatbot_resp[2], chatbot_resp[3]],# interaction_count],
@@ -207,46 +271,71 @@ def remove_metadata(json_array):
207
 
208
  # dont know the correct model beacuse it shuffles each time
209
  #selected model it's only the index in radio input
210
- def multiple_interact(query, history, models, selected_model, assistant_user_input): #, interaction_count)
211
  print(f'chat_checkbox: {selected_model}')
212
  resp_list = []
213
  print(model_history)
214
  #quito history ahora es una variable global
 
215
 
216
  if selected_model == "user_input":
217
- history.append({"role": "assistant", "content": assistant_user_input})
218
- history.append({"role": "user", "content": query})
 
 
 
 
 
 
219
 
220
  else:
221
- #chats = [chat1, chat2, chat3, chat4]
222
- #chatbot_aswser_list
 
 
 
 
 
223
  #get the previous answer of the selected model
224
  for index, model in enumerate(models):
225
  if alphabet[index] == selected_model:
226
- selected_model_history = chatbot_aswser_list[selected_model]['response']
227
  print(f"selected_model_history: {selected_model_history}")
228
- history.append(selected_model_history)
229
- history.append({"role": "user","content": query.strip()})
230
-
 
 
 
 
231
  #save to csv
232
  selected_model_history = {} #reset history
233
 
234
-
235
- aux_history = remove_metadata(history)
236
  #print(aux_history)
237
 
238
 
239
- #shuffle all models then iterate over them
240
- shuffle_models = randomize_key_order(all_models)
241
- for index, model in enumerate(shuffle_models):
242
- if model in models:
243
- response = interact_groq(aux_history, model).strip()
244
- resp_list.append(gr.Chatbot(value=[{"role": "assistant", "content": response}], type='messages'))
245
- chatbot_aswser_list[alphabet[index]] = {'response': response, 'model': model}
246
- else:
247
- resp_list.append(gr.Chatbot(value=None, type='messages', visible=False))
248
-
249
- main_output = gr.Chatbot(value=history, type='messages')
 
 
 
 
 
 
 
 
 
250
  model_history.append(selected_model)
251
  print(model_history)
252
 
@@ -294,8 +383,10 @@ chatbot_list = []
294
  model_list = list(all_models.keys())
295
  active_models = []
296
  #chatbot_answer_list['model'] = "respuesta aqui"
297
- chatbot_aswser_list = {}
 
298
  # Create the chat interface using Gradio Blocks
 
299
  with gr.Blocks() as demo:
300
  with gr.Tabs():
301
  with gr.TabItem("Chat"):
@@ -348,10 +439,6 @@ with gr.Blocks() as demo:
348
  label = f"Model {alphabet[i % len(alphabet)]}"
349
  aux = gr.Chatbot(label=label, visible=False, type='messages')
350
  chatbot_list.append(aux)
351
-
352
- #for model in model_list:
353
- # aux = gr.Chatbot(label=f"Model {model}", visible=False, type='messages')
354
- # chatbot_list.append(aux)
355
 
356
  user_input = gr.Textbox(placeholder="Type your message here...", label="User Input")
357
  #chat_radio = gr.Radio(choices=list(model_list)+["user_input"], label="Sent something to continue...", value=[model_list[0]])
@@ -395,16 +482,16 @@ with gr.Blocks() as demo:
395
  #luego retorno:
396
  #en
397
 
398
- #send_multiple_message_button.click(
399
- # fn=multiple_interact,
400
- # inputs=[user_input, chatbot_main_output, model_checkbox, chat_radio, assistant_user_input],# interaction_count],
401
- # outputs=[chatbot_list],
402
- # )
403
 
404
  #quiza tenga que guardar una variable con los valores de los checkbox
405
- #save_button_multievaluation.click(
406
- # fn=save_comment_score,
407
- # inputs=[chatbot_main_output, score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown, model_checkbox],
408
- # outputs=[data_table, comment_input])
409
 
410
  demo.launch()
 
14
  from groq import Client
15
  import random, string
16
 
17
+ from pydrive.auth import GoogleAuth
18
+ from pydrive.drive import GoogleDrive
19
+ import json
20
+
21
+
22
  # Initialize Google Sheets client
23
  client = init_google_sheets_client()
24
  sheet = client.open(google_sheets_name)
 
73
  model_history = []
74
 
75
 
76
+
77
+
78
+
79
+
80
+ from pydrive.auth import GoogleAuth
81
+ from pydrive.drive import GoogleDrive
82
+ import json
83
+
84
+ # Autenticación y creación del cliente de Google Drive
85
+ gauth = GoogleAuth()
86
+ gauth.LocalWebserverAuth() # Esto abrirá una ventana del navegador para autenticarse
87
+ drive = GoogleDrive(gauth)
88
+
89
+ def save_comment_score(score, comment, story_name, user_name, system_prompt, models):
90
  print("Saving comment and score...")
91
+ print(chat_history)
92
  print(model_history)
93
  full_chat_history = ""
94
 
95
+ # Save all_answers to Google Drive
96
+ file_name = 'all_answers.json'
97
+
98
+ # Check if the file already exists in Google Drive
99
+ file_list = drive.ListFile({'q': f"title='{file_name}' and trashed=false"}).GetList()
100
+ if file_list:
101
+ # File exists, download it
102
+ gfile = file_list[0]
103
+ gfile.GetContentFile(file_name)
104
+ with open(file_name, 'r') as json_file:
105
+ existing_data = json.load(json_file)
106
+ existing_data.append(all_answers)
107
+ else:
108
+ # File does not exist, create new data
109
+ existing_data = [all_answers]
110
+
111
+ # Save updated data to the file
112
+ with open(file_name, 'w') as json_file:
113
+ json.dump(existing_data, json_file)
114
+
115
+ # Upload the file to Google Drive
116
+ gfile = drive.CreateFile({'title': file_name})
117
+ gfile.SetContentFile(file_name)
118
+ gfile.Upload()
119
+ print(f"File {file_name} uploaded to Google Drive.")
 
 
 
 
 
120
 
121
+ return None
122
+
123
+
124
+
125
+
126
 
127
+
128
+
129
+
130
+
131
+ # Function to save comment and score
132
+ #def save_comment_score(score, comment, story_name, user_name, system_prompt, models):
133
+ # print("Saving comment and score...")
134
+ # print(chat_history)
135
+ # print(model_history)
136
+ # full_chat_history = ""
137
+ #
138
+ # # Create formatted chat history with roles
139
+ # #and model in model_history
140
+ # for message in chat_history:
141
+ # print(message['role'])
142
+ # if message['role'] == 'user': # User message
143
+ # full_chat_history += f"User: {message['content']}\n"
144
+ # if message['role'] == 'assistant': # Assistant message
145
+ # full_chat_history += f"Model:{model_history.pop(0)} Assistant: {message['content']}\n"
146
+ #
147
+ # timestamp = datetime.now(timezone.utc) - timedelta(hours=3) # Adjust to GMT-3
148
+ # timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S")
149
+ # model_name = (' ').join(models)
150
+ # # Append data to local data storage
151
+ # print(full_chat_history)
152
+ # data.append([
153
+ # timestamp_str,
154
+ # user_name,
155
+ # model_name,
156
+ # system_prompt,
157
+ # story_name,
158
+ # full_chat_history,
159
+ # score,
160
+ # comment
161
+ # ])
162
+ #
163
+ # # Append data to Google Sheets
164
+ # try:
165
+ # user_sheet = client.open(google_sheets_name).worksheet(user_name)
166
+ # except gspread.exceptions.WorksheetNotFound:
167
+ # user_sheet = client.open(google_sheets_name).add_worksheet(title=user_name, rows="100", cols="20")
168
+ #
169
+ # user_sheet.append_row([timestamp_str, user_name, model_name, system_prompt, story_name, full_chat_history, score, comment])
170
+ #
171
+ # df = pd.DataFrame(data, columns=["Timestamp", "User Name", "Model Name", "System Prompt", "Story Name", "Chat History", "Score", "Comment"])
172
+ # return df[["Chat History", "Score", "Comment"]], gr.update(value="") # Show only the required columns and clear the comment input box
173
 
174
 
175
  # Function to handle interaction with model
 
217
  print("Story title does not match.")
218
 
219
 
 
220
  #i=[story_dropdown, model_dropdown, system_prompt_dropdown],
221
  #o=[chatbot_output, chat_history_json, data_table, selected_story_textbox])
222
  #recibo varios respuestas las muestro nomas, agrego al contexto solo la que se
 
224
 
225
  def send_multiple_selected_story(title, models, system_prompt):
226
  global model_history
227
+ global chatbot_answser_list
228
+ global all_answers
229
  resp_list = []
230
  print(models)
231
  #iterate over words
232
+ #shuffle_models = randomize_key_order(all_models)
233
+ random.shuffle(models)
234
+ print(f"models shuffled: {models}")
235
+ for index, model in enumerate(models):
236
+ resp, context, _ = send_selected_story(title, model, system_prompt)
237
+ chatbot_answser_list[alphabet[index]] = {'response': resp, 'model': model}
238
+ try:
239
+ print(resp)
240
+ resp_list.append(gr.Chatbot(value=[resp], visible=True, type='messages'))
241
+ except gr.exceptions.Error:
242
+ print(f"error for en modelo {model}")
243
+
244
+
245
+ rest = [model for model in model_list if model not in models]
246
+ for model in rest:
247
+ try:
248
+ resp_list.append(gr.Chatbot(type='messages', visible=False))
249
+ except gr.exceptions.Error:
250
+ print(f"error, else en modelo {model}")
251
+
252
  try:
253
  resp_list.insert(0, gr.Chatbot(value=context, type='messages'))
254
+ #chat_history ya se hace en send_selected_story
255
  except gr.exceptions.Error:
256
  print(f"error en main output\n {context}")
257
+
258
  return resp_list
259
 
260
  #inputs=[user_input, chatbot_main_output, model_checkbox, chat_radio, assistant_user_input, chatbot_resp[0], chatbot_resp[1], chatbot_resp[2], chatbot_resp[3]],# interaction_count],
 
271
 
272
  # dont know the correct model beacuse it shuffles each time
273
  #selected model it's only the index in radio input
274
+ def multiple_interact(query, models, selected_model, assistant_user_input): #, interaction_count)
275
  print(f'chat_checkbox: {selected_model}')
276
  resp_list = []
277
  print(model_history)
278
  #quito history ahora es una variable global
279
+
280
 
281
  if selected_model == "user_input":
282
+ chat_history.append({"role": "assistant", "content": assistant_user_input})
283
+ chat_history.append({"role": "user", "content": query})
284
+
285
+ dialog = {
286
+ "context": remove_metadata(chat_history),
287
+ "assistant": assistant_user_input + chatbot_answser_list.values(),
288
+ "selected": "user_input",
289
+ }
290
 
291
  else:
292
+ dialog = {
293
+ "context": remove_metadata(chat_history),
294
+ "assistant": chatbot_answser_list.values(),
295
+ "selected": None,
296
+ }
297
+
298
+ #chatbot_answser_list
299
  #get the previous answer of the selected model
300
  for index, model in enumerate(models):
301
  if alphabet[index] == selected_model:
302
+ selected_model_history = chatbot_answser_list[selected_model]['response']
303
  print(f"selected_model_history: {selected_model_history}")
304
+ chat_history.append(selected_model_history)
305
+ chat_history.append({"role": "user","content": query.strip()})
306
+ #si es la correcta guardarla
307
+ dialog["selected"] = chatbot_answser_list[selected_model]['model']
308
+ break
309
+ #APPE
310
+ all_answers.append(dialog)
311
  #save to csv
312
  selected_model_history = {} #reset history
313
 
314
+ #creo que no precisa
315
+ aux_history = remove_metadata(chat_history)
316
  #print(aux_history)
317
 
318
 
319
+ #no es models es....
320
+ random.shuffle(active_models)
321
+ for index, model in enumerate(active_models):
322
+ resp = interact_groq(aux_history, model)
323
+ resp = {"role": "assistant", "content": resp.strip()}
324
+ chatbot_answser_list[alphabet[index]] = {'response': resp, 'model': model}
325
+ try:
326
+ print(resp)
327
+ resp_list.append(gr.Chatbot(value=[resp], visible=True, type='messages'))
328
+ except gr.exceptions.Error:
329
+ print(f"error for en modelo {model}")
330
+
331
+ rest = [model for model in model_list if model not in active_models]
332
+ for model in rest:
333
+ try:
334
+ resp_list.append(gr.Chatbot(type='messages', visible=False))
335
+ except gr.exceptions.Error:
336
+ print(f"error, else en modelo {model}")
337
+
338
+ resp_list.insert(0, gr.Chatbot(value=aux_history, type='messages'))
339
  model_history.append(selected_model)
340
  print(model_history)
341
 
 
383
  model_list = list(all_models.keys())
384
  active_models = []
385
  #chatbot_answer_list['model'] = "respuesta aqui"
386
+ chatbot_answser_list = {}
387
+ all_answers = {} #save all answers of all chatbots
388
  # Create the chat interface using Gradio Blocks
389
+ active_models = []
390
  with gr.Blocks() as demo:
391
  with gr.Tabs():
392
  with gr.TabItem("Chat"):
 
439
  label = f"Model {alphabet[i % len(alphabet)]}"
440
  aux = gr.Chatbot(label=label, visible=False, type='messages')
441
  chatbot_list.append(aux)
 
 
 
 
442
 
443
  user_input = gr.Textbox(placeholder="Type your message here...", label="User Input")
444
  #chat_radio = gr.Radio(choices=list(model_list)+["user_input"], label="Sent something to continue...", value=[model_list[0]])
 
482
  #luego retorno:
483
  #en
484
 
485
+ send_multiple_message_button.click(
486
+ fn=multiple_interact,
487
+ inputs=[user_input, model_checkbox, chat_radio, assistant_user_input],# interaction_count],
488
+ outputs=chatbot_list,
489
+ )
490
 
491
  #quiza tenga que guardar una variable con los valores de los checkbox
492
+ save_button_multievaluation.click(
493
+ fn=save_comment_score,
494
+ inputs=[score_input, comment_input, story_dropdown, user_dropdown, system_prompt_dropdown, model_checkbox],
495
+ outputs=[data_table, comment_input])
496
 
497
  demo.launch()