asthaaa300 commited on
Commit
f12d140
·
verified ·
1 Parent(s): 4cdf80c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -44
app.py CHANGED
@@ -7,25 +7,31 @@ MODEL = "nomiChroma3.1"
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
  def respond(
10
- message,
11
- history: list[tuple[str, str]],
12
- max_tokens,
13
- temperature,
14
- top_p,
15
- ):
16
- # Define system message internally instead of as a user input
 
 
 
17
  system_message = "You are a maritime legal assistant with expertise strictly in Indian maritime law. Provide detailed legal advice and information based on Indian maritime legal principles and regulations."
18
 
19
  messages = [{"role": "system", "content": system_message}]
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
  messages.append({"role": "user", "content": message})
 
 
26
  response = ""
 
27
  try:
28
- for message in client.chat_completion(
29
  messages,
30
  max_tokens=max_tokens,
31
  stream=True,
@@ -33,34 +39,41 @@ def respond(
33
  top_p=top_p,
34
  ):
35
  try:
36
- if isinstance(message, ChatCompletionStreamOutput):
37
- content = message.choices[0].delta.content
38
- if content is not None:
39
  response += content
40
- yield response
41
- if message.choices[0].finish_reason == 'stop':
 
42
  break
43
- elif isinstance(message, dict):
44
- content = message.get('choices', [{}])[0].get('delta', {}).get('content')
45
  if content:
46
  response += content
47
- yield response
48
- if message.get('choices', [{}])[0].get('finish_reason') == 'stop':
 
49
  break
50
- elif isinstance(message, str):
51
- if message.strip():
52
- response += message
53
- yield response
 
54
  except Exception as e:
55
- print(f"Error processing message: {e}")
56
  continue
57
 
58
- if response:
59
- yield response
 
 
60
 
61
  except Exception as e:
62
- print(f"An error occurred: {e}")
63
- yield f"An error occurred: {e}"
 
 
64
 
65
  custom_css = """
66
  /* Global styles */
@@ -183,13 +196,11 @@ with gr.Blocks(css=custom_css, theme=gr.themes.Base()) as demo:
183
  </div>
184
  """)
185
 
186
- # Main layout with sidebar
187
  with gr.Row():
188
  # Sidebar
189
  with gr.Column(scale=1, elem_classes="sidebar"):
190
  gr.Markdown("### Example Queries", elem_classes="sidebar-title")
191
 
192
- # Example queries in sidebar
193
  example_queries = [
194
  "What are the key regulations governing ports in India?",
195
  "Explain the concept of cabotage in Indian maritime law.",
@@ -199,7 +210,6 @@ with gr.Blocks(css=custom_css, theme=gr.themes.Base()) as demo:
199
  "What are the rules for coastal cargo transportation in India?"
200
  ]
201
 
202
- # Create buttons for example queries
203
  with gr.Column(elem_classes="example-queries"):
204
  example_buttons = [gr.Button(query, elem_classes="example-query-button") for query in example_queries]
205
 
@@ -239,29 +249,30 @@ with gr.Blocks(css=custom_css, theme=gr.themes.Base()) as demo:
239
  clear = gr.Button("Clear")
240
 
241
  # Event handlers
242
- submit_click = submit.click(
243
  respond,
244
  inputs=[msg, chatbot, max_tokens, temperature, top_p],
245
- outputs=chatbot
246
  )
247
- msg_submit = msg.submit(
 
248
  respond,
249
  inputs=[msg, chatbot, max_tokens, temperature, top_p],
250
- outputs=chatbot
251
  )
252
- clear.click(lambda: None, None, chatbot, queue=False)
253
 
254
- # Add click handlers for example queries
 
 
255
  for button in example_buttons:
256
  button.click(
257
- lambda query: query,
258
  inputs=[button],
259
- outputs=[msg]
260
  ).then(
261
  respond,
262
  inputs=[msg, chatbot, max_tokens, temperature, top_p],
263
- outputs=chatbot
264
- )
265
 
266
  if __name__ == "__main__":
267
  demo.launch()
 
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
  def respond(
10
+ message: str,
11
+ chat_history: list[tuple[str, str]],
12
+ max_tokens: int,
13
+ temperature: float,
14
+ top_p: float,
15
+ ) -> tuple[list[tuple[str, str]], str]:
16
+ """
17
+ Generate a response and update chat history.
18
+ Returns tuple of (new_history, None) to clear input box.
19
+ """
20
  system_message = "You are a maritime legal assistant with expertise strictly in Indian maritime law. Provide detailed legal advice and information based on Indian maritime legal principles and regulations."
21
 
22
  messages = [{"role": "system", "content": system_message}]
23
+ for user_msg, assistant_msg in chat_history:
24
+ messages.extend([
25
+ {"role": "user", "content": user_msg},
26
+ {"role": "assistant", "content": assistant_msg}
27
+ ])
28
  messages.append({"role": "user", "content": message})
29
+
30
+ chat_history = chat_history + [(message, None)]
31
  response = ""
32
+
33
  try:
34
+ for chunk in client.chat_completion(
35
  messages,
36
  max_tokens=max_tokens,
37
  stream=True,
 
39
  top_p=top_p,
40
  ):
41
  try:
42
+ if isinstance(chunk, ChatCompletionStreamOutput):
43
+ content = chunk.choices[0].delta.content
44
+ if content:
45
  response += content
46
+ chat_history[-1] = (message, response)
47
+ yield chat_history, ""
48
+ if chunk.choices[0].finish_reason == 'stop':
49
  break
50
+ elif isinstance(chunk, dict):
51
+ content = chunk.get('choices', [{}])[0].get('delta', {}).get('content')
52
  if content:
53
  response += content
54
+ chat_history[-1] = (message, response)
55
+ yield chat_history, ""
56
+ if chunk.get('choices', [{}])[0].get('finish_reason') == 'stop':
57
  break
58
+ elif isinstance(chunk, str) and chunk.strip():
59
+ response += chunk
60
+ chat_history[-1] = (message, response)
61
+ yield chat_history, ""
62
+
63
  except Exception as e:
64
+ print(f"Error processing chunk: {e}")
65
  continue
66
 
67
+ if not response:
68
+ chat_history[-1] = (message, "I apologize, but I couldn't generate a response. Please try again.")
69
+
70
+ yield chat_history, ""
71
 
72
  except Exception as e:
73
+ error_msg = f"An error occurred: {str(e)}"
74
+ chat_history[-1] = (message, error_msg)
75
+ yield chat_history, ""
76
+
77
 
78
  custom_css = """
79
  /* Global styles */
 
196
  </div>
197
  """)
198
 
 
199
  with gr.Row():
200
  # Sidebar
201
  with gr.Column(scale=1, elem_classes="sidebar"):
202
  gr.Markdown("### Example Queries", elem_classes="sidebar-title")
203
 
 
204
  example_queries = [
205
  "What are the key regulations governing ports in India?",
206
  "Explain the concept of cabotage in Indian maritime law.",
 
210
  "What are the rules for coastal cargo transportation in India?"
211
  ]
212
 
 
213
  with gr.Column(elem_classes="example-queries"):
214
  example_buttons = [gr.Button(query, elem_classes="example-query-button") for query in example_queries]
215
 
 
249
  clear = gr.Button("Clear")
250
 
251
  # Event handlers
252
+ msg.submit(
253
  respond,
254
  inputs=[msg, chatbot, max_tokens, temperature, top_p],
255
+ outputs=[chatbot, msg]
256
  )
257
+
258
+ submit.click(
259
  respond,
260
  inputs=[msg, chatbot, max_tokens, temperature, top_p],
261
+ outputs=[chatbot, msg]
262
  )
 
263
 
264
+ clear.click(lambda: ([], ""), None, [chatbot, msg], queue=False)
265
+
266
+ # Example query handlers
267
  for button in example_buttons:
268
  button.click(
269
+ lambda query: (query, []), # Reset chat history when using example
270
  inputs=[button],
271
+ outputs=[msg, chatbot],
272
  ).then(
273
  respond,
274
  inputs=[msg, chatbot, max_tokens, temperature, top_p],
275
+ outputs=[chatbot, msg]
 
276
 
277
  if __name__ == "__main__":
278
  demo.launch()