|
|
|
import gradio as gr |
|
import os |
|
from huggingface_hub import InferenceClient |
|
from huggingface_hub.inference._generated.types.chat_completion import ChatCompletionStreamOutput |
|
|
|
MODEL = "nomiChroma3.1" |
|
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") |
|
|
|
def respond( |
|
message: str, |
|
chat_history: list[dict], |
|
) -> tuple[list[dict], str]: |
|
""" |
|
Generate a response and update chat history. |
|
Uses the new message format with role and content keys. |
|
Returns tuple of (new_history, None) to clear input box. |
|
""" |
|
system_message = "You are a maritime legal assistant with expertise strictly in Indian maritime law. Provide a detailed legal advice and information based on Indian maritime legal principles and regulations." |
|
|
|
messages = [{"role": "system", "content": system_message}] |
|
for msg in chat_history: |
|
messages.append({"role": msg["role"], "content": msg["content"]}) |
|
messages.append({"role": "user", "content": message}) |
|
|
|
chat_history = chat_history + [{"role": "user", "content": message}] |
|
response = "" |
|
|
|
try: |
|
for chunk in client.chat_completion( |
|
messages, |
|
max_tokens=512, |
|
stream=True, |
|
temperature=0.7, |
|
top_p=0.95, |
|
): |
|
try: |
|
if isinstance(chunk, ChatCompletionStreamOutput): |
|
content = chunk.choices[0].delta.content |
|
if content: |
|
response += content |
|
if len(chat_history) > 0 and chat_history[-1]["role"] == "assistant": |
|
chat_history[-1]["content"] = response |
|
else: |
|
chat_history.append({"role": "assistant", "content": response}) |
|
yield chat_history, "" |
|
if chunk.choices[0].finish_reason == 'stop': |
|
break |
|
elif isinstance(chunk, dict): |
|
content = chunk.get('choices', [{}])[0].get('delta', {}).get('content') |
|
if content: |
|
response += content |
|
if len(chat_history) > 0 and chat_history[-1]["role"] == "assistant": |
|
chat_history[-1]["content"] = response |
|
else: |
|
chat_history.append({"role": "assistant", "content": response}) |
|
yield chat_history, "" |
|
if chunk.get('choices', [{}])[0].get('finish_reason') == 'stop': |
|
break |
|
elif isinstance(chunk, str) and chunk.strip(): |
|
response += chunk |
|
if len(chat_history) > 0 and chat_history[-1]["role"] == "assistant": |
|
chat_history[-1]["content"] = response |
|
else: |
|
chat_history.append({"role": "assistant", "content": response}) |
|
yield chat_history, "" |
|
|
|
except Exception as e: |
|
print(f"Error processing chunk: {e}") |
|
continue |
|
|
|
if not response: |
|
chat_history.append({"role": "assistant", "content": "I apologize, but I couldn't generate a response. Please try again."}) |
|
|
|
yield chat_history, "" |
|
|
|
except Exception as e: |
|
error_msg = f"An error occurred: {str(e)}" |
|
chat_history.append({"role": "assistant", "content": error_msg}) |
|
yield chat_history, "" |
|
|
|
def handle_example_click(example_query: str): |
|
"""Handle example query click by returning the query and empty chat history""" |
|
return example_query, [] |
|
|
|
|
|
|
|
WAVE_SVG = """ |
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1440 320" preserveAspectRatio="none"> |
|
<defs> |
|
<linearGradient id="waveGradient" x1="0%" y1="0%" x2="100%" y2="0%"> |
|
<stop offset="0%" style="stop-color:#1a365d;stop-opacity:0.2"/> |
|
<stop offset="50%" style="stop-color:#2a4a7d;stop-opacity:0.3"/> |
|
<stop offset="100%" style="stop-color:#1a365d;stop-opacity:0.2"/> |
|
</linearGradient> |
|
</defs> |
|
<path fill="url(#waveGradient)" d="M0,192L48,197.3C96,203,192,213,288,229.3C384,245,480,267,576,250.7C672,235,768,181,864,181.3C960,181,1056,235,1152,234.7C1248,235,1344,181,1392,154.7L1440,128L1440,320L1392,320C1344,320,1248,320,1152,320C1056,320,960,320,864,320C768,320,672,320,576,320C480,320,384,320,288,320C192,320,96,320,48,320L0,320Z"> |
|
<animate attributeName="d" |
|
dur="10s" |
|
repeatCount="indefinite" |
|
values="M0,192L48,197.3C96,203,192,213,288,229.3C384,245,480,267,576,250.7C672,235,768,181,864,181.3C960,181,1056,235,1152,234.7C1248,235,1344,181,1392,154.7L1440,128L1440,320L1392,320C1344,320,1248,320,1152,320C1056,320,960,320,864,320C768,320,672,320,576,320C480,320,384,320,288,320C192,320,96,320,48,320L0,320Z; |
|
M0,160L48,181.3C96,203,192,245,288,261.3C384,277,480,267,576,234.7C672,203,768,149,864,138.7C960,128,1056,160,1152,186.7C1248,213,1344,235,1392,245.3L1440,256L1440,320L1392,320C1344,320,1248,320,1152,320C1056,320,960,320,864,320C768,320,672,320,576,320C480,320,384,320,288,320C192,320,96,320,48,320L0,320Z; |
|
M0,192L48,197.3C96,203,192,213,288,229.3C384,245,480,267,576,250.7C672,235,768,181,864,181.3C960,181,1056,235,1152,234.7C1248,235,1344,181,1392,154.7L1440,128L1440,320L1392,320C1344,320,1248,320,1152,320C1056,320,960,320,864,320C768,320,672,320,576,320C480,320,384,320,288,320C192,320,96,320,48,320L0,320Z"/> |
|
</path> |
|
</svg> |
|
""" |
|
|
|
|
|
|
|
custom_css = """ |
|
@import url('https://fonts.googleapis.com/css2?family=Ubuntu:wght@300;400;500;700&display=swap'); |
|
/* Global styles */ |
|
.gradio-container { |
|
background-color: #1a365d !important; |
|
font-family: 'Ubuntu', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Cantarell, "Helvetica Neue", sans-serif !important; |
|
position: relative; |
|
overflow: hidden; |
|
} |
|
/* Wave Background */ |
|
.wave-background { |
|
position: fixed; |
|
bottom: 0; |
|
left: 0; |
|
width: 100%; |
|
height: 100%; |
|
z-index: 0; |
|
pointer-events: none; |
|
} |
|
/* Header styling */ |
|
.header-container { |
|
text-align: center; |
|
padding: 2rem 0; |
|
margin-bottom: 1rem; |
|
border-bottom: 2px solid rgba(255, 255, 255, 0.1); |
|
position: relative; |
|
z-index: 1; |
|
} |
|
.header-title { |
|
color: #ffffff; |
|
font-size: 2.5rem; |
|
margin-bottom: 0.5rem; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
text-shadow: 0 2px 4px rgba(0,0,0,0.2); |
|
} |
|
.header-subtitle { |
|
color: #e6f3ff; |
|
font-size: 1.1rem; |
|
margin-bottom: 0.3rem; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
opacity: 0.9; |
|
} |
|
/* Maritime icons styling */ |
|
.maritime-icons { |
|
display: flex; |
|
justify-content: center; |
|
margin: 1rem 0; |
|
gap: 2rem; |
|
} |
|
.maritime-icon { |
|
width: 40px; |
|
height: 40px; |
|
transition: transform 0.3s ease; |
|
} |
|
.maritime-icon:hover { |
|
transform: scale(1.1); |
|
} |
|
/* Sidebar styling */ |
|
.sidebar { |
|
background: #ccd9e6 !important; |
|
border-radius: 8px !important; |
|
padding: 15px !important; |
|
border: 1px solid rgba(176, 226, 255, 0.2) !important; |
|
height: fit-content !important; |
|
position: relative; |
|
overflow: hidden; |
|
} |
|
.sidebar-bg { |
|
position: absolute; |
|
top: 0; |
|
right: 0; |
|
bottom: 0; |
|
left: 0; |
|
opacity: 0.05; |
|
pointer-events: none; |
|
z-index: 0; |
|
} |
|
.sidebar-content { |
|
position: relative; |
|
z-index: 1; |
|
} |
|
.sidebar-icon { |
|
width: 24px; |
|
height: 24px; |
|
margin-right: 8px; |
|
vertical-align: middle; |
|
} |
|
.sidebar-title { |
|
color: #1a365d !important; |
|
font-size: 1.1rem !important; |
|
margin-bottom: 0.8rem !important; |
|
padding-bottom: 0.4rem !important; |
|
border-bottom: 2px solid rgba(26, 54, 93, 0.2) !important; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
display: flex; |
|
align-items: center; |
|
} |
|
.example-query-button { |
|
background-color: #b3c6d9 !important; |
|
color: #1a365d !important; |
|
border: none !important; |
|
margin: 3px 0 !important; |
|
padding: 6px 10px !important; |
|
border-radius: 4px !important; |
|
text-align: left !important; |
|
width: 100% !important; |
|
cursor: pointer !important; |
|
transition: background-color 0.3s ease !important; |
|
font-size: 0.9rem !important; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
position: relative; |
|
padding-left: 30px !important; |
|
} |
|
.example-query-button::before { |
|
content: "⚓"; |
|
position: absolute; |
|
left: 8px; |
|
top: 50%; |
|
transform: translateY(-50%); |
|
font-size: 12px; |
|
opacity: 0.7; |
|
} |
|
.example-query-button:hover { |
|
background-color: #99b3cc !important; |
|
} |
|
/* Chat container */ |
|
.chat-container { |
|
background: #ccd9e6 !important; |
|
border-radius: 8px !important; |
|
padding: 15px !important; |
|
height: 300px !important; |
|
overflow-y: auto !important; |
|
border: 1px solid rgba(176, 226, 255, 0.2) !important; |
|
backdrop-filter: blur(10px) !important; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
} |
|
/* Message styling */ |
|
.message.user, .message.bot { |
|
padding: 8px 12px !important; |
|
margin: 6px 0 !important; |
|
border-radius: 6px !important; |
|
color: #1a365d !important; |
|
font-size: 0.9rem !important; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
line-height: 1.5 !important; |
|
} |
|
.message.user { |
|
background-color: #b3c6d9 !important; |
|
} |
|
.message.bot { |
|
background-color: #e6f3ff !important; |
|
} |
|
/* Input and button styling */ |
|
textarea { |
|
background-color: #e6f3ff !important; |
|
border: 1px solid rgba(176, 226, 255, 0.3) !important; |
|
border-radius: 6px !important; |
|
padding: 8px !important; |
|
color: #1a365d !important; |
|
font-size: 0.9rem !important; |
|
font-family: 'Ubuntu', sans-serif !important; |
|
} |
|
.gr-button { |
|
font-family: 'Ubuntu', sans-serif !important; |
|
} |
|
.maritime-images { |
|
display: grid; |
|
grid-template-columns: repeat(3, 1fr); |
|
gap: 1rem; |
|
margin: 1.5rem auto; |
|
max-width: 1200px; |
|
padding: 0 1rem; |
|
} |
|
.maritime-image { |
|
width: 100%; |
|
height: 200px; |
|
object-fit: cover; |
|
border-radius: 8px; |
|
border: 2px solid #e6f3ff; |
|
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); |
|
transition: transform 0.3s ease; |
|
filter: contrast(110%) saturate(110%); |
|
} |
|
.maritime-image:hover { |
|
transform: scale(1.02); |
|
} |
|
""" |
|
|
|
|
|
|
|
|
|
with gr.Blocks(css=custom_css, theme=gr.themes.Base()) as demo: |
|
|
|
gr.HTML(""" |
|
<div class="header-container"> |
|
<h1 class="header-title">Maritime Legal Compliance</h1> |
|
<p class="header-subtitle">AI-powered assistance for Indian maritime law queries</p> |
|
<p class="header-subtitle">This chatbot uses Fine-tuned LLAMA-3.1 model personalised specifically to provide assistance with Indian maritime legal queries.</p> |
|
</div> |
|
""") |
|
|
|
|
|
|
|
with gr.Row(): |
|
|
|
with gr.Column(scale=1, elem_classes="sidebar"): |
|
gr.HTML(""" |
|
<div class="sidebar-bg"> |
|
<svg viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg" width="100%" height="100%"> |
|
<pattern id="grid" width="10" height="10" patternUnits="userSpaceOnUse"> |
|
<path d="M 10 0 L 0 0 0 10" fill="none" stroke="#1a365d" stroke-width="0.5"/> |
|
</pattern> |
|
<rect width="100%" height="100%" fill="url(#grid)"/> |
|
<circle cx="80" cy="20" r="15" fill="#1a365d"/> |
|
<path d="M70,80 L90,80 L80,95 Z" fill="#1a365d"/> |
|
</svg> |
|
</div> |
|
<div class="sidebar-content"> |
|
""") |
|
|
|
gr.HTML(""" |
|
<div class="sidebar-title"> |
|
<svg class="sidebar-icon" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"> |
|
<circle cx="12" cy="12" r="10" fill="none" stroke="#1a365d" stroke-width="2"/> |
|
<path d="M12 6 L12 18 M6 12 L18 12" stroke="#1a365d" stroke-width="2"/> |
|
</svg> |
|
Example Queries |
|
</div> |
|
""") |
|
|
|
example_queries = [ |
|
"What are the key regulations governing ports in India?", |
|
"What are the legal requirements for registering a vessel in India?", |
|
"What are the recent environmental concerns for shipping in Indian waters?", |
|
"What are the rules for coastal cargo transportation in India?" |
|
] |
|
|
|
with gr.Column(elem_classes="example-queries"): |
|
example_buttons = [gr.Button(query, elem_classes="example-query-button") for query in example_queries] |
|
|
|
gr.HTML("</div>") |
|
|
|
|
|
|
|
with gr.Column(scale=3): |
|
chatbot = gr.Chatbot( |
|
height=300, |
|
elem_classes="chat-container", |
|
type="messages" |
|
) |
|
msg = gr.Textbox( |
|
show_label=False, |
|
placeholder="Type your maritime law query here...", |
|
container=False |
|
) |
|
with gr.Row(): |
|
submit = gr.Button("Send", variant="primary") |
|
clear = gr.Button("Clear") |
|
|
|
|
|
msg.submit(fn=respond, inputs=[msg, chatbot], outputs=[chatbot, msg]) |
|
submit.click(fn=respond, inputs=[msg, chatbot], outputs=[chatbot, msg]) |
|
clear.click(fn=lambda: ([], ""), inputs=None, outputs=[chatbot, msg], queue=False) |
|
|
|
for button in example_buttons: |
|
button.click( |
|
fn=handle_example_click, |
|
inputs=[button], |
|
outputs=[msg, chatbot], |
|
queue=False |
|
).then( |
|
fn=respond, |
|
inputs=[msg, chatbot], |
|
outputs=[chatbot, msg] |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |