import gradio as gr import os from huggingface_hub import InferenceClient from huggingface_hub.inference._generated.types.chat_completion import ChatCompletionStreamOutput MODEL = "nomiChroma3.1" client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") def respond( message: str, chat_history: list[dict], ) -> tuple[list[dict], str]: """ Generate a response and update chat history. Uses the new message format with role and content keys. Returns tuple of (new_history, None) to clear input box. """ system_message = "You are a maritime legal assistant with expertise strictly in Indian maritime law. Provide a detailed legal advice and information based on Indian maritime legal principles and regulations." messages = [{"role": "system", "content": system_message}] for msg in chat_history: messages.append({"role": msg["role"], "content": msg["content"]}) messages.append({"role": "user", "content": message}) chat_history = chat_history + [{"role": "user", "content": message}] response = "" try: for chunk in client.chat_completion( messages, max_tokens=512, stream=True, temperature=0.7, top_p=0.95, ): try: if isinstance(chunk, ChatCompletionStreamOutput): content = chunk.choices[0].delta.content if content: response += content if len(chat_history) > 0 and chat_history[-1]["role"] == "assistant": chat_history[-1]["content"] = response else: chat_history.append({"role": "assistant", "content": response}) yield chat_history, "" if chunk.choices[0].finish_reason == 'stop': break elif isinstance(chunk, dict): content = chunk.get('choices', [{}])[0].get('delta', {}).get('content') if content: response += content if len(chat_history) > 0 and chat_history[-1]["role"] == "assistant": chat_history[-1]["content"] = response else: chat_history.append({"role": "assistant", "content": response}) yield chat_history, "" if chunk.get('choices', [{}])[0].get('finish_reason') == 'stop': break elif isinstance(chunk, str) and chunk.strip(): response += chunk if len(chat_history) > 0 and chat_history[-1]["role"] == "assistant": chat_history[-1]["content"] = response else: chat_history.append({"role": "assistant", "content": response}) yield chat_history, "" except Exception as e: print(f"Error processing chunk: {e}") continue if not response: chat_history.append({"role": "assistant", "content": "I apologize, but I couldn't generate a response. Please try again."}) yield chat_history, "" except Exception as e: error_msg = f"An error occurred: {str(e)}" chat_history.append({"role": "assistant", "content": error_msg}) yield chat_history, "" def handle_example_click(example_query: str): """Handle example query click by returning the query and empty chat history""" return example_query, [] # SVG Definitions WAVE_SVG = """ """ custom_css = """ @import url('https://fonts.googleapis.com/css2?family=Ubuntu:wght@300;400;500;700&display=swap'); /* Global styles */ .gradio-container { background-color: #1a365d !important; font-family: 'Ubuntu', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Cantarell, "Helvetica Neue", sans-serif !important; position: relative; overflow: hidden; } /* Wave Background */ .wave-background { position: fixed; bottom: 0; left: 0; width: 100%; height: 100%; z-index: 0; pointer-events: none; } /* Header styling */ .header-container { text-align: center; padding: 2rem 0; margin-bottom: 1rem; border-bottom: 2px solid rgba(255, 255, 255, 0.1); position: relative; z-index: 1; } .header-title { color: #ffffff; font-size: 2.5rem; margin-bottom: 0.5rem; font-family: 'Ubuntu', sans-serif !important; text-shadow: 0 2px 4px rgba(0,0,0,0.2); } .header-subtitle { color: #e6f3ff; font-size: 1.1rem; margin-bottom: 0.3rem; font-family: 'Ubuntu', sans-serif !important; opacity: 0.9; } /* Maritime icons styling */ .maritime-icons { display: flex; justify-content: center; margin: 1rem 0; gap: 2rem; } .maritime-icon { width: 40px; height: 40px; transition: transform 0.3s ease; } .maritime-icon:hover { transform: scale(1.1); } /* Sidebar styling */ .sidebar { background: #ccd9e6 !important; border-radius: 8px !important; padding: 15px !important; border: 1px solid rgba(176, 226, 255, 0.2) !important; height: fit-content !important; position: relative; overflow: hidden; } .sidebar-bg { position: absolute; top: 0; right: 0; bottom: 0; left: 0; opacity: 0.05; pointer-events: none; z-index: 0; } .sidebar-content { position: relative; z-index: 1; } .sidebar-icon { width: 24px; height: 24px; margin-right: 8px; vertical-align: middle; } .sidebar-title { color: #1a365d !important; font-size: 1.1rem !important; margin-bottom: 0.8rem !important; padding-bottom: 0.4rem !important; border-bottom: 2px solid rgba(26, 54, 93, 0.2) !important; font-family: 'Ubuntu', sans-serif !important; display: flex; align-items: center; } .example-query-button { background-color: #b3c6d9 !important; color: #1a365d !important; border: none !important; margin: 3px 0 !important; padding: 6px 10px !important; border-radius: 4px !important; text-align: left !important; width: 100% !important; cursor: pointer !important; transition: background-color 0.3s ease !important; font-size: 0.9rem !important; font-family: 'Ubuntu', sans-serif !important; position: relative; padding-left: 30px !important; } .example-query-button::before { content: "⚓"; position: absolute; left: 8px; top: 50%; transform: translateY(-50%); font-size: 12px; opacity: 0.7; } .example-query-button:hover { background-color: #99b3cc !important; } /* Chat container */ .chat-container { background: #ccd9e6 !important; border-radius: 8px !important; padding: 15px !important; height: 300px !important; overflow-y: auto !important; border: 1px solid rgba(176, 226, 255, 0.2) !important; backdrop-filter: blur(10px) !important; font-family: 'Ubuntu', sans-serif !important; } /* Message styling */ .message.user, .message.bot { padding: 8px 12px !important; margin: 6px 0 !important; border-radius: 6px !important; color: #1a365d !important; font-size: 0.9rem !important; font-family: 'Ubuntu', sans-serif !important; line-height: 1.5 !important; } .message.user { background-color: #b3c6d9 !important; } .message.bot { background-color: #e6f3ff !important; } /* Input and button styling */ textarea { background-color: #e6f3ff !important; border: 1px solid rgba(176, 226, 255, 0.3) !important; border-radius: 6px !important; padding: 8px !important; color: #1a365d !important; font-size: 0.9rem !important; font-family: 'Ubuntu', sans-serif !important; } .gr-button { font-family: 'Ubuntu', sans-serif !important; } .maritime-images { display: grid; grid-template-columns: repeat(3, 1fr); gap: 1rem; margin: 1.5rem auto; max-width: 1200px; padding: 0 1rem; } .maritime-image { width: 100%; height: 200px; object-fit: cover; border-radius: 8px; border: 2px solid #e6f3ff; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); transition: transform 0.3s ease; filter: contrast(110%) saturate(110%); } .maritime-image:hover { transform: scale(1.02); } """ # Main application # Header with wave background and maritime icons with gr.Blocks(css=custom_css, theme=gr.themes.Base()) as demo: # Header with wave background and maritime icons gr.HTML("""

Maritime Legal Compliance

AI-powered assistance for Indian maritime law queries

This chatbot uses Fine-tuned LLAMA-3.1 model personalised specifically to provide assistance with Indian maritime legal queries.

""") with gr.Row(): # Enhanced sidebar with graphics with gr.Column(scale=1, elem_classes="sidebar"): gr.HTML(""" ") # Close sidebar-content # Main chat area # Main chat area with gr.Column(scale=3): chatbot = gr.Chatbot( height=300, elem_classes="chat-container", type="messages" ) msg = gr.Textbox( show_label=False, placeholder="Type your maritime law query here...", container=False ) with gr.Row(): submit = gr.Button("Send", variant="primary") clear = gr.Button("Clear") # Event handlers msg.submit(fn=respond, inputs=[msg, chatbot], outputs=[chatbot, msg]) submit.click(fn=respond, inputs=[msg, chatbot], outputs=[chatbot, msg]) clear.click(fn=lambda: ([], ""), inputs=None, outputs=[chatbot, msg], queue=False) for button in example_buttons: button.click( fn=handle_example_click, inputs=[button], outputs=[msg, chatbot], queue=False ).then( fn=respond, inputs=[msg, chatbot], outputs=[chatbot, msg] ) if __name__ == "__main__": demo.launch()