Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -39,6 +39,19 @@ def clear_conversation_history():
|
|
39 |
except Exception as e:
|
40 |
return f"Error clearing history: {e}", ""
|
41 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
# Function to get response from the LLM
|
43 |
def get_groq_response(message, history=[]):
|
44 |
try:
|
@@ -47,7 +60,7 @@ def get_groq_response(message, history=[]):
|
|
47 |
model="llama-3.1-70b-versatile",
|
48 |
messages=messages
|
49 |
)
|
50 |
-
return response.choices[0].message["content"]
|
51 |
except Exception as e:
|
52 |
return f"Error: {str(e)}"
|
53 |
|
@@ -70,16 +83,40 @@ def chatbot(user_input, history):
|
|
70 |
save_history(conversation_history)
|
71 |
|
72 |
# Format for HTML display
|
73 |
-
display_html = "
|
74 |
-
f"<div><b>User:</b> {user}</div
|
|
|
75 |
for user, bot in conversation_history
|
76 |
)
|
77 |
|
78 |
return conversation_history, display_html, "" # Clear the user input field
|
79 |
|
80 |
# Gradio Interface
|
81 |
-
with gr.Blocks(
|
82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
|
84 |
chat_display = gr.HTML(label="Conversation")
|
85 |
user_input = gr.Textbox(label="Type your message here:")
|
|
|
39 |
except Exception as e:
|
40 |
return f"Error clearing history: {e}", ""
|
41 |
|
42 |
+
# Function to format bot response
|
43 |
+
def format_bot_response(response):
|
44 |
+
"""
|
45 |
+
Converts markdown-like symbols to HTML and structures response.
|
46 |
+
"""
|
47 |
+
response = response.replace("**", "<b>").replace("**", "</b>") # Bold formatting
|
48 |
+
response = response.replace("1.", "<br> 1.")
|
49 |
+
response = response.replace("2.", "<br> 2.")
|
50 |
+
response = response.replace("3.", "<br> 3.")
|
51 |
+
response = response.replace("4.", "<br> 4.")
|
52 |
+
response = response.replace("5.", "<br> 5.")
|
53 |
+
return f"<div>{response}</div>"
|
54 |
+
|
55 |
# Function to get response from the LLM
|
56 |
def get_groq_response(message, history=[]):
|
57 |
try:
|
|
|
60 |
model="llama-3.1-70b-versatile",
|
61 |
messages=messages
|
62 |
)
|
63 |
+
return format_bot_response(response.choices[0].message["content"])
|
64 |
except Exception as e:
|
65 |
return f"Error: {str(e)}"
|
66 |
|
|
|
83 |
save_history(conversation_history)
|
84 |
|
85 |
# Format for HTML display
|
86 |
+
display_html = "".join(
|
87 |
+
f"<div class='user-message'><b>User:</b> {user}</div>"
|
88 |
+
f"<div class='bot-message'><b>Bot:</b> {bot}</div>"
|
89 |
for user, bot in conversation_history
|
90 |
)
|
91 |
|
92 |
return conversation_history, display_html, "" # Clear the user input field
|
93 |
|
94 |
# Gradio Interface
|
95 |
+
with gr.Blocks(css="""
|
96 |
+
.user-message {
|
97 |
+
background-color: #9ACBD0;
|
98 |
+
padding: 10px;
|
99 |
+
margin: 10px;
|
100 |
+
border-radius: 8px;
|
101 |
+
max-width: 60%;
|
102 |
+
float: right;
|
103 |
+
clear: both;
|
104 |
+
}
|
105 |
+
.bot-message {
|
106 |
+
background-color: #F2EFE7;
|
107 |
+
padding: 10px;
|
108 |
+
margin: 10px;
|
109 |
+
border-radius: 8px;
|
110 |
+
max-width: 60%;
|
111 |
+
float: left;
|
112 |
+
clear: both;
|
113 |
+
}
|
114 |
+
.user-message:hover, .bot-message:hover {
|
115 |
+
transform: scale(1.02);
|
116 |
+
box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1);
|
117 |
+
}
|
118 |
+
""") as demo:
|
119 |
+
gr.Markdown("# Chatbot with Enhanced UI and User-Friendly Formatting")
|
120 |
|
121 |
chat_display = gr.HTML(label="Conversation")
|
122 |
user_input = gr.Textbox(label="Type your message here:")
|