Spaces:
Running
Running
Update app-backup10.py
Browse files- app-backup10.py +39 -14
app-backup10.py
CHANGED
@@ -175,20 +175,38 @@ def analyze_space(url: str, progress=gr.Progress()):
|
|
175 |
print(traceback.format_exc())
|
176 |
return f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}", "", None, "", "", "", "", 10
|
177 |
|
178 |
-
def respond(
|
179 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
Gradio νΉμ±μ μ νν μΈμνκ³ Requirements.txt λλ½μμ΄ μ½λ©κ³Ό μ€λ₯λ₯Ό ν΄κ²°ν΄μΌ ν©λλ€.
|
181 |
νμ μ ννκ³ μ μ©ν μ 보λ₯Ό μ 곡νλλ‘ λ
Έλ ₯νμΈμ."""
|
182 |
|
183 |
-
messages = [{"role": "system", "content": system_message}]
|
184 |
-
|
|
|
|
|
|
|
185 |
messages.append({"role": "user", "content": message})
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
192 |
|
193 |
def create_ui():
|
194 |
try:
|
@@ -297,7 +315,8 @@ def create_ui():
|
|
297 |
)
|
298 |
|
299 |
with gr.TabItem("AI μ½λ©"):
|
300 |
-
chatbot = gr.Chatbot(label="λν"
|
|
|
301 |
msg = gr.Textbox(label="λ©μμ§")
|
302 |
|
303 |
# μ¨κ²¨μ§ μνλ‘ νλΌλ―Έν° μ€μ
|
@@ -317,11 +336,16 @@ def create_ui():
|
|
317 |
gr.Examples(examples, inputs=msg)
|
318 |
|
319 |
def respond_wrapper(message, chat_history, max_tokens, temperature, top_p):
|
320 |
-
bot_message =
|
321 |
-
|
322 |
-
|
|
|
|
|
|
|
323 |
return "", chat_history
|
324 |
|
|
|
|
|
325 |
msg.submit(respond_wrapper, [msg, chatbot, max_tokens, temperature, top_p], [msg, chatbot])
|
326 |
|
327 |
space_id_state = gr.State()
|
@@ -347,6 +371,7 @@ def create_ui():
|
|
347 |
print(f"Error in create_ui: {str(e)}")
|
348 |
print(traceback.format_exc())
|
349 |
raise
|
|
|
350 |
|
351 |
if __name__ == "__main__":
|
352 |
try:
|
|
|
175 |
print(traceback.format_exc())
|
176 |
return f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}", "", None, "", "", "", "", 10
|
177 |
|
178 |
+
def respond(
|
179 |
+
message: str,
|
180 |
+
history: List[Tuple[str, str]],
|
181 |
+
system_message: str = "",
|
182 |
+
max_tokens: int = 1024,
|
183 |
+
temperature: float = 0.7,
|
184 |
+
top_p: float = 0.9,
|
185 |
+
):
|
186 |
+
system_prefix = """λΉμ μ νκΉ
νμ΄μ€μ νΉνλ AI μ½λ© μ λ¬Έκ°μ
λλ€. μ¬μ©μμ μ§λ¬Έμ μΉμ νκ³ μμΈνκ² λ΅λ³ν΄μ£ΌμΈμ.
|
187 |
Gradio νΉμ±μ μ νν μΈμνκ³ Requirements.txt λλ½μμ΄ μ½λ©κ³Ό μ€λ₯λ₯Ό ν΄κ²°ν΄μΌ ν©λλ€.
|
188 |
νμ μ ννκ³ μ μ©ν μ 보λ₯Ό μ 곡νλλ‘ λ
Έλ ₯νμΈμ."""
|
189 |
|
190 |
+
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
191 |
+
for user_msg, assistant_msg in history:
|
192 |
+
messages.append({"role": "user", "content": user_msg})
|
193 |
+
if assistant_msg:
|
194 |
+
messages.append({"role": "assistant", "content": assistant_msg})
|
195 |
messages.append({"role": "user", "content": message})
|
196 |
+
|
197 |
+
response = ""
|
198 |
+
for message in hf_client.chat_completion(
|
199 |
+
messages,
|
200 |
+
max_tokens=max_tokens,
|
201 |
+
stream=True,
|
202 |
+
temperature=temperature,
|
203 |
+
top_p=top_p,
|
204 |
+
):
|
205 |
+
token = message.choices[0].delta.get('content', None)
|
206 |
+
if token:
|
207 |
+
response += token.strip("")
|
208 |
+
yield response
|
209 |
+
|
210 |
|
211 |
def create_ui():
|
212 |
try:
|
|
|
315 |
)
|
316 |
|
317 |
with gr.TabItem("AI μ½λ©"):
|
318 |
+
chatbot = gr.Chatbot(label="λν")
|
319 |
+
|
320 |
msg = gr.Textbox(label="λ©μμ§")
|
321 |
|
322 |
# μ¨κ²¨μ§ μνλ‘ νλΌλ―Έν° μ€μ
|
|
|
336 |
gr.Examples(examples, inputs=msg)
|
337 |
|
338 |
def respond_wrapper(message, chat_history, max_tokens, temperature, top_p):
|
339 |
+
bot_message = ""
|
340 |
+
for response in respond(message, chat_history, max_tokens=max_tokens, temperature=temperature, top_p=top_p):
|
341 |
+
bot_message = response # λ§μ§λ§ μλ΅μ μ μ₯
|
342 |
+
yield "", chat_history + [(message, bot_message)]
|
343 |
+
|
344 |
+
chat_history.append((message, bot_message))
|
345 |
return "", chat_history
|
346 |
|
347 |
+
|
348 |
+
|
349 |
msg.submit(respond_wrapper, [msg, chatbot, max_tokens, temperature, top_p], [msg, chatbot])
|
350 |
|
351 |
space_id_state = gr.State()
|
|
|
371 |
print(f"Error in create_ui: {str(e)}")
|
372 |
print(traceback.format_exc())
|
373 |
raise
|
374 |
+
|
375 |
|
376 |
if __name__ == "__main__":
|
377 |
try:
|