Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -175,20 +175,48 @@ def analyze_space(url: str, progress=gr.Progress()):
|
|
175 |
print(traceback.format_exc())
|
176 |
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None, "", "", "", "", 10
|
177 |
|
178 |
-
|
179 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
Gradio ํน์ฑ์ ์ ํํ ์ธ์ํ๊ณ Requirements.txt ๋๋ฝ์์ด ์ฝ๋ฉ๊ณผ ์ค๋ฅ๋ฅผ ํด๊ฒฐํด์ผ ํฉ๋๋ค.
|
181 |
ํญ์ ์ ํํ๊ณ ์ ์ฉํ ์ ๋ณด๋ฅผ ์ ๊ณตํ๋๋ก ๋
ธ๋ ฅํ์ธ์."""
|
182 |
|
183 |
-
messages = [{"role": "system", "content": system_message}]
|
184 |
-
|
|
|
|
|
|
|
|
|
185 |
messages.append({"role": "user", "content": message})
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
192 |
|
193 |
def create_ui():
|
194 |
try:
|
|
|
175 |
print(traceback.format_exc())
|
176 |
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None, "", "", "", "", 10
|
177 |
|
178 |
+
|
179 |
+
|
180 |
+
|
181 |
+
def respond(
|
182 |
+
message,
|
183 |
+
history: List[Tuple[str, str]],
|
184 |
+
system_message="", # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ
|
185 |
+
max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ
|
186 |
+
temperature=0.7, # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ
|
187 |
+
top_p=0.9, # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ
|
188 |
+
):
|
189 |
+
global fashion_code, uhdimage_code, MixGEN_code
|
190 |
+
system_message = system_message or ""
|
191 |
+
system_prefix = """๋น์ ์ ํ๊น
ํ์ด์ค์ ํนํ๋ AI ์ฝ๋ฉ ์ ๋ฌธ๊ฐ์
๋๋ค. ์ฌ์ฉ์์ ์ง๋ฌธ์ ์น์ ํ๊ณ ์์ธํ๊ฒ ๋ต๋ณํด์ฃผ์ธ์.
|
192 |
Gradio ํน์ฑ์ ์ ํํ ์ธ์ํ๊ณ Requirements.txt ๋๋ฝ์์ด ์ฝ๋ฉ๊ณผ ์ค๋ฅ๋ฅผ ํด๊ฒฐํด์ผ ํฉ๋๋ค.
|
193 |
ํญ์ ์ ํํ๊ณ ์ ์ฉํ ์ ๋ณด๋ฅผ ์ ๊ณตํ๋๋ก ๋
ธ๋ ฅํ์ธ์."""
|
194 |
|
195 |
+
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
196 |
+
for val in history:
|
197 |
+
if val[0]:
|
198 |
+
messages.append({"role": "user", "content": val[0]})
|
199 |
+
if val[1]:
|
200 |
+
messages.append({"role": "assistant", "content": val[1]})
|
201 |
messages.append({"role": "user", "content": message})
|
202 |
+
|
203 |
+
response = ""
|
204 |
+
for message in hf_client.chat_completion(
|
205 |
+
messages,
|
206 |
+
max_tokens=max_tokens,
|
207 |
+
stream=True,
|
208 |
+
temperature=temperature,
|
209 |
+
top_p=top_p,
|
210 |
+
):
|
211 |
+
token = message.choices[0].delta.get('content', None)
|
212 |
+
if token:
|
213 |
+
response += token.strip("")
|
214 |
+
yield response
|
215 |
+
|
216 |
+
|
217 |
+
|
218 |
+
|
219 |
+
|
220 |
|
221 |
def create_ui():
|
222 |
try:
|