Spaces:
Running
Running
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,335 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from huggingface_hub import InferenceClient, HfApi
|
3 |
+
import os
|
4 |
+
import requests
|
5 |
+
from typing import List, Dict, Union, Tuple
|
6 |
+
import traceback
|
7 |
+
from PIL import Image
|
8 |
+
from io import BytesIO
|
9 |
+
import asyncio
|
10 |
+
from gradio_client import Client
|
11 |
+
import time
|
12 |
+
import threading
|
13 |
+
import json
|
14 |
+
|
15 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
16 |
+
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
17 |
+
hf_api = HfApi(token=HF_TOKEN)
|
18 |
+
|
19 |
+
def get_headers():
|
20 |
+
if not HF_TOKEN:
|
21 |
+
raise ValueError("Hugging Face token not found in environment variables")
|
22 |
+
return {"Authorization": f"Bearer {HF_TOKEN}"}
|
23 |
+
|
24 |
+
def get_file_content(space_id: str, file_path: str) -> str:
|
25 |
+
file_url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file_path}"
|
26 |
+
try:
|
27 |
+
response = requests.get(file_url, headers=get_headers())
|
28 |
+
if response.status_code == 200:
|
29 |
+
return response.text
|
30 |
+
else:
|
31 |
+
return f"File not found or inaccessible: {file_path}"
|
32 |
+
except requests.RequestException:
|
33 |
+
return f"Error fetching content for file: {file_path}"
|
34 |
+
|
35 |
+
def get_space_structure(space_id: str) -> Dict:
|
36 |
+
try:
|
37 |
+
files = hf_api.list_repo_files(repo_id=space_id, repo_type="space")
|
38 |
+
|
39 |
+
tree = {"type": "directory", "path": "", "name": space_id, "children": []}
|
40 |
+
for file in files:
|
41 |
+
path_parts = file.split('/')
|
42 |
+
current = tree
|
43 |
+
for i, part in enumerate(path_parts):
|
44 |
+
if i == len(path_parts) - 1: # ํ์ผ
|
45 |
+
current["children"].append({"type": "file", "path": file, "name": part})
|
46 |
+
else: # ๋๋ ํ ๋ฆฌ
|
47 |
+
found = False
|
48 |
+
for child in current["children"]:
|
49 |
+
if child["type"] == "directory" and child["name"] == part:
|
50 |
+
current = child
|
51 |
+
found = True
|
52 |
+
break
|
53 |
+
if not found:
|
54 |
+
new_dir = {"type": "directory", "path": '/'.join(path_parts[:i+1]), "name": part, "children": []}
|
55 |
+
current["children"].append(new_dir)
|
56 |
+
current = new_dir
|
57 |
+
|
58 |
+
return tree
|
59 |
+
except Exception as e:
|
60 |
+
print(f"Error in get_space_structure: {str(e)}")
|
61 |
+
return {"error": f"API request error: {str(e)}"}
|
62 |
+
|
63 |
+
def format_tree_structure(tree_data: Dict, indent: str = "") -> str:
|
64 |
+
formatted = f"{indent}{'๐' if tree_data['type'] == 'directory' else '๐'} {tree_data['name']}\n"
|
65 |
+
if tree_data["type"] == "directory":
|
66 |
+
for child in sorted(tree_data.get("children", []), key=lambda x: (x["type"] != "directory", x["name"])):
|
67 |
+
formatted += format_tree_structure(child, indent + " ")
|
68 |
+
return formatted
|
69 |
+
|
70 |
+
def summarize_code(app_content: str) -> str:
|
71 |
+
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๊ณ ์์ฝํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ๊ฐ๊ฒฐํ๊ฒ ์์ฝํด์ฃผ์ธ์."
|
72 |
+
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ์์ฝํด์ฃผ์ธ์:\n\n{app_content}"
|
73 |
+
|
74 |
+
messages = [
|
75 |
+
{"role": "system", "content": system_message},
|
76 |
+
{"role": "user", "content": user_message}
|
77 |
+
]
|
78 |
+
|
79 |
+
try:
|
80 |
+
response = hf_client.chat_completion(messages, max_tokens=200, temperature=0.7)
|
81 |
+
return response.choices[0].message.content
|
82 |
+
except Exception as e:
|
83 |
+
return f"์์ฝ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
84 |
+
|
85 |
+
def analyze_code(app_content: str) -> str:
|
86 |
+
system_message = """๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ๋ค์ ํญ๋ชฉ์ ๋ํด ์ค๋ช
ํด์ฃผ์ธ์:
|
87 |
+
A. ๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ
|
88 |
+
B. ๊ธฐ๋ฅ์ ํจ์ฉ์ฑ ๋ฐ ๊ฐ์น
|
89 |
+
C. ํน์ฅ์
|
90 |
+
D. ์ ์ฉ ๋์ ๋ฐ ํ๊ฒ
|
91 |
+
E. ๊ธฐ๋ํจ๊ณผ
|
92 |
+
๊ธฐ์กด ๋ฐ ์ ์ฌ ํ๋ก์ ํธ์ ๋น๊ตํ์ฌ ๋ถ์ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์."""
|
93 |
+
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ ๋ถ์ํด์ฃผ์ธ์:\n\n{app_content}"
|
94 |
+
|
95 |
+
messages = [
|
96 |
+
{"role": "system", "content": system_message},
|
97 |
+
{"role": "user", "content": user_message}
|
98 |
+
]
|
99 |
+
|
100 |
+
try:
|
101 |
+
response = hf_client.chat_completion(messages, max_tokens=1000, temperature=0.7)
|
102 |
+
return response.choices[0].message.content
|
103 |
+
except Exception as e:
|
104 |
+
return f"๋ถ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
105 |
+
|
106 |
+
def explain_usage(app_content: str) -> str:
|
107 |
+
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ฐํ์ผ๋ก ๋ง์น ํ๋ฉด์ ๋ณด๋ ๊ฒ์ฒ๋ผ ์ฌ์ฉ๋ฒ์ ์์ธํ ์ค๋ช
ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์."
|
108 |
+
user_message = f"๋ค์ Python ์ฝ๋์ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํด์ฃผ์ธ์:\n\n{app_content}"
|
109 |
+
|
110 |
+
messages = [
|
111 |
+
{"role": "system", "content": system_message},
|
112 |
+
{"role": "user", "content": user_message}
|
113 |
+
]
|
114 |
+
|
115 |
+
try:
|
116 |
+
response = hf_client.chat_completion(messages, max_tokens=800, temperature=0.7)
|
117 |
+
return response.choices[0].message.content
|
118 |
+
except Exception as e:
|
119 |
+
return f"์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
120 |
+
|
121 |
+
def analyze_space(url: str, progress=gr.Progress()):
|
122 |
+
try:
|
123 |
+
space_id = url.split('spaces/')[-1]
|
124 |
+
|
125 |
+
progress(0.1, desc="ํ์ผ ๊ตฌ์กฐ ๋ถ์ ์ค...")
|
126 |
+
tree_structure = get_space_structure(space_id)
|
127 |
+
tree_view = format_tree_structure(tree_structure)
|
128 |
+
|
129 |
+
progress(0.3, desc="app.py ๋ด์ฉ ๊ฐ์ ธ์ค๋ ์ค...")
|
130 |
+
app_content = get_file_content(space_id, "app.py")
|
131 |
+
|
132 |
+
progress(0.4, desc="์ฝ๋ ์์ฝ ์ค...")
|
133 |
+
summary = summarize_code(app_content)
|
134 |
+
|
135 |
+
progress(0.6, desc="์ฝ๋ ๋ถ์ ์ค...")
|
136 |
+
analysis = analyze_code(app_content)
|
137 |
+
|
138 |
+
progress(0.8, desc="์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค...")
|
139 |
+
usage = explain_usage(app_content)
|
140 |
+
|
141 |
+
progress(1.0, desc="์๋ฃ")
|
142 |
+
return summary, analysis, usage, app_content, tree_view, tree_structure, space_id
|
143 |
+
except Exception as e:
|
144 |
+
print(f"Error in analyze_space: {str(e)}")
|
145 |
+
print(traceback.format_exc())
|
146 |
+
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", "", "", "", None, ""
|
147 |
+
|
148 |
+
def respond(
|
149 |
+
message: str,
|
150 |
+
history: List[Tuple[str, str]],
|
151 |
+
system_message: str = "",
|
152 |
+
max_tokens: int = 4000,
|
153 |
+
temperature: float = 0.7,
|
154 |
+
top_p: float = 0.9,
|
155 |
+
):
|
156 |
+
system_prefix = """๋ฐ๋์ ํ๊ธ๋ก ๋ต๋ณํ ๊ฒ. ๋๋ ์ฃผ์ด์ง ์์ค์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก "์๋น์ค ์ฌ์ฉ ์ค๋ช
๋ฐ ์๋ด, qna๋ฅผ ํ๋ ์ญํ ์ด๋ค". ์์ฃผ ์น์ ํ๊ณ ์์ธํ๊ฒ 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ. ๋๋ ์ฝ๋๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ์ฌ์ฉ ์ค๋ช
๋ฐ ์ง์ ์๋ต์ ์งํํ๋ฉฐ, ์ด์ฉ์์๊ฒ ๋์์ ์ฃผ์ด์ผ ํ๋ค. ์ด์ฉ์๊ฐ ๊ถ๊ธํด ํ ๋ง ํ ๋ด์ฉ์ ์น์ ํ๊ฒ ์๋ ค์ฃผ๋๋ก ํ๋ผ. ์ฝ๋ ์ ์ฒด ๋ด์ฉ์ ๋ํด์๋ ๋ณด์์ ์ ์งํ๊ณ , ํค ๊ฐ ๋ฐ ์๋ํฌ์ธํธ์ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์ ๊ณต๊ฐํ์ง ๋ง๋ผ."""
|
157 |
+
|
158 |
+
messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
|
159 |
+
for user, assistant in history:
|
160 |
+
messages.append({"role": "user", "content": user})
|
161 |
+
messages.append({"role": "assistant", "content": assistant})
|
162 |
+
messages.append({"role": "user", "content": message})
|
163 |
+
|
164 |
+
response = ""
|
165 |
+
for message in hf_client.chat_completion(
|
166 |
+
messages,
|
167 |
+
max_tokens=max_tokens,
|
168 |
+
stream=True,
|
169 |
+
temperature=temperature,
|
170 |
+
top_p=top_p,
|
171 |
+
):
|
172 |
+
token = message.choices[0].delta.get('content', None)
|
173 |
+
if token:
|
174 |
+
response += token.strip("")
|
175 |
+
yield response
|
176 |
+
|
177 |
+
def create_ui():
|
178 |
+
try:
|
179 |
+
css = """
|
180 |
+
footer {visibility: hidden;}
|
181 |
+
.output-group {
|
182 |
+
border: 1px solid #ddd;
|
183 |
+
border-radius: 5px;
|
184 |
+
padding: 10px;
|
185 |
+
margin-bottom: 20px;
|
186 |
+
}
|
187 |
+
.scroll-lock {
|
188 |
+
overflow-y: auto !important;
|
189 |
+
max-height: calc((100vh - 200px) / 5) !important;
|
190 |
+
}
|
191 |
+
.full-height {
|
192 |
+
height: calc(100vh - 200px) !important;
|
193 |
+
overflow-y: auto !important;
|
194 |
+
}
|
195 |
+
"""
|
196 |
+
|
197 |
+
js = """
|
198 |
+
function openFile(path, spaceId) {
|
199 |
+
const filePathInput = document.querySelector('input[data-testid="file_path_input"]');
|
200 |
+
const spaceIdInput = document.querySelector('input[data-testid="space_id_input"]');
|
201 |
+
if (filePathInput && spaceIdInput) {
|
202 |
+
filePathInput.value = path;
|
203 |
+
spaceIdInput.value = spaceId;
|
204 |
+
filePathInput.dispatchEvent(new Event('change'));
|
205 |
+
}
|
206 |
+
}
|
207 |
+
"""
|
208 |
+
|
209 |
+
with gr.Blocks(css=css, theme="Nymbo/Nymbo_Theme") as demo:
|
210 |
+
gr.Markdown("# HuggingFace Space Analyzer")
|
211 |
+
|
212 |
+
with gr.Tabs() as tabs:
|
213 |
+
with gr.TabItem("๋ถ์"):
|
214 |
+
with gr.Row():
|
215 |
+
with gr.Column(scale=6): # ์ผ์ชฝ 60%
|
216 |
+
url_input = gr.Textbox(label="HuggingFace Space URL")
|
217 |
+
analyze_button = gr.Button("๋ถ์")
|
218 |
+
|
219 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
220 |
+
summary_output = gr.Markdown(label="์์ฝ (3์ค ์ด๋ด)")
|
221 |
+
|
222 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
223 |
+
analysis_output = gr.Markdown(label="๋ถ์")
|
224 |
+
|
225 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
226 |
+
usage_output = gr.Markdown(label="์ฌ์ฉ๋ฒ")
|
227 |
+
|
228 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
229 |
+
tree_view_output = gr.Textbox(label="ํ์ผ ๊ตฌ์กฐ (Tree View)", lines=20)
|
230 |
+
|
231 |
+
with gr.Group(elem_classes="output-group scroll-lock"):
|
232 |
+
file_buttons = gr.HTML(label="ํ์ผ ๋ฆฌ์คํธ")
|
233 |
+
|
234 |
+
with gr.Column(scale=4): # ์ค๋ฅธ์ชฝ 40%
|
235 |
+
with gr.Group(elem_classes="output-group full-height"):
|
236 |
+
code_tabs = gr.Tabs()
|
237 |
+
with code_tabs:
|
238 |
+
app_py_tab = gr.TabItem("app.py")
|
239 |
+
with app_py_tab:
|
240 |
+
app_py_content = gr.Code(language="python", label="app.py", lines=30)
|
241 |
+
|
242 |
+
with gr.TabItem("AI ์ฝ๋ฉ"):
|
243 |
+
chatbot = gr.Chatbot()
|
244 |
+
msg = gr.Textbox(label="๋ฉ์์ง")
|
245 |
+
with gr.Row():
|
246 |
+
system_message = gr.Textbox(label="System Message", value="")
|
247 |
+
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens")
|
248 |
+
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
|
249 |
+
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
|
250 |
+
|
251 |
+
examples = [
|
252 |
+
["์์ธํ ์ฌ์ฉ ๋ฐฉ๋ฒ์ ๋ง์น ํ๋ฉด์ ๋ณด๋ฉด์ ์ค๋ช
ํ๋ฏ์ด 4000 ํ ํฐ ์ด์ ์์ธํ ์ค๋ช
ํ๋ผ"],
|
253 |
+
["FAQ 20๊ฑด์ ์์ธํ๊ฒ ์์ฑํ๋ผ. 4000ํ ํฐ ์ด์ ์ฌ์ฉํ๋ผ."],
|
254 |
+
["์ฌ์ฉ ๋ฐฉ๋ฒ๊ณผ ์ฐจ๋ณ์ , ํน์ง, ๊ฐ์ ์ ์ค์ฌ์ผ๋ก 4000 ํ ํฐ ์ด์ ์ ํ๋ธ ์์ ์คํฌ๋ฆฝํธ ํํ๋ก ์์ฑํ๋ผ"],
|
255 |
+
["๋ณธ ์๋น์ค๋ฅผ SEO ์ต์ ํํ์ฌ ๋ธ๋ก๊ทธ ํฌ์คํธ(๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ, ๊ธฐ์กด ์ ์ฌ ์๋น์ค์ ๋น๊ตํ์ฌ ํน์ฅ์ , ํ์ฉ์ฒ, ๊ฐ์น, ๊ธฐ๋ํจ๊ณผ, ๊ฒฐ๋ก ์ ํฌํจ)๋ก 4000 ํ ํฐ ์ด์ ์์ฑํ๋ผ"],
|
256 |
+
["ํนํ ์ถ์์ ํ์ฉํ ๊ธฐ์ ๋ฐ ๋น์ฆ๋์ค๋ชจ๋ธ ์ธก๋ฉด์ ํฌํจํ์ฌ ํนํ ์ถ์์ ๊ตฌ์ฑ์ ๋ง๊ฒ ํ์ ์ ์ธ ์ฐฝ์ ๋ฐ๋ช
๋ด์ฉ์ ์ค์ฌ์ผ๋ก 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ."],
|
257 |
+
["๊ณ์ ์ด์ด์ ๋ต๋ณํ๋ผ"],
|
258 |
+
]
|
259 |
+
|
260 |
+
gr.Examples(examples, inputs=msg)
|
261 |
+
|
262 |
+
def respond_wrapper(message, chat_history, system_message, max_tokens, temperature, top_p):
|
263 |
+
bot_message = respond(message, chat_history, system_message, max_tokens, temperature, top_p)
|
264 |
+
chat_history.append((message, bot_message))
|
265 |
+
return "", chat_history
|
266 |
+
|
267 |
+
msg.submit(respond_wrapper, [msg, chatbot, system_message, max_tokens, temperature, top_p], [msg, chatbot])
|
268 |
+
|
269 |
+
space_id_state = gr.State()
|
270 |
+
tree_structure_state = gr.State()
|
271 |
+
|
272 |
+
def update_file_buttons(tree_structure, space_id):
|
273 |
+
if tree_structure is None:
|
274 |
+
return ""
|
275 |
+
|
276 |
+
def get_files(node):
|
277 |
+
files = []
|
278 |
+
if node["type"] == "file":
|
279 |
+
files.append(node)
|
280 |
+
elif node["type"] == "directory":
|
281 |
+
for child in node.get("children", []):
|
282 |
+
files.extend(get_files(child))
|
283 |
+
return files
|
284 |
+
|
285 |
+
files = get_files(tree_structure)
|
286 |
+
buttons_html = "<div style='display: flex; flex-direction: column;'>"
|
287 |
+
for file in files:
|
288 |
+
buttons_html += f"<button onclick=\"openFile('{file['path']}', '{space_id}')\" style='margin: 5px 0; padding: 5px;'>{file['path']}</button>"
|
289 |
+
buttons_html += "</div>"
|
290 |
+
return buttons_html
|
291 |
+
|
292 |
+
def open_file(file_path: str, space_id: str):
|
293 |
+
content = get_file_content(space_id, file_path)
|
294 |
+
return gr.Tabs.update(selected=file_path), gr.Code(value=content, language="python", label=file_path, lines=30)
|
295 |
+
|
296 |
+
analyze_button.click(
|
297 |
+
analyze_space,
|
298 |
+
inputs=[url_input],
|
299 |
+
outputs=[summary_output, analysis_output, usage_output, app_py_content, tree_view_output, tree_structure_state, space_id_state]
|
300 |
+
).then(
|
301 |
+
update_file_buttons,
|
302 |
+
inputs=[tree_structure_state, space_id_state],
|
303 |
+
outputs=[file_buttons]
|
304 |
+
)
|
305 |
+
|
306 |
+
file_path_input = gr.Textbox(visible=False)
|
307 |
+
space_id_input = gr.Textbox(visible=False)
|
308 |
+
|
309 |
+
def handle_file_open(file_path, space_id):
|
310 |
+
return file_path, space_id
|
311 |
+
|
312 |
+
file_path_input.change(
|
313 |
+
open_file,
|
314 |
+
inputs=[file_path_input, space_id_input],
|
315 |
+
outputs=[code_tabs, code_tabs]
|
316 |
+
)
|
317 |
+
|
318 |
+
# JavaScript ์ฝ๋๋ฅผ HTML์ ์ง์ ์ฝ์
|
319 |
+
gr.HTML(f"<script>{js}</script>")
|
320 |
+
|
321 |
+
return demo
|
322 |
+
|
323 |
+
except Exception as e:
|
324 |
+
print(f"Error in create_ui: {str(e)}")
|
325 |
+
print(traceback.format_exc())
|
326 |
+
raise
|
327 |
+
|
328 |
+
if __name__ == "__main__":
|
329 |
+
try:
|
330 |
+
demo = create_ui()
|
331 |
+
demo.queue()
|
332 |
+
demo.launch()
|
333 |
+
except Exception as e:
|
334 |
+
print(f"Error in main: {str(e)}")
|
335 |
+
print(traceback.format_exc())
|