Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from transformers import AutoModel, AutoTokenizer | |
def process_models(model_name, save_dir, additional_models): | |
log_lines = [] | |
# Process primary model | |
log_lines.append(f"π Loading model: {model_name}") | |
try: | |
model = AutoModel.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model_save_path = os.path.join(save_dir, model_name.replace("/", "_")) | |
os.makedirs(model_save_path, exist_ok=True) | |
model.save_pretrained(model_save_path) | |
log_lines.append(f"β Saved {model_name} to {model_save_path}") | |
except Exception as e: | |
log_lines.append(f"β Error with {model_name}: {e}") | |
# Process additional models if any | |
if additional_models: | |
for m in additional_models: | |
log_lines.append(f"π Loading model: {m}") | |
try: | |
model = AutoModel.from_pretrained(m) | |
tokenizer = AutoTokenizer.from_pretrained(m) | |
model_save_path = os.path.join(save_dir, m.replace("/", "_")) | |
os.makedirs(model_save_path, exist_ok=True) | |
model.save_pretrained(model_save_path) | |
log_lines.append(f"β Saved {m} to {model_save_path}") | |
except Exception as e: | |
log_lines.append(f"β Error with {m}: {e}") | |
return "\n".join(log_lines) | |
with gr.Blocks() as demo: | |
gr.Markdown("# HuggingFace Model Loader & Saver") | |
gr.Markdown("Load and save HuggingFace models locally using Transformers.") | |
with gr.Row(): | |
model_name_input = gr.Textbox(label="π Model", value="openai-gpt", placeholder="Enter model name") | |
save_dir_input = gr.Textbox(label="πΎ Save Dir", value="./hugging", placeholder="Enter save directory") | |
additional_models_input = gr.Dropdown( | |
label="𧩠Additional Models", | |
choices=["bert-base-uncased", "gpt2", "roberta-base"], | |
value=[], | |
multiselect=True, | |
info="Select additional models" | |
) | |
run_button = gr.Button("Load & Save Model") | |
output_log = gr.Textbox(label="Output Log", lines=10) | |
run_button.click( | |
fn=process_models, | |
inputs=[model_name_input, save_dir_input, additional_models_input], | |
outputs=output_log | |
) | |
if __name__ == "__main__": | |
# Launch the Gradio app. Hugging Face Spaces will execute this file with python. | |
demo.launch(server_name="0.0.0.0", server_port=7860) | |