Spaces:
Build error
Build error
import os, gdown | |
import aria2p | |
import subprocess | |
import requests | |
import numpy as np | |
import gradio as gr | |
from diffusers import FlaxStableDiffusionPipeline | |
import torch | |
from safetensors.torch import save_file, load_file | |
from huggingface_hub import model_info, create_repo, create_branch, upload_folder | |
from huggingface_hub.utils import RepositoryNotFoundError, RevisionNotFoundError | |
def download_file(file_url_1, file_name_1): | |
# Create the file directory if it doesn't exist | |
os.makedirs("file", exist_ok=True) | |
if "drive.google.com" in file_url_1: | |
gdown.download(url=file_url_1, output=f"file/{file_name_1}", quiet=False, fuzzy=True) | |
return "download file done!" | |
elif "civitai.com" in file_url_1: | |
os.system(f"aria2c --out=file/{file_name_1} --summary-interval=10 -c -x 10 -k 1M -s 10 {file_url_1}") | |
return "download file done!" | |
elif "huggingface.co" in file_url_1: | |
os.system(f"aria2c --out=file/{file_name_1} --summary-interval=10 -c -x 10 -k 1M -s 10 {file_url_1}") | |
return "download file done!" | |
else: | |
try: | |
response = requests.get(ckpt_url_1) | |
if response.status_code == 200: | |
with open(f"file/{file_name_1}", "wb") as f: | |
return "download file done!" | |
else: | |
return "error download file!" | |
except Exception as e: | |
return "error download file!" | |
def push_file_1(model_to, token, branch): | |
try: | |
repo_exists = True | |
r_info = model_info(model_to, token=token) | |
except RepositoryNotFoundError: | |
repo_exists = False | |
finally: | |
if repo_exists: | |
print(r_info) | |
else: | |
create_repo(model_to, private=True, token=token) | |
try: | |
branch_exists = True | |
b_info = model_info(model_to, revision=branch, token=token) | |
except RevisionNotFoundError: | |
branch_exists = False | |
finally: | |
if branch_exists: | |
print(b_info) | |
else: | |
create_branch(model_to, branch=branch, token=token) | |
# Create the ckpt directory if it doesn't exist | |
os.makedirs("file", exist_ok=True) | |
upload_folder(folder_path="file", path_in_repo="", revision=branch, repo_id=model_to, commit_message=f"file", token=token) | |
return "push files done!" | |
def delete_file(): | |
os.system(f"rm -rf file") | |
return "delete ckpt done!" | |
def download_ckpt(ckpt_url, ckpt_name): | |
# Create the ckpt directory if it doesn't exist | |
os.makedirs("ckpt", exist_ok=True) | |
if "drive.google.com" in ckpt_url: | |
gdown.download(url=ckpt_url, output=f"ckpt/{ckpt_name}.ckpt", quiet=False, fuzzy=True) | |
return "download ckpt done!" | |
elif "civitai.com" in ckpt_url: | |
os.system(f"aria2c --out=ckpt/{ckpt_name} --summary-interval=10 -c -x 10 -k 1M -s 10 {ckpt_url}") | |
return "download ckpt done!" | |
elif "huggingface.co" in ckpt_url: | |
os.system(f"aria2c --out=ckpt/{ckpt_name} --summary-interval=10 -c -x 10 -k 1M -s 10 {ckpt_url}") | |
return "download ckpt done!" | |
else: | |
try: | |
response = requests.get(ckpt_url) | |
if response.status_code == 200: | |
with open(f"ckpt/{ckpt_name}.ckpt", "wb") as f: | |
return "download ckpt done!" | |
else: | |
return "error download ckpt!" | |
except Exception as e: | |
return "error download ckpt!" | |
def to_pt(): | |
os.system("wget -q https://raw.githubusercontent.com/huggingface/diffusers/main/scripts/convert_original_stable_diffusion_to_diffusers.py") | |
os.system(f"python3 convert_original_stable_diffusion_to_diffusers.py --checkpoint_path model.ckpt --dump_path pt") | |
return "convert to pt done!" | |
def push_pt(model_to, token, branch): | |
try: | |
repo_exists = True | |
r_info = model_info(model_to, token=token) | |
except RepositoryNotFoundError: | |
repo_exists = False | |
finally: | |
if repo_exists: | |
print(r_info) | |
else: | |
create_repo(model_to, private=True, token=token) | |
try: | |
branch_exists = True | |
b_info = model_info(model_to, revision=branch, token=token) | |
except RevisionNotFoundError: | |
branch_exists = False | |
finally: | |
if branch_exists: | |
print(b_info) | |
else: | |
create_branch(model_to, branch=branch, token=token) | |
upload_folder(folder_path="pt", path_in_repo="", revision=branch, repo_id=model_to, commit_message=f"pt", token=token) | |
return "push pt done!" | |
def delete_pt(): | |
os.system(f"rm -rf pt") | |
return "delete pt done!" | |
def clone_pt(model_url): | |
os.system("git lfs install") | |
os.system(f"git clone https://huggingface.co/{model_url} pt") | |
return "clone pt done!" | |
def to_flax(): | |
pipe, params = FlaxStableDiffusionPipeline.from_pretrained("pt", from_pt=True) | |
pipe.save_pretrained("flax", params=params) | |
return "convert to flax done!" | |
def push_flax(model_to, token, branch): | |
try: | |
repo_exists = True | |
r_info = model_info(model_to, token=token) | |
except RepositoryNotFoundError: | |
repo_exists = False | |
finally: | |
if repo_exists: | |
print(r_info) | |
else: | |
create_repo(model_to, private=True, token=token) | |
try: | |
branch_exists = True | |
b_info = model_info(model_to, revision=branch, token=token) | |
except RevisionNotFoundError: | |
branch_exists = False | |
finally: | |
if branch_exists: | |
print(b_info) | |
else: | |
create_branch(model_to, branch=branch, token=token) | |
upload_folder(folder_path="flax", path_in_repo="", revision=branch, repo_id=model_to, commit_message=f"flax", token=token) | |
return "push flax done!" | |
def delete_flax(): | |
os.system(f"rm -rf flax") | |
return "delete flax done!" | |
def to_ckpt(ckpt_name): | |
os.system("wget -q https://raw.githubusercontent.com/huggingface/diffusers/main/scripts/convert_diffusers_to_original_stable_diffusion.py") | |
os.system("mkdir ckpt") | |
os.system(f"python3 convert_diffusers_to_original_stable_diffusion.py --model_path pt --checkpoint_path ckpt/{ckpt_name}.ckpt") | |
return "convert to ckpt done!" | |
def push_ckpt(model_to, token, branch): | |
try: | |
repo_exists = True | |
r_info = model_info(model_to, token=token) | |
except RepositoryNotFoundError: | |
repo_exists = False | |
finally: | |
if repo_exists: | |
print(r_info) | |
else: | |
create_repo(model_to, private=True, token=token) | |
try: | |
branch_exists = True | |
b_info = model_info(model_to, revision=branch, token=token) | |
except RevisionNotFoundError: | |
branch_exists = False | |
finally: | |
if branch_exists: | |
print(b_info) | |
else: | |
create_branch(model_to, branch=branch, token=token) | |
upload_folder(folder_path="ckpt", path_in_repo="", revision=branch, repo_id=model_to, commit_message=f"ckpt", token=token) | |
return "push ckpt done!" | |
def delete_ckpt(): | |
os.system(f"rm -rf ckpt") | |
return "delete ckpt done!" | |
def download_ckpt_1(ckpt_url_2, ckpt_name_2): | |
# Create the ckpt directory if it doesn't exist | |
os.makedirs("ckpt", exist_ok=True) | |
if "drive.google.com" in ckpt_url_2: | |
gdown.download(url=ckpt_url_2, output=f"ckpt/{ckpt_name_2}.ckpt", quiet=False, fuzzy=True) | |
return "download ckpt done!" | |
elif "civitai.com" in ckpt_url_2: | |
os.system(f"aria2c --out=ckpt/{ckpt_name_2}.ckpt --summary-interval=10 -c -x 10 -k 1M -s 10 {ckpt_url_2}") | |
return "download ckpt done!" | |
elif "huggingface.co" in ckpt_url_2: | |
os.system(f"aria2c --out=ckpt/{ckpt_name_2}.ckpt --summary-interval=10 -c -x 10 -k 1M -s 10 {ckpt_url_2}") | |
return "download ckpt done!" | |
else: | |
try: | |
response = requests.get(ckpt_url_2) | |
if response.status_code == 200: | |
with open(f"ckpt/{ckpt_name_2}.ckpt", "wb") as f: | |
return "download ckpt done!" | |
else: | |
return "error download ckpt!" | |
except Exception as e: | |
return "error download ckpt!" | |
def to_safetensors(ckpt_name_2, safetensors_name_2): | |
os.system("mkdir safetensors") | |
weights = torch.load(f"ckpt/{ckpt_name_2}.ckpt") | |
if "state_dict" in weights: | |
weights = weights["state_dict"] | |
save_file(weights, f"safetensors/{safetensors_name_2}.safetensors") | |
return "convert to safetensors done!" | |
def push_safetensors(model_to, token, branch): | |
try: | |
repo_exists = True | |
r_info = model_info(model_to, token=token) | |
except RepositoryNotFoundError: | |
repo_exists = False | |
finally: | |
if repo_exists: | |
print(r_info) | |
else: | |
create_repo(model_to, private=True, token=token) | |
try: | |
branch_exists = True | |
b_info = model_info(model_to, revision=branch, token=token) | |
except RevisionNotFoundError: | |
branch_exists = False | |
finally: | |
if branch_exists: | |
print(b_info) | |
else: | |
create_branch(model_to, branch=branch, token=token) | |
upload_folder(folder_path="safetensors", path_in_repo="", revision=branch, repo_id=model_to, commit_message=f"safetensors", token=token) | |
return "push safetensors done!" | |
def delete_safetensors(): | |
os.system(f"rm -rf safetensors") | |
return "delete safetensors done!" | |
def download_safetensors(safetensors_url, safetensors_name): | |
# Create the safetensors directory if it doesn't exist | |
os.makedirs("safetensors", exist_ok=True) | |
if "drive.google.com" in safetensors_url: | |
gdown.download(url=safetensors_url, output=f"safetensors/{safetensors_name}.safetensors", quiet=False, fuzzy=True) | |
return "download safetensors done!" | |
elif "civitai.com" in safetensors_url: | |
os.system(f"aria2c --out=safetensors/{safetensors_name}.safetensors --summary-interval=10 -c -x 10 -k 1M -s 10 {safetensors_url}") | |
return "download safetensors done!" | |
elif "huggingface.co" in safetensors_url: | |
os.system(f"aria2c --out=safetensors/{safetensors_name}.safetensors --summary-interval=10 -c -x 10 -k 1M -s 10 {safetensors_url}") | |
return "download safetensors done!" | |
else: | |
try: | |
response = requests.get(safetensors_url) | |
if response.status_code == 200: | |
with open(f"safetensors/{safetensors_name}.safetensors", "wb") as f: | |
return "download safetensors done!" | |
else: | |
return "error download safetensors!" | |
except Exception as e: | |
return "error download safetensors!" | |
def from_safetensors_to_ckpt(safetensors_name, ckpt_name): | |
weights = load_file(f"safetensors/{safetensors_name}.safetensors", device="cpu") | |
os.system("mkdir ckpt") | |
torch.save(weights, f"ckpt/{ckpt_name}.ckpt") | |
return "convert to ckpt done!" | |
def delete_torrent(): | |
os.system(f"rm -rf torrent") | |
return "delete torrent done!" | |
def delete_ckpt(): | |
os.system(f"rm -rf ckpt") | |
return "delete ckpt done!" | |
def delete_pt(): | |
os.system(f"rm -rf pt") | |
return "delete pt done!" | |
def delete_flax(): | |
os.system(f"rm -rf flax") | |
return "delete flax done!" | |
def delete_safetensors(): | |
os.system(f"rm -rf safetensors") | |
return "delete safetensors done!" | |
def delete_all(): | |
delete_ckpt() | |
delete_torrent() | |
delete_pt() | |
delete_flax() | |
delete_safetensors() | |
return "delete all done!" | |
block = gr.Blocks() | |
with block: | |
gr.Markdown( | |
""" | |
## Now Using aria2c for better downloading<br /> | |
## 🚨 Please first click all delete buttons 🚨 🎉<br /> | |
## Almost all Download site link works<br /> | |
### Special thanks to [@camenduru](https://huggingface.co/camenduru) for creating initial script <br /> | |
### Modified By [@umair007](https://huggingface.co/umair007)<br /> | |
""") | |
# Add delete buttons to the interface | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
out_pt = gr.Textbox(show_label=False) | |
out_ckpt = gr.Textbox(show_label=False) | |
out_flax = gr.Textbox(show_label=False) | |
out_torrent = gr.Textbox(show_label=False) | |
out_safetensors = gr.Textbox(show_label=False) | |
out_delete_all = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
# Delete buttons | |
btn_delete_pt = gr.Button("delete pt") | |
btn_delete_torrent = gr.Button("Delete torrent") | |
btn_delete_ckpt = gr.Button("delete ckpt") | |
btn_delete_flax = gr.Button("delete flax") | |
btn_delete_safetensors = gr.Button("delete safetensors") | |
# Delete all button | |
btn_delete_all = gr.Button("delete all") | |
# Delete click | |
btn_delete_pt.click(delete_pt, outputs=out_pt) | |
btn_delete_ckpt.click(delete_ckpt, outputs=out_ckpt) | |
btn_delete_flax.click(delete_flax, outputs=out_flax) | |
btn_delete_safetensors.click(delete_safetensors, outputs=out_safetensors) | |
btn_delete_torrent.click(delete_torrent, outputs=out_torrent) | |
btn_delete_all.click(delete_all, outputs=out_delete_all) | |
gr.Markdown( | |
""" | |
### download and push file ckpt, yaml, safetensors etc <br /> | |
file_url = https://civitai.com/api/download/models/4224 or https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.ckpt or https://drive.google.com/file/d/file-id/view?usp=share_link<br /> | |
file_name = openjourney.ckpt or openjourney.safetensors whatsever your extension is <br /> | |
extension_name = ckpt, yaml, safetensors etc whatsever your file extension_name is <br /> | |
ckpt_model_to = camenduru/openjourney <br /> | |
branch = ckpt <br /> | |
token = get from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) new token role=write <br /> | |
""") | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
text_file_url_1 = gr.Textbox(show_label=False, max_lines=1, placeholder="file_url") | |
text_file_name_1 = gr.Textbox(show_label=False, max_lines=1, placeholder="file_name ex file.ckpt ") | |
text_file_model_to = gr.Textbox(show_label=False, max_lines=1, placeholder="file_model_to") | |
text_file_branch = gr.Textbox(show_label=False, value="file", max_lines=1, placeholder="file_branch") | |
text_file_token = gr.Textbox(show_label=False, max_lines=1, placeholder="🤗 token") | |
out_file = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
btn_download_file = gr.Button("Download file") | |
btn_push_file_1 = gr.Button("Push file to 🤗") | |
btn_delete_file = gr.Button("Delete file") | |
btn_download_file.click(download_file, inputs=[text_file_url_1, text_file_name_1], outputs=out_file) | |
btn_push_file_1.click(push_file_1, inputs=[text_file_model_to, text_file_token, text_file_branch], outputs=out_file) | |
btn_delete_file.click(delete_file, outputs=out_file) | |
gr.Markdown( | |
""" | |
### ckpt to pytorch | |
ckpt_url = https://civitai.com/api/download/models/4224 or https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.ckpt or https://drive.google.com/file/d/file-id/view?usp=share_link<br /> | |
pt_model_to = camenduru/openjourney <br /> | |
branch = main <br /> | |
token = get from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) new token role=write | |
""") | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
text_ckpt_url = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_url") | |
text_pt_model_to = gr.Textbox(show_label=False, max_lines=1, placeholder="pt_model_to") | |
text_pt_branch = gr.Textbox(show_label=False, value="main", max_lines=1, placeholder="branch") | |
text_pt_token = gr.Textbox(show_label=False, max_lines=1, placeholder="🤗 token") | |
out_pt = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
btn_download_ckpt = gr.Button("Download CKPT") | |
btn_to_pt = gr.Button("Convert to PT") | |
btn_push_pt = gr.Button("Push PT to 🤗") | |
btn_delete_pt = gr.Button("Delete PT") | |
btn_download_ckpt.click(download_ckpt, inputs=[text_ckpt_url], outputs=out_pt) | |
btn_to_pt.click(to_pt, outputs=out_pt) | |
btn_push_pt.click(push_pt, inputs=[text_pt_model_to, text_pt_token, text_pt_branch], outputs=out_pt) | |
btn_delete_pt.click(delete_pt, outputs=out_pt) | |
gr.Markdown( | |
""" | |
### pytorch to flax <br /> | |
pt_model_from = prompthero/openjourney <br /> | |
flax_model_to = camenduru/openjourney <br /> | |
branch = flax <br /> | |
token = get from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) new token role=write <br /> | |
""") | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
text_pt_model_from = gr.Textbox(show_label=False, max_lines=1, placeholder="pt_model_from") | |
text_flax_model_to = gr.Textbox(show_label=False, max_lines=1, placeholder="flax_model_to") | |
text_flax_branch = gr.Textbox(show_label=False, value="flax", max_lines=1, placeholder="flax_branch") | |
text_flax_token = gr.Textbox(show_label=False, max_lines=1, placeholder="🤗 token") | |
out_flax = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
btn_clone_pt = gr.Button("Clone PT from 🤗") | |
btn_to_flax = gr.Button("Convert to Flax") | |
btn_push_flax = gr.Button("Push Flax to 🤗") | |
btn_delete_flax = gr.Button("Delete Flax") | |
btn_clone_pt.click(clone_pt, inputs=[text_pt_model_from], outputs=out_flax) | |
btn_to_flax.click(to_flax, outputs=out_flax) | |
btn_push_flax.click(push_flax, inputs=[text_flax_model_to, text_flax_token, text_flax_branch], outputs=out_flax) | |
btn_delete_flax.click(delete_flax, outputs=out_flax) | |
gr.Markdown( | |
""" | |
### pytorch to ckpt | |
pt_model_from = prompthero/openjourney <br /> | |
ckpt_name = openjourney <br /> | |
ckpt_model_to = camenduru/openjourney <br /> | |
branch = ckpt <br /> | |
token = get from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) new token role=write | |
""") | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
text_pt_model_from = gr.Textbox(show_label=False, max_lines=1, placeholder="pt_model_from") | |
text_ckpt_name = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_name") | |
text_ckpt_model_to = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_model_to") | |
text_ckpt_branch = gr.Textbox(show_label=False, value="ckpt", max_lines=1, placeholder="ckpt_branch") | |
text_ckpt_token = gr.Textbox(show_label=False, max_lines=1, placeholder="🤗 token") | |
out_ckpt = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
btn_clone_pt = gr.Button("Clone PT from 🤗") | |
btn_to_ckpt = gr.Button("Convert to CKPT") | |
btn_push_ckpt = gr.Button("Push CKPT to 🤗") | |
btn_delete_ckpt = gr.Button("Delete CKPT") | |
btn_clone_pt.click(clone_pt, inputs=[text_pt_model_from], outputs=out_ckpt) | |
btn_to_ckpt.click(to_ckpt, inputs=[text_ckpt_name], outputs=out_ckpt) | |
btn_push_ckpt.click(push_ckpt, inputs=[text_ckpt_model_to, text_ckpt_token, text_ckpt_branch], outputs=out_ckpt) | |
btn_delete_ckpt.click(delete_ckpt, outputs=out_ckpt) | |
gr.Markdown( | |
""" | |
### ckpt to safetensors <br /> | |
ckpt_url = https://civitai.com/api/download/models/4224 or https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.ckpt or https://drive.google.com/file/d/file-id/view?usp=share_link<br /> | |
safetensors_name = openjourney <br /> | |
safetensors_model_to = camenduru/openjourney <br /> | |
branch = safetensors <br /> | |
token = get from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) new token role=write <br /> | |
""") | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
text_ckpt_url_2 = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_url_2") | |
text_ckpt_name_2 = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_name_2") | |
text_safetensors_name_2 = gr.Textbox(show_label=False, max_lines=1, placeholder="safetensors_name_2") | |
text_safetensors_model_to = gr.Textbox(show_label=False, max_lines=1, placeholder="safetensors_model_to") | |
text_safetensors_branch = gr.Textbox(show_label=False, value="safetensors", max_lines=1, placeholder="safetensors_branch") | |
text_safetensors_token = gr.Textbox(show_label=False, max_lines=1, placeholder="🤗 token") | |
out_safetensors = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
btn_download_ckpt_1 = gr.Button("Download CKPT") | |
btn_to_safetensors = gr.Button("Convert to Safetensors") | |
btn_push_safetensors = gr.Button("Push Safetensors to 🤗") | |
btn_delete_safetensors = gr.Button("Delete Safetensors") | |
btn_download_ckpt_1.click(download_ckpt_1, inputs=[text_ckpt_url_2, text_ckpt_name_2], outputs=out_safetensors) | |
btn_to_safetensors.click(to_safetensors, inputs=[text_ckpt_name_2, text_safetensors_name_2], outputs=out_safetensors) | |
btn_push_safetensors.click(push_safetensors, inputs=[text_safetensors_model_to, text_safetensors_token, text_safetensors_branch], outputs=out_safetensors) | |
btn_delete_safetensors.click(delete_safetensors, outputs=out_safetensors) | |
gr.Markdown( | |
""" | |
### safetensors to ckpt <br /> | |
safetensors_url = https://civitai.com/api/download/models/4224 or https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.ckpt or https://drive.google.com/file/d/file-id/view?usp=share_link<br /> | |
ckpt_name = openjourney <br /> | |
ckpt_model_to = camenduru/openjourney <br /> | |
branch = ckpt <br /> | |
token = get from [https://huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) new token role=write <br /> | |
""") | |
with gr.Group(): | |
with gr.Box(): | |
with gr.Row().style(equal_height=True): | |
text_safetensors_url = gr.Textbox(show_label=False, max_lines=1, placeholder="safetensors_url") | |
text_safetensors_name = gr.Textbox(show_label=False, max_lines=1, placeholder="safetensors_name") | |
text_safetensors_to_ckpt_name = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_name") | |
text_safetensors_to_ckpt_model_to = gr.Textbox(show_label=False, max_lines=1, placeholder="ckpt_model_to") | |
text_safetensors_to_ckpt_branch = gr.Textbox(show_label=False, value="ckpt", max_lines=1, placeholder="ckpt_branch") | |
text_safetensors_to_ckpt_token = gr.Textbox(show_label=False, max_lines=1, placeholder="🤗 token") | |
out_safetensors_to_ckpt = gr.Textbox(show_label=False) | |
with gr.Row().style(equal_height=True): | |
btn_download_safetensors = gr.Button("Download Safetensors") | |
btn_safetensors_to_ckpt = gr.Button("Convert to CKPT") | |
btn_push_safetensors_to_ckpt = gr.Button("Push CKPT to 🤗") | |
btn_delete_safetensors_ckpt = gr.Button("Delete CKPT") | |
btn_download_safetensors.click(download_safetensors, inputs=[text_safetensors_url, text_safetensors_name], outputs=out_safetensors_to_ckpt) | |
btn_safetensors_to_ckpt.click(from_safetensors_to_ckpt, inputs=[text_safetensors_name, text_safetensors_to_ckpt_name], outputs=out_safetensors_to_ckpt) | |
btn_push_safetensors_to_ckpt.click(push_ckpt, inputs=[text_safetensors_to_ckpt_model_to, text_safetensors_to_ckpt_token, text_safetensors_to_ckpt_branch], outputs=out_safetensors_to_ckpt) | |
btn_delete_safetensors_ckpt.click(delete_ckpt, outputs=out_safetensors_to_ckpt) | |
block.launch() |