File size: 1,351 Bytes
23483c4 78504e4 23483c4 78504e4 23483c4 78504e4 23483c4 78504e4 23483c4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
#!/usr/bin/env python3
from huggingface_hub import HfApi
from huggingface_hub import hf_hub_download
import huggingface_hub
from huggingface_hub import get_repo_discussions
from bs4 import BeautifulSoup
import os
repo_id = "stabilityai/stable-diffusion"
repo_id = "huggingface-projects/diffuse-the-rest"
discussions_list = list(get_repo_discussions(repo_id=repo_id, repo_type="space"))
all_data = []
for i, disc in enumerate(discussions_list[:10]):
disc = huggingface_hub.get_discussion_details(repo_id=repo_id, repo_type="space", discussion_num=disc.num)
page = BeautifulSoup(disc.events[0]._event["data"]["latest"]["raw"])
image_urls = [link.get('src') for link in page.findAll('img')]
data = {
"discussion_number": i,
"data": {
"prompt": disc.title,
"images": image_urls,
}
}
if not image_urls:
continue
else:
all_data.append(data)
dataset_repo_id = "triple-t/dummy"
repo_id = "huggingface-projects/diffuse-the-rest"
file_name = "_".join(repo_id.split("/")) + ".json"
api = HfApi()
path = hf_hub_download(repo_id=dataset_repo_id, filename=file_name, cache_dir="/home/patrick_huggingface_co/image_cache", repo_type="dataset")
api.upload_file(
path_or_fileobj=path,
path_in_repo=file_name,
repo_id=dataset_repo_id,
repo_type="dataset",
)
|