Commit
•
8bedfed
1
Parent(s):
e5f64a0
docs: Update local deployment info and getting the right HF token
Browse files
src/distilabel_dataset_generator/apps/faq.py
CHANGED
@@ -29,7 +29,7 @@ with gr.Blocks() as app:
|
|
29 |
|
30 |
<h4 style="text-align: center;">Can I run this locally?</h4>
|
31 |
|
32 |
-
<p>Yes, you can run this locally by <a href="https://huggingface.co/spaces/argilla/synthetic-data-generator?clone=true" target="_blank">cloning the Space</a> and installing the requirements with `pip install -r requirements.txt` and running `python app.py`. Alternatively, you can install the <a href="https://github.com/argilla-io/distilabel" target="_blank">distilabel library</a> with `pip install distilabel[hf-inference-endpoints]` and use the pipeline code at the bottom of each application tab. Distilabel also supports running the pipeline with <a href="https://distilabel.argilla.io/latest/components-gallery/llms/" target="_blank">other LLMs</a
|
33 |
|
34 |
<h4 style="text-align: center;">What is distilabel?</h4>
|
35 |
|
|
|
29 |
|
30 |
<h4 style="text-align: center;">Can I run this locally?</h4>
|
31 |
|
32 |
+
<p>Yes, you can run this locally by <a href="https://huggingface.co/spaces/argilla/synthetic-data-generator?clone=true" target="_blank">cloning the Space</a> and installing the requirements with `pip install -r requirements.txt` and running `python app.py`. Alternatively, you can install the <a href="https://github.com/argilla-io/distilabel" target="_blank">distilabel library</a> with `pip install distilabel[hf-inference-endpoints]` and use the pipeline code at the bottom of each application tab. Distilabel also supports running the pipeline with <a href="https://distilabel.argilla.io/latest/components-gallery/llms/" target="_blank">other LLMs</a>. Do make sure to get a valid <a href="https://huggingface.co/settings/tokens/new?ownUserPermissions=repo.content.read&ownUserPermissions=repo.write&globalPermissions=inference.serverless.write&canReadGatedRepos=true&tokenType=fineGrained" target="_blank">Hugging Face Token</a> that allows for calling serverless inference endpoints and create datasets on the Hugging Face Hub.</p>
|
33 |
|
34 |
<h4 style="text-align: center;">What is distilabel?</h4>
|
35 |
|
src/distilabel_dataset_generator/apps/sft.py
CHANGED
@@ -386,6 +386,9 @@ with gr.Blocks(
|
|
386 |
return gr.Markdown(visible=False)
|
387 |
|
388 |
gr.Markdown("## Or run this pipeline locally with distilabel")
|
|
|
|
|
|
|
389 |
|
390 |
with gr.Accordion(
|
391 |
"Run this pipeline using distilabel",
|
|
|
386 |
return gr.Markdown(visible=False)
|
387 |
|
388 |
gr.Markdown("## Or run this pipeline locally with distilabel")
|
389 |
+
gr.Markdown(
|
390 |
+
"You can run this pipeline locally with distilabel. For more information, please refer to the [distilabel documentation](https://distilabel.argilla.io/) or go to the FAQ tab at the top of the page for more information."
|
391 |
+
)
|
392 |
|
393 |
with gr.Accordion(
|
394 |
"Run this pipeline using distilabel",
|
src/distilabel_dataset_generator/pipelines/sft.py
CHANGED
@@ -180,7 +180,7 @@ from distilabel.llms import InferenceEndpointsLLM
|
|
180 |
|
181 |
MODEL = "{MODEL}"
|
182 |
SYSTEM_PROMPT = "{system_prompt}"
|
183 |
-
os.environ["HF_TOKEN"] = "hf_xxx" # https://huggingface.co/settings/tokens/new?globalPermissions=inference.serverless.write&tokenType=fineGrained
|
184 |
|
185 |
with Pipeline(name="sft") as pipeline:
|
186 |
magpie = MagpieGenerator(
|
|
|
180 |
|
181 |
MODEL = "{MODEL}"
|
182 |
SYSTEM_PROMPT = "{system_prompt}"
|
183 |
+
os.environ["HF_TOKEN"] = "hf_xxx" # https://huggingface.co/settings/tokens/new?ownUserPermissions=repo.content.read&ownUserPermissions=repo.write&globalPermissions=inference.serverless.write&canReadGatedRepos=true&tokenType=fineGrained
|
184 |
|
185 |
with Pipeline(name="sft") as pipeline:
|
186 |
magpie = MagpieGenerator(
|