Commit
•
f1ce634
1
Parent(s):
00daf64
Update src/distilabel_dataset_generator/pipelines/sft.py
Browse files
src/distilabel_dataset_generator/pipelines/sft.py
CHANGED
@@ -114,7 +114,7 @@ The prompt you write should follow the same style and structure as the following
|
|
114 |
User dataset description:
|
115 |
"""
|
116 |
|
117 |
-
MODEL = "meta-llama/Meta-Llama-3.1-
|
118 |
DEFAULT_DATASET_DESCRIPTIONS = (
|
119 |
"A chemistry dataset for an assistant that explains chemical reactions and formulas.",
|
120 |
"A dataset for an assistant that work in the customer support domain.",
|
@@ -270,7 +270,6 @@ def get_prompt_generation_step():
|
|
270 |
global TOKEN_INDEX
|
271 |
api_key = HF_TOKENS[TOKEN_INDEX % len(HF_TOKENS)]
|
272 |
TOKEN_INDEX += 1
|
273 |
-
MODEL = "meta-llama/Meta-Llama-3.1-70B-Instruct"
|
274 |
generate_description = TextGeneration(
|
275 |
llm=InferenceEndpointsLLM(
|
276 |
api_key=api_key,
|
|
|
114 |
User dataset description:
|
115 |
"""
|
116 |
|
117 |
+
MODEL = "meta-llama/Meta-Llama-3.1-70B-Instruct"
|
118 |
DEFAULT_DATASET_DESCRIPTIONS = (
|
119 |
"A chemistry dataset for an assistant that explains chemical reactions and formulas.",
|
120 |
"A dataset for an assistant that work in the customer support domain.",
|
|
|
270 |
global TOKEN_INDEX
|
271 |
api_key = HF_TOKENS[TOKEN_INDEX % len(HF_TOKENS)]
|
272 |
TOKEN_INDEX += 1
|
|
|
273 |
generate_description = TextGeneration(
|
274 |
llm=InferenceEndpointsLLM(
|
275 |
api_key=api_key,
|