Spaces:
Sleeping
Sleeping
OmPrakashSingh1704
commited on
Commit
•
3369adf
1
Parent(s):
dfcffbf
Update app.py
Browse files
app.py
CHANGED
@@ -119,7 +119,7 @@ def generate_recipe(user_inputs):
|
|
119 |
text = f"{prompt}\n\nTools:\n{tool_section}"
|
120 |
|
121 |
# Tokenize and move to the correct device
|
122 |
-
model_inputs = tokenizer(
|
123 |
# text = tokenizer.apply_chat_template(
|
124 |
# messages,
|
125 |
# tokenize=False,
|
@@ -132,15 +132,15 @@ def generate_recipe(user_inputs):
|
|
132 |
torch.cuda.empty_cache()
|
133 |
with torch.no_grad():
|
134 |
generated_ids = model.generate(
|
135 |
-
model_inputs
|
136 |
max_new_tokens=512,
|
137 |
)
|
138 |
|
139 |
-
generated_ids = [
|
140 |
-
|
141 |
-
]
|
142 |
|
143 |
-
st.session_state.recipe = tokenizer.
|
144 |
st.session_state.recipe_saved = False
|
145 |
|
146 |
def clear_inputs():
|
|
|
119 |
text = f"{prompt}\n\nTools:\n{tool_section}"
|
120 |
|
121 |
# Tokenize and move to the correct device
|
122 |
+
model_inputs = tokenizer(prompt, return_tensors="pt")
|
123 |
# text = tokenizer.apply_chat_template(
|
124 |
# messages,
|
125 |
# tokenize=False,
|
|
|
132 |
torch.cuda.empty_cache()
|
133 |
with torch.no_grad():
|
134 |
generated_ids = model.generate(
|
135 |
+
**model_inputs,
|
136 |
max_new_tokens=512,
|
137 |
)
|
138 |
|
139 |
+
# generated_ids = [
|
140 |
+
# output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
|
141 |
+
# ]
|
142 |
|
143 |
+
st.session_state.recipe = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
|
144 |
st.session_state.recipe_saved = False
|
145 |
|
146 |
def clear_inputs():
|