OmPrakashSingh1704 commited on
Commit
dfcffbf
1 Parent(s): dd6553b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -7
app.py CHANGED
@@ -115,15 +115,20 @@ def generate_recipe(user_inputs):
115
  }
116
  prompt = create_detailed_prompt(user_inputs['user_direction'], user_inputs['exclusions'], user_inputs['serving_size'], user_inputs['difficulty'])
117
  messages = [{"role": "user", "content": prompt}]
118
- text = tokenizer.apply_chat_template(
119
- messages,
120
- tokenize=False,
121
- add_generation_prompt=True,
122
- tools=[provide_recipe_schema]
123
- )
124
-
125
  # Tokenize and move to the correct device
126
  model_inputs = tokenizer([text], return_tensors="pt")
 
 
 
 
 
 
 
 
 
127
  torch.cuda.empty_cache()
128
  with torch.no_grad():
129
  generated_ids = model.generate(
 
115
  }
116
  prompt = create_detailed_prompt(user_inputs['user_direction'], user_inputs['exclusions'], user_inputs['serving_size'], user_inputs['difficulty'])
117
  messages = [{"role": "user", "content": prompt}]
118
+ tool_section = "\n".join([f"{tool['function']['name']}({json.dumps(tool['function']['parameters'])})" for tool in [provide_recipe_schema]])
119
+ text = f"{prompt}\n\nTools:\n{tool_section}"
120
+
 
 
 
 
121
  # Tokenize and move to the correct device
122
  model_inputs = tokenizer([text], return_tensors="pt")
123
+ # text = tokenizer.apply_chat_template(
124
+ # messages,
125
+ # tokenize=False,
126
+ # add_generation_prompt=True,
127
+ # tools=[provide_recipe_schema]
128
+ # )
129
+
130
+ # Tokenize and move to the correct device
131
+ # model_inputs = tokenizer([text], return_tensors="pt")
132
  torch.cuda.empty_cache()
133
  with torch.no_grad():
134
  generated_ids = model.generate(