stmnk commited on
Commit
0aa1779
·
1 Parent(s): ca48b86

convert temp to float

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -161,6 +161,7 @@ def pygen_func(nl_code_intent):
161
  # CT5_URL = "https://api-inference.huggingface.co/models/nielsr/codet5-small-code-summarization-ruby"
162
 
163
  def docgen_func(function_code, temp):
 
164
  req_data = {
165
  "inputs": function_code,
166
  "parameters": {
@@ -169,7 +170,7 @@ def docgen_func(function_code, temp):
169
  "top_k": 3, # (Default: None). Integer to define the top tokens considered within the sample operation to create new text.
170
  "top_p": 0.8, # (Default: None). Float to define the tokens that are within the sample` operation of text generation.
171
  # Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.
172
- "temperature": temp, # (Default: 1.0). Float (0.0-100.0). The temperature of the sampling operation.
173
  # 1 means regular sampling, 0 means top_k=1, 100.0 is getting closer to uniform probability.
174
  "repetition_penalty": 50.0, # (Default: None). Float (0.0-100.0). The more a token is used within generation
175
  # the more it is penalized to not be picked in successive generation passes.
 
161
  # CT5_URL = "https://api-inference.huggingface.co/models/nielsr/codet5-small-code-summarization-ruby"
162
 
163
  def docgen_func(function_code, temp):
164
+ t = float(temp)
165
  req_data = {
166
  "inputs": function_code,
167
  "parameters": {
 
170
  "top_k": 3, # (Default: None). Integer to define the top tokens considered within the sample operation to create new text.
171
  "top_p": 0.8, # (Default: None). Float to define the tokens that are within the sample` operation of text generation.
172
  # Add tokens in the sample for more probable to least probable until the sum of the probabilities is greater than top_p.
173
+ "temperature": t, # (Default: 1.0). Float (0.0-100.0). The temperature of the sampling operation.
174
  # 1 means regular sampling, 0 means top_k=1, 100.0 is getting closer to uniform probability.
175
  "repetition_penalty": 50.0, # (Default: None). Float (0.0-100.0). The more a token is used within generation
176
  # the more it is penalized to not be picked in successive generation passes.