ThomasBlumet commited on
Commit
1e59ffd
·
1 Parent(s): f694567

change model

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -1,7 +1,7 @@
1
  from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
2
  from transformers.utils import logging
3
  import gradio as gr
4
- import spaces
5
 
6
  # Define the logger instance for the transformers library
7
  logger = logging.get_logger("transformers")
@@ -23,7 +23,7 @@ model = AutoModelForCausalLM.from_pretrained(model_name,device_map="auto",trust_
23
  # repetition_penalty=1.1)
24
 
25
  # Generate text using the model and tokenizer
26
- @spaces.GPU(duration=60)
27
  def generate_text(input_text):
28
  input_ids = tokenizer.encode(input_text, return_tensors="pt")#.to("cuda")
29
  #attention_mask = input_ids.ne(tokenizer.pad_token_id).long()
 
1
  from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
2
  from transformers.utils import logging
3
  import gradio as gr
4
+ #import spaces
5
 
6
  # Define the logger instance for the transformers library
7
  logger = logging.get_logger("transformers")
 
23
  # repetition_penalty=1.1)
24
 
25
  # Generate text using the model and tokenizer
26
+ #@spaces.GPU(duration=60)
27
  def generate_text(input_text):
28
  input_ids = tokenizer.encode(input_text, return_tensors="pt")#.to("cuda")
29
  #attention_mask = input_ids.ne(tokenizer.pad_token_id).long()