Hjgugugjhuhjggg commited on
Commit
ca61f16
·
verified ·
1 Parent(s): e72c053

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -2
app.py CHANGED
@@ -10,7 +10,8 @@ from transformers import (
10
  AutoConfig,
11
  AutoModelForCausalLM,
12
  AutoTokenizer,
13
- GenerationConfig
 
14
  )
15
  import boto3
16
  import torch
@@ -57,6 +58,13 @@ class S3ModelLoader:
57
  config = AutoConfig.from_pretrained(f"s3://{self.bucket_name}/{model_name}")
58
  model = AutoModelForCausalLM.from_pretrained(f"s3://{self.bucket_name}/{model_name}", config=config)
59
  tokenizer = AutoTokenizer.from_pretrained(f"s3://{self.bucket_name}/{model_name}")
 
 
 
 
 
 
 
60
  return model, tokenizer
61
  except Exception:
62
  return None, None
@@ -72,9 +80,16 @@ class S3ModelLoader:
72
 
73
  async def download_and_save_model_from_huggingface(self, model_name):
74
  try:
75
- # Eliminado tqdm para descargar sin mostrar progreso
76
  model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=HUGGINGFACE_HUB_TOKEN)
77
  tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=HUGGINGFACE_HUB_TOKEN)
 
 
 
 
 
 
 
78
  self.upload_model_to_s3(model_name, model, tokenizer)
79
  return model, tokenizer
80
  except Exception as e:
 
10
  AutoConfig,
11
  AutoModelForCausalLM,
12
  AutoTokenizer,
13
+ GenerationConfig,
14
+ pipeline
15
  )
16
  import boto3
17
  import torch
 
58
  config = AutoConfig.from_pretrained(f"s3://{self.bucket_name}/{model_name}")
59
  model = AutoModelForCausalLM.from_pretrained(f"s3://{self.bucket_name}/{model_name}", config=config)
60
  tokenizer = AutoTokenizer.from_pretrained(f"s3://{self.bucket_name}/{model_name}")
61
+
62
+ # Asegurarse de que el `eos_token_id` y `pad_token_id` estén definidos
63
+ if model.config.eos_token_id is None:
64
+ model.config.eos_token_id = tokenizer.eos_token_id
65
+ if model.config.pad_token_id is None:
66
+ model.config.pad_token_id = tokenizer.pad_token_id
67
+
68
  return model, tokenizer
69
  except Exception:
70
  return None, None
 
80
 
81
  async def download_and_save_model_from_huggingface(self, model_name):
82
  try:
83
+ # Descarga del modelo sin tqdm
84
  model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=HUGGINGFACE_HUB_TOKEN)
85
  tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=HUGGINGFACE_HUB_TOKEN)
86
+
87
+ # Asegurarse de que el `eos_token_id` y `pad_token_id` estén definidos
88
+ if model.config.eos_token_id is None:
89
+ model.config.eos_token_id = tokenizer.eos_token_id
90
+ if model.config.pad_token_id is None:
91
+ model.config.pad_token_id = tokenizer.pad_token_id
92
+
93
  self.upload_model_to_s3(model_name, model, tokenizer)
94
  return model, tokenizer
95
  except Exception as e: