Spaces:
Sleeping
Sleeping
Hjgugugjhuhjggg
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -3,25 +3,26 @@ import logging
|
|
3 |
import time
|
4 |
import threading
|
5 |
import asyncio
|
6 |
-
from io import BytesIO
|
7 |
-
import requests
|
8 |
-
import boto3
|
9 |
import torch
|
|
|
10 |
import safetensors
|
11 |
-
import
|
12 |
-
import
|
13 |
-
from fastapi import FastAPI, HTTPException, Request, UploadFile, File
|
14 |
-
from fastapi.responses import StreamingResponse
|
15 |
from pydantic import BaseModel, field_validator
|
16 |
from transformers import (
|
17 |
AutoConfig,
|
18 |
AutoModelForCausalLM,
|
19 |
AutoTokenizer,
|
20 |
GenerationConfig,
|
21 |
-
StoppingCriteriaList
|
|
|
22 |
)
|
23 |
from huggingface_hub import hf_hub_download
|
24 |
import uvicorn
|
|
|
|
|
|
|
|
|
25 |
|
26 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s")
|
27 |
|
@@ -70,7 +71,6 @@ class S3ModelLoader:
|
|
70 |
return f"s3://{self.bucket_name}/lilmeaty_garca/{model_name.replace('/', '-')}"
|
71 |
|
72 |
def _download_from_s3(self, model_name):
|
73 |
-
s3_uri = self._get_s3_uri(model_name)
|
74 |
try:
|
75 |
logging.info(f"Attempting to load model {model_name} from S3...")
|
76 |
model_files = self.s3_client.list_objects_v2(Bucket=self.bucket_name, Prefix=f"lilmeaty_garca/{model_name}")
|
@@ -109,7 +109,7 @@ class S3ModelLoader:
|
|
109 |
def download_model_from_huggingface(self, model_name):
|
110 |
try:
|
111 |
logging.info(f"Downloading model {model_name} from Hugging Face...")
|
112 |
-
model_dir = hf_hub_download(model_name, token=HUGGINGFACE_HUB_TOKEN
|
113 |
self.s3_client.upload_file(model_dir, self.bucket_name, f"lilmeaty_garca/{model_name}")
|
114 |
logging.info(f"Model {model_name} saved to S3 successfully.")
|
115 |
except Exception as e:
|
|
|
3 |
import time
|
4 |
import threading
|
5 |
import asyncio
|
|
|
|
|
|
|
6 |
import torch
|
7 |
+
import boto3
|
8 |
import safetensors
|
9 |
+
from fastapi import FastAPI, HTTPException, Request
|
10 |
+
from fastapi.responses import StreamingResponse, Response
|
|
|
|
|
11 |
from pydantic import BaseModel, field_validator
|
12 |
from transformers import (
|
13 |
AutoConfig,
|
14 |
AutoModelForCausalLM,
|
15 |
AutoTokenizer,
|
16 |
GenerationConfig,
|
17 |
+
StoppingCriteriaList,
|
18 |
+
pipeline
|
19 |
)
|
20 |
from huggingface_hub import hf_hub_download
|
21 |
import uvicorn
|
22 |
+
import requests
|
23 |
+
from io import BytesIO
|
24 |
+
import numpy as np
|
25 |
+
import soundfile as sf
|
26 |
|
27 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s")
|
28 |
|
|
|
71 |
return f"s3://{self.bucket_name}/lilmeaty_garca/{model_name.replace('/', '-')}"
|
72 |
|
73 |
def _download_from_s3(self, model_name):
|
|
|
74 |
try:
|
75 |
logging.info(f"Attempting to load model {model_name} from S3...")
|
76 |
model_files = self.s3_client.list_objects_v2(Bucket=self.bucket_name, Prefix=f"lilmeaty_garca/{model_name}")
|
|
|
109 |
def download_model_from_huggingface(self, model_name):
|
110 |
try:
|
111 |
logging.info(f"Downloading model {model_name} from Hugging Face...")
|
112 |
+
model_dir = hf_hub_download(model_name, token=HUGGINGFACE_HUB_TOKEN)
|
113 |
self.s3_client.upload_file(model_dir, self.bucket_name, f"lilmeaty_garca/{model_name}")
|
114 |
logging.info(f"Model {model_name} saved to S3 successfully.")
|
115 |
except Exception as e:
|