app.py
CHANGED
@@ -1,19 +1,10 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import AutoTokenizer
|
3 |
-
from transformers.utils import logging
|
4 |
|
5 |
-
#
|
6 |
-
logging.set_verbosity_info()
|
7 |
-
|
8 |
-
# Import custom configuration and model classes
|
9 |
-
from transformers_modules.deepseek_ai.DeepSeek_R1.configuration_deepseek import DeepseekV3Config
|
10 |
-
from transformers_modules.deepseek_ai.DeepSeek_R1.modeling_deepseek import DeepseekV3Model
|
11 |
-
|
12 |
-
# Load model and tokenizer
|
13 |
model_name = "deepseek-ai/DeepSeek-R1"
|
14 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
15 |
-
|
16 |
-
model = DeepseekV3Model.from_pretrained(model_name, config=config, trust_remote_code=True)
|
17 |
|
18 |
def classify_text(input_text):
|
19 |
# Tokenize input
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoTokenizer, AutoModel
|
|
|
3 |
|
4 |
+
# Load the model and tokenizer with `trust_remote_code=True`
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
model_name = "deepseek-ai/DeepSeek-R1"
|
6 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
7 |
+
model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
|
|
|
8 |
|
9 |
def classify_text(input_text):
|
10 |
# Tokenize input
|