Update model.safetensors
Browse files- model.safetensors +27 -17
model.safetensors
CHANGED
@@ -1,23 +1,33 @@
|
|
1 |
-
import
|
|
|
|
|
2 |
|
3 |
-
|
4 |
-
|
5 |
-
"Banana!",
|
6 |
-
"Did someone say potato?",
|
7 |
-
"Beep boop... processing... error 404: brain not found",
|
8 |
-
"Is that a squirrel?"
|
9 |
-
]
|
10 |
|
11 |
-
|
12 |
-
|
13 |
-
"Tell me more about that.",
|
14 |
-
"That's fascinating!",
|
15 |
-
"What do you think about AI?",
|
16 |
-
"Have you ever seen a platypus?"
|
17 |
-
])
|
18 |
|
19 |
-
|
|
|
20 |
|
21 |
-
|
|
|
22 |
|
|
|
23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import PreTrainedModel, PreTrainedTokenizerFast, PretrainedConfig
|
3 |
+
from transformers.modeling_outputs import ModelOutput
|
4 |
|
5 |
+
class HelloWorldConfig(PretrainedConfig):
|
6 |
+
model_type = "hello-world"
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
+
class HelloWorldModel(PreTrainedModel):
|
9 |
+
config_class = HelloWorldConfig
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
+
def __init__(self, config):
|
12 |
+
super().__init__(config)
|
13 |
|
14 |
+
def forward(self, *args, **kwargs):
|
15 |
+
return ModelOutput(logits=torch.tensor([[0]]), decoder_hidden_states=["Hello, world!"])
|
16 |
|
17 |
+
tokenizer = PreTrainedTokenizerFast(tokenizer_file="tokenizer.json")
|
18 |
|
19 |
+
# Dummy tokenizer configuration to work with the model
|
20 |
+
tokenizer_config = {
|
21 |
+
"do_lower_case": False,
|
22 |
+
"model_max_length": 512,
|
23 |
+
"padding_side": "right",
|
24 |
+
"special_tokens_map_file": None,
|
25 |
+
"tokenizer_file": "tokenizer.json",
|
26 |
+
"unk_token": "<unk>",
|
27 |
+
"bos_token": "<s>",
|
28 |
+
"eos_token": "</s>"
|
29 |
+
}
|
30 |
+
|
31 |
+
with open("tokenizer.json", "w") as f:
|
32 |
+
import json
|
33 |
+
json.dump(tokenizer_config, f)
|