File size: 675 Bytes
8d55510
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
from transformers import AutoModelForCausalLM, AutoTokenizer

# Model checkpoints
models_checkpoints = [
    "HuggingFaceTB/SmolLM-135M-Instruct",
    "HuggingFaceTB/SmolLM-360M",
    "HuggingFaceTB/SmolLM-360M-Instruct",
    "HuggingFaceTB/SmolLM-1.7B",
    "HuggingFaceTB/SmolLM-1.7B-Instruct",
    "Qwen/Qwen2-1.5B-Instruct",
    "Qwen/Qwen2-0.5B"
]

# Dictionary to store models and tokenizers
models_and_tokenizers = {}

# Loading models and tokenizers
for checkpoint in models_checkpoints:
    model = AutoModelForCausalLM.from_pretrained(checkpoint)
    tokenizer = AutoTokenizer.from_pretrained(checkpoint)
    models_and_tokenizers[checkpoint] = (model, tokenizer)