Update README.md
Browse files
README.md
CHANGED
@@ -121,10 +121,10 @@ GSM8K、MATH、HumanEval、BBH等数据集,评测能力包括了指令遵循
|
|
121 |
>> > import os
|
122 |
>> > import torch
|
123 |
>> > from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
124 |
-
>> > tokenizer = AutoTokenizer.from_pretrained('
|
125 |
-
>> > model = AutoModelForCausalLM.from_pretrained('
|
126 |
torch_dtype=torch.float16)
|
127 |
-
>> > generate_config = GenerationConfig.from_pretrained('
|
128 |
>> > question = "生抽与老抽的区别?"
|
129 |
>> > answer, history = model.chat(tokenizer=tokenizer, question=question, history=[], generation_config=generate_config,
|
130 |
stream=False)
|
|
|
121 |
>> > import os
|
122 |
>> > import torch
|
123 |
>> > from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
124 |
+
>> > tokenizer = AutoTokenizer.from_pretrained('chuhac/TeleChat2-115B', trust_remote_code=True)
|
125 |
+
>> > model = AutoModelForCausalLM.from_pretrained('chuhac/TeleChat2-115B', trust_remote_code=True, device_map="auto",
|
126 |
torch_dtype=torch.float16)
|
127 |
+
>> > generate_config = GenerationConfig.from_pretrained('chuhac/TeleChat2-115B')
|
128 |
>> > question = "生抽与老抽的区别?"
|
129 |
>> > answer, history = model.chat(tokenizer=tokenizer, question=question, history=[], generation_config=generate_config,
|
130 |
stream=False)
|