chuhac commited on
Commit
9ae35fd
·
verified ·
1 Parent(s): 6d8f609

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +5 -6
README.md CHANGED
@@ -17,7 +17,7 @@ license_link: >-
17
 
18
 
19
  <p align="center">
20
- 🤗 <a href="https://huggingface.co/Tele-AI" target="_blank">Hugging Face</a> • 🤖 <a href="https://modelscope.cn/organization/TeleAI" target="_blank">ModelScope</a> • 🏔 <a href="https://gitee.com/mindspore/mindformers/tree/dev/research/telechat" target="_blank">MindSpore</a> • 🐾 <a href="https://gitee.com/Tele-AI/tele-chat" target="_blank">gitee</a>️ • 💬 <a href="https://github.com/Tele-AI/Telechat/blob/master/images/wechat.jpg" target="_blank">WeChat</a>
21
  </p>
22
 
23
  # 目录
@@ -119,13 +119,12 @@ GSM8K、MATH、HumanEval、BBH等数据集,评测能力包括了指令遵循
119
  ```python
120
  import os
121
  import torch
122
- from modelscope import snapshot_download
123
  from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
124
- model_dir =snapshot_download('TeleAI/TeleChat2-115B')
125
- tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
126
- model = AutoModelForCausalLM.from_pretrained(model_dir, trust_remote_code=True, device_map="auto",
127
  torch_dtype=torch.float16)
128
- generate_config = GenerationConfig.from_pretrained(model_dir)
129
  question = "生抽与老抽的区别?"
130
  answer, history = model.chat(tokenizer=tokenizer, question=question, history=[], generation_config=generate_config,
131
  stream=False)
 
17
 
18
 
19
  <p align="center">
20
+ 🤗 <a href="https://huggingface.co/collections/chuhac/telechat2-67170814ae78266f697ea5e9" target="_blank">Hugging Face</a> • 🤖 <a href="https://modelscope.cn/organization/TeleAI" target="_blank">ModelScope</a> • 🏔 <a href="https://gitee.com/mindspore/mindformers/tree/dev/research/telechat" target="_blank">MindSpore</a> • 🐾 <a href="https://gitee.com/Tele-AI/tele-chat" target="_blank">gitee</a>️ • 💬 <a href="https://github.com/Tele-AI/Telechat/blob/master/images/wechat.jpg" target="_blank">WeChat</a>
21
  </p>
22
 
23
  # 目录
 
119
  ```python
120
  import os
121
  import torch
 
122
  from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
123
+
124
+ tokenizer = AutoTokenizer.from_pretrained("chuhac/TeleChat2-35B", trust_remote_code=True)
125
+ model = AutoModelForCausalLM.from_pretrained("chuhac/TeleChat2-35B", trust_remote_code=True, device_map="auto",
126
  torch_dtype=torch.float16)
127
+ generate_config = GenerationConfig.from_pretrained("chuhac/TeleChat2-35B")
128
  question = "生抽与老抽的区别?"
129
  answer, history = model.chat(tokenizer=tokenizer, question=question, history=[], generation_config=generate_config,
130
  stream=False)