Update README.md
Browse files
README.md
CHANGED
@@ -33,10 +33,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
33 |
import torch
|
34 |
import os
|
35 |
|
36 |
-
#
|
37 |
TOP_P = 0.9 # Top-p (nucleus sampling),范围0到1
|
38 |
TOP_K = 80 # Top-k 采样的K值
|
39 |
-
TEMPERATURE = 0.
|
40 |
|
41 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
42 |
|
|
|
33 |
import torch
|
34 |
import os
|
35 |
|
36 |
+
# 可调参数,建议在文本生成时设置为较高值(温度不要太高)
|
37 |
TOP_P = 0.9 # Top-p (nucleus sampling),范围0到1
|
38 |
TOP_K = 80 # Top-k 采样的K值
|
39 |
+
TEMPERATURE = 0.3 # 温度参数,控制生成文本的随机性
|
40 |
|
41 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
42 |
|