Update README.md
Browse files
README.md
CHANGED
@@ -15,9 +15,9 @@ widget:
|
|
15 |
|
16 |
inference:
|
17 |
parameters:
|
18 |
-
max_length:
|
19 |
do_sample: True
|
20 |
-
temperature:
|
21 |
---
|
22 |
|
23 |
# GPT-3 small
|
@@ -36,7 +36,7 @@ text = "Ever noticed how plane seats appear to be getting smaller and smaller? "
|
|
36 |
input_ids = tokenizer.encode(text, return_tensors='pt')
|
37 |
max_length = 150
|
38 |
|
39 |
-
sample_outputs = model.generate(input_ids, do_sample=True, max_length=max_length)
|
40 |
|
41 |
for i, sample_output in enumerate(sample_outputs):
|
42 |
print(">> Generated text {}\n\n{}".format(i+1, tokenizer.decode(sample_output.tolist())))
|
|
|
15 |
|
16 |
inference:
|
17 |
parameters:
|
18 |
+
max_length: 100
|
19 |
do_sample: True
|
20 |
+
temperature: 0.15
|
21 |
---
|
22 |
|
23 |
# GPT-3 small
|
|
|
36 |
input_ids = tokenizer.encode(text, return_tensors='pt')
|
37 |
max_length = 150
|
38 |
|
39 |
+
sample_outputs = model.generate(input_ids, do_sample=True, max_length=max_length,temperature = 0.15)
|
40 |
|
41 |
for i, sample_output in enumerate(sample_outputs):
|
42 |
print(">> Generated text {}\n\n{}".format(i+1, tokenizer.decode(sample_output.tolist())))
|