Update README.md
Browse files
README.md
CHANGED
@@ -92,9 +92,12 @@ checkpoint = "lambdasec/santafixer"
|
|
92 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
93 |
|
94 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
95 |
-
model = AutoModelForCausalLM.from_pretrained(checkpoint,
|
|
|
96 |
|
97 |
-
input_text = "<fim-prefix>def print_hello_world():\n
|
|
|
|
|
98 |
inputs = tokenizer.encode(input_text, return_tensors="pt").to(device)
|
99 |
outputs = model.generate(inputs)
|
100 |
print(tokenizer.decode(outputs[0]))
|
|
|
92 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
93 |
|
94 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
95 |
+
model = AutoModelForCausalLM.from_pretrained(checkpoint,
|
96 |
+
trust_remote_code=True).to(device)
|
97 |
|
98 |
+
input_text = "<fim-prefix>def print_hello_world():\n
|
99 |
+
<fim-suffix>\n print('Hello world!')
|
100 |
+
<fim-middle>"
|
101 |
inputs = tokenizer.encode(input_text, return_tensors="pt").to(device)
|
102 |
outputs = model.generate(inputs)
|
103 |
print(tokenizer.decode(outputs[0]))
|