rrun

#1
by goodasdgood - opened

from llama_cpp import Llama
llm = Llama(
model_path="/content/dracarys2-72b-instruct.Q2_K.gguf",
chat_format="llama-2"
)
llm.create_chat_completion(
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{
"role": "user",
"content": "Who is Napoleon Bonaparte?"
}
]
)

not run

Sign up or log in to comment