Spaces:
Runtime error
Runtime error
changing model and adding temperature parameters
Browse files
app.py
CHANGED
@@ -34,11 +34,11 @@ print("loading the LLM......................................")
|
|
34 |
# )
|
35 |
|
36 |
llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGUF",
|
37 |
-
model_file="llama-2-7b-chat.
|
38 |
model_type="llama",
|
39 |
# config = ctransformers.hub.AutoConfig,
|
40 |
# hf = True
|
41 |
-
|
42 |
# max_new_tokens = 1024,
|
43 |
# stop = ['\n']
|
44 |
)
|
|
|
34 |
# )
|
35 |
|
36 |
llm = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGUF",
|
37 |
+
model_file="llama-2-7b-chat.Q3_K_L.gguf",
|
38 |
model_type="llama",
|
39 |
# config = ctransformers.hub.AutoConfig,
|
40 |
# hf = True
|
41 |
+
temperature = 0.2,
|
42 |
# max_new_tokens = 1024,
|
43 |
# stop = ['\n']
|
44 |
)
|