Illia56 commited on
Commit
960e470
1 Parent(s): 109a58b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -2
app.py CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  import os
2
  import platform
3
  import random
@@ -13,8 +16,32 @@ from ctransformers import AutoModelForCausalLM
13
  from dl_hf_model import dl_hf_model
14
  from loguru import logger
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
- url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q2_K.bin" # 7.37G
 
 
 
 
 
 
18
 
19
  _ = (
20
  "golay" in platform.node()
@@ -27,7 +54,7 @@ _ = (
27
  if _:
28
  # url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q2_K.bin"
29
  url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q2_K.bin" # 2.87G
30
-
31
 
32
  prompt_template = """Below is an instruction that describes a task. Write a response that appropriately completes the request.
33
 
 
1
+ """Run codes."""
2
+ # pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
3
+ # ruff: noqa: E501
4
  import os
5
  import platform
6
  import random
 
16
  from dl_hf_model import dl_hf_model
17
  from loguru import logger
18
 
19
+ filename_list = [
20
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q2_K.bin",
21
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q3_K_L.bin",
22
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q3_K_M.bin",
23
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q3_K_S.bin",
24
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q4_0.bin",
25
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q4_1.bin",
26
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q4_K_M.bin",
27
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q4_K_S.bin",
28
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q5_0.bin",
29
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q5_1.bin",
30
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q5_K_M.bin",
31
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q5_K_S.bin",
32
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q6_K.bin",
33
+ "Wizard-Vicuna-7B-Uncensored.ggmlv3.q8_0.bin",
34
+ ]
35
+
36
+ URL = "https://huggingface.co/TheBloke/Wizard-Vicuna-7B-Uncensored-GGML/raw/main/Wizard-Vicuna-7B-Uncensored.ggmlv3.q4_K_M.bin" # 4.05G
37
 
38
+ url = "https://huggingface.co/savvamadar/ggml-gpt4all-j-v1.3-groovy/blob/main/ggml-gpt4all-j-v1.3-groovy.bin"
39
+ url = "https://huggingface.co/TheBloke/Llama-2-13B-GGML/blob/main/llama-2-13b.ggmlv3.q4_K_S.bin" # 7.37G
40
+ # url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q3_K_L.bin"
41
+ url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q3_K_L.bin" # 6.93G
42
+ # url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q3_K_L.binhttps://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q4_K_M.bin" # 7.87G
43
+
44
+ url = "https://huggingface.co/localmodels/Llama-2-13B-Chat-ggml/blob/main/llama-2-13b-chat.ggmlv3.q4_K_S.bin" # 7.37G
45
 
46
  _ = (
47
  "golay" in platform.node()
 
54
  if _:
55
  # url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q2_K.bin"
56
  url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q2_K.bin" # 2.87G
57
+ url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q4_K_M.bin" # 2.87G
58
 
59
  prompt_template = """Below is an instruction that describes a task. Write a response that appropriately completes the request.
60