BMike10 commited on
Commit
c674665
1 Parent(s): 415081a
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +6 -8
  3. requirements.txt +1 -1
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ itaca_mistral7b_qlora_4bit-unsloth.Q4_K_M.gguf
app.py CHANGED
@@ -1,19 +1,17 @@
1
  import random
2
  import gradio as gr
3
- from transformers import pipeline
4
 
 
5
 
6
- def random_response(message, history):
7
- return random.choice(["Yes", "No"])
8
 
9
-
10
- # model="michelebasilico/itaca-mistral-7b-v2-4bit")
11
- model = pipeline("text-generation",
12
- model="michelebasilico/itaca-mistral-7b-v2-16bit")
13
 
14
 
15
  def predict(message, history):
16
- outputs = model(message)[0]["generated_text"]
17
 
18
  return outputs
19
 
 
1
  import random
2
  import gradio as gr
3
+ from ctransformers import AutoModelForCausalLM
4
 
5
+ local_path = r"itaca_mistral7b_qlora_4bit-unsloth.Q4_K_M.gguf"
6
 
7
+ model = AutoModelForCausalLM.from_pretrained(local_path, model_file="itaca_mistral7b_qlora_4bit-unsloth.Q4_K_M.gguf", model_type="mistral", local_files_only=True
8
+ # , gpu_layers=50
9
 
10
+ )
 
 
 
11
 
12
 
13
  def predict(message, history):
14
+ outputs = model(message)
15
 
16
  return outputs
17
 
requirements.txt CHANGED
@@ -1,2 +1,2 @@
1
- transformers
2
  torch
 
1
+ ctransformers
2
  torch