File size: 1,623 Bytes
13e77d8
096a21c
 
 
 
 
 
 
13e77d8
 
 
096a21c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13e77d8
096a21c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import gradio as gr
from llama_cpp import Llama
from typing import Optional
import gradio as gr

llm = Llama.from_pretrained(
    repo_id="Haary/USK_Mistral_7B_Unsloth_GGUF",
    filename="unsloth.Q4_K_M.gguf"
)


class Chat:

    def __init__(self, system: Optional[str] = None):
        self.system = system
        self.messages = []
        
        if system is not None:
            self.messages.append({
                "role": "system",
                "content": system
            })

    def prompt(self, content: str) -> str:
          self.messages.append({
              "role": "user",
              "content": content
          })
          response = llm.create_chat_completion(
              messages = [
                {
                  "role": "user",
                  "content": "sertifikat akreditasi bisa dicari dimana yaa?"
                }
              ]
          )
          response_content = response["choices"][0]["message"]["content"]
          self.messages.append({
              "role": "assistant",
              "content": response_content
          })
          return response_content

chat = Chat(system="You are a helpful assistant.")


def respond(message, chat_history):
    bot_message = chat.prompt(content=message)
    chat_history.append((message, bot_message))
    return "", chat_history


with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    clear = gr.Button("Clear")

    msg.submit(respond, [msg, chatbot], [msg, chatbot])
    clear.click(lambda: None, None, chatbot, queue=False)


if __name__ == "__main__":
    demo.launch(debug=True)