Aksh1t commited on
Commit
173bcd0
1 Parent(s): f846aa8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -14
app.py CHANGED
@@ -3,7 +3,7 @@ from huggingface_hub import InferenceClient
3
 
4
  client = InferenceClient("Aksh1t/mistral-7b-oig-unsloth-merged")
5
 
6
- # Define the custom chat template
7
  custom_template = {
8
  "chat": {
9
  "prompt": "The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.\n\nHuman: {input}\nAI:",
@@ -11,6 +11,25 @@ custom_template = {
11
  }
12
  }
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  def respond(
15
  message,
16
  history: list[tuple[str, str]],
@@ -19,26 +38,16 @@ def respond(
19
  temperature,
20
  top_p,
21
  ):
22
- messages = [{"role": "system", "content": system_message}]
23
-
24
- for val in history:
25
- if val[0]:
26
- messages.append({"role": "user", "content": val[0]})
27
- if val[1]:
28
- messages.append({"role": "assistant", "content": val[1]})
29
-
30
- messages.append({"role": "user", "content": message})
31
-
32
  response = ""
33
 
34
- # Call chat_completion with custom template and other parameters
35
  for message in client.chat_completion(
36
- messages,
37
  max_tokens=max_tokens,
38
  stream=True,
39
  temperature=temperature,
40
  top_p=top_p,
41
- template=custom_template # Pass custom template here
42
  ):
43
  token = message.choices[0].delta.content
44
 
 
3
 
4
  client = InferenceClient("Aksh1t/mistral-7b-oig-unsloth-merged")
5
 
6
+ # Custom chat template
7
  custom_template = {
8
  "chat": {
9
  "prompt": "The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.\n\nHuman: {input}\nAI:",
 
11
  }
12
  }
13
 
14
+ def format_messages(message, history):
15
+ formatted_messages = []
16
+
17
+ # Add system message if present
18
+ if system_message:
19
+ formatted_messages.append({"role": "system", "content": system_message})
20
+
21
+ # Add history messages
22
+ for val in history:
23
+ if val[0]:
24
+ formatted_messages.append({"role": "user", "content": val[0]})
25
+ if val[1]:
26
+ formatted_messages.append({"role": "assistant", "content": val[1]})
27
+
28
+ # Add current user message
29
+ formatted_messages.append({"role": "user", "content": message})
30
+
31
+ return formatted_messages
32
+
33
  def respond(
34
  message,
35
  history: list[tuple[str, str]],
 
38
  temperature,
39
  top_p,
40
  ):
41
+ formatted_messages = format_messages(message, history)
 
 
 
 
 
 
 
 
 
42
  response = ""
43
 
44
+ # Call chat_completion with formatted messages
45
  for message in client.chat_completion(
46
+ formatted_messages,
47
  max_tokens=max_tokens,
48
  stream=True,
49
  temperature=temperature,
50
  top_p=top_p,
 
51
  ):
52
  token = message.choices[0].delta.content
53