phoen1x commited on
Commit
85952f0
1 Parent(s): 66d6b18

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +126 -9
app.py CHANGED
@@ -1,22 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from huggingface_hub import InferenceClient
2
  import gradio as gr
 
3
 
4
  client = InferenceClient(
5
  "mistralai/Mixtral-8x7B-Instruct-v0.1"
6
  )
7
 
 
 
 
 
 
 
 
8
 
9
  def format_prompt(message, history):
10
- prompt = "<s>"
11
- for user_prompt, bot_response in history:
12
- prompt += f"[INST] {user_prompt} [/INST]"
13
- prompt += f" {bot_response}</s> "
14
- prompt += f"[INST] {message} [/INST]"
15
- return prompt
16
 
17
  def generate(
18
- prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
19
  ):
 
 
 
 
20
  temperature = float(temperature)
21
  if temperature < 1e-2:
22
  temperature = 1e-2
@@ -40,7 +157,6 @@ def generate(
40
  yield output
41
  return output
42
 
43
-
44
  additional_inputs=[
45
  gr.Textbox(
46
  label="System Prompt",
@@ -82,7 +198,8 @@ additional_inputs=[
82
  step=0.05,
83
  interactive=True,
84
  info="Penalize repeated tokens",
85
- )
 
86
  ]
87
 
88
  examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, ],
 
1
+ # from huggingface_hub import InferenceClient
2
+ # import gradio as gr
3
+
4
+ # client = InferenceClient(
5
+ # "mistralai/Mixtral-8x7B-Instruct-v0.1"
6
+ # )
7
+
8
+
9
+ # def format_prompt(message, history):
10
+ # prompt = "<s>"
11
+ # for user_prompt, bot_response in history:
12
+ # prompt += f"[INST] {user_prompt} [/INST]"
13
+ # prompt += f" {bot_response}</s> "
14
+ # prompt += f"[INST] {message} [/INST]"
15
+ # return prompt
16
+
17
+ # def generate(
18
+ # prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
19
+ # ):
20
+ # temperature = float(temperature)
21
+ # if temperature < 1e-2:
22
+ # temperature = 1e-2
23
+ # top_p = float(top_p)
24
+
25
+ # generate_kwargs = dict(
26
+ # temperature=temperature,
27
+ # max_new_tokens=max_new_tokens,
28
+ # top_p=top_p,
29
+ # repetition_penalty=repetition_penalty,
30
+ # do_sample=True,
31
+ # seed=42,
32
+ # )
33
+
34
+ # formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
35
+ # stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
36
+ # output = ""
37
+
38
+ # for response in stream:
39
+ # output += response.token.text
40
+ # yield output
41
+ # return output
42
+
43
+
44
+ # additional_inputs=[
45
+ # gr.Textbox(
46
+ # label="System Prompt",
47
+ # max_lines=1,
48
+ # interactive=True,
49
+ # ),
50
+ # gr.Slider(
51
+ # label="Temperature",
52
+ # value=0.9,
53
+ # minimum=0.0,
54
+ # maximum=1.0,
55
+ # step=0.05,
56
+ # interactive=True,
57
+ # info="Higher values produce more diverse outputs",
58
+ # ),
59
+ # gr.Slider(
60
+ # label="Max new tokens",
61
+ # value=256,
62
+ # minimum=0,
63
+ # maximum=1048,
64
+ # step=64,
65
+ # interactive=True,
66
+ # info="The maximum numbers of new tokens",
67
+ # ),
68
+ # gr.Slider(
69
+ # label="Top-p (nucleus sampling)",
70
+ # value=0.90,
71
+ # minimum=0.0,
72
+ # maximum=1,
73
+ # step=0.05,
74
+ # interactive=True,
75
+ # info="Higher values sample more low-probability tokens",
76
+ # ),
77
+ # gr.Slider(
78
+ # label="Repetition penalty",
79
+ # value=1.2,
80
+ # minimum=1.0,
81
+ # maximum=2.0,
82
+ # step=0.05,
83
+ # interactive=True,
84
+ # info="Penalize repeated tokens",
85
+ # )
86
+ # ]
87
+
88
+ # examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, ],
89
+ # ["Can you write a short story about a time-traveling detective who solves historical mysteries?", None, None, None, None, None,],
90
+ # ["I'm trying to learn French. Can you provide some common phrases that would be useful for a beginner, along with their pronunciations?", None, None, None, None, None,],
91
+ # ["I have chicken, rice, and bell peppers in my kitchen. Can you suggest an easy recipe I can make with these ingredients?", None, None, None, None, None,],
92
+ # ["Can you explain how the QuickSort algorithm works and provide a Python implementation?", None, None, None, None, None,],
93
+ # ["What are some unique features of Rust that make it stand out compared to other systems programming languages like C++?", None, None, None, None, None,],
94
+ # ]
95
+
96
+ # gr.ChatInterface(
97
+ # fn=generate,
98
+ # chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
99
+ # additional_inputs=additional_inputs,
100
+ # title="Mixtral 46.7B",
101
+ # examples=examples,
102
+ # concurrency_limit=20,
103
+ # ).launch(show_api= True)
104
+
105
+
106
  from huggingface_hub import InferenceClient
107
  import gradio as gr
108
+ import PyPDF2
109
 
110
  client = InferenceClient(
111
  "mistralai/Mixtral-8x7B-Instruct-v0.1"
112
  )
113
 
114
+ def extract_text_from_pdf(file):
115
+ text = ""
116
+ with open(file.name, "rb") as f:
117
+ reader = PyPDF2.PdfFileReader(f)
118
+ for page_num in range(reader.numPages):
119
+ text += reader.getPage(page_num).extractText()
120
+ return text
121
 
122
  def format_prompt(message, history):
123
+ prompt = "<s>"
124
+ for user_prompt, bot_response in history:
125
+ prompt += f"[INST] {user_prompt} [/INST]"
126
+ prompt += f" {bot_response}</s> "
127
+ prompt += f"[INST] {message} [/INST]"
128
+ return prompt
129
 
130
  def generate(
131
+ prompt, history, system_prompt, pdf_file=None, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
132
  ):
133
+ if pdf_file is not None:
134
+ pdf_text = extract_text_from_pdf(pdf_file)
135
+ prompt += " " + pdf_text
136
+
137
  temperature = float(temperature)
138
  if temperature < 1e-2:
139
  temperature = 1e-2
 
157
  yield output
158
  return output
159
 
 
160
  additional_inputs=[
161
  gr.Textbox(
162
  label="System Prompt",
 
198
  step=0.05,
199
  interactive=True,
200
  info="Penalize repeated tokens",
201
+ ),
202
+ gr.File(label="Upload PDF Document", type="upload", max_size="100MB"),
203
  ]
204
 
205
  examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, ],