teragron commited on
Commit
6e1ffb9
·
1 Parent(s): e634ea9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -1,13 +1,16 @@
1
- import json
2
  import gradio as gr
3
  from transformers import LlamaTokenizer
 
 
 
4
 
5
  # Load the tokenizer from the specific folder
6
  tokenizer = LlamaTokenizer.from_pretrained("llama_tokenizer")
7
 
8
  def tokenize(input_text, file=None):
9
  if file:
10
- full_text = json.loads(file.decode('utf-8'))
 
11
  else:
12
  full_text = input_text
13
 
@@ -22,11 +25,10 @@ with gr.Blocks() as demo:
22
  """)
23
  with gr.Row():
24
  text_input = gr.Textbox(placeholder="Enter prompt")
25
- file_input = gr.File(label="Upload File", type="bytes") # Change type to "byte"
26
  with gr.Column():
27
  out = gr.Textbox(label="Number of tokens")
28
  run_btn = gr.Button("Run")
29
  run_btn.click(fn=tokenize, inputs=[text_input, file_input], outputs=out)
30
 
31
  demo.launch()
32
-
 
 
1
  import gradio as gr
2
  from transformers import LlamaTokenizer
3
+ import io
4
+ import json
5
+
6
 
7
  # Load the tokenizer from the specific folder
8
  tokenizer = LlamaTokenizer.from_pretrained("llama_tokenizer")
9
 
10
  def tokenize(input_text, file=None):
11
  if file:
12
+ with open(file, encoding="utf-8") as f:
13
+ full_text = "".join(f.readlines())
14
  else:
15
  full_text = input_text
16
 
 
25
  """)
26
  with gr.Row():
27
  text_input = gr.Textbox(placeholder="Enter prompt")
28
+ file_input = gr.File(label="Upload File", type="filepath")
29
  with gr.Column():
30
  out = gr.Textbox(label="Number of tokens")
31
  run_btn = gr.Button("Run")
32
  run_btn.click(fn=tokenize, inputs=[text_input, file_input], outputs=out)
33
 
34
  demo.launch()