isonuma commited on
Commit
fef3565
·
verified ·
1 Parent(s): 465c1d8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -5
app.py CHANGED
@@ -1,7 +1,40 @@
1
  import streamlit as st
2
- import numpy as np
3
- import transformers
 
4
 
5
- x = st.slider('Select a value')
6
- st.write(x, 'squared is', x * x)
7
- st.write(transformers.__version__)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from utils import is_hiragana_or_katakana
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer, LlamaTokenizer
4
+ from st_keyup import st_keyup
5
 
6
+ # model_name_or_path = "tokyotech-llm/Llama-3-Swallow-8B-v0.1"
7
+ model_name_or_path = "tokyotech-llm/Swallow-7b-hf"
8
+ # model_name_or_path = "llm-jp/llm-jp-1.3b-v1.0"
9
+ tokenizer = LlamaTokenizer.from_pretrained(model_name_or_path)
10
+ tokenizer.pad_token = tokenizer.eos_token
11
+
12
+ model = AutoModelForCausalLM.from_pretrained(model_name_or_path, torch_dtype=torch.float16)
13
+ # model = AutoModelForCausalLM.from_pretrained(model_name_or_path)
14
+
15
+
16
+ # Show title and description.
17
+ st.title("丸点棒AI")
18
+ st.write(
19
+ ""
20
+ )
21
+
22
+ query_candidates = {"": ([""], 0)}
23
+
24
+ query = st_keyup(
25
+ "お題",
26
+ placeholder="ひらがな/カタカナのみを入力",
27
+ )
28
+
29
+ if query != "" and is_hiragana_or_katakana(query):
30
+ st.info("OK")
31
+ # if query in query_candidates:
32
+ # top_candidates, top_losses = query_candidates[query]
33
+ # else:
34
+ # top_candidates, top_losses = search_candidates(query, top_k=100)
35
+ # answers = ["{}: {:.2f}".format(top_candidates[index], top_losses[index]) for index in range(min(len(top_candidates), 10))]
36
+ # value = "<br>".join(answers)
37
+ # value += f"<br>({len(top_candidates)}候補)"
38
+ # html.value = value
39
+ else:
40
+ st.info("ひらがな/カタカナのみを入力してください")