Spaces:
Running
Running
update app.py
Browse files
app.py
CHANGED
@@ -71,11 +71,26 @@ async def generate_from_api(user_input, generation_config):
|
|
71 |
|
72 |
# Sample texts
|
73 |
sample_texts = {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
"Tell me a story in pidgin": "<prompt> Tell me a story in pidgin <response>:",
|
75 |
-
"who are you?": "<prompt> who are you? <response>:",
|
76 |
"Translate 'how are you?' to Yoruba": "<prompt> Translate 'how are you?' to Yoruba <response>:",
|
77 |
-
"
|
78 |
-
"
|
|
|
|
|
|
|
|
|
79 |
}
|
80 |
|
81 |
# Task options
|
@@ -113,8 +128,8 @@ else:
|
|
113 |
task_value = task_options[task]
|
114 |
|
115 |
# Text input
|
116 |
-
user_input = st.text_area("Enter text below **(please, first read the instructions on how to use in the side bar for better experience)**: ", sample_texts[sample_text])
|
117 |
-
|
118 |
if st.button("Generate"):
|
119 |
if user_input:
|
120 |
try:
|
@@ -123,14 +138,14 @@ if st.button("Generate"):
|
|
123 |
print("wrapped_input: ", wrapped_input)
|
124 |
generation_config["max_new_tokens"]= min(max_new_tokens, 1024 - len(tokenizer.tokenize(wrapped_input)))
|
125 |
generated_text = asyncio.run(generate_from_api(wrapped_input, generation_config))
|
126 |
-
# print(generated_text)
|
127 |
full_output = st.empty()
|
128 |
|
129 |
start_time = time.time()
|
130 |
output = ""
|
131 |
for next_token in tokenizer.tokenize(generated_text):
|
132 |
output += tokenizer.convert_tokens_to_string([next_token])
|
133 |
-
full_output.
|
|
|
134 |
time.sleep(0.1)
|
135 |
end_time = time.time()
|
136 |
time_diff = end_time - start_time
|
|
|
71 |
|
72 |
# Sample texts
|
73 |
sample_texts = {
|
74 |
+
"Tell me a story in pidgin": "Tell me a story in pidgin",
|
75 |
+
"Oma Ede, Mi ji ogede...": "Oma Ede, Mi ji ogede mi a foroma orhorho edha meji ri eka. "
|
76 |
+
"who are you?": "who are you?",
|
77 |
+
"Translate 'how are you?' to Yoruba": "Translate 'how are you?' to Yoruba",
|
78 |
+
"Classify the sentiment": "Anyi na-echefu oke ike.",
|
79 |
+
"what is the topic of this text": "Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn",
|
80 |
+
"diacritize this text: ": "",
|
81 |
+
"clean this text": ""
|
82 |
+
"headline of this text": '** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage,"'
|
83 |
+
}
|
84 |
+
|
85 |
+
instruction_wrap = {
|
86 |
"Tell me a story in pidgin": "<prompt> Tell me a story in pidgin <response>:",
|
|
|
87 |
"Translate 'how are you?' to Yoruba": "<prompt> Translate 'how are you?' to Yoruba <response>:",
|
88 |
+
"who are you?": "<prompt> who are you? <response>:",
|
89 |
+
"Classify the sentiment" : "<classify> Anyi na-echefu oke ike. <sentiment>",
|
90 |
+
"clean this text": "<clean> <pcm>",
|
91 |
+
"diacritize this text: ": "<diacritize> <yor>"
|
92 |
+
"what is the topic of this text": "<classify> Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn <topic>",
|
93 |
+
'headline of this text': '<title> ** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage," <headline>',
|
94 |
}
|
95 |
|
96 |
# Task options
|
|
|
128 |
task_value = task_options[task]
|
129 |
|
130 |
# Text input
|
131 |
+
user_input = st.text_area("Enter text below **(please, first read the instructions on how to use in the side bar for better experience)**: ", instruction_wrap.get(sample_texts[sample_text], sample_texts[sample_text]))
|
132 |
+
|
133 |
if st.button("Generate"):
|
134 |
if user_input:
|
135 |
try:
|
|
|
138 |
print("wrapped_input: ", wrapped_input)
|
139 |
generation_config["max_new_tokens"]= min(max_new_tokens, 1024 - len(tokenizer.tokenize(wrapped_input)))
|
140 |
generated_text = asyncio.run(generate_from_api(wrapped_input, generation_config))
|
|
|
141 |
full_output = st.empty()
|
142 |
|
143 |
start_time = time.time()
|
144 |
output = ""
|
145 |
for next_token in tokenizer.tokenize(generated_text):
|
146 |
output += tokenizer.convert_tokens_to_string([next_token])
|
147 |
+
full_output.markdown(f"<div style='word-wrap: break-word;'>{output}</div>", unsafe_allow_html=True)
|
148 |
+
# full_output.text(output)
|
149 |
time.sleep(0.1)
|
150 |
end_time = time.time()
|
151 |
time_diff = end_time - start_time
|