Spaces:
Running
Running
File size: 25,535 Bytes
9cee6a8 863cc12 9cee6a8 863cc12 e5162e3 9f8f8be 863cc12 9cee6a8 03e933b 1005bd7 863cc12 aadaab0 9cee6a8 863cc12 19a7a66 9cee6a8 863cc12 b2afae5 863cc12 b2afae5 f83b2ac b2afae5 f83b2ac 0846362 76c917a b2afae5 89ce0b1 770bd42 89ce0b1 863cc12 d16eece 770bd42 d16eece b3fa751 d16eece cfc543f d16eece 3ef41e0 d16eece 3ef41e0 332a731 d16eece 863cc12 332a731 863cc12 3ef41e0 863cc12 f83b2ac 863cc12 e5e53b2 76c917a 89ce0b1 863cc12 7e4e637 036ab0d 7e4e637 ea3de3e 83f3921 76c917a 7e4e637 ea3de3e 83f3921 ea3de3e 76c917a ea3de3e 83f3921 0846362 7e4e637 6d5c8c7 19a7a66 6d5c8c7 9f8f8be e5162e3 9f8f8be 863cc12 e5162e3 863cc12 9f8f8be 6d5c8c7 9748b7f 863cc12 3ef41e0 89ce0b1 b22b5ab 3ef41e0 89ce0b1 a4e2550 7e4e637 cfc543f 15cce89 76c917a 15cce89 a4e2550 89ce0b1 863cc12 07c2d0f a4e2550 15cce89 89ce0b1 cfc543f 89ce0b1 e5e53b2 a4e2550 863cc12 a4e2550 0846362 863cc12 823f064 3ef41e0 f21167e b3fa751 3ef41e0 89ce0b1 19a7a66 5c62089 89ce0b1 0846362 5c62089 638952b cfc543f 638952b 3ef41e0 638952b d53f6bc 89ce0b1 863cc12 1cd1646 863cc12 6cc7755 863cc12 6cc7755 638952b b2afae5 3ef41e0 638952b 6cc7755 0846362 d16eece 0ba8302 75f32e5 89ce0b1 9748b7f 0ba8302 0846362 863cc12 e5e53b2 83f3921 0846362 d36fb1a 679d19d d36fb1a 07c2d0f d36fb1a a7135a3 d36fb1a 76c917a cf6f089 0846362 d36fb1a 7b533d4 770bd42 d36fb1a 770bd42 d36fb1a 770bd42 d36fb1a 7b533d4 d36fb1a 0846362 b1d0e37 7e4e637 19a7a66 7b533d4 0846362 83f3921 b1d0e37 83f3921 1c1cae4 25ba82d 1f545b9 7fccc8a 76c917a 83f3921 31c1d30 58eb9fc d36fb1a 89ce0b1 863cc12 9cee6a8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 |
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM
import time
import asyncio
import aiohttp
import json
import torch
import re
import nest_asyncio
nest_asyncio.apply()
repo_name = "Aletheia-ng/SabiYarn-125M"
device = "cuda" if torch.cuda.is_available() else "cpu"
@st.cache_resource(show_spinner=False)
def load_model():
tokenizer = AutoTokenizer.from_pretrained(repo_name, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(repo_name, trust_remote_code=True).to(device)
return tokenizer, model
tokenizer, model = load_model()
# Add sidebar with instructions
st.sidebar.title("Instructions: How to use")
st.sidebar.write("""
1. **Write Text or Select Sample:**
- Enter text in the text area or use the dropdown to choose a sample.
2. **Select a Task:**
- Choose a task from the **task dropdown** if using your own text.
- **Important:** This ensures correct model response.
3. **Avoid Conflicts:**
- Don't select a sample text if using your own text.
4. **Select Nigerian Language:**
- If prompted, choose the Nigerian language (it represents the input/base language for diacritization/cleaning, target language for translation).
5. **Generate Output:**
- Click the Generate button.
6. **Translation Tips:**
- English as the target language gives the best results.
- You can also do inter-language translation i.e yoruba to igbo
- Use sentences instead of words for better results.
7. **Performance Note:**
- The model's performance varies due to its size and training data. It performs best on text generation and translation.
- For other tasks, try multiple times if model's output is not optimal (This is due to the generator's sampling parameter settings).
- **It's best to read/understand/translate the model's output completely first. Model can sometimes fail to stop generation after providing correct answers.**
8. **Other Tips:**
- Use simple instructions for instruction following.
- For question answering and generation, follow the structure in the corresponding sample text.
9. **Adjust Parameters:**
- Experiment with the generation parameters below to improve performance. However, default values are sufficient.
""")
max_length = 100
max_new_tokens = 80
num_beams = 5
temperature = 0.99
top_k = 50
top_p = 0.95
repetition_penalty = 4.0
length_penalty = 3.0
# Create sliders in the sidebar
max_length = st.sidebar.slider("Max. output length", min_value=10, max_value=500, value=max_length)
max_new_tokens = st.sidebar.slider("Max. generated tokens", min_value=30, max_value=768, value=max_new_tokens)
num_beams = st.sidebar.slider("Number of Beams: Improves coherence of the model output.", min_value=1, max_value=10, value=num_beams)
temperature = st.sidebar.slider("Temperature: Controls the creativity of the model", min_value=0.1, max_value=2.0, value=temperature)
top_k = st.sidebar.slider("Top-K: Controls model's sampling space.", min_value=1, max_value=100, value=top_k)
top_p = st.sidebar.slider("Top-P", min_value=0.1, max_value=1.0, value=top_p)
repetition_penalty = st.sidebar.slider("Repetition Penalty: Discourages token repitition during generation.", min_value=1.0, max_value=10.0, value=repetition_penalty)
length_penalty = st.sidebar.slider("Length Penalty: Discourages poor output as token length grows.", min_value=0.1, max_value=10.0, value=length_penalty)
do_sample = st.sidebar.checkbox("Do_sample: Enable sampling for richer generation.")
generation_config = {
"max_length": max_length,
"num_beams": num_beams,
"do_sample": bool(do_sample),
"temperature": temperature,
"top_k": top_k,
"top_p": top_p,
"repetition_penalty": repetition_penalty,
"length_penalty": length_penalty,
"early_stopping": True
}
# Streamlit app
st.title("SabiYarn-125M : Generates text in multiple Nigerian languages.")
st.write("**Supported Languages: English, Yoruba, Igbo, Hausa, Pidgin, Efik, Urhobo, Fulfulde, Fulah. \nResults may not be coherent for less represented languages (i.e Efik, \
Urhobo, Fulfulde, Fulah).**")
st.write("**It takes a while (~25s) to return an output on the first 'generate' click. Avg response time: 1-2s on GPU, 40s on CPU**")
st.write("**Model outputs 80 tokens as default. Adjust in the side bar (longer inputs/tokens will increase response time). MAX TOKENS=1024**")
st.write("**For convenience, you can use chatgpt to provide input text and translate/evaluate model output.**")
st.write("-" * 50)
popular_topics = [
"Sports", "Entertainment", "Politics", "Travel", "Technology",
"Health", "Business", "Science", "Education", "Lifestyle",
"Culture", "Environment", "Finance", "Food", "Gaming",
"History", "Law", "Literature", "Music", "News","Africa",
"Philosophy", "Religion", "Society", "World"
]
async def extract_answer(text):
pattern = r'[a-z][A-Z]'
result = re.split(pattern, text)[0]
result = text[:len(result) + 1]
return result
async def assign_topic(generated_text, topic_list=popular_topics):
lower_generated_text = generated_text.lower()
for topic in topic_list:
if topic.lower() in lower_generated_text:
return topic.title()
return generated_text
async def count_sentences(text):
# Split the text based on sentence-ending punctuation followed by a space or end of string
sentences = re.split(r'[.!?]+\s*', text.strip())
# Filter out any empty strings from the resulting list
sentences = [sentence for sentence in sentences if sentence]
return len(sentences), sentences
async def wrap_text(text, task_value):
tasks = ["<classify>", "<prompt>", "<clean>", "<title>", "<diacritize>", "<translate>"]
if any(task in text for task in tasks):
return text
return task_value.format(text)
async def generate_from_api(user_input, generation_config):
urls = [
"https://pauljeffrey--sabiyarn-fastapi-app.modal.run/predict",
"https://daveokpare--sabiyarn-fastapi-app.modal.run/predict",
"https://damilojohn--sabiyarn-fastapi-app.modal.run/predict"
]
payload = {
"prompt": user_input,
"config": generation_config
}
headers = {
'Content-Type': 'application/json'
}
async with aiohttp.ClientSession() as session:
for url in urls:
try:
async with session.post(url, headers=headers, json=payload) as response:
if response.status == 200:
return await response.text()
else:
print(f"Failed to fetch from {url} with status code {response.status}")
except Exception as e:
print(f"Error fetching from {url}: {e}")
return "FAILED"
# Sample texts
sample_texts = {
"select":"",
"Me ya nuna?": "Me ya nuna?",
"Wetin dem dey call you?": "Wetin dem dey call you?",
"M nwere ike ịma onye ị bụ? Gịnị bụ njirimara gị?": "M nwere ike ịma onye ị bụ? Gịnị bụ njirimara gị?",
"Bawo ni, kini...": "Bawo ni, kini nkan ti o nilo lati maa mo bayi?",
"What are you called?": "What are you called?",
"Hausa: Afirka tana da al'adu...": "Afirka tana da al'adu da harsuna masu yawa. Tana da albarkatu da wuraren yawon shakatawa masu ban mamaki.",
"Yoruba: Ìmọ̀ sáyẹ́nsì àti...": "Ìmọ̀ sáyẹ́nsì àti tẹ̀knọ́lójì ń ṣe émi lóore tó níye lori ní Áfíríkà. Ó ń fún àwọn ènìyàn ní ànfààní láti dá irọyin àti kí wọ́n lè ṣe àwọn nǹkan tuntun.",
"Efik: Oma Ede, Mi ji ogede...": "Oma Ede, Mi ji ogede mi a foroma orhorho edha meji ri eka. ",
"Igbo: N'ala Igbo ...": "N'ala Igbo, ọtụtụ ndị mmadụ kwenyere na e nwere mmiri ara na elu-ilu",
"urhobo: Eshare nana ri...":"Eshare nana ri vwo ẹguọnọ rẹ iyono rẹ Aristotle vẹ Plato na",
"Efik: Ke eyo ...":"Ke eyo Jesus ye mme mbet esie, etop emi ama ada ifụre ọsọk mme Jew oro esịt okobụn̄ọde ke ntak idiọkido ke Israel, oro ẹkenyụn̄ ẹdude ke mfụhọ ke itie-ufụn mme nsunsu ido edinam Ido Ukpono Mme Jew eke akpa isua ikie.",
"Question Generation: Afghanistan ...": "Afghanistan has around 150 radio stations and over 50 television stations, which includes the state-owned RTA TV and various private channels such as TOLO and Shamshad TV. The first Afghan newspaper was published in 1906 and there are hundreds of print outlets today. By the 1920s, Radio Kabul was broadcasting local radio services. Television programs began airing in the early 1970s. Voice of America, BBC, and Radio Free Europe/Radio Liberty (RFE/RL) broadcast in both of Afghanistan's official languages.\n Considering this context, what question would you ask?",
"Instruct: Please narrate a story..": "Please narrate a short story in yoruba",
"Question-Answering: Kai found one ...": "Kai found one for sale online but it was too much money for her. Keeping the provided context in mind, please answer the subsequent question: What does Kai need to do before this? A. cheaper B. Open up her laptop C. save money",
"Translate 'how are you?' to Yoruba": "how are you?",
"Translate to pidgin": "Spain won the 2024 europa football cup. it was a tough one because they had to play very strong opponents in the quarter-finals, semi-finals and finals.",
"Translate 'Often, all Yoruba children...' to Yoruba": "Often, all Yoruba children take pride in speaking the Yoruba language.",
"Classify the sentiment": "Anyi na-echefu oke ike.",
"what is the topic of this text": "Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn",
"diacritize this text: ": "E sun, Alaga, fun ise amalayi ti e n se ni Naijiria. E maa ba a lo, egbon!",
"clean this text": "Abin mamaki ne aikin da shugabaZn HNajeriya ybake yi. kCiF 39gaba Tda haRkGa sir!",
"headline of this text": '** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage,"'
}
instruction_wrap = {
# "Translate 'Often, all Yoruba children...' to Yoruba":"<translate> Often, all Yoruba children take pride in speaking the Yoruba language. <yor>",
"Me ya nuna?":"<prompt> Me ya nuna? <response>:",
"Wetin dem dey call you?":"<prompt> Wetin dem dey call you? <response>:",
"M nwere ike ịma onye ị bụ? Gịnị bụ njirimara gị?":"<prompt> M nwere ike ịma onye ị bụ? Gịnị bụ njirimara gị? <response>:",
"What are you called?":"<prompt> What are you called? <response>:",
"Bawo ni, kini nkan ti o nilo lati maa mo bayi?":"<prompt> Bawo ni, kini nkan ti o nilo lati maa mo bayi? <response>:",
"Tell me a story in pidgin": "<prompt> Tell me a story in pidgin <response>:",
"Spain won the 2024 europa football cup. it was a tough one because they had to play very strong opponents in the quarter-finals, semi-finals and finals.": "<translate> Spain won the 2024 europa football cup. it was a tough one because they had to play very strong opponents in the quarter-finals, semi-finals and finals. <pcm>",
"how are you?": "<translate> how are you? <yor>:",
"Often, all Yoruba children take pride in speaking the Yoruba language.": "<translate> Often, all Yoruba children take pride in speaking the Yoruba language. <yor>",
"who are you?": "<prompt> who are you? <response>:",
"Kai found one for sale online but it was too much money for her. Keeping the provided context in mind, please answer the subsequent question: What does Kai need to do before this? A. cheaper B. Open up her laptop C. save money":"<prompt> Kai found one for sale online but it was too much money for her. Keeping the provided context in mind, please answer the subsequent question: What does Kai need to do before this? A. cheaper B. Open up her laptop C. save money <response>:",
"Speak Yoruba": "<prompt> Speak Yoruba <response>:",
"Please narrate a short story in yoruba":"<prompt> Please narrate a short story in yoruba <response>:",
"Afghanistan has around 150 radio stations and over 50 television stations, which includes the state-owned RTA TV and various private channels such as TOLO and Shamshad TV. The first Afghan newspaper was published in 1906 and there are hundreds of print outlets today. By the 1920s, Radio Kabul was broadcasting local radio services. Television programs began airing in the early 1970s. Voice of America, BBC, and Radio Free Europe/Radio Liberty (RFE/RL) broadcast in both of Afghanistan's official languages.\n Considering this context, what question would you ask?":"<prompt> Afghanistan has around 150 radio stations and over 50 television stations, which includes the state-owned RTA TV and various private channels such as TOLO and Shamshad TV. The first Afghan newspaper was published in 1906 and there are hundreds of print outlets today. By the 1920s, Radio Kabul was broadcasting local radio services. Television programs began airing in the early 1970s. Voice of America, BBC, and Radio Free Europe/Radio Liberty (RFE/RL) broadcast in both of Afghanistan's official languages.\n Considering this context, what question would you ask? <response>:",
"Anyi na-echefu oke ike." : "<classify> Anyi na-echefu oke ike. <sentiment>",
"Abin mamaki ne aikin da shugabaZn HNajeriya ybake yi. kCiF 39gaba Tda haRkGa sir!": "<clean> Abin mamaki ne aikin da shugabaZn HNajeriya ybake yi. kCiF 39gaba Tda haRkGa sir! <pcm>",
"E sun, Alaga, fun ise amalayi ti e n se ni Naijiria. E maa ba a lo, egbon!": "<diacritize> E sun, Alaga, fun ise amalayi ti e n se ni Naijiria. E maa ba a lo, egbon! <yor>",
"Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn": "<classify> Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn <topic>",
'** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage,"': '<title> ** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage," <headline>',
}
# instruction_wrap = {
# # "Translate 'Often, all Yoruba children...' to Yoruba":"<translate> Often, all Yoruba children take pride in speaking the Yoruba language. <yor>",
# "Tell me a story in pidgin": "<prompt> Tell me a story in pidgin <response>:",
# "Translate 'how are you?' to Yoruba": "<translate> how are you? <yor>:",
# "Translate 'Often, all Yoruba children...' to Yoruba": "<translate> Often, all Yoruba children take pride in speaking the Yoruba language. <yor>",
# "who are you?": "<prompt> who are you? <response>:",
# "Speak Yoruba": "<prompt> Speak Yoruba <response>:",
# "Classify the sentiment" : "<classify> Anyi na-echefu oke ike. <sentiment>",
# "clean this text": "<clean> Abin mamaki ne aikin da shugabaZn HNajeriya ybake yi. kCiF 39gaba Tda haRkGa sir! <pcm>",
# "diacritize this text: ": "<diacritize> E sun, Alaga, fun ise amalayi ti e n se ni Naijiria. E maa ba a lo, egbon! <yor>",
# "what is the topic of this text": "<classify> Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn <topic>",
# 'headline of this text': '<title> ** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage," <headline>',
#
# Task options
task_options = {
"select": "{}",
"Text Generation": "{}",
"Translation": "<translate> {} ",
"Sentiment Classification": "<classify> {} <sentiment>:",
"Topic Classification": "<classify> {} <topic>",
"Simple Instruction Following" : "<prompt> {} <response>:",
"Headline Generation": "<title> {} <headline>",
"Text Diacritization": "<diacritize> {} ",
"Question Generation": "<prompt> {} <response>:",
"Question-Answering" : "<prompt> {} <response>:",
"Text Summarization" : "<summarize> {} <summary>:",
"Text Cleaning": "<clean> {} "
}
# Language options for diacritize, translation and clean tasks
language_options = {
"select": "",
"Yoruba": "<yor>",
"Hausa": "<hau>",
"Ibo": "<ibo>",
"Pidgin": "<pcm>",
"English": "<eng>",
"Efik": "<efi>",
"Urhobo": "<urh>",
"Fulah": "<ful>"
}
# Dropdown for sample text
sample_text = st.selectbox("Select a sample text to test the model:", list(sample_texts.keys()))
# Dropdown for tasks
task = st.selectbox("Select a task for the model:", list(task_options.keys()))
# Conditionally show language options dropdown for diacritize and clean tasks
if task in ["Text Diacritization", "Text Cleaning", "Translation"]:
language = st.selectbox("Select a Nigerian language:", list(language_options.keys()))
task_value = f"{task_options[task]} {language_options[language]}"
else:
task_value = task_options[task]
# Text input
initial_input = st.text_area("Enter text below **(PLEASE, FIRST READ ALL INSTRUCTIONS IN THE SIDEBAR CAREFULLY FOR THE BEST EXPERIENCE)**: ", sample_texts.get(sample_text, sample_text))
if task == "select":
user_input = instruction_wrap.get(initial_input, initial_input)
else:
user_input = initial_input
# print("Final user input: ", user_input)
if st.button("Generate"):
if user_input:
with st.spinner("Please wait..."):
wrapped_input = asyncio.run(wrap_text(user_input, task_value))
# print("wrapped_input: ", wrapped_input)
generation_config["max_new_tokens"]= min(max_new_tokens, 1024 - len(tokenizer.tokenize(wrapped_input)))
start_time = time.time()
generation_config["max_new_tokens"] = min(max_new_tokens, 1024 - len(tokenizer.tokenize(wrapped_input)))
generated_text = asyncio.run(generate_from_api(wrapped_input, generation_config))
if generated_text == "FAILED":
st.write("GPU not available. Running Model on CPU. This might take a while...")
input_ids = tokenizer(wrapped_input, return_tensors="pt")["input_ids"].to(device)
output = model.generate(input_ids, **generation_config)
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
# generated_text = re.split(r"\|(end_f_text|end_of_text|end_ofext|end_of_text_||end_of_te|end_oftext)|:|`", generated_text)[0]
generated_text = re.sub(r"\|(end_f_text|end_of_text|end_ofext|end_of_text_|end_of_te|end_o|end_of_tet|end_oftext)|:|`", "", generated_text)
generated_text = generated_text.strip("\n")
# print("Generated text: ", generated_text)
if task == "Sentiment Classification" or "<sentiment>" in wrapped_input :
if "negative" in generated_text.lower():
generated_text = "Negative"
elif "positive" in generated_text.lower():
generated_text = "Positive"
elif "neutral" in generated_text.lower():
generated_text = "Neutral"
elif task == "Topic Classification" or "<topic>" in wrapped_input:
generated_text = generated_text[:15]
# print("split", generated_text.split(" ")[0], re.split(r"\.|\n|\*\*|\*", generated_text)[0], generated_text.split(" "))
generated_text = re.split(r"\.|\n|\*\*|\*", generated_text)[0] + "."
generated_text = asyncio.run(assign_topic(generated_text))
elif task == "Translation" or "<translate>" in wrapped_input:
# print("split for translation: ", n_sentences, re.split(r"\.|\n", generated_text)[:n_sentences])
n_sentences, split_= asyncio.run(count_sentences(initial_input))
print(n_sentences, split_)
_, generated_text = asyncio.run(count_sentences(generated_text))
generated_text = ". ".join(generated_text[:n_sentences]) + "."
elif task == "Question Generation" or "Question Generation:" in sample_text:
if "?" in generated_text:
generated_text = "? ".join(re.split(r"\?", generated_text)[:-1]) + "?"
elif task == "Question-Answering" or "Question-Answering:" in sample_text:
generated_text = asyncio.run(extract_answer(generated_text))
full_output = st.empty()
output = ""
for next_token in tokenizer.tokenize(generated_text):
output += tokenizer.convert_tokens_to_string([next_token])
full_output.markdown(f"<div style='word-wrap: break-word;'>{output}</div>", unsafe_allow_html=True)
# full_output.text(output)
time.sleep(0.1)
end_time = time.time()
time_diff = end_time - start_time
st.write("Time taken: ", time_diff , "seconds.")
else:
st.write("Please enter some text to generate.") |