Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import time | |
import asyncio | |
import aiohttp | |
import json | |
import torch | |
repo_name = "BeardedMonster/SabiYarn-125M" | |
device = "cuda" if torch.cuda.is_available() else "cpu" | |
tokenizer = AutoTokenizer.from_pretrained(repo_name, trust_remote_code=True) | |
model = AutoModelForCausalLM.from_pretrained(repo_name, trust_remote_code=True).to(device) | |
# Add sidebar with instructions | |
st.sidebar.title("Instructions: How to use") | |
st.sidebar.write(""" | |
1. Write something in the text area (a prompt or random text) or use the dropdown menu to select predefined text. | |
2. Select a task from the **task dropdown menu** below. **This is very important as it ensures the model responds accordingly.** | |
3. If a dropdown menu pops up for a nigerian language, **select the target nigerian language.** | |
4. Click Generate to get a response below the text area.\n | |
**Note: Model's performance when given prompts vary due to model size and training data distribution.**\n | |
5. Lastly, you can play with some of the generation parameters below to improve performance. | |
""") | |
# Define generation configuration | |
max_length = st.sidebar.slider("Max Length", min_value=10, max_value=500, value=100) | |
max_new_tokens = st.sidebar.slider("Max tokens", min_value=30, max_value=768, value=50) | |
num_beams = st.sidebar.slider("Number of Beams", min_value=1, max_value=10, value=5) | |
temperature = st.sidebar.slider("Temperature", min_value=0.1, max_value=2.0, value=0.9) | |
top_k = st.sidebar.slider("Top-K", min_value=1, max_value=100, value=50) | |
top_p = st.sidebar.slider("Top-P", min_value=0.1, max_value=1.0, value=0.95) | |
repetition_penalty = st.sidebar.slider("Repetition Penalty", min_value=1.0, max_value=10.0, value=2.0) | |
length_penalty = st.sidebar.slider("Length Penalty", min_value=0.1, max_value=10.0, value=1.7) | |
# early_stopping = st.sidebar.selectbox("Early Stopping", [True, False], index=0) | |
generation_config = { | |
"max_length": max_length, | |
"num_beams": num_beams, | |
"do_sample": True, | |
"temperature": temperature, | |
"top_k": top_k, | |
"top_p": top_p, | |
"repetition_penalty": repetition_penalty, | |
"length_penalty": length_penalty, | |
"early_stopping": True | |
} | |
# Streamlit app | |
st.title("SabiYarn-125M: Generates text in multiple Nigerian languages.") | |
st.write("**Supported Languages: English, Yoruba, Igbo, Hausa, Pidgin, Efik, Urhobo, Fulfulde, Fulah. \nResults might not be coherent for less represented languages (i.e Efik, \ | |
Urhobo, Fulfulde, Fulah).**") | |
st.write("**It might take a while to return an output on the first 'generate' click.**") | |
st.write("**For convenience, you can use chatgpt to copy text and evaluate model output.**") | |
st.write("-" * 50) | |
async def generate_from_api(user_input, generation_config): | |
url = "https://pauljeffrey--sabiyarn-fastapi-app.modal.run/predict" | |
payload = { | |
"prompt": user_input, | |
"config": generation_config | |
} | |
headers = { | |
'Content-Type': 'application/json' | |
} | |
async with aiohttp.ClientSession() as session: | |
async with session.post(url, headers=headers, json=payload) as response: | |
return await response.text() | |
# Sample texts | |
sample_texts = { | |
"Tell me a story in pidgin": "Tell me a story in pidgin", | |
"Oma Ede, Mi ji ogede...": "Oma Ede, Mi ji ogede mi a foroma orhorho edha meji ri eka. ", | |
"who are you?": "who are you?", | |
"Translate 'how are you?' to Yoruba": "Translate 'how are you?' to Yoruba", | |
"Classify the sentiment": "Anyi na-echefu oke ike.", | |
"what is the topic of this text": "Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn", | |
"diacritize this text: ": "E sun, Alaga, fun ise amalayi ti e n se ni Naijiria. E maa ba a lo, egbon!", | |
"clean this text": "Di service say sheVbin doE tlykto find out how Zelensky wak^ g3 be before im go visit MykolailXwey flood fKg Junew Zele?sky confam s,y rem don inform a2 abouo di frrest, im sHy dR_head of SBU bin don update aI abou.Wdi 'fiMht againsX traitorsfLZelevsky bin visi# Mykolaiv for June tk see di dam&ge wey di collapse of KakhoYka dam 'ause Rgd den im v:sit again for July afta Russia1heavy bo+bing. Di 8ecurity sBrviceTs1y dem bin know/Mbout di plan bev9re di ;isit, snd dat onG make put vdditional security measurZs{", | |
"headline of this text": '** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage,"' | |
} | |
instruction_wrap = { | |
"Tell me a story in pidgin": "<prompt> Tell me a story in pidgin <response>:", | |
"Translate 'how are you?' to Yoruba": "<prompt> Translate 'how are you?' to Yoruba <response>:", | |
"who are you?": "<prompt> who are you? <response>:", | |
"Classify the sentiment" : "<classify> Anyi na-echefu oke ike. <sentiment>", | |
"clean this text": "<clean> Di service say sheVbin doE tlykto find out how Zelensky wak^ g3 be before im go visit MykolailXwey flood fKg Junew Zele?sky confam s,y rem don inform a2 abouo di frrest, im sHy dR_head of SBU bin don update aI abou.Wdi 'fiMht againsX traitorsfLZelevsky bin visi# Mykolaiv for June tk see di dam&ge wey di collapse of KakhoYka dam 'ause Rgd den im v:sit again for July afta Russia1heavy bo+bing. Di 8ecurity sBrviceTs1y dem bin know/Mbout di plan bev9re di ;isit, snd dat onG make put vdditional security measurZs{ <pcm>", | |
"diacritize this text: ": "<diacritize> E sun, Alaga, fun ise amalayi ti e n se ni Naijiria. E maa ba a lo, egbon! <yor>", | |
"what is the topic of this text": "<classify> Africa Free Trade Zone: Kò sí ìdènà láti kó ọjà láti orílẹ̀èdè kan sí òmíràn <topic>", | |
'headline of this text': '<title> ** Sylvain Itté French ambassador don comot Niger Republic **. Sylvain Itté, di French ambassador for Niger don comot Niamey and currently e dey for flight from Ndjamena to Paris. Sylvain Itté, di French ambassador for Niger don comot Niamey very early dis morning and currently e dey for flight from Ndjamena to Paris.\n\nDi military detain Bazoum and im family for di presidential palace. Niger na former French colony, and France still get 1,500 sojas for di African country.\n\n"France don decide to withdraw dia ambassador. In di next hours our ambassador and several diplomats go return to France," Oga Macron tok.\n\nE add say di military co-operation dey "over" and French troops go leave in "di months to come".\n\n"Dis Sunday we celebrate one new step towards di sovereignty of Niger," di junta tok, for one statement wey AFP news agency quote.\n\nDi decision by Paris dey come afta months of hostility and protest against di presence of French for di kontri, wit regular demonstrations for di capital Niamey.\n\nDi move don scata France operations against Islamist militants for di wider Sahel region and Paris influence for there. But oga Macron tok say "putschists no go hold France hostage," <headline>', | |
} | |
# Task options | |
task_options = { | |
"Instruction following": "<prompt> {} <response>:", | |
"Sentiment classification": "<classify> {} <sentiment>", | |
"Topic classification": "<classify> {} <topic>", | |
"Title Generation": "<title> {} <headline>", | |
"Diacritize text": "<diacritize> {}", | |
"Clean text": "<clean> {}" | |
} | |
# Language options for diacritize and clean tasks | |
language_options = { | |
"Yoruba": "<yor>", | |
"Hausa": "<hau>", | |
"Ibo": "<ibo>", | |
"Pidgin": "<pcm>", | |
"Efik": "<efi>", | |
"Urhobo": "<urh>", | |
"Fulah": "<ful>" | |
} | |
# Dropdown for sample text | |
sample_text = st.selectbox("Select a sample text to test the model:", list(sample_texts.keys())) | |
# Dropdown for tasks | |
task = st.selectbox("Select a task for the model:", list(task_options.keys())) | |
# Conditionally show language options dropdown for diacritize and clean tasks | |
if task in ["Diacritize text", "Clean text"]: | |
language = st.selectbox("Select a language:", list(language_options.keys())) | |
task_value = f"{task_options[task]} {language_options[language]}" | |
else: | |
task_value = task_options[task] | |
def wrap_text(text, task_value): | |
tasks = ["<classify>", "<prompt>", "<clean>", "<title>", "<diacritize>"] | |
if any(task in text for task in tasks): | |
return text | |
return task_value.format(text) | |
# Text input | |
user_input = st.text_area("Enter text below **(please, first read the instructions on how to use in the side bar for better experience)**: ", sample_texts[sample_text]) | |
user_input = instruction_wrap.get(sample_texts.get(user_input, user_input), user_input) | |
if st.button("Generate"): | |
if user_input: | |
try: | |
st.write("**Generated Text Below:**") | |
wrapped_input = wrap_text(user_input, task_value) | |
print("wrapped_input: ", wrapped_input) | |
generation_config["max_new_tokens"]= min(max_new_tokens, 1024 - len(tokenizer.tokenize(wrapped_input))) | |
start_time = time.time() | |
try: | |
# Attempt the asynchronous API call | |
generation_config["max_new_tokens"] = min(max_new_tokens, 1024 - len(tokenizer.tokenize(wrapped_input))) | |
generated_text = asyncio.run(generate_from_api(wrapped_input, generation_config)) | |
except Exception as e: | |
print(f"API call failed: {e}. Using local model for text generation.") | |
# Use the locally loaded model for text generation | |
input_ids = tokenizer(wrapped_input, return_tensors="pt")["input_ids"].to(device) | |
output = model.generate(input_ids, **generation_config) | |
generated_text = tokenizer.decode(output[0], skip_special_tokens=True) | |
full_output = st.empty() | |
output = "" | |
for next_token in tokenizer.tokenize(generated_text): | |
output += tokenizer.convert_tokens_to_string([next_token]) | |
full_output.markdown(f"<div style='word-wrap: break-word;'>{output}</div>", unsafe_allow_html=True) | |
# full_output.text(output) | |
time.sleep(0.1) | |
end_time = time.time() | |
time_diff = end_time - start_time | |
st.write("Time taken: ", time_diff , "seconds.") | |
except Exception as e: | |
st.error(f"Error during text generation: {e}") | |
else: | |
st.write("Please enter some text to generate.") |