Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import pipeline | |
# ๊ฐ์ฑ ๋ถ์ ํ์ดํ๋ผ์ธ ์ด๊ธฐํ | |
sentiment = pipeline("sentiment-analysis") | |
# ์ฌ์ฉ์ ์ ๋ ฅ์ ๋ํ ๊ฐ์ฑ ๋ถ์ ๊ฒฐ๊ณผ๋ฅผ ๋ฐํํ๋ ํจ์ | |
def get_sentiment(์ ๋ ฅ): | |
# ๊ฐ์ฑ ๋ถ์ ์คํ | |
return sentiment(์ ๋ ฅ) | |
gr.Interface(fn=ask_question, inputs="์ ๋ ฅ", outputs="output", title="Sentiment Analysis", description="").launch() | |
# import gradio as gr | |
# from transformers import pipeline | |
# sentiment = pipeline("sentiment-analysis") | |
# def get_sentiment(์ ๋ ฅ): | |
# # from transformers import AutoTokenizer, AutoModelForCausalLM | |
# # model_name = "heegyu/koalpaca-355m" | |
# # tokenizer = AutoTokenizer.from_pretrained(model_name) | |
# # tokenizer.truncation_side = "right" | |
# # model = AutoModelForCausalLM.from_pretrained(model_name) | |
# return sentiment(์ ๋ ฅ) | |
# def get_response(output): | |
# context = f"<usr>{context}\n<sys>" | |
# inputs = tokenizer( | |
# context, | |
# truncation=True, | |
# max_length=512, | |
# return_tensors="pt") | |
# generation_args = dict( | |
# max_length=256, | |
# min_length=64, | |
# eos_token_id=2, | |
# do_sample=True, | |
# top_p=1.0, | |
# early_stopping=True | |
# ) | |
# outputs = model.generate(**inputs, **generation_args) | |
# response = tokenizer.decode(outputs[0]) | |
# print(context) | |
# print(response) | |
# response = response[len(context):].replace("</s>", "") | |
# return response | |
# model, tokenizer = get_pipe() | |
# def ask_question(input_): | |
# response = get_response(tokenizer, model, input_) | |
# return response | |
# gr.Interface(fn=ask_question, inputs="text", outputs="text", title="KoAlpaca-355M", description="ํ๊ตญ์ด๋ก ์ง๋ฌธํ์ธ์.").launch() |