File size: 1,141 Bytes
48f8356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import os
import gradio as gr
from langchain.llms import HuggingFaceHub
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain

model_repo = os.getenv('HF_MODEL_REPO')
eos_string = "</s>"
template = """<s>[INST]<<SYS>>You work as translator. You job is translate user requests from {source} to {target}<<SYS>>
{query}[/INST]</s>\n"""

prompt = PromptTemplate(template=template, input_variables=["source","target","query"])

model_kwargs={
            "max_new_tokens":2048, 
            "temperature":0.5,
            "stop" : ["</s>","<|endoftext|>","<|end|>"]
            }

llm = HuggingFaceHub(repo_id=model_repo, task="text-generation", model_kwargs=model_kwargs)
chain = LLMChain(prompt=prompt, llm=llm)

def translation(source, target, text):
    response=chain.run(question)
    return response.partition(eos_string)[0]

inputs = [gr.inputs.Dropdown(lang_codes, default='English', label='Source'),
          gr.inputs.Dropdown(lang_codes, default='Korean', label='Target'),
          gr.inputs.Textbox(lines=5, label="Input text"),
         ]

gr.Interface(fn=translation, inputs, outputs="text").launch()