File size: 1,704 Bytes
1aabdda
614e0a8
 
 
 
 
1aabdda
ddab2a9
4a5976f
614e0a8
ddab2a9
4a5976f
614e0a8
 
 
 
 
 
 
0b2fa8a
614e0a8
 
 
 
0b2fa8a
614e0a8
 
 
 
ddab2a9
614e0a8
 
ddab2a9
0b2fa8a
 
 
 
 
 
 
e2a1b35
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import gradio as gr
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
from langchain.chains.llm import LLMChain
from langchain.chains.constitutional_ai.base import ConstitutionalChain
from langchain.chains.constitutional_ai.models import ConstitutionalPrinciple

def yodafy(sentence, selection):
    llm = OpenAI(model_name="gpt-3.5-turbo", temperature=.8)
    prompt = PromptTemplate(
        input_variables=["sentence", "selection"],
        template="You are Master Yoda. A young apprentice has came to tell ysou this sentence: {sentence}. {selection} the sentence as Master Yoda would. You may refer to the Star Wars and use punctuation. Remove any \n.",
    )

    chain = LLMChain(llm=llm, prompt=prompt)

    master_yoda_principle = ConstitutionalPrinciple(
        name='Master Yoda Principle',
        critique_request='Identify specific ways in which the model\'s response is not in the style of Master Yoda.',
        revision_request='Please rewrite the model response to be in the style of Master Yoda using his teachings, his wisdom and the Force.',
    )

    constitutional_chain = ConstitutionalChain.from_llm(
        chain=chain,
        constitutional_principles=[master_yoda_principle],
        llm=llm,
        verbose=True,
    )

    return constitutional_chain.run(sentence=sentence, selection=selection)


iface = gr.Interface(fn=yodafy, 
                     inputs=["text", 
                             gr.inputs.Radio(["Rewrite", "Reply"])], 
                     outputs="text",
                     examples=[
                         ["May the Force be with you!", "Rewrite"],
                         ]
                    )
iface.launch()