File size: 10,613 Bytes
8655f82
039d611
ae36130
 
 
c2bbdc5
ae36130
 
 
 
 
 
039d611
ae36130
 
 
0fbe474
ae36130
 
b389f8f
7424ed1
ae36130
 
0fbe474
ae36130
 
b389f8f
7424ed1
ae36130
 
 
 
 
8ea3dab
ae36130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
936b892
cbb678b
b9fb733
ae36130
 
 
 
 
 
 
 
039d611
0872eb7
f6fe4f2
ae36130
 
 
c19b76d
ae36130
ff1f22e
 
ae36130
 
 
 
c19b76d
ae36130
ff1f22e
 
ae36130
 
c19b76d
ae36130
c19b76d
ae36130
ff1f22e
 
ae36130
 
c19b76d
ae36130
c19b76d
0872eb7
ff1f22e
 
0872eb7
f6fe4f2
0872eb7
ae36130
d15e848
ae36130
 
 
c2bbdc5
d32e977
a658291
 
b389f8f
c2bbdc5
ae36130
f3c93c4
 
 
 
 
 
 
 
ae36130
f6fe4f2
ae36130
 
 
 
 
 
 
 
 
 
 
 
 
 
c19b76d
88976aa
5727bd1
ae36130
 
 
 
 
 
 
0872eb7
f6fe4f2
 
ae36130
 
 
 
 
 
 
 
0872eb7
f6fe4f2
 
0872eb7
ae36130
0872eb7
ae36130
87fc391
ae36130
 
 
 
 
87fc391
22285d4
c19b76d
 
 
 
22285d4
1118a37
ae36130
 
 
 
 
 
 
 
 
 
 
1118a37
ae36130
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
import torch
import gradio as gr
from huggingface_hub import hf_hub_download
import importlib
from functools import lru_cache
from time import time

# Import TinyStyler
tinystyler_module = importlib.util.module_from_spec(
    importlib.util.spec_from_file_location(
        "tinystyler",
        hf_hub_download(repo_id="tinystyler/tinystyler", filename="tinystyler.py"),
    )
)
tinystyler_module.__spec__.loader.exec_module(tinystyler_module)
(
    get_tinystyler_model,
    get_style_embedding_model,
    get_luar_model,
    get_simcse_model,
    compute_simcse,
    run_tinystyler_batch,
) = (
    tinystyler_module.get_tinystyler_model,
    tinystyler_module.get_style_embedding_model,
    tinystyler_module.get_luar_model,
    tinystyler_module.get_simcse_model,
    tinystyler_module.compute_simcse,
    tinystyler_module.run_tinystyler_batch,
)


@lru_cache(maxsize=256)
def run_tinystyler(source_text, target_texts, reranking, temperature, top_p):
    try:
        device = "cuda" if torch.cuda.is_available() else "cpu"
        target_texts = [target_text.strip() for target_text in target_texts.split("\n")]
        assert (
            len(source_text) <= 200
        ), "Please enter a shorter source text (max 200 chars) for the purposes of this demo."
        assert (
            len(target_texts) <= 16
        ), "Please enter fewer than 16 or fewer examples for the purposes of this demo."
        for target_text in target_texts:
            assert len(
                target_text
            ), "Please enter a shorter target texts (max 200 chars per line) for the purposes of this demo."
        return run_tinystyler_batch(
            [source_text],
            [target_texts],
            reranking,
            temperature,
            top_p,
            200,
            device=device,
            seed=42,
            verbose=True,
            sim_sample=3,
            model_name='tinystyler_sim',
        )[0]
    except Exception as e:
        return f"Error: {e}"


#########################################################################
# Define Gradio Demo Interfance
#########################################################################

# Preset examples with cached generations
preset_examples = {
    "Robert De Niro in Taxi Driver's Style": {
        "source_text": "I know that you and Frank were planning to disconnect me. And I'm afraid that's something I cannot allow to happen.",
        "target_texts": "You talkin' to me? You talkin' to me? You talkin' to me?\nThen who the hell else are you talking... you talking to me? Well I'm the only one here.\nWho the fuck do you think you're talking to? Oh yeah? OK.",
        "reranking": 5,
        "temperature": 1.0,
        "top_p": 0.8,
        "output": "You and Frank planned to disconnect me? 'Cause that's something I can't let happen.",
    },
    "Informal Style": {
        "source_text": "Innovation is where bold ideas meet the relentless pursuit of progress.",
        "target_texts": "the real world, the newly weds and laguna beach\nContact Warner Bros.or just go to ebay.I dont think youll find any\nthat I'm a woman's man with no time to talk!\nWhen you have an eye problem so you see 3,not 2  ( :\ncant wait for a new album from him.\nI'll pick one of my favorite country ones...\nto me, jamie foxx aint all that sexy.\nidk.....but i have faith in you lol\nWang Chung - Everybody Have Fun Tonight\ni am gonna have to defend the werewolf here.\nYEAH, AND I WASN'T VERY COMFORTABLE WITH IT EITHER...\nIF YOU TEXT YOUR ANSWER IN IT MIGHT IF YOU DON'T HAVE TEXT MESSAGES IN YOUR PLAN\nhe is about 83 yrs old\nHE IS TO ME FOR NOW, OUR BLACK GEORGE CLOONEY.\nTill they run out of ideas\neminem because his some of his music is just so funny and relevent to todays pop music enviorment.",
        "reranking": 5,
        "temperature": 1.0,
        "top_p": 0.8,
        "output": "innovation is where bold ideas meet relentless pursuit of progress tho...",
    },
    "Barack Obama's Style": {
        "source_text": "i heard that new pizza joint is dope",
        "target_texts": "Good afternoon, everybody.\nLet me start out by saying that I was sorely tempted to wear a tan suit today -- (laughter) -- for my last press conference.\nBut Michelle, whose fashion sense is a little better than mine, tells me that's not appropriate in January.\nI covered a lot of the ground that I would want to cover in my farewell address last week.\nSo I'm just going to say a couple of quick things before I start taking questions.\nFirst, we have been in touch with the Bush family today, after hearing about President George H.W. Bush and Barbara Bush being admitted to the hospital this morning.\nThey have not only dedicated their lives to this country, they have been a constant source of friendship and support and good counsel for Michelle and me over the years.\nThey are as fine a couple as we know.  And so we want to send our prayers and our love to them.  Really good people.\nSecond thing I want to do is to thank all of you.\nSome of you have been covering me for a long time -- folks like Christi and Win.\nSome of you I've just gotten to know.  We have traveled the world together. \nWe’ve hit a few singles, a few doubles together.\nI’ve offered advice that I thought was pretty sound, like “don’t do stupid…stuff.”  (Laughter.)\nAnd even when you complained about my long answers, I just want you to know that the only reason they were long was because you asked six-part questions.  (Laughter.)   \nBut I have enjoyed working with all of you.\nThat does not, of course, mean that I’ve enjoyed every story that you have filed.",
        "reranking": 5,
        "temperature": 1.0,
        "top_p": 0.8,
        "output": "Just heard that the new pizza joint is doing something pretty great.",
    },
    "Donald Trump's Style": {
        "source_text": "I am asking you to support me.",
        "target_texts": "great American Patriots who voted for me, AMERICA FIRST, and MAKE AMERICA GREAT AGAIN, will have a GIANT VOICE long into the future.\nThey will not be disrespected or treated unfairly in any way, shape or form!!!\nTHE REPUBLICAN PARTY AND, MORE IMPORTANTLY, OUR COUNTRY, NEEDS THE PRESIDENCY MORE THAN EVER BEFORE - THE POWER OF THE VETO.\nSTAY STRONG!\nGet smart Republicans.\nFIGHT!\nGeorgia, we have a job to do TODAY.\nWe have to STOP socialism.\nWe have to PROTECT the American Dream.\nHow do you certify numbers that have now proven to be wrong and, in many cases, fraudulent!\nSad to watch!\nSleepy Eyes Chuck Todd is so happy with the fake voter tabulation process that he can’t even get the words out straight.\nThey found out they voted on a FRAUD.\nThe 75,000,000 great American Patriots who voted for me, AMERICA FIRST, and MAKE AMERICA GREAT AGAIN, will have a GIANT VOICE long into the future.\nThey will not be disrespected or treated unfairly in any way, shape or form!!!\nUSA demands the truth!",
        "reranking": 5,
        "temperature": 1.0,
        "top_p": 0.8,
        "output": "Support me, please!",
    },
}


with gr.Blocks(theme="ParityError/[email protected]") as demo:
    device = "cuda" if torch.cuda.is_available() else "cpu"

    # Immediately load models
    print("Loading models...", time())
    get_tinystyler_model(device, model_name='tinystyler_sim')
    get_style_embedding_model(device)
    get_luar_model(device)
    get_simcse_model(device)
    print("Done loading models.", time())

    gr.Markdown("""
        <table style="border: none;">
          <tr style="border: none;">
            <td style="border: none;"><img style="margin: 0" src="https://cdn-avatars.huggingface.co/v1/production/uploads/61c40eeb727d1257bf3cf5ba/k-npb3L2TEnhBECzjXPGK.png" width=165></td>
            <td style="border: none; vertical-align: middle;"><h1>TinyStyler Demo</h1>Style transfer the source text into the target style, given some example texts of the target style. You can adjust re-ranking and top_p to your desire to control the quality of style transfer. A higher re-ranking value will generally result in better generations, at slower speed.<br/><br/>You can find the model at <a href="https://huggingface.co/tinystyler/tinystyler">https://huggingface.co/tinystyler/tinystyler</a><br/><br/><em>Please note: this demo runs on a CPU-only machine, generation is much faster when run locally with a GPU.</em></td>
          </tr>
        </table>
    """)

    with gr.Row():
        example_dropdown = gr.Dropdown(
            label="Examples", choices=list(preset_examples.keys())
        )

    source_text = gr.Textbox(
        lines=3,
        placeholder="Enter the source text to transform into the target style...",
        label="Source Text",
    )
    target_texts = gr.Textbox(
        lines=5,
        placeholder="Enter example texts of the target style (one per line)...",
        label="Example Texts of the Target Style",
    )
    reranking = gr.Slider(1, 10, value=5, step=1, label="Re-ranking")
    temperature = gr.Slider(0.1, 2.0, value=1.0, step=0.1, label="Temperature")
    top_p = gr.Slider(0.0, 1.0, value=1.0, step=0.1, label="Top-P")

    output = gr.Textbox(
        lines=5,
        placeholder="Click 'Generate' to transform the source text into the target style.",
        label="Output",
        interactive=False,
    )

    def set_example(example_name):
        example = preset_examples[example_name]
        return (
            example["source_text"],
            example["target_texts"],
            example["reranking"],
            example["temperature"],
            example["top_p"],
            example["output"],
        )

    example_dropdown.change(
        set_example,
        inputs=[example_dropdown],
        outputs=[source_text, target_texts, reranking, temperature, top_p, output],
    )

    btn = gr.Button("Generate")
    btn.click(
        run_tinystyler,
        [source_text, target_texts, reranking, temperature, top_p],
        output,
    )

    # Pre-warm all examples
    print("Pre-warming...", time())
    for preset_example in preset_examples.values():
        run_tinystyler("A test.", preset_example["target_texts"], 1, 1.0, 1.0)
    print("Done pre-warming.", time())

    # Initialize the fields with the first example
    (
        example_dropdown.value,
        (
            source_text.value,
            target_texts.value,
            reranking.value,
            temperature.value,
            top_p.value,
            output.value,
        ),
    ) = list(preset_examples.keys())[0], set_example(list(preset_examples.keys())[0])

demo.launch()