import torch import gradio as gr from huggingface_hub import hf_hub_download import importlib from functools import lru_cache from time import time # Import TinyStyler tinystyler_module = importlib.util.module_from_spec( importlib.util.spec_from_file_location( "tinystyler", hf_hub_download(repo_id="tinystyler/tinystyler", filename="tinystyler.py"), ) ) tinystyler_module.__spec__.loader.exec_module(tinystyler_module) ( get_tinystyler_model, get_style_embedding_model, get_luar_model, get_simcse_model, compute_simcse, run_tinystyler_batch, ) = ( tinystyler_module.get_tinystyler_model, tinystyler_module.get_style_embedding_model, tinystyler_module.get_luar_model, tinystyler_module.get_simcse_model, tinystyler_module.compute_simcse, tinystyler_module.run_tinystyler_batch, ) @lru_cache(maxsize=256) def run_tinystyler(source_text, target_texts, reranking, temperature, top_p): try: device = "cuda" if torch.cuda.is_available() else "cpu" target_texts = [target_text.strip() for target_text in target_texts.split("\n")] assert ( len(source_text) <= 200 ), "Please enter a shorter source text (max 200 chars) for the purposes of this demo." assert ( len(target_texts) <= 16 ), "Please enter fewer than 16 or fewer examples for the purposes of this demo." for target_text in target_texts: assert len( target_text ), "Please enter a shorter target texts (max 200 chars per line) for the purposes of this demo." return run_tinystyler_batch( [source_text], [target_texts], reranking, temperature, top_p, 200, device=device, seed=42, verbose=True, sim_sample=3, model_name='tinystyler_sim', )[0] except Exception as e: return f"Error: {e}" ######################################################################### # Define Gradio Demo Interfance ######################################################################### # Preset examples with cached generations preset_examples = { "Robert De Niro in Taxi Driver's Style": { "source_text": "I know that you and Frank were planning to disconnect me. And I'm afraid that's something I cannot allow to happen.", "target_texts": "You talkin' to me? You talkin' to me? You talkin' to me?\nThen who the hell else are you talking... you talking to me? Well I'm the only one here.\nWho the fuck do you think you're talking to? Oh yeah? OK.", "reranking": 5, "temperature": 1.0, "top_p": 0.8, "output": "You and Frank planned to disconnect me? 'Cause that's something I can't let happen.", }, "Informal Style": { "source_text": "Innovation is where bold ideas meet the relentless pursuit of progress.", "target_texts": "the real world, the newly weds and laguna beach\nContact Warner Bros.or just go to ebay.I dont think youll find any\nthat I'm a woman's man with no time to talk!\nWhen you have an eye problem so you see 3,not 2 ( :\ncant wait for a new album from him.\nI'll pick one of my favorite country ones...\nto me, jamie foxx aint all that sexy.\nidk.....but i have faith in you lol\nWang Chung - Everybody Have Fun Tonight\ni am gonna have to defend the werewolf here.\nYEAH, AND I WASN'T VERY COMFORTABLE WITH IT EITHER...\nIF YOU TEXT YOUR ANSWER IN IT MIGHT IF YOU DON'T HAVE TEXT MESSAGES IN YOUR PLAN\nhe is about 83 yrs old\nHE IS TO ME FOR NOW, OUR BLACK GEORGE CLOONEY.\nTill they run out of ideas\neminem because his some of his music is just so funny and relevent to todays pop music enviorment.", "reranking": 5, "temperature": 1.0, "top_p": 0.8, "output": "innovation is where bold ideas meet relentless pursuit of progress tho...", }, "Barack Obama's Style": { "source_text": "i heard that new pizza joint is dope", "target_texts": "Good afternoon, everybody.\nLet me start out by saying that I was sorely tempted to wear a tan suit today -- (laughter) -- for my last press conference.\nBut Michelle, whose fashion sense is a little better than mine, tells me that's not appropriate in January.\nI covered a lot of the ground that I would want to cover in my farewell address last week.\nSo I'm just going to say a couple of quick things before I start taking questions.\nFirst, we have been in touch with the Bush family today, after hearing about President George H.W. Bush and Barbara Bush being admitted to the hospital this morning.\nThey have not only dedicated their lives to this country, they have been a constant source of friendship and support and good counsel for Michelle and me over the years.\nThey are as fine a couple as we know. And so we want to send our prayers and our love to them. Really good people.\nSecond thing I want to do is to thank all of you.\nSome of you have been covering me for a long time -- folks like Christi and Win.\nSome of you I've just gotten to know. We have traveled the world together. \nWe’ve hit a few singles, a few doubles together.\nI’ve offered advice that I thought was pretty sound, like “don’t do stupid…stuff.” (Laughter.)\nAnd even when you complained about my long answers, I just want you to know that the only reason they were long was because you asked six-part questions. (Laughter.) \nBut I have enjoyed working with all of you.\nThat does not, of course, mean that I’ve enjoyed every story that you have filed.", "reranking": 5, "temperature": 1.0, "top_p": 0.8, "output": "Just heard that the new pizza joint is doing something pretty great.", }, "Donald Trump's Style": { "source_text": "I am asking you to support me.", "target_texts": "great American Patriots who voted for me, AMERICA FIRST, and MAKE AMERICA GREAT AGAIN, will have a GIANT VOICE long into the future.\nThey will not be disrespected or treated unfairly in any way, shape or form!!!\nTHE REPUBLICAN PARTY AND, MORE IMPORTANTLY, OUR COUNTRY, NEEDS THE PRESIDENCY MORE THAN EVER BEFORE - THE POWER OF THE VETO.\nSTAY STRONG!\nGet smart Republicans.\nFIGHT!\nGeorgia, we have a job to do TODAY.\nWe have to STOP socialism.\nWe have to PROTECT the American Dream.\nHow do you certify numbers that have now proven to be wrong and, in many cases, fraudulent!\nSad to watch!\nSleepy Eyes Chuck Todd is so happy with the fake voter tabulation process that he can’t even get the words out straight.\nThey found out they voted on a FRAUD.\nThe 75,000,000 great American Patriots who voted for me, AMERICA FIRST, and MAKE AMERICA GREAT AGAIN, will have a GIANT VOICE long into the future.\nThey will not be disrespected or treated unfairly in any way, shape or form!!!\nUSA demands the truth!", "reranking": 5, "temperature": 1.0, "top_p": 0.8, "output": "Support me, please!", }, } with gr.Blocks(theme="ParityError/Interstellar@0.0.1") as demo: device = "cuda" if torch.cuda.is_available() else "cpu" # Immediately load models print("Loading models...", time()) get_tinystyler_model(device, model_name='tinystyler_sim') get_style_embedding_model(device) get_luar_model(device) get_simcse_model(device) print("Done loading models.", time()) gr.Markdown("""

TinyStyler Demo

Style transfer the source text into the target style, given some example texts of the target style. You can adjust re-ranking and top_p to your desire to control the quality of style transfer. A higher re-ranking value will generally result in better generations, at slower speed.

You can find the model at https://huggingface.co/tinystyler/tinystyler

Please note: this demo runs on a CPU-only machine, generation is much faster when run locally with a GPU.
""") with gr.Row(): example_dropdown = gr.Dropdown( label="Examples", choices=list(preset_examples.keys()) ) source_text = gr.Textbox( lines=3, placeholder="Enter the source text to transform into the target style...", label="Source Text", ) target_texts = gr.Textbox( lines=5, placeholder="Enter example texts of the target style (one per line)...", label="Example Texts of the Target Style", ) reranking = gr.Slider(1, 10, value=5, step=1, label="Re-ranking") temperature = gr.Slider(0.1, 2.0, value=1.0, step=0.1, label="Temperature") top_p = gr.Slider(0.0, 1.0, value=1.0, step=0.1, label="Top-P") output = gr.Textbox( lines=5, placeholder="Click 'Generate' to transform the source text into the target style.", label="Output", interactive=False, ) def set_example(example_name): example = preset_examples[example_name] return ( example["source_text"], example["target_texts"], example["reranking"], example["temperature"], example["top_p"], example["output"], ) example_dropdown.change( set_example, inputs=[example_dropdown], outputs=[source_text, target_texts, reranking, temperature, top_p, output], ) btn = gr.Button("Generate") btn.click( run_tinystyler, [source_text, target_texts, reranking, temperature, top_p], output, ) # Pre-warm all examples print("Pre-warming...", time()) for preset_example in preset_examples.values(): run_tinystyler("A test.", preset_example["target_texts"], 1, 1.0, 1.0) print("Done pre-warming.", time()) # Initialize the fields with the first example ( example_dropdown.value, ( source_text.value, target_texts.value, reranking.value, temperature.value, top_p.value, output.value, ), ) = list(preset_examples.keys())[0], set_example(list(preset_examples.keys())[0]) demo.launch()