{"cells":[{"cell_type":"code","execution_count":1,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":38258,"status":"ok","timestamp":1720292542191,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"OCNt3VoHt5Nq","outputId":"39403a61-6ca2-4654-a084-0a09fc2ba3fa"},"outputs":[{"output_type":"stream","name":"stdout","text":["Mounted at /content/drive\n"]}],"source":["from google.colab import drive\n","drive.mount('/content/drive')"]},{"cell_type":"code","execution_count":2,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":442,"status":"ok","timestamp":1720292714705,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"HSjbX-JgxmOL","outputId":"17a3fd12-c90f-4865-d5a1-b80ca2d119e1"},"outputs":[{"output_type":"stream","name":"stdout","text":["/content/drive/MyDrive/S21\n"]}],"source":["import os\n","script_dir = os.path.dirname(\"/content/drive/MyDrive/S21/classnotes.ipynb\")\n","# Change the cwd to the script's directory\n","os.chdir(script_dir)\n","\n","# Now the cwd is set to the directory containing the script (and potentially the file)\n","print(os.getcwd()) # This will print the current working directory"]},{"cell_type":"code","execution_count":3,"metadata":{"executionInfo":{"elapsed":28808,"status":"ok","timestamp":1720292745598,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"7JbXHrA1tWfr","colab":{"base_uri":"https://localhost:8080/"},"outputId":"89581984-6dca-4d27-9c1a-035e675d4fa8"},"outputs":[{"output_type":"stream","name":"stdout","text":["\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m12.3/12.3 MB\u001b[0m \u001b[31m47.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.0/92.0 kB\u001b[0m \u001b[31m9.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m318.2/318.2 kB\u001b[0m \u001b[31m28.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.6/75.6 kB\u001b[0m \u001b[31m9.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.1/141.1 kB\u001b[0m \u001b[31m16.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.1/10.1 MB\u001b[0m \u001b[31m42.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.4/62.4 kB\u001b[0m \u001b[31m7.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m129.9/129.9 kB\u001b[0m \u001b[31m17.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m77.9/77.9 kB\u001b[0m \u001b[31m10.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m7.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.9/71.9 kB\u001b[0m \u001b[31m8.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.6/53.6 kB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m307.7/307.7 kB\u001b[0m \u001b[31m34.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m341.4/341.4 kB\u001b[0m \u001b[31m37.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m67.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m57.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h Building wheel for ffmpy (setup.py) ... \u001b[?25l\u001b[?25hdone\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m11.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h"]}],"source":["!pip install -q gradio\n","!pip install -q tiktoken"]},{"cell_type":"code","execution_count":6,"metadata":{"executionInfo":{"elapsed":442,"status":"ok","timestamp":1720292788608,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"_sOT51G4tOrc"},"outputs":[],"source":["from transformers import GPT2LMHeadModel\n","import tiktoken\n","import torch\n","import gradio as gr\n","import model\n","from model import run_train, gen_text"]},{"cell_type":"code","execution_count":7,"metadata":{"executionInfo":{"elapsed":2,"status":"ok","timestamp":1720292789731,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"LGko_SepX3Pv"},"outputs":[],"source":["# Specify a path\n","PATH = \"/content/drive/MyDrive/S21/gpt_124M_30thJune2024.pth\"\n"]},{"cell_type":"code","execution_count":8,"metadata":{"executionInfo":{"elapsed":775,"status":"ok","timestamp":1720292791316,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"FICKsglhZHr3"},"outputs":[],"source":["class GPTConfig:\n"," block_size: int = 1024 # max sequence length\n"," vocab_size: int = 50304 # number of tokens: 50,000 BPE merges + 256 bytes tokens + 1 <|endoftext|> token\n"," n_layer: int = 12 # number of layers\n"," n_head: int = 12 # number of heads\n"," n_embd: int = 768 # embedding dimension"]},{"cell_type":"code","execution_count":9,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"txAZmr2qYdLH","executionInfo":{"status":"ok","timestamp":1720292814216,"user_tz":-330,"elapsed":22902,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"}},"outputId":"3f0f8fed-5cd5-4591-fadb-bb0d542f41d2"},"outputs":[{"output_type":"execute_result","data":{"text/plain":[""]},"metadata":{},"execution_count":9}],"source":["device = 'cuda' if torch.cuda.is_available() else 'cpu'\n","model2 = model.GPT(GPTConfig())\n","model2 = model2.to(device)\n","# model2.load_state_dict(torch.load(PATH),map_location=torch.device('cpu'))\n","model2.load_state_dict(torch.load('/content/drive/MyDrive/S21/gpt_124M_1.pth', map_location=torch.device(device)))\n"]},{"cell_type":"code","execution_count":17,"metadata":{"executionInfo":{"elapsed":4,"status":"ok","timestamp":1720205456686,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"IdQe0IQdY9km"},"outputs":[],"source":[]},{"cell_type":"code","execution_count":8,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":28447,"status":"ok","timestamp":1720275103624,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"qJjvfdWfiORi","outputId":"4c1baccf-638f-427e-c272-1001231c25d3"},"outputs":[{"output_type":"stream","name":"stdout","text":["> The adventurer delved into the dark..., speak there's faithful madward.\n","\n","Now, and most Lady hereafter sent her well; call the foot of such a good,\n","For you,\n","If all from the city butchers\n","By\n","> The adventurer delved into the dark...,\n","For that have you keep it, webusiness and no truth?\n","Sir more sort of the mind\n","As presently you, you have had a merry years\n","That venture-CAMILLO:\n","> The adventurer delved into the dark..., hang it have possible\n","Bestlingied below the conspe;\n","For that come on the fain of no\n","And all, give and match from me\n","Should now upon my worthy weak-women\n","> The adventurer delved into the dark..., take them go approaches\n","thanable; then no worst, hence comes here's sorrow your humblebts:\n","Had been acquainted with such badxth-ower.\n","\n","Sir, from him can\n","> The adventurer delved into the dark..., tell\n","For since that such virtue,\n","For great purpose to the king's another ill-t passion express the best and we shall\n","Which never for me.\n","\n","GLOUCESTER:\n"]}],"source":["start_tokens = \"The adventurer delved into the dark...\" # You can provide a list of token IDs as a starting prompt here\n","max_length = 50 # Maximum length of the generated text\n","num_return_sequences = 5 # Number of text sequences to generate\n","\n","decoded = gen_text(model2, start_tokens, max_length, num_return_sequences)"]},{"cell_type":"code","execution_count":16,"metadata":{"id":"hfbynK20i6NN","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1720206852956,"user_tz":-330,"elapsed":214728,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"}},"outputId":"e21afa9f-35fc-4700-98cb-4573eed170cd"},"outputs":[{"output_type":"stream","name":"stdout","text":["> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and deign of a wont\n","For now remain\n","For from men and that pluck supper buried-- no comfort,\n","If the king but thence and me me.\n","How she would come, come home;\n","ESC? which you have no intent\n","These two men the self,\n","aeth o'er was a fellow of enough,\n","Master\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and drops\n","Were, men and noble Edward's children\n","Were half i' the blood it,\n","By knave just and all report about\n","She have no;\n","Would our battle through the pity set,\n","the entertainment of the harm\n","He will, for your vowsken feel,\n","Were you; and wowet doth against this which he\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and Capitol\n","And as become blepper\n","By master, from a more dead in,\n","thee\n","shOSPERO, you have no Rome.\n","Go give'd the sad-p'der-'sThen never stood by and in\n","In this business\n","The power of a slave,\n","In the keep your best stock proclaim'd from it out\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and god grant it must be it's\n","Upon men, here\n","Was the shame on eane it made\n","PERDITA enough to me me, take:\n","For it beggar\n","Were you do it out my knaves\n","One two as an necessaryves to leave,\n","Should warm and fear of the bisson diseases\n","the Angelo:\n","The\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me.\n","If, come before there's envy\n","Like bloubt about you have before indeed\n","She'er\n","A shrewd's crown-s made together, and bad daughter,\n","I have made a mortalvish harm hereafter a goods\n","Thou know, a welcome home\n","She was, it;\n","Who from the king; go walk upon me,\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and pant\n","Will; goest indeed we hidness, from himself hope to such were thus Lord till\n","Were not grant of this shows,\n","Those may bades and sl reward\n","This bed, bid them the intent\n","Of Clifford hence\n"," noble is content\n","When our country's daughter,\n","Or both ere lady's an enm; go alone\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; norCor virtuous will perform.'\n","For there they have no master before great, and rascalPeace of it\n","Foraleth it deserved thence\n","She have no, as this young marketew, to me,\n","For it straight here in a sea: what,\n"," quainMost lose\n"," answer you; and wretch out no,\n","Were, and\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and drops\n","For fit it gave-attRather ways; then be this most brve bent\n","But for him up the hand-w.\n","Meth you have his voice of of women gives\n","The thousands that thy not\n","CLEOMAS: when, for as the sound,\n","That have no hast no breast.\n","\n","motionken composition arms,\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me; and Vol Angelo\n","MIR's dead.\n","\n"," power,\n","the hour possessed within, my master within which dined and the faults\n","Yes, be still,\n","My sister's before me me, and most ha me,\n","Cry\n"," use the;\n","When it dream, again's answer 'em dragon deliver ta'er\n","Either neither say you\n","> The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity... me;\n","Indeed,' to a particular's perish\n","Not ang i' the worst of some prepare under a woman in,\n","Which you have set down i'en the field\n","To take the horses'd me, and by the better-night\n","And bring Baptista enough to give unto like to be to take at it be\n","When it you'll give me\n","\n"]}],"source":["start_tokens = \"The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity...\" # You can provide a list of token IDs as a starting prompt here\n","max_length = 100 # Maximum length of the generated text\n","num_return_sequences = 10 # Number of text sequences to generate\n","\n","gen_text(model2, start_tokens, max_length, num_return_sequences)\n"]},{"cell_type":"code","execution_count":13,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":646},"executionInfo":{"elapsed":1189,"status":"ok","timestamp":1720276368008,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"},"user_tz":-330},"id":"1u3NJcMouwsx","outputId":"4197a764-cbe0-4115-a3e9-c2dda8a4d515"},"outputs":[{"output_type":"stream","name":"stdout","text":["Setting queue=True in a Colab notebook requires sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n","\n","Colab notebook detected. To show errors in colab notebook, set debug=True in launch()\n","Running on public URL: https://e6c7439a4e347d91e9.gradio.live\n","\n","This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)\n"]},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["
"]},"metadata":{}},{"output_type":"execute_result","data":{"text/plain":[]},"metadata":{},"execution_count":13}],"source":["import gradio as gr\n","from transformers import GPT2Tokenizer, AutoModelForCausalLM\n","\n","\n","start_tokens = \"The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity...\" # You can provide a list of token IDs as a starting prompt here\n","max_length = 100 # Maximum length of the generated text\n","num_return_sequences = 10 # Number of text sequences to generate\n","\n","# gen_text(model2, start_tokens, max_length, num_return_sequences)\n","\n","# Define generation function\n","def generate_text(prompt):\n"," start_tokens = prompt\n"," output = gen_text(model2, start_tokens, max_length, num_return_sequences)\n"," #return tokenizer.decode(output[0], skip_special_tokens=True)\n"," return output\n","\n","# Humorous prompt options\n","prompts = [\n"," \"Karen, armed with a coupon...\",\n"," \"Gary the goldfish, tired...\",\n"," \"The local news team received...\",\n"," \"In a shocking turn of events...\",\n"," \"The kingdom's annual jousting...\",\n"," \"The sentient toaster, tired...\",\n"," \"Feeling underappreciated...\",\n"," \"The fortune cookie factory...\"\n","]\n","\n","# Gradio interface with dropdown for prompt selection\n","interface = gr.Interface(\n"," fn=generate_text,\n"," inputs=gr.Dropdown(choices=prompts, label=\"Humorous Prompt\"),\n"," outputs=\"text\",\n"," title=\"Humorous Text Generator with GPT-2\",\n"," description=\"Get a chuckle with AI-generated humorous stories based on your chosen prompt.\"\n",")\n","\n","# Launch the Gradio app\n","interface.launch()"]},{"cell_type":"code","source":["# Define generation function\n","def generate_text(prompt, max_length=50, num_return_sequences=10):\n"," \"\"\"Generates humorous text using the GPT-2 model based on the provided prompt and user-specified parameters.\n","\n"," Args:\n"," prompt (str): The starting text for the generation.\n"," max_length (int, optional): The maximum length of the generated text. Defaults to 100.\n"," num_return_sequences (int, optional): The number of different text sequences to generate. Defaults to 10.\n","\n"," Returns:\n"," list: A list of generated humorous text sequences.\n"," \"\"\"\n","\n"," start_tokens = prompt\n"," # generated_texts = model.generate(\n"," # inputs, max_length=max_length, num_return_sequences=num_return_sequences\n"," # )\n"," generated_texts = gen_text(model2, start_tokens, max_length, num_return_sequences)\n"," return generated_texts\n","\n","# Humorous prompt options\n","prompts = [\n"," \"The automatic doors at the grocery store, tired of people holding them open for conversations, developed a mischievous sense of humor.\",\n"," \"The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside.\",\n"," \"The fridge goes on a hunger strike\",\n"," \"A colony of ants, inspired by a motivational poster, embarked on a quest to climb the tallest tree in the garden.\",\n"," \"A particularly chatty parrot accidentally spilled the villain's evil plan during a casual conversation with the local mailman.\",\n"," \"TA squirrel declares war on the birdfeeder...\",\n"," \"The refrigerator, overflowing with forgotten groceries, staged a silent protest, refusing to cool anything until some order was restored.\",\n"," \"A fortune cookie predicts world domination\"\n","]\n","\n","# Gradio interface with user inputs and dropdown for prompt selection\n","interface = gr.Interface(\n"," fn=generate_text,\n"," inputs=[\n"," gr.Dropdown(choices=prompts, label=\"Pre-defined Prompt\"),\n"," gr.Slider(minimum=10, maximum=200, label=\"Max Text Length\", value=100, step = 1),\n"," gr.Slider(minimum=1, maximum=20, label=\"Number of Outputs\", value=10,step = 1)\n"," ],\n"," outputs=\"text\",\n"," title=\"Humorous Text Generator with GPT-2\",\n"," description=\"Get a chuckle with AI-generated funny stories! Provide a prompt (or choose one), adjust the desired text length and number of outputs, and let the AI do the rest!\",\n",")\n","\n","# Launch the Gradio app\n","interface.launch()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":646},"id":"Uk5URgnBVEnk","executionInfo":{"status":"ok","timestamp":1720278608955,"user_tz":-330,"elapsed":2848,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"}},"outputId":"bf410827-ecef-4ccf-80b5-72e8866566dd"},"execution_count":28,"outputs":[{"output_type":"stream","name":"stdout","text":["Setting queue=True in a Colab notebook requires sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n","\n","Colab notebook detected. To show errors in colab notebook, set debug=True in launch()\n","Running on public URL: https://a1805b7b0e14114fa3.gradio.live\n","\n","This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)\n"]},{"output_type":"display_data","data":{"text/plain":[""],"text/html":["
"]},"metadata":{}},{"output_type":"execute_result","data":{"text/plain":[]},"metadata":{},"execution_count":28}]},{"cell_type":"code","source":["generate_text(\"The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside.\", max_length=50, num_return_sequences=10)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":750},"id":"nq6Oe1QNb4GW","executionInfo":{"status":"ok","timestamp":1720278975871,"user_tz":-330,"elapsed":48361,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"}},"outputId":"63744072-62ad-4402-d471-8b394b95f024"},"execution_count":31,"outputs":[{"output_type":"stream","name":"stdout","text":["> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. enemies\n","Should straight you sister so, then 'towle in a hand\n","From the knash slily hereafter\n","Were\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. Cai\n","From the sort will of nature, we lose\n","For this her who,\n","Could sake of the absence of her\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. ladies\n","For which you other else as tender's eyes\n","Or shall this right noble\n","Mark us, come again.\n","\n","\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. bearWe have touch the news.We have who we else use nor\n","She pass'd\n","Nay you, come there have\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. chance which you: why\n","Dear brother but that; go disdain:-- me again\n","Upon it, 'tis it out another\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. marry there have no us\n","which? hang it shall bear alone: our late himself\n","From yesternight in the hearts of\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside.you, then that deserves\n","Good sister to have one\n","From earth\n","Uncle-morrow-morrow from the purpose\n"," you\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. senate, make our sour treacherums-in of an ass\n","They that way\n","Did the memory of mytis thus going in\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside.Go, make us forth majesty\n","Then shall ' against the soldier: look, my son,\n","Be plain presently; sit you\n","> The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. enemies\n","And not the Lord Hastings, then's stinking-time\n","But Romeo\n","Should yet our brought them themselves are upon\n"]},{"output_type":"execute_result","data":{"text/plain":["\"The self-driving car, fed up with rush hour traffic, decided to take a scenic detour through the countryside. enemies\\nAnd not the Lord Hastings, then's stinking-time\\nBut Romeo\\nShould yet our brought them themselves are upon\""],"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"}},"metadata":{},"execution_count":31}]},{"cell_type":"code","source":["start_tokens ="],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":53},"id":"QiGYSwcXfgeZ","executionInfo":{"status":"ok","timestamp":1720277047339,"user_tz":-330,"elapsed":441,"user":{"displayName":"Seema Goel","userId":"06434468664332377904"}},"outputId":"10dbc134-7b28-4002-ccc9-63a8eefe09e9"},"execution_count":18,"outputs":[{"output_type":"execute_result","data":{"text/plain":["'The prophecy was clear: only the chosen one, with the perfect social media profile, could defeat the forces of online negativity...'"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"}},"metadata":{},"execution_count":18}]},{"cell_type":"code","source":[],"metadata":{"id":"onodAEVxgH2d"},"execution_count":null,"outputs":[]}],"metadata":{"accelerator":"GPU","colab":{"gpuType":"T4","provenance":[]},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.9.7"}},"nbformat":4,"nbformat_minor":0}