|
import os |
|
import gradio as gr |
|
from langchain.chat_models import ChatOpenAI |
|
from langchain.prompts import ChatPromptTemplate |
|
from langchain.chains import LLMChain, SequentialChain |
|
from langchain.memory import ConversationBufferMemory |
|
import random |
|
|
|
|
|
current_story = "" |
|
story_branches = {} |
|
story_history = [] |
|
|
|
def generate_story_idea(llm, genre, theme, length): |
|
"""Generate a story idea based on user inputs""" |
|
idea_prompt = ChatPromptTemplate.from_template( |
|
"""You are a creative writing assistant. |
|
Generate a compelling story idea for a {length} story in the {genre} genre |
|
that explores the theme of {theme}. |
|
Include a brief plot outline, main character, and setting. |
|
Respond with just the idea, no additional commentary. |
|
""" |
|
) |
|
|
|
idea_chain = LLMChain(llm=llm, prompt=idea_prompt, output_key="story_idea") |
|
return idea_chain.run(genre=genre, theme=theme, length=length) |
|
|
|
def create_story_beginning(llm, story_idea): |
|
"""Create the beginning of a story based on the idea""" |
|
beginning_prompt = ChatPromptTemplate.from_template( |
|
"""You are a creative writing assistant. |
|
Based on this story idea: {story_idea} |
|
|
|
Write an engaging opening for this story that introduces the main character and setting. |
|
Make it compelling and hook the reader immediately. |
|
Write approximately 250-350 words. |
|
""" |
|
) |
|
|
|
beginning_chain = LLMChain(llm=llm, prompt=beginning_prompt, output_key="story_beginning") |
|
return beginning_chain.run(story_idea=story_idea) |
|
|
|
def continue_linear_story(llm, story_so_far): |
|
"""Continue a linear story based on the story so far""" |
|
continue_prompt = ChatPromptTemplate.from_template( |
|
"""You are a creative writing assistant. |
|
Continue this story: |
|
|
|
{story_so_far} |
|
|
|
Write the next part of the story (approximately 250-350 words), advancing the plot in an interesting way. |
|
End at a point that feels satisfying but leaves room for more story. |
|
""" |
|
) |
|
|
|
continue_chain = LLMChain(llm=llm, prompt=continue_prompt, output_key="story_continuation") |
|
return continue_chain.run(story_so_far=story_so_far) |
|
|
|
def generate_story_branches(llm, story_so_far): |
|
"""Generate three possible story continuations as branches""" |
|
branch_prompt = ChatPromptTemplate.from_template( |
|
"""You are a creative writing assistant. |
|
Based on this story so far: |
|
|
|
{story_so_far} |
|
|
|
Generate THREE possible directions the story could take next. For each: |
|
1. Provide a brief title (10 words or less) |
|
2. Write a short description (1-2 sentences) |
|
|
|
Format as: |
|
Option 1: [Title] |
|
[Description] |
|
|
|
Option 2: [Title] |
|
[Description] |
|
|
|
Option 3: [Title] |
|
[Description] |
|
""" |
|
) |
|
|
|
branch_chain = LLMChain(llm=llm, prompt=branch_prompt, output_key="story_branches") |
|
branches_text = branch_chain.run(story_so_far=story_so_far) |
|
|
|
|
|
branches = [] |
|
lines = branches_text.strip().split('\n') |
|
current_option = None |
|
current_description = "" |
|
|
|
for line in lines: |
|
if line.startswith("Option "): |
|
if current_option: |
|
branches.append((current_option, current_description.strip())) |
|
current_option = line.split(": ", 1)[1] if ": " in line else line |
|
current_description = "" |
|
elif current_option is not None: |
|
current_description += line + " " |
|
|
|
if current_option: |
|
branches.append((current_option, current_description.strip())) |
|
|
|
return branches |
|
|
|
def continue_branch(llm, story_so_far, branch_title, branch_description): |
|
"""Continue the story based on the selected branch""" |
|
branch_continue_prompt = ChatPromptTemplate.from_template( |
|
"""You are a creative writing assistant. |
|
Continue this story: |
|
|
|
{story_so_far} |
|
|
|
The story should now follow this direction: |
|
{branch_title} - {branch_description} |
|
|
|
Write the next part of the story (approximately 250-350 words) following this direction. |
|
Make it engaging and consistent with what came before. |
|
""" |
|
) |
|
|
|
branch_chain = LLMChain(llm=llm, prompt=branch_continue_prompt, output_key="branch_continuation") |
|
return branch_chain.run( |
|
story_so_far=story_so_far, |
|
branch_title=branch_title, |
|
branch_description=branch_description |
|
) |
|
|
|
def initialize_story(api_key, genre, theme, length): |
|
"""Initialize a new story with the given parameters""" |
|
global current_story, story_branches, story_history |
|
|
|
try: |
|
|
|
llm = ChatOpenAI( |
|
openai_api_key=api_key, |
|
model="gpt-4.5-preview", |
|
temperature=0.7 |
|
) |
|
|
|
|
|
current_story = "" |
|
story_branches = {} |
|
story_history = [] |
|
|
|
|
|
story_idea = generate_story_idea(llm, genre, theme, length) |
|
story_beginning = create_story_beginning(llm, story_idea) |
|
|
|
current_story = story_beginning |
|
story_history.append(current_story) |
|
|
|
|
|
return ( |
|
f"Story Idea:\n{story_idea}\n\n" + |
|
f"Story Beginning:\n{story_beginning}", |
|
gr.update(visible=True), |
|
gr.update(visible=True), |
|
gr.update(visible=True) |
|
) |
|
except Exception as e: |
|
return f"Error initializing story: {str(e)}", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False) |
|
|
|
def continue_story(api_key): |
|
"""Continue the linear story""" |
|
global current_story, story_history |
|
|
|
try: |
|
|
|
llm = ChatOpenAI( |
|
openai_api_key=api_key, |
|
model="gpt-4.5-preview", |
|
temperature=0.7 |
|
) |
|
|
|
|
|
continuation = continue_linear_story(llm, current_story) |
|
current_story += "\n\n" + continuation |
|
story_history.append(continuation) |
|
|
|
return current_story, gr.update(value="Story continued successfully!") |
|
except Exception as e: |
|
return current_story, gr.update(value=f"Error continuing story: {str(e)}") |
|
|
|
def generate_branches(api_key): |
|
"""Generate branching options for the story""" |
|
global current_story, story_branches |
|
|
|
try: |
|
|
|
llm = ChatOpenAI( |
|
openai_api_key=api_key, |
|
model="gpt-4.5-preview", |
|
temperature=0.8 |
|
) |
|
|
|
|
|
branches = generate_story_branches(llm, current_story) |
|
story_branches = {f"Option {i+1}: {title}": (title, desc) for i, (title, desc) in enumerate(branches)} |
|
|
|
|
|
branches_output = "\n\n".join([f"{option}\n{desc}" for option, (_, desc) in story_branches.items()]) |
|
|
|
|
|
radio_options = list(story_branches.keys()) |
|
|
|
return branches_output, gr.update(choices=radio_options, value=radio_options[0] if radio_options else None, visible=True), gr.update(visible=True) |
|
except Exception as e: |
|
return f"Error generating branches: {str(e)}", gr.update(visible=False), gr.update(visible=False) |
|
|
|
def select_branch(api_key, selected_branch): |
|
"""Continue the story based on the selected branch""" |
|
global current_story, story_branches, story_history |
|
|
|
try: |
|
if not selected_branch or selected_branch not in story_branches: |
|
return current_story, "Please select a valid branch." |
|
|
|
|
|
llm = ChatOpenAI( |
|
openai_api_key=api_key, |
|
model="gpt-4-turbo", |
|
temperature=0.7 |
|
) |
|
|
|
|
|
branch_title, branch_description = story_branches[selected_branch] |
|
|
|
|
|
continuation = continue_branch(llm, current_story, branch_title, branch_description) |
|
|
|
|
|
current_story += f"\n\n[{selected_branch}]\n\n" + continuation |
|
story_history.append(f"[Branch: {branch_title}] {continuation}") |
|
|
|
return current_story, gr.update(value="Branch selected and story continued!") |
|
except Exception as e: |
|
return current_story, gr.update(value=f"Error selecting branch: {str(e)}") |
|
|
|
def create_app(): |
|
"""Create the Gradio interface""" |
|
with gr.Blocks(title="AI Story Generator", theme=gr.themes.Soft()) as app: |
|
gr.Markdown("# π AI-Powered Story Generator") |
|
gr.Markdown("Create interactive stories with branching narratives using OpenAI and LangChain") |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
api_key = gr.Textbox( |
|
label="OpenAI API π", |
|
placeholder="Enter your OpenAI π key here", |
|
type="password" |
|
) |
|
|
|
with gr.Group(): |
|
|
|
genre = gr.Dropdown( |
|
label="Story Parameters β‘οΈ Genre", |
|
choices=[ |
|
"Fantasy", "Science Fiction", "Mystery", "Romance", |
|
"Horror", "Adventure", "Historical Fiction", "Comedy" |
|
], |
|
value="Fantasy" |
|
) |
|
theme = gr.Textbox( |
|
label="Story Parameters β‘οΈ Theme", |
|
placeholder="e.g., Redemption, Loss, Discovery" |
|
|
|
) |
|
length = gr.Radio( |
|
label="Story Parameters β‘οΈ Story Length", |
|
choices=["Short Story", "Novella", "Novel Chapter"], |
|
value="Short Story" |
|
) |
|
|
|
with gr.Row(): |
|
start_btn = gr.Button("Start New Story", variant="primary") |
|
|
|
with gr.Row(): |
|
continue_btn = gr.Button("Continue Story", visible=False) |
|
branch_btn = gr.Button("Generate Branch Options", visible=False) |
|
|
|
status = gr.Textbox(label="Status", value="", visible=True) |
|
|
|
with gr.Group(visible=False) as branch_group: |
|
branch_output = gr.Textbox(label="Story Branches") |
|
branch_selection = gr.Radio(label="Select a Branch", choices=[]) |
|
select_branch_btn = gr.Button("Continue with Selected Branch") |
|
|
|
with gr.Column(scale=2): |
|
story_output = gr.Textbox( |
|
label="Generated Story", |
|
placeholder="Your story will appear here...", |
|
lines=20 |
|
) |
|
|
|
|
|
start_btn.click( |
|
initialize_story, |
|
inputs=[api_key, genre, theme, length], |
|
outputs=[story_output, continue_btn, branch_btn, branch_group] |
|
) |
|
|
|
continue_btn.click( |
|
continue_story, |
|
inputs=[api_key], |
|
outputs=[story_output, status] |
|
) |
|
|
|
branch_btn.click( |
|
generate_branches, |
|
inputs=[api_key], |
|
outputs=[branch_output, branch_selection, select_branch_btn] |
|
) |
|
|
|
select_branch_btn.click( |
|
select_branch, |
|
inputs=[api_key, branch_selection], |
|
outputs=[story_output, status] |
|
) |
|
|
|
|
|
gr.Examples( |
|
examples=[ |
|
["Fantasy", "Coming of age", "Short Story"], |
|
["Science Fiction", "Artificial intelligence", "Novella"], |
|
["Mystery", "Betrayal", "Novel Chapter"] |
|
], |
|
inputs=[genre, theme, length], |
|
outputs=[] |
|
) |
|
|
|
|
|
with gr.Accordion("π Understanding Story Options", open=True): |
|
with gr.Accordion("β Genre Options", open=False): |
|
gr.Markdown(""" |
|
**Genre** determines the type and style of your story: |
|
|
|
- **Fantasy**: Stories with magic, mythical creatures, or supernatural elements (like Harry Potter or Lord of the Rings) |
|
- **Science Fiction**: Stories involving futuristic technology, space travel, or scientific concepts |
|
- **Mystery**: Stories centered around solving a crime or puzzle |
|
- **Romance**: Stories focusing on relationships and love |
|
- **Horror**: Stories designed to frighten or disturb the reader |
|
- **Adventure**: Stories with exciting journeys, quests, or challenges |
|
- **Historical Fiction**: Stories set in the past with historically accurate settings |
|
- **Comedy**: Humorous stories meant to entertain and make readers laugh |
|
""") |
|
|
|
with gr.Accordion("β Theme Suggestions", open=False): |
|
gr.Markdown(""" |
|
**Theme** is the central idea or message that runs throughout your story. Some examples: |
|
|
|
- **Redemption**: A character seeking forgiveness or making amends |
|
- **Loss**: Dealing with grief or the absence of something important |
|
- **Discovery**: Finding something new about oneself or the world |
|
- **Adventure**: Exploring new places or situations |
|
- **Coming of age**: A character maturing or growing up |
|
- **Betrayal**: Dealing with trust being broken |
|
- **Power**: The effects of gaining or losing control |
|
- **Justice**: Exploring fairness and moral rights |
|
- **Love**: Different types of relationships and connections |
|
- **Identity**: Understanding who one truly is |
|
""") |
|
|
|
with gr.Accordion("β Story Length Options", open=False): |
|
gr.Markdown(""" |
|
**Story Length** determines the scope of your generated story: |
|
|
|
- **Short Story**: A brief, self-contained narrative (typically 1,000-7,500 words) that can be read in one sitting |
|
- **Novella**: A medium-length story (typically 17,500-40,000 words), longer than a short story but shorter than a novel |
|
- **Novel Chapter**: A section of what could be a longer work, focusing on just one part of a potentially larger narrative |
|
""") |
|
|
|
with gr.Accordion("β Interface Elements", open=False): |
|
gr.Markdown(""" |
|
**Status**: Shows feedback about your story generation process (success messages, errors, confirmations) |
|
|
|
**Story Branches**: When you click "Generate Branch Options," this area shows three different possible directions for your story. Each includes a title and brief description. |
|
|
|
**Select a Branch**: Radio buttons that let you choose which story direction to follow. After selecting one, click "Continue with Selected Branch" to develop that storyline. |
|
|
|
**Generated Story**: The main output area where your complete story appears, including all continuations and branches you've selected. |
|
""") |
|
|
|
gr.Markdown(""" |
|
## How to Use |
|
1. Enter your OpenAI API π |
|
2. Select a genre, theme, and length |
|
3. Click "Start New Story" to begin |
|
4. Continue the linear story or generate branching options |
|
5. If you chose branching, select a branch to follow |
|
|
|
This app uses LangChain to orchestrate the story generation process and OpenAI's models to create the content. |
|
""") |
|
|
|
return app |
|
|
|
if __name__ == "__main__": |
|
app = create_app() |
|
app.launch() |