Omid-sar commited on
Commit
d29a64f
·
1 Parent(s): 6ab3a74
Files changed (1) hide show
  1. app.py +51 -0
app.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Import the necessary packages
2
+ import os
3
+ import streamlit as st
4
+ from langchain.llms import OpenAI
5
+ from langchain.prompts import PromptTemplate
6
+ from langchain.chains import LLMChain, SequentialChain
7
+ from langchain.memory import ConversationBufferMemory
8
+
9
+
10
+ # ------------------------ streamlit app ------------------------
11
+ # APP Framework
12
+ st.title("🦜🔗 YouTube GPT Creator")
13
+ prompt = st.text_input("Plug in your prompt here")
14
+
15
+
16
+ # Prompt Templates
17
+ title_template = PromptTemplate(
18
+ input_variables=["topic"],
19
+ template="write me a YouTube video title about {topic}",
20
+ )
21
+
22
+ script_template = PromptTemplate(
23
+ input_variables=["title"],
24
+ template="write me a YouTube video script based on this {title}",
25
+ )
26
+ # Memory
27
+ memory = ConversationBufferMemory(input_key="topic", memory_key="chat_history")
28
+
29
+ # LLMS
30
+ llm = OpenAI(temperature=0.0)
31
+ title_chain = LLMChain(
32
+ llm=llm, prompt=title_template, output_key="title", memory=memory, verbose=True
33
+ )
34
+ script_chain = LLMChain(
35
+ llm=llm, prompt=script_template, output_key="script", memory=memory, verbose=True
36
+ )
37
+ sequential_chain = SequentialChain(
38
+ chains=[title_chain, script_chain],
39
+ input_variables=["topic"],
40
+ output_variables=["title", "script"],
41
+ verbose=True,
42
+ )
43
+
44
+ # Screen Output
45
+ if prompt:
46
+ response = sequential_chain({"topic": prompt})
47
+ st.write(response["title"])
48
+ st.write(response["script"])
49
+
50
+ with st.expander(" Chat History"):
51
+ st.info(memory.buffer)