Spaces:
Sleeping
Sleeping
Upload 4 files
Browse filesNew functionality added in the project
- advance_post.py +79 -0
- app.py +113 -0
- paraphrase_post.py +111 -0
- requirements.txt +11 -0
advance_post.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from langchain.output_parsers import ResponseSchema, StructuredOutputParser
|
3 |
+
from langchain.prompts import PromptTemplate
|
4 |
+
from langchain_openai import ChatOpenAI
|
5 |
+
from langchain_community.document_loaders import WebBaseLoader
|
6 |
+
from langchain.prompts import ChatPromptTemplate
|
7 |
+
from langchain_core.output_parsers import StrOutputParser
|
8 |
+
from langchain_groq import ChatGroq
|
9 |
+
|
10 |
+
import nest_asyncio
|
11 |
+
|
12 |
+
def google_search(linkedin_post,openai_api_key, google_api_key, search_engine_id , num_results_per_query=[3,2,1]):
|
13 |
+
|
14 |
+
response_schemas = [
|
15 |
+
ResponseSchema(name="answer", description="These are the top three relevant questions from the LinkedIn post" , type="list")]
|
16 |
+
output_parser = StructuredOutputParser.from_response_schemas(response_schemas)
|
17 |
+
format_instructions = output_parser.get_format_instructions()
|
18 |
+
|
19 |
+
template = """
|
20 |
+
You are a helpful question extractor bot. You are provided with LinkedIn post and your task is to extract the top three relevant questions from the post which are related to the topics of the post only.:
|
21 |
+
LinkedIn post: {post}
|
22 |
+
{format_instructions}
|
23 |
+
|
24 |
+
"""
|
25 |
+
prompt = PromptTemplate(
|
26 |
+
template=template,
|
27 |
+
input_variables=["post"],
|
28 |
+
partial_variables={"format_instructions": format_instructions},
|
29 |
+
)
|
30 |
+
model=ChatOpenAI(api_key=openai_api_key, model="gpt-4-turbo-preview", temperature=0)
|
31 |
+
#model = ChatGroq(temperature=0, groq_api_key="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", model_name="mixtral-8x7b-32768")
|
32 |
+
chain = prompt | model | output_parser
|
33 |
+
result=chain.invoke({"post": linkedin_post})
|
34 |
+
questions=result['answer']
|
35 |
+
print(questions)
|
36 |
+
|
37 |
+
all_links = []
|
38 |
+
for query, num_results in zip(questions, num_results_per_query):
|
39 |
+
url = f"https://www.googleapis.com/customsearch/v1?key={google_api_key}&cx={search_engine_id}&q={query}&tbm=nws&num={num_results}"
|
40 |
+
headers = {'Cookie': 'NID=513=KqMRZpKGj6WedOM42XZfrWSUunISFtrQ1twN2s6GEO_lIwb4SzNBCoRHw1Z6lmrRjuSHMxW2wIm1kL20piObJbroQQR5Sr3YSuCTXqH9UstqwzvSaUgS6P40fPvq9OKeDxWg3O8UGTYX_7g8xR76ox80aUZ4oy14DCjgwNInLDc'}
|
41 |
+
response = requests.get(url, headers=headers)
|
42 |
+
search_results = response.json()
|
43 |
+
links = [item['link'] for item in search_results.get('items', [])]
|
44 |
+
all_links.extend(links)
|
45 |
+
|
46 |
+
|
47 |
+
return all_links
|
48 |
+
|
49 |
+
|
50 |
+
nest_asyncio.apply()
|
51 |
+
def advanced_post(all_links ,openai_api_key ,linkedinpost):
|
52 |
+
loader = WebBaseLoader(all_links,encoding="utf-8")
|
53 |
+
loader.requests_per_second = 1
|
54 |
+
docs = loader.load()
|
55 |
+
template="""You are a helpful linkedin post creator . You are provided with LinkedIn post and documents related to the post extracted from different articles from the internet.
|
56 |
+
Your task is to create a new linkedin post but content should be taken from the documents according to the semantic similarity of the post content with document content.
|
57 |
+
|
58 |
+
Linkedin post:{post}
|
59 |
+
Documents: {content}"""
|
60 |
+
|
61 |
+
prompt = ChatPromptTemplate.from_template(template)
|
62 |
+
model=ChatOpenAI(temperature=0 ,api_key=openai_api_key , model="gpt-4-turbo-preview")
|
63 |
+
#model = ChatGroq(temperature=0, groq_api_key="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", model_name="mixtral-8x7b-32768")
|
64 |
+
chain= prompt | model | StrOutputParser()
|
65 |
+
result=chain.invoke({'post':linkedinpost , 'content':docs})
|
66 |
+
return result , docs
|
67 |
+
|
68 |
+
|
69 |
+
|
70 |
+
|
71 |
+
|
72 |
+
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
|
78 |
+
|
79 |
+
|
app.py
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import re
|
3 |
+
import openai
|
4 |
+
from paraphrase_post import get_original_url , paraphrased_post
|
5 |
+
from advance_post import google_search , advanced_post
|
6 |
+
|
7 |
+
|
8 |
+
def main():
|
9 |
+
st.title("LinkedIn Post Creator")
|
10 |
+
|
11 |
+
# Initialize SessionState dictionary
|
12 |
+
session_state = st.session_state
|
13 |
+
|
14 |
+
if 'paraphrase' not in session_state:
|
15 |
+
session_state.paraphrase = ""
|
16 |
+
if 'keywords' not in session_state:
|
17 |
+
session_state.keywords = ""
|
18 |
+
if 'take_aways' not in session_state:
|
19 |
+
session_state.take_aways = ""
|
20 |
+
if 'highlights' not in session_state:
|
21 |
+
session_state.highlights = ""
|
22 |
+
|
23 |
+
if 'advancepost' not in session_state:
|
24 |
+
session_state.advancepost = ""
|
25 |
+
|
26 |
+
url = st.sidebar.text_input("Enter URL:", placeholder="Enter URL here...")
|
27 |
+
openai_api_key=st.sidebar.text_input("API Key:",placeholder="Enter OpenAI API Key...")
|
28 |
+
temperature= st.sidebar.select_slider(
|
29 |
+
'How much accurate post you want ?',
|
30 |
+
options=['Less accuracy', 9, 8, 7, 6, 5,4,3 ,2,1,'High accuracy'])
|
31 |
+
if temperature=='Less accuracy':
|
32 |
+
temperature=10
|
33 |
+
elif temperature=="High accuracy":
|
34 |
+
temperature=0
|
35 |
+
temperature=temperature/10
|
36 |
+
|
37 |
+
|
38 |
+
if st.sidebar.button("Submit"):
|
39 |
+
if url:
|
40 |
+
if openai_api_key:
|
41 |
+
original_url = get_original_url(url)
|
42 |
+
match = re.match(r"https?://(?:www\.)?linkedin\.com/(posts|feed|pulse)/.*", original_url) # checking domain and url page (means it should only be a post nothing else like login page or something else)
|
43 |
+
|
44 |
+
if match:
|
45 |
+
try:
|
46 |
+
session_state.paraphrase, session_state.keywords, session_state.take_aways, session_state.highlights = paraphrased_post(url ,openai_api_key , temperature)
|
47 |
+
except (openai.AuthenticationError) as e:
|
48 |
+
st.sidebar.error("Enter your valid API key")
|
49 |
+
else:
|
50 |
+
st.sidebar.error("Put a valid LinkedIn post url only")
|
51 |
+
else:
|
52 |
+
st.sidebar.error("Please enter API Key")
|
53 |
+
else:
|
54 |
+
st.sidebar.error("Please enter url")
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
paraphrase_text=st.text_area("Generated LinkedIn post",value=session_state.paraphrase, height=400)
|
59 |
+
# import pyperclip
|
60 |
+
# if st.button('Copy'):
|
61 |
+
# pyperclip.copy(paraphrase_text)
|
62 |
+
# st.success('Text copied successfully!')
|
63 |
+
|
64 |
+
if st.sidebar.toggle("Show Details") and session_state.keywords:
|
65 |
+
st.write("Keywords:")
|
66 |
+
for i, statement in enumerate(session_state.keywords, start=1):
|
67 |
+
st.write(f"{i}. {statement}")
|
68 |
+
|
69 |
+
st.write("Take Aways:")
|
70 |
+
for i, statement in enumerate(session_state.take_aways, start=1):
|
71 |
+
st.write(f"{i}. {statement}")
|
72 |
+
|
73 |
+
st.write("Highlights:")
|
74 |
+
for i, statement in enumerate(session_state.highlights, start=1):
|
75 |
+
st.write(f"{i}. {statement}")
|
76 |
+
|
77 |
+
#------------------------------------------------------------Advance LinkedIn post code below-----------------------------------------------------------------
|
78 |
+
|
79 |
+
if st.sidebar.toggle("Advance LinkedIn Post"):
|
80 |
+
google_api_key=st.sidebar.text_input("Google API Key:",placeholder="Enter Google Search API Key...")
|
81 |
+
search_engine_id=st.sidebar.text_input("Search Engine ID:",placeholder="Enter Search Engine ID...")
|
82 |
+
google_api_key = "AIzaSyDh-lkJh2Zef0t6UVqSu_w3njpucx40mDc"
|
83 |
+
search_engine_id = "44bbd32a2b2fc4418"
|
84 |
+
if st.sidebar.button("Generate Advance Post"):
|
85 |
+
if google_api_key:
|
86 |
+
if search_engine_id:
|
87 |
+
all_links =google_search(session_state.paraphrase ,openai_api_key , google_api_key,search_engine_id)
|
88 |
+
session_state.advancepost , docs=advanced_post(all_links ,openai_api_key ,session_state.paraphrase)
|
89 |
+
if len(docs)==0:
|
90 |
+
st.sidebar.error("Please Check your both credentials carefully")
|
91 |
+
|
92 |
+
else:
|
93 |
+
st.sidebar.error("Please enter Search Engine ID")
|
94 |
+
else:
|
95 |
+
st.sidebar.error("Please enter Google API Key")
|
96 |
+
advance_post=st.text_area("Advance LinkedIn post",value=session_state.advancepost, height=400)
|
97 |
+
# if st.button('Copy Advanced Post'):
|
98 |
+
# pyperclip.copy(advance_post)
|
99 |
+
# st.success('Text copied successfully!')
|
100 |
+
#--------------------------------------------------------------------------------------------------------------------------------------------------------------
|
101 |
+
|
102 |
+
if __name__ == "__main__":
|
103 |
+
main()
|
104 |
+
|
105 |
+
|
106 |
+
|
107 |
+
|
108 |
+
|
109 |
+
|
110 |
+
|
111 |
+
|
112 |
+
|
113 |
+
|
paraphrase_post.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_community.document_loaders import WebBaseLoader
|
2 |
+
from langchain.prompts import ChatPromptTemplate
|
3 |
+
from langchain.output_parsers import ResponseSchema
|
4 |
+
from langchain.output_parsers import StructuredOutputParser
|
5 |
+
from langchain_core.output_parsers import StrOutputParser
|
6 |
+
from langchain.chat_models import ChatOpenAI
|
7 |
+
import requests
|
8 |
+
|
9 |
+
|
10 |
+
from langchain_groq import ChatGroq
|
11 |
+
|
12 |
+
|
13 |
+
def is_shortened_url(url): # It is checking whether it is a shorten url or regular website url
|
14 |
+
try:
|
15 |
+
response = requests.head(url, allow_redirects=True)
|
16 |
+
final_url = response.url
|
17 |
+
if final_url != url:
|
18 |
+
return True
|
19 |
+
return False
|
20 |
+
except requests.exceptions.RequestException as e:
|
21 |
+
print("Error:", e)
|
22 |
+
return False
|
23 |
+
|
24 |
+
def expand_short_url(short_url): # It is converting shorten url to regular url
|
25 |
+
try:
|
26 |
+
response = requests.head(short_url, allow_redirects=True)
|
27 |
+
if response.status_code == 200:
|
28 |
+
return response.url
|
29 |
+
else:
|
30 |
+
print("Error: Short URL couldn't be expanded.")
|
31 |
+
return None
|
32 |
+
except requests.exceptions.RequestException as e:
|
33 |
+
print("Error:", e)
|
34 |
+
return None
|
35 |
+
|
36 |
+
def get_original_url(url):
|
37 |
+
if is_shortened_url(url):
|
38 |
+
return expand_short_url(url)
|
39 |
+
else:
|
40 |
+
return url
|
41 |
+
|
42 |
+
|
43 |
+
|
44 |
+
|
45 |
+
# Below function extract the post only content from complete web page content and parraphrase the extracted post
|
46 |
+
|
47 |
+
def paraphrased_post(url,api_key , temperature):
|
48 |
+
loader=WebBaseLoader([url],encoding='utf-8')
|
49 |
+
docs = loader.load()
|
50 |
+
|
51 |
+
template="""You are a helpful LinkedIn webscrapper. You are provided with a data , extract the content of the post only.
|
52 |
+
{docs}"""
|
53 |
+
prompt = ChatPromptTemplate.from_template(template)
|
54 |
+
|
55 |
+
|
56 |
+
#model = ChatGroq(temperature=0, groq_api_key="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", model_name="mixtral-8x7b-32768")
|
57 |
+
|
58 |
+
model = ChatOpenAI(api_key=api_key , model="gpt-4-turbo-preview", temperature=temperature)
|
59 |
+
template2="""You are a helpful LinkedIn post paraphraser and plagiarism remover bot. You are provided with LinkedIn post content and your task is to paraphrase it and remove plagiarism .Return the output in the format with spaces or stickers if present.
|
60 |
+
{data}"""
|
61 |
+
chain = prompt | model | StrOutputParser()
|
62 |
+
analysis_prompt = ChatPromptTemplate.from_template(template2)
|
63 |
+
|
64 |
+
composed_chain = {"data": chain} | analysis_prompt | model | StrOutputParser()
|
65 |
+
phrased_post=composed_chain.invoke({"docs":docs})
|
66 |
+
|
67 |
+
data2=extract_data(phrased_post,api_key , temperature)
|
68 |
+
keywords=data2['Keywords'][:3]
|
69 |
+
take_aways=data2['Take Aways'][:3]
|
70 |
+
highlights=data2['Highlights'][:3]
|
71 |
+
return phrased_post,keywords , take_aways, highlights
|
72 |
+
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
# Below function extract the details such as keywords , Take aways , highlights and questions
|
78 |
+
def extract_data(post_data , api_key ,temperature):
|
79 |
+
keywords = ResponseSchema(name="Keywords",
|
80 |
+
description="These are the keywords extracted from LinkedIn post",type="list")
|
81 |
+
|
82 |
+
Take_aways = ResponseSchema(name="Take Aways",
|
83 |
+
description="These are the take aways extracted from LinkedIn post", type= "list")
|
84 |
+
Highlights=ResponseSchema(name="Highlights",
|
85 |
+
description="These are the highlights extracted from LinkedIn post", type= "list")
|
86 |
+
|
87 |
+
|
88 |
+
response_schema = [
|
89 |
+
keywords,
|
90 |
+
Take_aways,
|
91 |
+
Highlights
|
92 |
+
|
93 |
+
]
|
94 |
+
output_parser = StructuredOutputParser.from_response_schemas(response_schema)
|
95 |
+
format_instructions = output_parser.get_format_instructions()
|
96 |
+
|
97 |
+
template = """
|
98 |
+
You are a helpful keywords , take aways and highlights extractor from the post of LinkedIn Bot. Your task is to extract relevant keywords , take aways and highlights in descending order of their scores in a list, means high relevant should be on the top .
|
99 |
+
From the following text message, extract the following information:
|
100 |
+
|
101 |
+
text message: {content}
|
102 |
+
{format_instructions}
|
103 |
+
"""
|
104 |
+
|
105 |
+
prompt_template = ChatPromptTemplate.from_template(template)
|
106 |
+
messages = prompt_template.format_messages(content=post_data, format_instructions=format_instructions)
|
107 |
+
llm=ChatOpenAI(temperature=temperature , model="gpt-4-turbo-preview" , api_key=api_key)
|
108 |
+
#llm = ChatGroq(temperature=0, groq_api_key="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", model_name="mixtral-8x7b-32768")
|
109 |
+
response = llm(messages)
|
110 |
+
output_dict= output_parser.parse(response.content)
|
111 |
+
return output_dict
|
requirements.txt
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
streamlit
|
2 |
+
langchain_community
|
3 |
+
python-dotenv
|
4 |
+
langchain
|
5 |
+
pyperclip
|
6 |
+
fastapi
|
7 |
+
beautifulsoup4
|
8 |
+
openai
|
9 |
+
langchain_groq
|
10 |
+
langchain_openai
|
11 |
+
nest_asyncio
|