joshuadunlop's picture
Update app.py
486fe7d verified
raw
history blame
3.25 kB
import streamlit as st
import pandas as pd
import requests
import base64
def get_backlinks(api_login, api_key, target_url, filters):
# Encoding credentials
encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
# Setting headers with Basic Authentication
headers = {
'Authorization': f'Basic {encoded_credentials}'
}
# Prepare post data
post_data = {
0: {
"target": target_url,
"limit": 1000,
"mode": "as_is",
"filters": [f"{filter[0]}:{filter[1]}:{filter[2]}" for filter in filters] if filters else []
}
}
# Making the API request
response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers)
# Log the full response for debugging
st.text("API Response:")
st.json(response.json()) # This will display the full JSON response in the Streamlit app
# Check if the response contains 'results' key
if response.status_code == 200 and 'results' in response.json():
# Extract the results
results = response.json()['results']
# Check if results are not empty
if results:
# Convert results to DataFrame
df = pd.DataFrame(results)
return df
else:
st.error("Received empty data from API.")
return None
else:
error_message = response.json().get('status_message', 'No specific error message provided')
st.error(f"Error: Code: {response.status_code} Message: {error_message}")
return None
# Streamlit layout
st.sidebar.title("DataForSEO API Parameters")
api_login = st.sidebar.text_input("API Login", value="[email protected]")
api_key = st.sidebar.text_input("API Key", type="password")
# Filters
url_from_not_contain = st.sidebar.text_input("URL from does not contain (comma-separated)")
is_lost = st.sidebar.checkbox("Is Lost", value=False)
dofollow = st.sidebar.checkbox("Dofollow", value=True)
backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10)
page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other'])
# Prepare filters for API call
filters = []
if url_from_not_contain:
for url in url_from_not_contain.split(','):
filters.append(["url_from", "not_like", url.strip()])
if is_lost:
filters.append(["is_lost", "=", is_lost])
if dofollow:
filters.append(["dofollow", "=", dofollow])
filters.append(["backlink_spam_score", "<=", backlink_spam_score])
filters.append(["page_from_language", "=", page_from_language])
# Main app layout
col1, col2 = st.columns(2)
with col1:
st.header("Input")
target_url = st.text_input("Enter the target URL") # Define target_url here
generate_button = st.sidebar.button("Generate All")
reset_button = st.sidebar.button("Reset")
# Generate CSV and download button
if generate_button and target_url:
df = get_backlinks(api_login, api_key, target_url, filters)
if df is not None:
# Display the DataFrame in the second column
with col2:
st.header("Output")
st.dataframe(df)
# Reset functionality
if reset_button:
st.experimental_rerun()