joshuadunlop's picture
Update app.py
53590af verified
raw
history blame
4.43 kB
import streamlit as st
import pandas as pd
import requests
import base64
def get_backlinks(api_login, api_key, target_url, filters):
# Encoding credentials
encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
# Setting headers with Basic Authentication
headers = {
'Authorization': f'Basic {encoded_credentials}'
}
# Prepare post data
post_data = {
0: {
"target": target_url,
"limit": 1000,
"mode": "as_is",
"filters": filters
}
}
# Making the API request
response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers)
# Log the full response for debugging
st.text("API Response:")
# st.json(response.json()) # This will display the full JSON response in the Streamlit app
# For debugging: Display the status code and any text
st.text(f"Response Status Code: {response.status_code}")
st.text(f"Response Headers: {response.headers}")
try:
st.text(f"Response Body: {response.json()}")
except ValueError:
st.text("Response Body: <Not a JSON response>")
# Check if the response contains 'results' key
if response.status_code == 200:
response_data = response.json()
# Check if 'results' key is in the response
if 'results' in response_data:
results = response_data['results']
if results:
# Adjust the following line based on the actual JSON structure
df = pd.json_normalize(results)
return df
else:
st.error("Received empty data from API.")
return None
else:
# Handle other internal status codes here
internal_status_code = response_data.get('status_code', None)
internal_status_message = response_data.get('status_message', 'No specific message provided')
st.error(f"Internal Status Code: {internal_status_code}, Message: {internal_status_message}")
return None
else:
error_message = response.json().get('status_message', 'No specific error message provided')
st.error(f"Error: Code: {response.status_code} Message: {error_message}")
return None
def convert_df_to_csv(df):
# Convert DataFrame to CSV
return df.to_csv().encode('utf-8')
# Streamlit layout
st.sidebar.title("DataForSEO API Parameters")
api_login = st.sidebar.text_input("API Login", value="[email protected]")
api_key = st.sidebar.text_input("API Key", type="password")
# Filters input
url_from_not_contain = st.sidebar.text_input("URL from does not contain (comma-separated)")
is_lost = st.sidebar.checkbox("Is Lost", value=False)
dofollow = st.sidebar.checkbox("Dofollow", value=True)
backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10)
page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other'])
# Prepare filters for API call
filters = []
if url_from_not_contain:
for url in url_from_not_contain.split(','):
filters.append(["url_from", "not_like", url.strip()])
filters.append("and")
if is_lost:
filters.append(["is_lost", "=", is_lost])
filters.append("and")
if dofollow:
filters.append(["dofollow", "=", dofollow])
filters.append("and")
filters.append(["backlink_spam_score", "<=", backlink_spam_score])
filters.append("and")
filters.append(["page_from_language", "=", page_from_language])
# Remove the last "and" if it's the last element
if filters and filters[-1] == "and":
filters.pop()
# Main app layout
col1, col2 = st.columns(2)
with col1:
st.header("Input")
target_url = st.text_input("Enter the target URL") # Define target_url here
generate_button = st.sidebar.button("Generate All")
reset_button = st.sidebar.button("Reset")
# Generate CSV and download button
if generate_button and target_url:
df = get_backlinks(api_login, api_key, target_url, filters)
if df is not None:
# Convert DataFrame to CSV
csv = convert_df_to_csv(df)
# Create download link
st.download_button(
label="Download data as CSV",
data=csv,
file_name='backlinks.csv',
mime='text/csv',
)
# Reset functionality
if reset_button:
st.experimental_rerun()