File size: 4,429 Bytes
b2974e8 7119782 7e21d9b 7119782 7a83db7 7e21d9b 7a83db7 7e21d9b d743cd0 7e21d9b d0479df 7e21d9b 7119782 7e21d9b 47e2fc4 53590af d9fe2ef d964699 6abe217 47e2fc4 6097094 6abe217 6097094 47e2fc4 6097094 47e2fc4 e5bd9f1 47e2fc4 7119782 47e2fc4 53590af 7119782 7a83db7 7119782 7a83db7 ba3ff9a 7119782 ba3ff9a 7119782 b7d9073 ba3ff9a 7119782 b7d9073 ba3ff9a 7119782 b7d9073 ba3ff9a b7d9073 ba3ff9a b7d9073 7119782 71194e2 bc5df64 37cf3dc 7119782 71194e2 e5bd9f1 7588655 47e2fc4 53590af 3dbc3bb 7119782 ba3ff9a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 |
import streamlit as st
import pandas as pd
import requests
import base64
def get_backlinks(api_login, api_key, target_url, filters):
# Encoding credentials
encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
# Setting headers with Basic Authentication
headers = {
'Authorization': f'Basic {encoded_credentials}'
}
# Prepare post data
post_data = {
0: {
"target": target_url,
"limit": 1000,
"mode": "as_is",
"filters": filters
}
}
# Making the API request
response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers)
# Log the full response for debugging
st.text("API Response:")
# st.json(response.json()) # This will display the full JSON response in the Streamlit app
# For debugging: Display the status code and any text
st.text(f"Response Status Code: {response.status_code}")
st.text(f"Response Headers: {response.headers}")
try:
st.text(f"Response Body: {response.json()}")
except ValueError:
st.text("Response Body: <Not a JSON response>")
# Check if the response contains 'results' key
if response.status_code == 200:
response_data = response.json()
# Check if 'results' key is in the response
if 'results' in response_data:
results = response_data['results']
if results:
# Adjust the following line based on the actual JSON structure
df = pd.json_normalize(results)
return df
else:
st.error("Received empty data from API.")
return None
else:
# Handle other internal status codes here
internal_status_code = response_data.get('status_code', None)
internal_status_message = response_data.get('status_message', 'No specific message provided')
st.error(f"Internal Status Code: {internal_status_code}, Message: {internal_status_message}")
return None
else:
error_message = response.json().get('status_message', 'No specific error message provided')
st.error(f"Error: Code: {response.status_code} Message: {error_message}")
return None
def convert_df_to_csv(df):
# Convert DataFrame to CSV
return df.to_csv().encode('utf-8')
# Streamlit layout
st.sidebar.title("DataForSEO API Parameters")
api_login = st.sidebar.text_input("API Login", value="[email protected]")
api_key = st.sidebar.text_input("API Key", type="password")
# Filters input
url_from_not_contain = st.sidebar.text_input("URL from does not contain (comma-separated)")
is_lost = st.sidebar.checkbox("Is Lost", value=False)
dofollow = st.sidebar.checkbox("Dofollow", value=True)
backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10)
page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other'])
# Prepare filters for API call
filters = []
if url_from_not_contain:
for url in url_from_not_contain.split(','):
filters.append(["url_from", "not_like", url.strip()])
filters.append("and")
if is_lost:
filters.append(["is_lost", "=", is_lost])
filters.append("and")
if dofollow:
filters.append(["dofollow", "=", dofollow])
filters.append("and")
filters.append(["backlink_spam_score", "<=", backlink_spam_score])
filters.append("and")
filters.append(["page_from_language", "=", page_from_language])
# Remove the last "and" if it's the last element
if filters and filters[-1] == "and":
filters.pop()
# Main app layout
col1, col2 = st.columns(2)
with col1:
st.header("Input")
target_url = st.text_input("Enter the target URL") # Define target_url here
generate_button = st.sidebar.button("Generate All")
reset_button = st.sidebar.button("Reset")
# Generate CSV and download button
if generate_button and target_url:
df = get_backlinks(api_login, api_key, target_url, filters)
if df is not None:
# Convert DataFrame to CSV
csv = convert_df_to_csv(df)
# Create download link
st.download_button(
label="Download data as CSV",
data=csv,
file_name='backlinks.csv',
mime='text/csv',
)
# Reset functionality
if reset_button:
st.experimental_rerun()
|