import streamlit as st import pandas as pd import requests import base64 def get_backlinks(api_login, api_key, target_url, filters): # Encoding credentials encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode() # Setting headers with Basic Authentication headers = { 'Authorization': f'Basic {encoded_credentials}' } # Prepare post data post_data = { 0: { "target": target_url, "limit": 1000, "mode": "as_is", "filters": filters } } # Making the API request response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers) # Log the full response for debugging st.text("API Response:") st.text(f"Response Status Code: {response.status_code}") st.text(f"Response Headers: {response.headers}") try: response_json = response.json() st.text(f"Response Body: {response_json}") except ValueError as e: st.text(f"Response Body: \nError: {e}") # Check if the response contains 'results' key and handle the JSON structure appropriately if response.status_code == 200: response_data = response.json() # Debugging: Print out the keys of the response_data st.text(f"Keys in response JSON: {list(response_data.keys())}") if 'results' in response_data: results = response_data['results'] if results: # Debugging: print the first few items to inspect the structure st.text(f"First few results: {results[:5]}") # assuming results is a list # Convert to DataFrame df = pd.json_normalize(results) return df else: st.error("Received empty 'results' from API.") return None else: st.error(f"No 'results' key in response JSON. Full response: {response_data}") return None else: error_message = response.json().get('status_message', 'No specific error message provided') st.error(f"Error: Code: {response.status_code} Message: {error_message}") return None def convert_df_to_csv(df): # Convert DataFrame to CSV return df.to_csv(index=False).encode('utf-8') # Streamlit layout st.sidebar.title("DataForSEO API Parameters") api_login = st.sidebar.text_input("API Login", value="josh@expertphotography.com") api_key = st.sidebar.text_input("API Key", type="password") # Filters input url_from_not_contain = st.sidebar.text_input("URL from does not contain (comma-separated)") is_lost = st.sidebar.checkbox("Is Lost", value=False) dofollow = st.sidebar.checkbox("Dofollow", value=True) backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10) page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other']) # Prepare filters for API call filters = [] if url_from_not_contain: for url in url_from_not_contain.split(','): filters.append(["url_from", "not_like", url.strip()]) filters.append("and") if is_lost: filters.append(["is_lost", "=", is_lost]) filters.append("and") if dofollow: filters.append(["dofollow", "=", dofollow]) filters.append("and") filters.append(["backlink_spam_score", "<=", backlink_spam_score]) filters.append("and") filters.append(["page_from_language", "=", page_from_language]) # Remove the last "and" if it's the last element if filters and filters[-1] == "and": filters.pop() # Main app layout col1, col2 = st.columns(2) with col1: st.header("Input") target_url = st.text_input("Enter the target URL") generate_button = st.sidebar.button("Generate All") reset_button = st.sidebar.button("Reset") df = None # Generate CSV and download button if generate_button and target_url: df = get_backlinks(api_login, api_key, target_url, filters) if df is not None: csv = convert_df_to_csv(df) st.download_button( label="Download data as CSV", data=csv, file_name='backlinks.csv', mime='text/csv', ) else: st.error("Failed to generate CSV: No data returned from the API or data processing error.") # Reset functionality if reset_button: st.experimental_rerun()