File size: 4,314 Bytes
b2974e8
7119782
 
7e21d9b
7119782
7a83db7
7e21d9b
7a83db7
7e21d9b
 
 
 
 
 
d743cd0
7e21d9b
 
 
 
 
d0479df
7e21d9b
7119782
7e21d9b
 
 
 
47e2fc4
 
53590af
 
 
f5ed2ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e5bd9f1
f5ed2ae
47e2fc4
7119782
f5ed2ae
 
47e2fc4
 
53590af
 
f5ed2ae
53590af
7119782
 
7a83db7
7119782
7a83db7
ba3ff9a
7119782
 
 
 
 
 
 
 
ba3ff9a
7119782
 
b7d9073
 
ba3ff9a
7119782
b7d9073
 
ba3ff9a
7119782
b7d9073
 
ba3ff9a
b7d9073
 
 
ba3ff9a
b7d9073
 
 
7119782
 
 
 
 
 
f5ed2ae
bc5df64
37cf3dc
 
7119782
71194e2
e5bd9f1
7588655
47e2fc4
53590af
 
 
 
 
 
 
f5ed2ae
 
3dbc3bb
7119782
 
ba3ff9a
f5ed2ae
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import streamlit as st
import pandas as pd
import requests
import base64

def get_backlinks(api_login, api_key, target_url, filters):
    # Encoding credentials
    encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()

    # Setting headers with Basic Authentication
    headers = {
        'Authorization': f'Basic {encoded_credentials}'
    }

    # Prepare post data
    post_data = {
        0: {
            "target": target_url,
            "limit": 1000,
            "mode": "as_is",
            "filters": filters
        }
    }

    # Making the API request
    response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers)

    # Log the full response for debugging
    st.text("API Response:")
    st.text(f"Response Status Code: {response.status_code}")
    st.text(f"Response Headers: {response.headers}")
    try:
        response_json = response.json()
        st.text(f"Response Body: {response_json}")
    except ValueError as e:
        st.text(f"Response Body: <Not a JSON response>\nError: {e}")

    # Check if the response contains 'results' key and handle the JSON structure appropriately
    if response.status_code == 200 and 'results' in response_json:
        results = response_json['results']
        if results:
            # Check if the results actually contain the backlink data you expect
            st.text(f"Results: {results}")  # Debugging line to show the results structure

            # If the structure is as expected, convert to DataFrame
            df = pd.json_normalize(results)
            return df
        else:
            st.error("Received empty data from API.")
            return None
    else:
        # Handle API errors
        st.error(f"Error: Code: {response.status_code} Message: {response_json.get('status_message', 'No specific error message provided')}")
        return None

def convert_df_to_csv(df):
    # Convert DataFrame to CSV
    return df.to_csv(index=False).encode('utf-8')

# Streamlit layout
st.sidebar.title("DataForSEO API Parameters")
api_login = st.sidebar.text_input("API Login", value="[email protected]")
api_key = st.sidebar.text_input("API Key", type="password")

# Filters input
url_from_not_contain = st.sidebar.text_input("URL from does not contain (comma-separated)")
is_lost = st.sidebar.checkbox("Is Lost", value=False)
dofollow = st.sidebar.checkbox("Dofollow", value=True)
backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10)
page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other'])

# Prepare filters for API call
filters = []

if url_from_not_contain:
    for url in url_from_not_contain.split(','):
        filters.append(["url_from", "not_like", url.strip()])
        filters.append("and")

if is_lost:
    filters.append(["is_lost", "=", is_lost])
    filters.append("and")

if dofollow:
    filters.append(["dofollow", "=", dofollow])
    filters.append("and")

filters.append(["backlink_spam_score", "<=", backlink_spam_score])
filters.append("and")
filters.append(["page_from_language", "=", page_from_language])

# Remove the last "and" if it's the last element
if filters and filters[-1] == "and":
    filters.pop()

# Main app layout
col1, col2 = st.columns(2)

with col1:
    st.header("Input")
    target_url = st.text_input("Enter the target URL")

generate_button = st.sidebar.button("Generate All")
reset_button = st.sidebar.button("Reset")

# Generate CSV and download button
if generate_button and target_url:
    df = get_backlinks(api_login, api_key, target_url, filters)
    if df is not None:
        csv = convert_df_to_csv(df)
        st.download_button(
            label="Download data as CSV",
            data=csv,
            file_name='backlinks.csv',
            mime='text/csv',
        )
    else:
        st.error("Failed to generate CSV: No data returned from the API or data processing error.")

# Reset functionality
if reset_button:
    st.experimental_rerun()

# Ensure that the download button is only visible if the DataFrame is not None
if df is not None:
    csv = convert_df_to_csv(df)
    st.download_button(
        label="Download data as CSV",
        data=csv,
        file_name='backlinks.csv',
        mime='text/csv',
    )