joshuadunlop commited on
Commit
fe997de
·
verified ·
1 Parent(s): 188966c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -28
app.py CHANGED
@@ -3,7 +3,7 @@ import pandas as pd
3
  import requests
4
  import base64
5
 
6
- def get_backlinks(api_login, api_key, target_url, filters):
7
  # Encoding credentials
8
  encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
9
 
@@ -18,7 +18,8 @@ def get_backlinks(api_login, api_key, target_url, filters):
18
  "target": target_url,
19
  "limit": 1000,
20
  "mode": "as_is",
21
- "filters": filters
 
22
  }
23
  }
24
 
@@ -79,12 +80,7 @@ is_lost = st.sidebar.checkbox("Is Lost", value=False)
79
  dofollow = st.sidebar.checkbox("Dofollow", value=True)
80
  backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10)
81
  page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other'])
82
-
83
- # New filters
84
- include_subdomains = st.sidebar.checkbox("Include Subdomains", value=True)
85
- page_from_rank = st.sidebar.text_input("Page From Rank (e.g., '<10,>1')")
86
- domain_from_rank = st.sidebar.text_input("Domain From Rank (e.g., '<10,>1')")
87
- is_broken = st.sidebar.checkbox("Is Broken", value=False)
88
 
89
  # Prepare filters for API call
90
  filters = []
@@ -106,25 +102,6 @@ filters.append(["backlink_spam_score", "<=", backlink_spam_score])
106
  filters.append("and")
107
  filters.append(["page_from_language", "=", page_from_language])
108
 
109
- # New filters processing
110
- filters.append(["include_subdomains", "=", include_subdomains])
111
- filters.append("and")
112
-
113
- for rank_condition in page_from_rank.split(','):
114
- if rank_condition:
115
- operator, value = rank_condition[:1], rank_condition[1:]
116
- filters.append(["page_from_rank", operator, value])
117
- filters.append("and")
118
-
119
- for rank_condition in domain_from_rank.split(','):
120
- if rank_condition:
121
- operator, value = rank_condition[:1], rank_condition[1:]
122
- filters.append(["domain_from_rank", operator, value])
123
- filters.append("and")
124
-
125
- filters.append(["is_broken", "=", is_broken])
126
- filters.append("and")
127
-
128
  # Remove the last "and" if it's the last element
129
  if filters and filters[-1] == "and":
130
  filters.pop()
@@ -143,7 +120,7 @@ df = None
143
 
144
  # Generate CSV and download button
145
  if generate_button and target_url:
146
- df = get_backlinks(api_login, api_key, target_url, filters)
147
  if df is not None:
148
  csv = convert_df_to_csv(df)
149
  st.download_button(
 
3
  import requests
4
  import base64
5
 
6
+ def get_backlinks(api_login, api_key, target_url, filters, include_subdomains):
7
  # Encoding credentials
8
  encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
9
 
 
18
  "target": target_url,
19
  "limit": 1000,
20
  "mode": "as_is",
21
+ "filters": filters,
22
+ "include_subdomains": include_subdomains # Adding the include_subdomains filter
23
  }
24
  }
25
 
 
80
  dofollow = st.sidebar.checkbox("Dofollow", value=True)
81
  backlink_spam_score = st.sidebar.slider("Backlink Spam Score ≤", 0, 100, 10)
82
  page_from_language = st.sidebar.selectbox("Page From Language", ['en', 'other'])
83
+ include_subdomains = st.sidebar.checkbox("Include Subdomains", value=True) # New filter
 
 
 
 
 
84
 
85
  # Prepare filters for API call
86
  filters = []
 
102
  filters.append("and")
103
  filters.append(["page_from_language", "=", page_from_language])
104
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  # Remove the last "and" if it's the last element
106
  if filters and filters[-1] == "and":
107
  filters.pop()
 
120
 
121
  # Generate CSV and download button
122
  if generate_button and target_url:
123
+ df = get_backlinks(api_login, api_key, target_url, filters, include_subdomains)
124
  if df is not None:
125
  csv = convert_df_to_csv(df)
126
  st.download_button(