Update app.py
Browse files
app.py
CHANGED
@@ -9,28 +9,43 @@ import csv
|
|
9 |
|
10 |
def get_backlinks(api_login, api_key, target_url, filters):
|
11 |
encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
|
12 |
-
headers = {
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers)
|
15 |
if response.status_code == 200:
|
16 |
response_data = response.json()
|
17 |
-
if 'tasks' in response_data
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
|
|
|
|
|
|
21 |
else:
|
22 |
-
st.error("No
|
23 |
-
return
|
24 |
else:
|
25 |
-
|
26 |
-
|
|
|
27 |
|
28 |
def convert_df_to_csv(df):
|
29 |
return df.to_csv(index=False).encode('utf-8')
|
30 |
|
31 |
class BacklinkWorker(threading.Thread):
|
32 |
def __init__(self, jobs, results, api_login, api_key):
|
33 |
-
|
34 |
self.jobs = jobs
|
35 |
self.results = results
|
36 |
self.api_login = api_login
|
@@ -39,18 +54,16 @@ class BacklinkWorker(threading.Thread):
|
|
39 |
def run(self):
|
40 |
while True:
|
41 |
job = self.jobs.get()
|
42 |
-
if job is None:
|
43 |
-
self.jobs.task_done()
|
44 |
break
|
45 |
url_id, target_url, filters = job
|
46 |
df = get_backlinks(self.api_login, self.api_key, target_url, filters)
|
47 |
-
self.results
|
48 |
-
self.jobs.task_done()
|
49 |
|
50 |
def process_pasted_data(data):
|
51 |
-
data_io = StringIO(data)
|
52 |
-
reader = csv.reader(data_io, delimiter='\n')
|
53 |
-
return [row[0] for row in reader
|
54 |
|
55 |
st.sidebar.title("DataForSEO API Parameters")
|
56 |
api_login = st.sidebar.text_input("API Login", value="[email protected]")
|
@@ -113,49 +126,57 @@ add_row = st.sidebar.button("Add Row")
|
|
113 |
reset = st.sidebar.button("Reset")
|
114 |
|
115 |
if paste_data:
|
116 |
-
|
117 |
-
|
|
|
|
|
118 |
|
119 |
if add_row:
|
120 |
-
|
121 |
-
|
122 |
-
else:
|
123 |
-
st.session_state.row_count += 1
|
124 |
|
125 |
if reset:
|
126 |
st.session_state.clear()
|
127 |
|
128 |
-
|
129 |
-
for i in range(
|
130 |
-
st.
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
st.session_state.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
jobs = Queue()
|
136 |
-
results =
|
137 |
workers = [BacklinkWorker(jobs, results, api_login, api_key) for _ in range(num_concurrent_calls)]
|
138 |
|
139 |
for worker in workers:
|
140 |
worker.start()
|
141 |
|
142 |
-
for i in range(
|
143 |
-
target_url = st.session_state.get(f"target_url_{i}")
|
144 |
if target_url:
|
145 |
-
jobs.put((i, target_url, filters))
|
146 |
|
147 |
for _ in workers:
|
148 |
-
jobs.put(None)
|
149 |
|
150 |
for worker in workers:
|
151 |
worker.join()
|
152 |
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
# Display Download Buttons
|
157 |
-
for i in range(st.session_state.get('row_count', 1)):
|
158 |
-
df = st.session_state.get(f'df_{i}')
|
159 |
-
if df is not None and not df.empty:
|
160 |
-
csv = convert_df_to_csv(df)
|
161 |
-
st.download_button(f"Download CSV for URL {i + 1}", csv, f"backlinks_{i + 1}.csv", "text/csv", key=f"download_{i}")
|
|
|
9 |
|
10 |
def get_backlinks(api_login, api_key, target_url, filters):
|
11 |
encoded_credentials = base64.b64encode(f"{api_login}:{api_key}".encode()).decode()
|
12 |
+
headers = {
|
13 |
+
'Authorization': f'Basic {encoded_credentials}'
|
14 |
+
}
|
15 |
+
post_data = {
|
16 |
+
0: {
|
17 |
+
"target": target_url,
|
18 |
+
"limit": 1000,
|
19 |
+
"mode": "as_is",
|
20 |
+
"filters": filters
|
21 |
+
}
|
22 |
+
}
|
23 |
response = requests.post("https://api.dataforseo.com/v3/backlinks/backlinks/live", json=post_data, headers=headers)
|
24 |
if response.status_code == 200:
|
25 |
response_data = response.json()
|
26 |
+
if 'tasks' in response_data:
|
27 |
+
task_result = response_data['tasks'][0]['result']
|
28 |
+
if task_result and 'items' in task_result[0]:
|
29 |
+
items = task_result[0]['items']
|
30 |
+
df = pd.json_normalize(items)
|
31 |
+
return df
|
32 |
+
else:
|
33 |
+
st.error("Received empty 'result' from API or missing 'items'.")
|
34 |
+
return None
|
35 |
else:
|
36 |
+
st.error(f"No 'tasks' key in response JSON. Full response: {response_data}")
|
37 |
+
return None
|
38 |
else:
|
39 |
+
error_message = response.json().get('status_message', 'No specific error message provided')
|
40 |
+
st.error(f"Error: Code: {response.status_code} Message: {error_message}")
|
41 |
+
return None
|
42 |
|
43 |
def convert_df_to_csv(df):
|
44 |
return df.to_csv(index=False).encode('utf-8')
|
45 |
|
46 |
class BacklinkWorker(threading.Thread):
|
47 |
def __init__(self, jobs, results, api_login, api_key):
|
48 |
+
super().__init__()
|
49 |
self.jobs = jobs
|
50 |
self.results = results
|
51 |
self.api_login = api_login
|
|
|
54 |
def run(self):
|
55 |
while True:
|
56 |
job = self.jobs.get()
|
57 |
+
if job is None:
|
|
|
58 |
break
|
59 |
url_id, target_url, filters = job
|
60 |
df = get_backlinks(self.api_login, self.api_key, target_url, filters)
|
61 |
+
self.results.put((url_id, df))
|
|
|
62 |
|
63 |
def process_pasted_data(data):
|
64 |
+
data_io = StringIO(data.strip())
|
65 |
+
reader = csv.reader(data_io, delimiter='\n', quotechar='"')
|
66 |
+
return [row[0] for row in reader]
|
67 |
|
68 |
st.sidebar.title("DataForSEO API Parameters")
|
69 |
api_login = st.sidebar.text_input("API Login", value="[email protected]")
|
|
|
126 |
reset = st.sidebar.button("Reset")
|
127 |
|
128 |
if paste_data:
|
129 |
+
pasted_urls = process_pasted_data(data_section)
|
130 |
+
for i, url in enumerate(pasted_urls):
|
131 |
+
st.session_state[f"target_url_{i}"] = url
|
132 |
+
st.session_state["row_count"] = len(pasted_urls)
|
133 |
|
134 |
if add_row:
|
135 |
+
row_count = st.session_state.get("row_count", 0) + 1
|
136 |
+
st.session_state["row_count"] = row_count
|
|
|
|
|
137 |
|
138 |
if reset:
|
139 |
st.session_state.clear()
|
140 |
|
141 |
+
row_count = st.session_state.get("row_count", 1)
|
142 |
+
for i in range(row_count):
|
143 |
+
cols = st.columns(2)
|
144 |
+
target_url_key = f"target_url_{i}"
|
145 |
+
target_url = cols[0].text_input(f"Enter the target URL {i + 1}", key=target_url_key)
|
146 |
+
df_key = f"df_{i}"
|
147 |
+
df = st.session_state.get(df_key)
|
148 |
+
if df is not None:
|
149 |
+
csv = convert_df_to_csv(df)
|
150 |
+
cols[1].download_button(
|
151 |
+
label=f"Download data as CSV for URL {i + 1}",
|
152 |
+
data=csv,
|
153 |
+
file_name=f'backlinks_{i + 1}.csv',
|
154 |
+
mime='text/csv',
|
155 |
+
)
|
156 |
+
elif df is None and generate_button:
|
157 |
+
cols[1].error(f"Failed to generate CSV for URL {i + 1}: No data returned from the API or data processing error.")
|
158 |
+
|
159 |
+
generate_button = st.sidebar.button("Generate All")
|
160 |
+
|
161 |
+
if generate_button:
|
162 |
jobs = Queue()
|
163 |
+
results = Queue()
|
164 |
workers = [BacklinkWorker(jobs, results, api_login, api_key) for _ in range(num_concurrent_calls)]
|
165 |
|
166 |
for worker in workers:
|
167 |
worker.start()
|
168 |
|
169 |
+
for i in range(row_count):
|
170 |
+
target_url = st.session_state.get(f"target_url_{i}", "")
|
171 |
if target_url:
|
172 |
+
jobs.put((i, target_url, filters))
|
173 |
|
174 |
for _ in workers:
|
175 |
+
jobs.put(None)
|
176 |
|
177 |
for worker in workers:
|
178 |
worker.join()
|
179 |
|
180 |
+
while not results.empty():
|
181 |
+
url_id, df = results.get()
|
182 |
+
st.session_state[f"df_{url_id}"] = df
|
|
|
|
|
|
|
|
|
|
|
|