joshuadunlop commited on
Commit
514f0d9
·
verified ·
1 Parent(s): a473a45

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -19
app.py CHANGED
@@ -2,6 +2,10 @@ import streamlit as st
2
  import pandas as pd
3
  import requests
4
  import base64
 
 
 
 
5
 
6
  def get_backlinks(api_login, api_key, target_url, filters):
7
  # Encoding credentials
@@ -55,6 +59,30 @@ def get_backlinks(api_login, api_key, target_url, filters):
55
  def convert_df_to_csv(df):
56
  # Convert DataFrame to CSV
57
  return df.to_csv(index=False).encode('utf-8')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
  # Streamlit layout
60
  st.sidebar.title("DataForSEO API Parameters")
@@ -112,34 +140,70 @@ filters.append("and")
112
  if filters and filters[-1] == "and":
113
  filters.pop()
114
 
115
- # Main app layout
116
- col1, col2 = st.columns(2)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
 
118
- with col1:
119
- st.header("Input")
120
- target_url = st.text_input("Enter the target URL")
121
 
122
- # Variables for control flow
123
- df = None
 
 
 
 
 
124
  generate_button = st.sidebar.button("Generate All")
125
- reset_button = st.sidebar.button("Reset")
126
 
127
- # Generate CSV and download button logic
128
- if generate_button and target_url:
129
- df = get_backlinks(api_login, api_key, target_url, filters)
 
 
 
 
130
 
131
- with col2:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
  if df is not None:
133
  csv = convert_df_to_csv(df)
134
  st.download_button(
135
- label="Download data as CSV",
136
  data=csv,
137
- file_name='backlinks.csv',
138
  mime='text/csv',
139
  )
140
  elif df is None and generate_button:
141
- st.error("Failed to generate CSV: No data returned from the API or data processing error.")
142
-
143
- # Reset functionality
144
- if reset_button:
145
- st.experimental_rerun()
 
2
  import pandas as pd
3
  import requests
4
  import base64
5
+ import threading
6
+ from queue import Queue
7
+ from io import StringIO
8
+ import csv
9
 
10
  def get_backlinks(api_login, api_key, target_url, filters):
11
  # Encoding credentials
 
59
  def convert_df_to_csv(df):
60
  # Convert DataFrame to CSV
61
  return df.to_csv(index=False).encode('utf-8')
62
+
63
+ # Worker thread to handle concurrent API calls
64
+ class BacklinkWorker(threading.Thread):
65
+ def __init__(self, jobs, results, api_login, api_key):
66
+ super().__init__()
67
+ self.jobs = jobs
68
+ self.results = results
69
+ self.api_login = api_login
70
+ self.api_key = api_key
71
+
72
+ def run(self):
73
+ while True:
74
+ job = self.jobs.get()
75
+ if job is None:
76
+ break
77
+ url_id, target_url, filters = job
78
+ df = get_backlinks(self.api_login, self.api_key, target_url, filters)
79
+ self.results.put((url_id, df))
80
+
81
+ # Function to process pasted data
82
+ def process_pasted_data(data):
83
+ data_io = StringIO(data.strip())
84
+ reader = csv.reader(data_io, delimiter='\n', quotechar='"')
85
+ return [row[0] for row in reader]
86
 
87
  # Streamlit layout
88
  st.sidebar.title("DataForSEO API Parameters")
 
140
  if filters and filters[-1] == "and":
141
  filters.pop()
142
 
143
+ # Sidebar options for concurrency and pasting data
144
+ num_concurrent_calls = st.sidebar.number_input("Concurrent Calls:", min_value=1, max_value=10, value=5, step=1)
145
+ data_section = st.sidebar.text_area("Paste Data:")
146
+ paste_data = st.sidebar.button("Paste Data")
147
+ add_row = st.sidebar.button("Add Row")
148
+ reset = st.sidebar.button("Reset")
149
+
150
+ # Managing rows and pasted data
151
+ if paste_data:
152
+ pasted_urls = process_pasted_data(data_section)
153
+ for i, url in enumerate(pasted_urls):
154
+ st.session_state[f"target_url_{i}"] = url
155
+ st.session_state["row_count"] = len(pasted_urls)
156
+
157
+ if add_row:
158
+ row_count = st.session_state.get("row_count", 0) + 1
159
+ st.session_state["row_count"] = row_count
160
 
161
+ if reset:
162
+ st.session_state.clear()
 
163
 
164
+ # Main app layout
165
+ row_count = st.session_state.get("row_count", 1)
166
+ for i in range(row_count):
167
+ target_url_key = f"target_url_{i}"
168
+ target_url = st.text_input(f"Enter the target URL {i + 1}", key=target_url_key)
169
+
170
+ # Generate and reset button logic
171
  generate_button = st.sidebar.button("Generate All")
 
172
 
173
+ if generate_button:
174
+ jobs = Queue()
175
+ results = Queue()
176
+ workers = [BacklinkWorker(jobs, results, api_login, api_key) for _ in range(num_concurrent_calls)]
177
+
178
+ for worker in workers:
179
+ worker.start()
180
 
181
+ for i in range(row_count):
182
+ target_url = st.session_state.get(f"target_url_{i}", "")
183
+ if target_url:
184
+ jobs.put((i, target_url, filters))
185
+
186
+ for _ in workers:
187
+ jobs.put(None)
188
+
189
+ for worker in workers:
190
+ worker.join()
191
+
192
+ while not results.empty():
193
+ url_id, df = results.get()
194
+ st.session_state[f"df_{url_id}"] = df
195
+
196
+ # Display and download logic for each row
197
+ for i in range(row_count):
198
+ df_key = f"df_{i}"
199
+ df = st.session_state.get(df_key)
200
  if df is not None:
201
  csv = convert_df_to_csv(df)
202
  st.download_button(
203
+ label=f"Download data as CSV for URL {i + 1}",
204
  data=csv,
205
+ file_name=f'backlinks_{i + 1}.csv',
206
  mime='text/csv',
207
  )
208
  elif df is None and generate_button:
209
+ st.error(f"Failed to generate CSV for URL {i + 1}: No data returned from the API or data processing error.")