Spaces:
Running
Running
Commit
·
67b6792
1
Parent(s):
c2c07f9
Add proxy handling to NLP functions and update .gitignore to exclude JSON files
Browse files- .gitignore +1 -1
- nlp_processes.py +21 -0
- proxy.py +28 -0
.gitignore
CHANGED
@@ -3,5 +3,5 @@ __pycache__
|
|
3 |
/downloads
|
4 |
*.html
|
5 |
.gradio
|
6 |
-
|
7 |
test*
|
|
|
3 |
/downloads
|
4 |
*.html
|
5 |
.gradio
|
6 |
+
*.json
|
7 |
test*
|
nlp_processes.py
CHANGED
@@ -1,8 +1,12 @@
|
|
1 |
from g4f.client import Client
|
2 |
from g4f.Provider import RetryProvider, Blackbox, Airforce, AmigoChat, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI
|
|
|
3 |
import threading
|
4 |
|
5 |
def generate_nlp_summary(temp_summary):
|
|
|
|
|
|
|
6 |
try:
|
7 |
try:
|
8 |
client = Client(
|
@@ -12,6 +16,7 @@ def generate_nlp_summary(temp_summary):
|
|
12 |
),
|
13 |
)
|
14 |
completion = client.chat.completions.create(
|
|
|
15 |
model="llama-3.1-405b",
|
16 |
messages=[
|
17 |
{"role": "system", "content": "You are a helpful research assistant for generating well-formatted summaries from scientific research papers."},
|
@@ -23,6 +28,7 @@ def generate_nlp_summary(temp_summary):
|
|
23 |
print(str(e))
|
24 |
client = Client()
|
25 |
completion = client.chat.completions.create(
|
|
|
26 |
provider=RetryProvider(
|
27 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
28 |
max_retries=8,
|
@@ -40,6 +46,9 @@ def generate_nlp_summary(temp_summary):
|
|
40 |
return False
|
41 |
|
42 |
def generate_nlp_mindmap(temp_summary):
|
|
|
|
|
|
|
43 |
try:
|
44 |
try:
|
45 |
client = Client(
|
@@ -49,6 +58,7 @@ def generate_nlp_mindmap(temp_summary):
|
|
49 |
),
|
50 |
)
|
51 |
completion = client.chat.completions.create(
|
|
|
52 |
model="llama-3.1-405b",
|
53 |
messages=[
|
54 |
{"role": "system", "content": "You are a helpful research assistant for generating well-formatted mindmaps from scientific research papers."},
|
@@ -60,6 +70,7 @@ def generate_nlp_mindmap(temp_summary):
|
|
60 |
print(str(e))
|
61 |
client = Client()
|
62 |
completion = client.chat.completions.create(
|
|
|
63 |
provider=RetryProvider(
|
64 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
65 |
max_retries=8,
|
@@ -77,6 +88,9 @@ def generate_nlp_mindmap(temp_summary):
|
|
77 |
return False
|
78 |
|
79 |
def fix_title(title):
|
|
|
|
|
|
|
80 |
try:
|
81 |
try:
|
82 |
client = Client(
|
@@ -86,6 +100,7 @@ def fix_title(title):
|
|
86 |
),
|
87 |
)
|
88 |
completion = client.chat.completions.create(
|
|
|
89 |
model="llama-3.1-405b",
|
90 |
messages=[
|
91 |
{
|
@@ -120,6 +135,7 @@ def fix_title(title):
|
|
120 |
print(str(e))
|
121 |
client = Client()
|
122 |
completion = client.chat.completions.create(
|
|
|
123 |
provider=RetryProvider(
|
124 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
125 |
max_retries=8,
|
@@ -160,6 +176,9 @@ def fix_title(title):
|
|
160 |
return False
|
161 |
|
162 |
def fix_citation(citation):
|
|
|
|
|
|
|
163 |
try:
|
164 |
try:
|
165 |
client = Client(
|
@@ -169,6 +188,7 @@ def fix_citation(citation):
|
|
169 |
),
|
170 |
)
|
171 |
completion = client.chat.completions.create(
|
|
|
172 |
model="llama-3.1-405b",
|
173 |
messages=[
|
174 |
{
|
@@ -201,6 +221,7 @@ def fix_citation(citation):
|
|
201 |
print(str(e))
|
202 |
client = Client()
|
203 |
completion = client.chat.completions.create(
|
|
|
204 |
provider=RetryProvider(
|
205 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
206 |
max_retries=8,
|
|
|
1 |
from g4f.client import Client
|
2 |
from g4f.Provider import RetryProvider, Blackbox, Airforce, AmigoChat, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI
|
3 |
+
from proxy import get_proxy
|
4 |
import threading
|
5 |
|
6 |
def generate_nlp_summary(temp_summary):
|
7 |
+
proxy = get_proxy()
|
8 |
+
while not get_proxy():
|
9 |
+
proxy = get_proxy()
|
10 |
try:
|
11 |
try:
|
12 |
client = Client(
|
|
|
16 |
),
|
17 |
)
|
18 |
completion = client.chat.completions.create(
|
19 |
+
proxy=proxy,
|
20 |
model="llama-3.1-405b",
|
21 |
messages=[
|
22 |
{"role": "system", "content": "You are a helpful research assistant for generating well-formatted summaries from scientific research papers."},
|
|
|
28 |
print(str(e))
|
29 |
client = Client()
|
30 |
completion = client.chat.completions.create(
|
31 |
+
proxy=proxy,
|
32 |
provider=RetryProvider(
|
33 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
34 |
max_retries=8,
|
|
|
46 |
return False
|
47 |
|
48 |
def generate_nlp_mindmap(temp_summary):
|
49 |
+
proxy = get_proxy()
|
50 |
+
while not get_proxy():
|
51 |
+
proxy = get_proxy()
|
52 |
try:
|
53 |
try:
|
54 |
client = Client(
|
|
|
58 |
),
|
59 |
)
|
60 |
completion = client.chat.completions.create(
|
61 |
+
proxy=proxy,
|
62 |
model="llama-3.1-405b",
|
63 |
messages=[
|
64 |
{"role": "system", "content": "You are a helpful research assistant for generating well-formatted mindmaps from scientific research papers."},
|
|
|
70 |
print(str(e))
|
71 |
client = Client()
|
72 |
completion = client.chat.completions.create(
|
73 |
+
proxy=proxy,
|
74 |
provider=RetryProvider(
|
75 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
76 |
max_retries=8,
|
|
|
88 |
return False
|
89 |
|
90 |
def fix_title(title):
|
91 |
+
proxy = get_proxy()
|
92 |
+
while not get_proxy():
|
93 |
+
proxy = get_proxy()
|
94 |
try:
|
95 |
try:
|
96 |
client = Client(
|
|
|
100 |
),
|
101 |
)
|
102 |
completion = client.chat.completions.create(
|
103 |
+
proxy=proxy,
|
104 |
model="llama-3.1-405b",
|
105 |
messages=[
|
106 |
{
|
|
|
135 |
print(str(e))
|
136 |
client = Client()
|
137 |
completion = client.chat.completions.create(
|
138 |
+
proxy=proxy,
|
139 |
provider=RetryProvider(
|
140 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
141 |
max_retries=8,
|
|
|
176 |
return False
|
177 |
|
178 |
def fix_citation(citation):
|
179 |
+
proxy = get_proxy()
|
180 |
+
while not get_proxy():
|
181 |
+
proxy = get_proxy()
|
182 |
try:
|
183 |
try:
|
184 |
client = Client(
|
|
|
188 |
),
|
189 |
)
|
190 |
completion = client.chat.completions.create(
|
191 |
+
proxy=proxy,
|
192 |
model="llama-3.1-405b",
|
193 |
messages=[
|
194 |
{
|
|
|
221 |
print(str(e))
|
222 |
client = Client()
|
223 |
completion = client.chat.completions.create(
|
224 |
+
proxy=proxy,
|
225 |
provider=RetryProvider(
|
226 |
providers=[Blackbox, ChatGptEs, ChatGpt, Copilot, DDG, Liaobots, Mhystical, PollinationsAI],
|
227 |
max_retries=8,
|
proxy.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from swiftshadow.classes import Proxy
|
2 |
+
from requests import get, exceptions
|
3 |
+
|
4 |
+
def get_proxy():
|
5 |
+
proxies = {}
|
6 |
+
max_retries = 10 # Maximum number of attempts to find a valid proxy
|
7 |
+
attempts = 0
|
8 |
+
while attempts < max_retries:
|
9 |
+
try:
|
10 |
+
proxy = Proxy(
|
11 |
+
autoRotate=True,
|
12 |
+
cachePeriod=3,
|
13 |
+
).proxy()
|
14 |
+
proxies = {
|
15 |
+
'http': proxy[0],
|
16 |
+
'https': proxy[0]
|
17 |
+
}
|
18 |
+
resp = get('https://checkip.amazonws.com', proxies=proxies, timeout=10)
|
19 |
+
ip_address = resp.text.strip()
|
20 |
+
if ip_address == proxy[0].split(':')[0]:
|
21 |
+
print(f"Valid proxy found: {proxy[0]}")
|
22 |
+
return proxies
|
23 |
+
else:
|
24 |
+
print(f"Proxy {proxy[0]} did not match the response IP ({ip_address}). Retrying...")
|
25 |
+
except exceptions.RequestException as e:
|
26 |
+
print(f"Error testing proxy: {e}. Retrying...")
|
27 |
+
attempts += 1
|
28 |
+
raise Exception("Failed to find a valid proxy after multiple attempts.")
|