Update app.py
Browse files
app.py
CHANGED
@@ -61,114 +61,6 @@ def hf_chat(api_key, model, text):
|
|
61 |
|
62 |
return stream.choices[0].message.content
|
63 |
|
64 |
-
|
65 |
-
|
66 |
-
import openai
|
67 |
-
|
68 |
-
openai.api_key = os.getenv("HF_TOKEN")
|
69 |
-
openai.api_base = "YOUR_API_ENDPOINT" # 例: "https://your-api-provider.com/v1"
|
70 |
-
model_name = "YOUR_MODEL_NAME" # 例: "my-custom-model"
|
71 |
-
|
72 |
-
def generate_text(prompt):
|
73 |
-
try:
|
74 |
-
response = openai.Completion.create(
|
75 |
-
engine=model_name,
|
76 |
-
prompt=prompt,
|
77 |
-
n=1,
|
78 |
-
stop=None,
|
79 |
-
temperature=0.9 # ランダム性 (0: deterministic, 1: creative)
|
80 |
-
)
|
81 |
-
return response.choices[0].text.strip()
|
82 |
-
except Exception as e:
|
83 |
-
print(f"Error happend: {e}")
|
84 |
-
return None
|
85 |
-
|
86 |
-
class GoogleSearchTool(Tool):
|
87 |
-
name = "web_search"
|
88 |
-
description = """Performs a google web search for your query then returns a string of the top search results."""
|
89 |
-
inputs = {
|
90 |
-
"query": {"type": "string", "description": "The search query to perform."},
|
91 |
-
"filter_year": {
|
92 |
-
"type": "integer",
|
93 |
-
"description": "Optionally restrict results to a certain year",
|
94 |
-
"nullable": True,
|
95 |
-
},
|
96 |
-
}
|
97 |
-
output_type = "string"
|
98 |
-
|
99 |
-
def __init__(self):
|
100 |
-
super().__init__(self)
|
101 |
-
import os
|
102 |
-
|
103 |
-
self.serpapi_key = os.getenv("SERPER_API_KEY")
|
104 |
-
|
105 |
-
def forward(self, query: str, filter_year: Optional[int] = None) -> str:
|
106 |
-
import requests
|
107 |
-
|
108 |
-
if self.serpapi_key is None:
|
109 |
-
raise ValueError("Missing SerpAPI key. Make sure you have 'SERPER_API_KEY' in your env variables.")
|
110 |
-
|
111 |
-
params = {
|
112 |
-
"engine": "google",
|
113 |
-
"q": query,
|
114 |
-
"api_key": self.serpapi_key,
|
115 |
-
"google_domain": "google.com",
|
116 |
-
}
|
117 |
-
|
118 |
-
headers = {
|
119 |
-
'X-API-KEY': self.serpapi_key,
|
120 |
-
'Content-Type': 'application/json'
|
121 |
-
}
|
122 |
-
|
123 |
-
if filter_year is not None:
|
124 |
-
params["tbs"] = f"cdr:1,cd_min:01/01/{filter_year},cd_max:12/31/{filter_year}"
|
125 |
-
|
126 |
-
response = requests.request("POST", "https://google.serper.dev/search", headers=headers, data=json.dumps(params))
|
127 |
-
|
128 |
-
|
129 |
-
if response.status_code == 200:
|
130 |
-
results = response.json()
|
131 |
-
else:
|
132 |
-
raise ValueError(response.json())
|
133 |
-
|
134 |
-
if "organic" not in results.keys():
|
135 |
-
print("REZZZ", results.keys())
|
136 |
-
if filter_year is not None:
|
137 |
-
raise Exception(
|
138 |
-
f"No results found for query: '{query}' with filtering on year={filter_year}. Use a less restrictive query or do not filter on year."
|
139 |
-
)
|
140 |
-
else:
|
141 |
-
raise Exception(f"No results found for query: '{query}'. Use a less restrictive query.")
|
142 |
-
if len(results["organic"]) == 0:
|
143 |
-
year_filter_message = f" with filter year={filter_year}" if filter_year is not None else ""
|
144 |
-
return f"No results found for '{query}'{year_filter_message}. Try with a more general query, or remove the year filter."
|
145 |
-
|
146 |
-
web_snippets = []
|
147 |
-
if "organic" in results:
|
148 |
-
for idx, page in enumerate(results["organic"]):
|
149 |
-
date_published = ""
|
150 |
-
if "date" in page:
|
151 |
-
date_published = "\nDate published: " + page["date"]
|
152 |
-
|
153 |
-
source = ""
|
154 |
-
if "source" in page:
|
155 |
-
source = "\nSource: " + page["source"]
|
156 |
-
|
157 |
-
snippet = ""
|
158 |
-
if "snippet" in page:
|
159 |
-
snippet = "\n" + page["snippet"]
|
160 |
-
|
161 |
-
redacted_version = f"{idx}. [{page['title']}]({page['link']}){date_published}{source}\n{snippet}"
|
162 |
-
|
163 |
-
redacted_version = redacted_version.replace("Your browser can't play this video.", "")
|
164 |
-
web_snippets.append(redacted_version)
|
165 |
-
|
166 |
-
return "## Search Results\n" + "\n\n".join(web_snippets)
|
167 |
-
|
168 |
-
# web_search = GoogleSearchTool()
|
169 |
-
|
170 |
-
# print(web_search(query="Donald Trump news"))
|
171 |
-
# quit()
|
172 |
AUTHORIZED_IMPORTS = [
|
173 |
"requests",
|
174 |
"zipfile",
|
|
|
61 |
|
62 |
return stream.choices[0].message.content
|
63 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
AUTHORIZED_IMPORTS = [
|
65 |
"requests",
|
66 |
"zipfile",
|