Spaces:
Sleeping
Sleeping
Delete app.py
Browse files
app.py
DELETED
@@ -1,980 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
from jinja2 import Template
|
3 |
-
import os
|
4 |
-
import json
|
5 |
-
from typing import Dict, List, Any
|
6 |
-
|
7 |
-
# Konfigurationen für verschiedene Modelle und APIs
|
8 |
-
AI_MODELS = {
|
9 |
-
"openai": {
|
10 |
-
"models": ["gpt-3.5-turbo", "gpt-4", "gpt-4o"],
|
11 |
-
"import": "from openai import OpenAI",
|
12 |
-
"setup": "client = OpenAI(api_key=api_key)",
|
13 |
-
"call": """response = client.chat.completions.create(
|
14 |
-
model="{model}",
|
15 |
-
messages=[
|
16 |
-
{{"role": "system", "content": "{system_prompt}"}},
|
17 |
-
{{"role": "user", "content": "{user_input}"}}
|
18 |
-
],
|
19 |
-
temperature={temperature}
|
20 |
-
)
|
21 |
-
return response.choices[0].message.content"""
|
22 |
-
},
|
23 |
-
"deepseek": {
|
24 |
-
"models": ["deepseek-coder", "deepseek-chat"],
|
25 |
-
"import": "import requests",
|
26 |
-
"setup": "headers = {'Authorization': f'Bearer {api_key}', 'Content-Type': 'application/json'}",
|
27 |
-
"call": """response = requests.post(
|
28 |
-
"https://api.deepseek.com/v1/chat/completions",
|
29 |
-
headers=headers,
|
30 |
-
json={{
|
31 |
-
"model": "{model}",
|
32 |
-
"messages": [
|
33 |
-
{{"role": "system", "content": "{system_prompt}"}},
|
34 |
-
{{"role": "user", "content": "{user_input}"}}
|
35 |
-
],
|
36 |
-
"temperature": {temperature}
|
37 |
-
}}
|
38 |
-
)
|
39 |
-
return response.json()["choices"][0]["message"]["content"]"""
|
40 |
-
},
|
41 |
-
"anthropic": {
|
42 |
-
"models": ["claude-3-opus", "claude-3-sonnet", "claude-3-haiku"],
|
43 |
-
"import": "from anthropic import Anthropic",
|
44 |
-
"setup": "client = Anthropic(api_key=api_key)",
|
45 |
-
"call": """response = client.messages.create(
|
46 |
-
model="{model}",
|
47 |
-
system="{system_prompt}",
|
48 |
-
messages=[
|
49 |
-
{{"role": "user", "content": "{user_input}"}}
|
50 |
-
],
|
51 |
-
temperature={temperature}
|
52 |
-
)
|
53 |
-
return response.content[0].text"""
|
54 |
-
}
|
55 |
-
}
|
56 |
-
|
57 |
-
# Funktionen für die Feature-Handler
|
58 |
-
FEATURE_HANDLERS = {
|
59 |
-
"file_handling": {
|
60 |
-
"imports": """import os
|
61 |
-
import tempfile
|
62 |
-
from werkzeug.utils import secure_filename""",
|
63 |
-
"functions": """def save_uploaded_file(file):
|
64 |
-
if file is None:
|
65 |
-
return None
|
66 |
-
temp_dir = tempfile.mkdtemp()
|
67 |
-
filename = secure_filename(file.name)
|
68 |
-
filepath = os.path.join(temp_dir, filename)
|
69 |
-
with open(filepath, 'wb') as f:
|
70 |
-
f.write(file.read())
|
71 |
-
return filepath
|
72 |
-
|
73 |
-
def read_file_content(filepath, max_size=100000):
|
74 |
-
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
|
75 |
-
content = f.read(max_size)
|
76 |
-
return content"""
|
77 |
-
},
|
78 |
-
"memory": {
|
79 |
-
"imports": """import sqlite3
|
80 |
-
import json""",
|
81 |
-
"functions": """class ConversationMemory:
|
82 |
-
def __init__(self, db_path="memory.db"):
|
83 |
-
self.db_path = db_path
|
84 |
-
self._init_db()
|
85 |
-
|
86 |
-
def _init_db(self):
|
87 |
-
conn = sqlite3.connect(self.db_path)
|
88 |
-
cursor = conn.cursor()
|
89 |
-
cursor.execute('''
|
90 |
-
CREATE TABLE IF NOT EXISTS conversations (
|
91 |
-
id INTEGER PRIMARY KEY,
|
92 |
-
session_id TEXT,
|
93 |
-
timestamp TEXT,
|
94 |
-
user_input TEXT,
|
95 |
-
assistant_response TEXT
|
96 |
-
)
|
97 |
-
''')
|
98 |
-
conn.commit()
|
99 |
-
conn.close()
|
100 |
-
|
101 |
-
def save_interaction(self, session_id, user_input, assistant_response):
|
102 |
-
conn = sqlite3.connect(self.db_path)
|
103 |
-
cursor = conn.cursor()
|
104 |
-
cursor.execute(
|
105 |
-
"INSERT INTO conversations (session_id, timestamp, user_input, assistant_response) VALUES (?, datetime('now'), ?, ?)",
|
106 |
-
(session_id, user_input, assistant_response)
|
107 |
-
)
|
108 |
-
conn.commit()
|
109 |
-
conn.close()
|
110 |
-
|
111 |
-
def get_conversation_history(self, session_id, limit=10):
|
112 |
-
conn = sqlite3.connect(self.db_path)
|
113 |
-
cursor = conn.cursor()
|
114 |
-
cursor.execute(
|
115 |
-
"SELECT user_input, assistant_response FROM conversations WHERE session_id = ? ORDER BY timestamp DESC LIMIT ?",
|
116 |
-
(session_id, limit)
|
117 |
-
)
|
118 |
-
history = cursor.fetchall()
|
119 |
-
conn.close()
|
120 |
-
return history"""
|
121 |
-
},
|
122 |
-
"api_integration": {
|
123 |
-
"imports": """import requests
|
124 |
-
import json""",
|
125 |
-
"functions": """def call_external_api(url, method="GET", headers=None, data=None, params=None):
|
126 |
-
"""Call an external API with the specified parameters."""
|
127 |
-
headers = headers or {}
|
128 |
-
|
129 |
-
if method.upper() == "GET":
|
130 |
-
response = requests.get(url, headers=headers, params=params)
|
131 |
-
elif method.upper() == "POST":
|
132 |
-
response = requests.post(url, headers=headers, json=data if data else None, params=params)
|
133 |
-
elif method.upper() == "PUT":
|
134 |
-
response = requests.put(url, headers=headers, json=data if data else None, params=params)
|
135 |
-
elif method.upper() == "DELETE":
|
136 |
-
response = requests.delete(url, headers=headers, params=params)
|
137 |
-
else:
|
138 |
-
raise ValueError(f"Unsupported HTTP method: {method}")
|
139 |
-
|
140 |
-
if response.status_code >= 200 and response.status_code < 300:
|
141 |
-
try:
|
142 |
-
return response.json()
|
143 |
-
except:
|
144 |
-
return response.text
|
145 |
-
else:
|
146 |
-
return {
|
147 |
-
"error": True,
|
148 |
-
"status_code": response.status_code,
|
149 |
-
"message": response.text
|
150 |
-
}"""
|
151 |
-
}
|
152 |
-
}
|
153 |
-
|
154 |
-
def parse_tasks(task_input: str) -> Dict[str, Any]:
|
155 |
-
"""Parse the task input to determine the required features and configurations."""
|
156 |
-
task_input = task_input.lower()
|
157 |
-
|
158 |
-
# Bestimme die Programmiersprache
|
159 |
-
if "python" in task_input:
|
160 |
-
language = "python"
|
161 |
-
elif "php" in task_input:
|
162 |
-
language = "php"
|
163 |
-
elif "javascript" in task_input or "js" in task_input:
|
164 |
-
language = "javascript"
|
165 |
-
else:
|
166 |
-
language = "python" # Standard
|
167 |
-
|
168 |
-
# Bestimme die zu verwendende KI-API
|
169 |
-
if "openai" in task_input:
|
170 |
-
api = "openai"
|
171 |
-
elif "deepseek" in task_input:
|
172 |
-
api = "deepseek"
|
173 |
-
elif "anthropic" in task_input or "claude" in task_input:
|
174 |
-
api = "anthropic"
|
175 |
-
else:
|
176 |
-
api = "openai" # Standard
|
177 |
-
|
178 |
-
# Bestimme das Modell (falls angegeben)
|
179 |
-
model = None
|
180 |
-
for model_name in AI_MODELS[api]["models"]:
|
181 |
-
if model_name.lower() in task_input:
|
182 |
-
model = model_name
|
183 |
-
break
|
184 |
-
|
185 |
-
if not model:
|
186 |
-
model = AI_MODELS[api]["models"][0] # Verwende das erste Modell als Standard
|
187 |
-
|
188 |
-
# Erfasse die gewünschten Features
|
189 |
-
features = []
|
190 |
-
if any(x in task_input for x in ["file", "files", "upload", "datei", "dateien"]):
|
191 |
-
features.append("file_handling")
|
192 |
-
if any(x in task_input for x in ["memory", "history", "gedächtnis", "speicher", "verlauf"]):
|
193 |
-
features.append("memory")
|
194 |
-
if any(x in task_input for x in ["api", "integration", "external", "extern"]):
|
195 |
-
features.append("api_integration")
|
196 |
-
|
197 |
-
# Erfasse weitere Parameter
|
198 |
-
web_ui = "web" in task_input or "ui" in task_input or "interface" in task_input
|
199 |
-
cli = "cli" in task_input or "command" in task_input or "terminal" in task_input
|
200 |
-
|
201 |
-
return {
|
202 |
-
"language": language,
|
203 |
-
"api": api,
|
204 |
-
"model": model,
|
205 |
-
"features": features,
|
206 |
-
"web_ui": web_ui,
|
207 |
-
"cli": cli
|
208 |
-
}
|
209 |
-
|
210 |
-
def generate_code(task_input: str, api_key: str) -> str:
|
211 |
-
"""Generate the complete code for the AI assistant based on the task input."""
|
212 |
-
if not api_key.strip():
|
213 |
-
return "Bitte geben Sie einen gültigen API-Key ein."
|
214 |
-
|
215 |
-
params = parse_tasks(task_input)
|
216 |
-
|
217 |
-
# Template-Auswahl basierend auf der Programmiersprache
|
218 |
-
if params["language"] == "python":
|
219 |
-
return generate_python_code(params, api_key)
|
220 |
-
elif params["language"] == "php":
|
221 |
-
return generate_php_code(params, api_key)
|
222 |
-
elif params["language"] == "javascript":
|
223 |
-
return generate_js_code(params, api_key)
|
224 |
-
else:
|
225 |
-
return f"Die Programmiersprache {params['language']} wird noch nicht unterstützt."
|
226 |
-
|
227 |
-
def generate_python_code(params: Dict[str, Any], api_key: str) -> str:
|
228 |
-
"""Generate Python code for the AI assistant."""
|
229 |
-
api_info = AI_MODELS[params["api"]]
|
230 |
-
|
231 |
-
imports = [
|
232 |
-
"import os",
|
233 |
-
"import sys",
|
234 |
-
"import json",
|
235 |
-
api_info["import"]
|
236 |
-
]
|
237 |
-
|
238 |
-
setup_code = [
|
239 |
-
"# API-Setup",
|
240 |
-
f"api_key = os.environ.get('API_KEY', '{api_key[:3]}...')" # Zeige nur einen Teil des API-Keys
|
241 |
-
]
|
242 |
-
|
243 |
-
assistant_class = [
|
244 |
-
"class AIAssistant:",
|
245 |
-
" def __init__(self, api_key):",
|
246 |
-
" self.api_key = api_key",
|
247 |
-
f" {api_info['setup'].replace('api_key', 'self.api_key')}",
|
248 |
-
"",
|
249 |
-
" def ask(self, user_input, system_prompt=\"You are a helpful AI assistant.\", temperature=0.7):",
|
250 |
-
f" {api_info['call'].replace('{model}', params['model']).replace('{temperature}', 'temperature')}"
|
251 |
-
]
|
252 |
-
|
253 |
-
# Füge Feature-spezifischen Code hinzu
|
254 |
-
for feature in params["features"]:
|
255 |
-
if feature in FEATURE_HANDLERS:
|
256 |
-
imports.append(FEATURE_HANDLERS[feature]["imports"])
|
257 |
-
assistant_class.append("")
|
258 |
-
assistant_class.append(f" # {feature.replace('_', ' ').title()} Methods")
|
259 |
-
|
260 |
-
# Indent feature functions correctly
|
261 |
-
feature_funcs = FEATURE_HANDLERS[feature]["functions"].split("\n")
|
262 |
-
if feature != "memory": # Memory ist eine Klasse, daher anders behandeln
|
263 |
-
feature_funcs = [" " + line for line in feature_funcs]
|
264 |
-
assistant_class.extend(feature_funcs)
|
265 |
-
else:
|
266 |
-
# Füge Memory-Integration zur Assistenten-Klasse hinzu
|
267 |
-
assistant_class.append(" def initialize_memory(self, db_path=\"memory.db\"):")
|
268 |
-
assistant_class.append(" self.memory = ConversationMemory(db_path)")
|
269 |
-
assistant_class.append("")
|
270 |
-
assistant_class.append(" def ask_with_memory(self, user_input, session_id, system_prompt=\"You are a helpful AI assistant.\", temperature=0.7):")
|
271 |
-
assistant_class.append(" response = self.ask(user_input, system_prompt, temperature)")
|
272 |
-
assistant_class.append(" self.memory.save_interaction(session_id, user_input, response)")
|
273 |
-
assistant_class.append(" return response")
|
274 |
-
|
275 |
-
# Generiere den Haupt-Code basierend auf UI-Anforderungen
|
276 |
-
main_code = ["# Hauptfunktion"]
|
277 |
-
|
278 |
-
if params["web_ui"]:
|
279 |
-
imports.append("import gradio as gr")
|
280 |
-
main_code.extend([
|
281 |
-
"def create_web_interface():",
|
282 |
-
" assistant = AIAssistant(api_key)",
|
283 |
-
"",
|
284 |
-
" def process_query(query, history):",
|
285 |
-
" response = assistant.ask(query)",
|
286 |
-
" history.append((query, response))",
|
287 |
-
" return \"\", history",
|
288 |
-
"",
|
289 |
-
" with gr.Blocks() as demo:",
|
290 |
-
" gr.Markdown(f\"## AI Assistant mit {params['model']}\")",
|
291 |
-
"",
|
292 |
-
" chatbot = gr.Chatbot()",
|
293 |
-
" msg = gr.Textbox()",
|
294 |
-
" clear = gr.Button(\"Clear\")",
|
295 |
-
"",
|
296 |
-
" msg.submit(process_query, [msg, chatbot], [msg, chatbot])",
|
297 |
-
" clear.click(lambda: None, None, chatbot, queue=False)",
|
298 |
-
"",
|
299 |
-
" demo.launch()",
|
300 |
-
""
|
301 |
-
])
|
302 |
-
|
303 |
-
if params["cli"]:
|
304 |
-
main_code.extend([
|
305 |
-
"def run_cli():",
|
306 |
-
" assistant = AIAssistant(api_key)",
|
307 |
-
" print(f\"AI Assistant mit {params['model']} bereit. Zum Beenden 'exit' eingeben.\")",
|
308 |
-
"",
|
309 |
-
" while True:",
|
310 |
-
" user_input = input(\"\\nFrage: \")",
|
311 |
-
" if user_input.lower() in ['exit', 'quit', 'q']:",
|
312 |
-
" print(\"Auf Wiedersehen!\")",
|
313 |
-
" break",
|
314 |
-
"",
|
315 |
-
" response = assistant.ask(user_input)",
|
316 |
-
" print(f\"\\nAssistent: {response}\")",
|
317 |
-
""
|
318 |
-
])
|
319 |
-
|
320 |
-
main_code.append("if __name__ == \"__main__\":")
|
321 |
-
if params["web_ui"] and params["cli"]:
|
322 |
-
main_code.append(" if len(sys.argv) > 1 and sys.argv[1] == '--cli':")
|
323 |
-
main_code.append(" run_cli()")
|
324 |
-
main_code.append(" else:")
|
325 |
-
main_code.append(" create_web_interface()")
|
326 |
-
elif params["web_ui"]:
|
327 |
-
main_code.append(" create_web_interface()")
|
328 |
-
elif params["cli"]:
|
329 |
-
main_code.append(" run_cli()")
|
330 |
-
else:
|
331 |
-
main_code.append(" assistant = AIAssistant(api_key)")
|
332 |
-
main_code.append(" response = assistant.ask(\"Hallo, wie geht es dir?\")")
|
333 |
-
main_code.append(" print(f\"Antwort: {response}\")")
|
334 |
-
|
335 |
-
# Füge Memory-Klassendefinition hinzu, wenn erforderlich
|
336 |
-
memory_class = []
|
337 |
-
if "memory" in params["features"]:
|
338 |
-
memory_class = FEATURE_HANDLERS["memory"]["functions"].split("\n")
|
339 |
-
|
340 |
-
# Kombiniere alles zum endgültigen Code
|
341 |
-
all_sections = [
|
342 |
-
"# Generierter AI Assistant",
|
343 |
-
f"# API: {params['api'].upper()}",
|
344 |
-
f"# Modell: {params['model']}",
|
345 |
-
f"# Features: {', '.join(params['features']) if params['features'] else 'Keine zusätzlichen Features'}",
|
346 |
-
"",
|
347 |
-
"\n".join(list(dict.fromkeys(imports))), # Entferne Duplikate
|
348 |
-
"",
|
349 |
-
"\n".join(setup_code),
|
350 |
-
"",
|
351 |
-
"\n".join(memory_class) if memory_class else "",
|
352 |
-
"",
|
353 |
-
"\n".join(assistant_class),
|
354 |
-
"",
|
355 |
-
"\n".join(main_code)
|
356 |
-
]
|
357 |
-
|
358 |
-
return "\n".join(all_sections)
|
359 |
-
|
360 |
-
def generate_php_code(params: Dict[str, Any], api_key: str) -> str:
|
361 |
-
"""Generate PHP code for the AI assistant."""
|
362 |
-
# PHP-Code-Generierung (vereinfachte Version)
|
363 |
-
php_template = """<?php
|
364 |
-
// Generierter AI Assistant
|
365 |
-
// API: {api}
|
366 |
-
// Modell: {model}
|
367 |
-
// Features: {features}
|
368 |
-
|
369 |
-
class AIAssistant {{
|
370 |
-
private $api_key;
|
371 |
-
private $model;
|
372 |
-
|
373 |
-
public function __construct($api_key, $model) {{
|
374 |
-
$this->api_key = $api_key;
|
375 |
-
$this->model = $model;
|
376 |
-
}}
|
377 |
-
|
378 |
-
public function ask($prompt, $system_prompt = "You are a helpful AI assistant.", $temperature = 0.7) {{
|
379 |
-
$url = "https://api.{api_endpoint}/v1/chat/completions";
|
380 |
-
|
381 |
-
$headers = [
|
382 |
-
"Content-Type: application/json",
|
383 |
-
"Authorization: Bearer " . $this->api_key
|
384 |
-
];
|
385 |
-
|
386 |
-
$data = [
|
387 |
-
"model" => $this->model,
|
388 |
-
"messages" => [
|
389 |
-
["role" => "system", "content" => $system_prompt],
|
390 |
-
["role" => "user", "content" => $prompt]
|
391 |
-
],
|
392 |
-
"temperature" => $temperature
|
393 |
-
];
|
394 |
-
|
395 |
-
$ch = curl_init($url);
|
396 |
-
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
397 |
-
curl_setopt($ch, CURLOPT_POST, true);
|
398 |
-
curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($data));
|
399 |
-
curl_setopt($ch, CURLOPT_HTTPHEADER, $headers);
|
400 |
-
|
401 |
-
$response = curl_exec($ch);
|
402 |
-
curl_close($ch);
|
403 |
-
|
404 |
-
$response_data = json_decode($response, true);
|
405 |
-
return $response_data["choices"][0]["message"]["content"];
|
406 |
-
}}
|
407 |
-
|
408 |
-
{feature_methods}
|
409 |
-
}}
|
410 |
-
|
411 |
-
// Hauptcode
|
412 |
-
$api_key = '{api_key_preview}';
|
413 |
-
$assistant = new AIAssistant($api_key, '{model}');
|
414 |
-
|
415 |
-
{main_code}
|
416 |
-
?>"""
|
417 |
-
|
418 |
-
feature_methods = ""
|
419 |
-
if "file_handling" in params["features"]:
|
420 |
-
feature_methods += """
|
421 |
-
public function handleUploadedFile($file) {
|
422 |
-
$tempDir = sys_get_temp_dir();
|
423 |
-
$filename = basename($file["name"]);
|
424 |
-
$filepath = $tempDir . "/" . $filename;
|
425 |
-
|
426 |
-
if (move_uploaded_file($file["tmp_name"], $filepath)) {
|
427 |
-
return $filepath;
|
428 |
-
}
|
429 |
-
|
430 |
-
return null;
|
431 |
-
}
|
432 |
-
|
433 |
-
public function readFileContent($filepath, $maxSize = 100000) {
|
434 |
-
if (!file_exists($filepath)) {
|
435 |
-
return null;
|
436 |
-
}
|
437 |
-
|
438 |
-
return file_get_contents($filepath, false, null, 0, $maxSize);
|
439 |
-
}"""
|
440 |
-
|
441 |
-
if "memory" in params["features"]:
|
442 |
-
feature_methods += """
|
443 |
-
private $db;
|
444 |
-
|
445 |
-
public function initializeMemory($dbPath = "memory.sqlite") {
|
446 |
-
$this->db = new SQLite3($dbPath);
|
447 |
-
$this->db->exec("CREATE TABLE IF NOT EXISTS conversations (
|
448 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
449 |
-
session_id TEXT,
|
450 |
-
timestamp TEXT,
|
451 |
-
user_input TEXT,
|
452 |
-
assistant_response TEXT
|
453 |
-
)");
|
454 |
-
}
|
455 |
-
|
456 |
-
public function askWithMemory($prompt, $sessionId, $systemPrompt = "You are a helpful AI assistant.", $temperature = 0.7) {
|
457 |
-
$response = $this->ask($prompt, $systemPrompt, $temperature);
|
458 |
-
|
459 |
-
$stmt = $this->db->prepare("INSERT INTO conversations (session_id, timestamp, user_input, assistant_response)
|
460 |
-
VALUES (:session_id, datetime('now'), :user_input, :assistant_response)");
|
461 |
-
$stmt->bindValue(':session_id', $sessionId, SQLITE3_TEXT);
|
462 |
-
$stmt->bindValue(':user_input', $prompt, SQLITE3_TEXT);
|
463 |
-
$stmt->bindValue(':assistant_response', $response, SQLITE3_TEXT);
|
464 |
-
$stmt->execute();
|
465 |
-
|
466 |
-
return $response;
|
467 |
-
}"""
|
468 |
-
|
469 |
-
# Bestimme den API-Endpunkt basierend auf der API
|
470 |
-
api_endpoint = params["api"]
|
471 |
-
if params["api"] == "anthropic":
|
472 |
-
api_endpoint = "anthropic.com"
|
473 |
-
elif params["api"] == "openai":
|
474 |
-
api_endpoint = "openai.com"
|
475 |
-
else:
|
476 |
-
api_endpoint = "deepseek.com"
|
477 |
-
|
478 |
-
# Generiere den Hauptcode basierend auf UI-Anforderungen
|
479 |
-
main_code = ""
|
480 |
-
if params["web_ui"]:
|
481 |
-
main_code += """
|
482 |
-
// Web-UI
|
483 |
-
if ($_SERVER["REQUEST_METHOD"] == "POST") {
|
484 |
-
$user_input = $_POST["user_input"] ?? "";
|
485 |
-
|
486 |
-
if (!empty($user_input)) {
|
487 |
-
$response = $assistant->ask($user_input);
|
488 |
-
echo json_encode(["response" => $response]);
|
489 |
-
exit;
|
490 |
-
}
|
491 |
-
}
|
492 |
-
|
493 |
-
?>
|
494 |
-
|
495 |
-
<!DOCTYPE html>
|
496 |
-
<html>
|
497 |
-
<head>
|
498 |
-
<title>AI Assistant</title>
|
499 |
-
<style>
|
500 |
-
body { font-family: Arial, sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }
|
501 |
-
.chat-container { border: 1px solid #ddd; border-radius: 5px; padding: 10px; height: 400px; overflow-y: auto; margin-bottom: 10px; }
|
502 |
-
.user-message { background-color: #e6f7ff; padding: 8px; border-radius: 5px; margin-bottom: 10px; }
|
503 |
-
.assistant-message { background-color: #f2f2f2; padding: 8px; border-radius: 5px; margin-bottom: 10px; }
|
504 |
-
input[type="text"] { width: 80%; padding: 8px; }
|
505 |
-
button { padding: 8px 15px; background-color: #4CAF50; color: white; border: none; border-radius: 5px; cursor: pointer; }
|
506 |
-
</style>
|
507 |
-
</head>
|
508 |
-
<body>
|
509 |
-
<h1>AI Assistant mit <?php echo htmlspecialchars('{model}'); ?></h1>
|
510 |
-
|
511 |
-
<div class="chat-container" id="chatContainer"></div>
|
512 |
-
|
513 |
-
<div>
|
514 |
-
<input type="text" id="userInput" placeholder="Stellen Sie eine Frage...">
|
515 |
-
<button onclick="sendMessage()">Senden</button>
|
516 |
-
</div>
|
517 |
-
|
518 |
-
<script>
|
519 |
-
function sendMessage() {
|
520 |
-
const userInput = document.getElementById('userInput').value;
|
521 |
-
if (!userInput) return;
|
522 |
-
|
523 |
-
// Nachricht des Benutzers anzeigen
|
524 |
-
addMessage('user', userInput);
|
525 |
-
document.getElementById('userInput').value = '';
|
526 |
-
|
527 |
-
// Anfrage an den Server senden
|
528 |
-
fetch(window.location.href, {
|
529 |
-
method: 'POST',
|
530 |
-
headers: {
|
531 |
-
'Content-Type': 'application/x-www-form-urlencoded',
|
532 |
-
},
|
533 |
-
body: 'user_input=' + encodeURIComponent(userInput)
|
534 |
-
})
|
535 |
-
.then(response => response.json())
|
536 |
-
.then(data => {
|
537 |
-
addMessage('assistant', data.response);
|
538 |
-
})
|
539 |
-
.catch(error => {
|
540 |
-
console.error('Error:', error);
|
541 |
-
addMessage('assistant', 'Es ist ein Fehler aufgetreten.');
|
542 |
-
});
|
543 |
-
}
|
544 |
-
|
545 |
-
function addMessage(role, content) {
|
546 |
-
const chatContainer = document.getElementById('chatContainer');
|
547 |
-
const messageDiv = document.createElement('div');
|
548 |
-
messageDiv.className = role + '-message';
|
549 |
-
messageDiv.textContent = content;
|
550 |
-
chatContainer.appendChild(messageDiv);
|
551 |
-
chatContainer.scrollTop = chatContainer.scrollHeight;
|
552 |
-
}
|
553 |
-
|
554 |
-
// Event-Listener für die Enter-Taste
|
555 |
-
document.getElementById('userInput').addEventListener('keypress', function(e) {
|
556 |
-
if (e.key === 'Enter') {
|
557 |
-
sendMessage();
|
558 |
-
}
|
559 |
-
});
|
560 |
-
</script>
|
561 |
-
</body>
|
562 |
-
</html>
|
563 |
-
|
564 |
-
<?php
|
565 |
-
// Verhindere weitere Ausführung
|
566 |
-
exit;"""
|
567 |
-
elif params["cli"]:
|
568 |
-
main_code += """
|
569 |
-
// CLI-Modus
|
570 |
-
echo "AI Assistant mit {model} bereit. Zum Beenden 'exit' eingeben.\n";
|
571 |
-
|
572 |
-
while (true) {
|
573 |
-
echo "\nFrage: ";
|
574 |
-
$userInput = trim(fgets(STDIN));
|
575 |
-
|
576 |
-
if (in_array(strtolower($userInput), ['exit', 'quit', 'q'])) {
|
577 |
-
echo "Auf Wiedersehen!\n";
|
578 |
-
break;
|
579 |
-
}
|
580 |
-
|
581 |
-
$response = $assistant->ask($userInput);
|
582 |
-
echo "\nAssistent: " . $response . "\n";
|
583 |
-
}"""
|
584 |
-
else:
|
585 |
-
main_code += """
|
586 |
-
// Einfacher Test
|
587 |
-
$response = $assistant->ask("Hallo, wie geht es dir?");
|
588 |
-
echo "Antwort: " . $response . "\n";"""
|
589 |
-
|
590 |
-
return php_template.format(
|
591 |
-
api=params["api"].upper(),
|
592 |
-
api_endpoint=api_endpoint,
|
593 |
-
model=params["model"],
|
594 |
-
features=", ".join(params["features"]) if params["features"] else "Keine zusätzlichen Features",
|
595 |
-
feature_methods=feature_methods,
|
596 |
-
api_key_preview=api_key[:3] + "...",
|
597 |
-
main_code=main_code
|
598 |
-
)
|
599 |
-
|
600 |
-
def generate_js_code(params: Dict[str, Any], api_key: str) -> str:
|
601 |
-
"""Generate JavaScript code for the AI assistant."""
|
602 |
-
# JS-Code-Generierung (vereinfachte Version)
|
603 |
-
js_template = """// Generierter AI Assistant
|
604 |
-
// API: {api}
|
605 |
-
// Modell: {model}
|
606 |
-
// Features: {features}
|
607 |
-
|
608 |
-
{imports}
|
609 |
-
|
610 |
-
class AIAssistant {{
|
611 |
-
constructor(apiKey) {{
|
612 |
-
this.apiKey = apiKey;
|
613 |
-
this.model = "{model}";
|
614 |
-
{setup}
|
615 |
-
}}
|
616 |
-
|
617 |
-
async ask(userInput, systemPrompt = "You are a helpful AI assistant.", temperature = 0.7) {{
|
618 |
-
{call_code}
|
619 |
-
}}
|
620 |
-
|
621 |
-
{feature_methods}
|
622 |
-
}}
|
623 |
-
|
624 |
-
{memory_class}
|
625 |
-
|
626 |
-
// Hauptcode
|
627 |
-
{main_code}
|
628 |
-
"""
|
629 |
-
|
630 |
-
# Importe basierend auf der API und den Features
|
631 |
-
imports = []
|
632 |
-
|
633 |
-
if params["api"] == "openai":
|
634 |
-
imports.append("const OpenAI = require('openai');")
|
635 |
-
elif params["api"] == "anthropic":
|
636 |
-
imports.append("const Anthropic = require('@anthropic-ai/sdk');")
|
637 |
-
else:
|
638 |
-
imports.append("const axios = require('axios');")
|
639 |
-
|
640 |
-
if "file_handling" in params["features"]:
|
641 |
-
imports.append("const fs = require('fs');")
|
642 |
-
imports.append("const path = require('path');")
|
643 |
-
imports.append("const os = require('os');")
|
644 |
-
|
645 |
-
if params["web_ui"]:
|
646 |
-
imports.append("const express = require('express');")
|
647 |
-
imports.append("const bodyParser = require('body-parser');")
|
648 |
-
|
649 |
-
# Setup-Code basierend auf der API
|
650 |
-
setup = ""
|
651 |
-
if params["api"] == "openai":
|
652 |
-
setup = "this.client = new OpenAI({ apiKey: this.apiKey });"
|
653 |
-
elif params["api"] == "anthropic":
|
654 |
-
setup = "this.client = new Anthropic({ apiKey: this.apiKey });"
|
655 |
-
|
656 |
-
# API-Aufruf basierend auf der ausgewählten API
|
657 |
-
call_code = ""
|
658 |
-
if params["api"] == "openai":
|
659 |
-
call_code = """
|
660 |
-
try {
|
661 |
-
const response = await this.client.chat.completions.create({
|
662 |
-
model: this.model,
|
663 |
-
messages: [
|
664 |
-
{ role: "system", content: systemPrompt },
|
665 |
-
{ role: "user", content: userInput }
|
666 |
-
],
|
667 |
-
temperature: temperature
|
668 |
-
});
|
669 |
-
return response.choices[0].message.content;
|
670 |
-
} catch (error) {
|
671 |
-
console.error("Error calling OpenAI:", error);
|
672 |
-
return "An error occurred while processing your request.";
|
673 |
-
}"""
|
674 |
-
elif params["api"] == "anthropic":
|
675 |
-
call_code = """
|
676 |
-
try {
|
677 |
-
const response = await this.client.messages.create({
|
678 |
-
model: this.model,
|
679 |
-
system: systemPrompt,
|
680 |
-
messages: [
|
681 |
-
{ role: "user", content: userInput }
|
682 |
-
],
|
683 |
-
temperature: temperature
|
684 |
-
});
|
685 |
-
return response.content[0].text;
|
686 |
-
} catch (error) {
|
687 |
-
console.error("Error calling Anthropic:", error);
|
688 |
-
return "An error occurred while processing your request.";
|
689 |
-
}"""
|
690 |
-
else: # deepseek und andere
|
691 |
-
call_code = """
|
692 |
-
try {
|
693 |
-
const response = await axios.post("https://api.deepseek.com/v1/chat/completions", {
|
694 |
-
model: this.model,
|
695 |
-
messages: [
|
696 |
-
{ role: "system", content: systemPrompt },
|
697 |
-
{ role: "user", content: userInput }
|
698 |
-
],
|
699 |
-
temperature: temperature
|
700 |
-
}, {
|
701 |
-
headers: {
|
702 |
-
"Content-Type": "application/json",
|
703 |
-
"Authorization": `Bearer ${this.apiKey}`
|
704 |
-
}
|
705 |
-
});
|
706 |
-
|
707 |
-
return response.data.choices[0].message.content;
|
708 |
-
} catch (error) {
|
709 |
-
console.error("Error calling API:", error);
|
710 |
-
return "An error occurred while processing your request.";
|
711 |
-
}"""
|
712 |
-
|
713 |
-
# Feature-Methoden
|
714 |
-
feature_methods = ""
|
715 |
-
|
716 |
-
if "file_handling" in params["features"]:
|
717 |
-
feature_methods += """
|
718 |
-
saveUploadedFile(fileData, filename) {
|
719 |
-
const tempDir = os.tmpdir();
|
720 |
-
const filepath = path.join(tempDir, filename);
|
721 |
-
|
722 |
-
return new Promise((resolve, reject) => {
|
723 |
-
fs.writeFile(filepath, fileData, (err) => {
|
724 |
-
if (err) {
|
725 |
-
reject(err);
|
726 |
-
return;
|
727 |
-
}
|
728 |
-
resolve(filepath);
|
729 |
-
});
|
730 |
-
});
|
731 |
-
}
|
732 |
-
|
733 |
-
readFileContent(filepath, maxSize = 100000) {
|
734 |
-
return new Promise((resolve, reject) => {
|
735 |
-
fs.readFile(filepath, 'utf8', (err, data) => {
|
736 |
-
if (err) {
|
737 |
-
reject(err);
|
738 |
-
return;
|
739 |
-
}
|
740 |
-
resolve(data.slice(0, maxSize));
|
741 |
-
});
|
742 |
-
});
|
743 |
-
}"""
|
744 |
-
|
745 |
-
# Memory-Klasse wenn erforderlich
|
746 |
-
memory_class = ""
|
747 |
-
if "memory" in params["features"]:
|
748 |
-
imports.append("const sqlite3 = require('sqlite3').verbose();")
|
749 |
-
|
750 |
-
memory_class = """
|
751 |
-
class ConversationMemory {
|
752 |
-
constructor(dbPath = "memory.db") {
|
753 |
-
this.dbPath = dbPath;
|
754 |
-
this.initDb();
|
755 |
-
}
|
756 |
-
|
757 |
-
initDb() {
|
758 |
-
this.db = new sqlite3.Database(this.dbPath, (err) => {
|
759 |
-
if (err) {
|
760 |
-
console.error("Error opening database:", err);
|
761 |
-
return;
|
762 |
-
}
|
763 |
-
|
764 |
-
this.db.run(`CREATE TABLE IF NOT EXISTS conversations (
|
765 |
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
766 |
-
session_id TEXT,
|
767 |
-
timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
768 |
-
user_input TEXT,
|
769 |
-
assistant_response TEXT
|
770 |
-
)`);
|
771 |
-
});
|
772 |
-
}
|
773 |
-
|
774 |
-
saveInteraction(sessionId, userInput, assistantResponse) {
|
775 |
-
return new Promise((resolve, reject) => {
|
776 |
-
const stmt = this.db.prepare(
|
777 |
-
`INSERT INTO conversations (session_id, user_input, assistant_response)
|
778 |
-
VALUES (?, ?, ?)`
|
779 |
-
);
|
780 |
-
|
781 |
-
stmt.run(sessionId, userInput, assistantResponse, function(err) {
|
782 |
-
if (err) {
|
783 |
-
reject(err);
|
784 |
-
return;
|
785 |
-
}
|
786 |
-
resolve(this.lastID);
|
787 |
-
});
|
788 |
-
|
789 |
-
stmt.finalize();
|
790 |
-
});
|
791 |
-
}
|
792 |
-
|
793 |
-
getConversationHistory(sessionId, limit = 10) {
|
794 |
-
return new Promise((resolve, reject) => {
|
795 |
-
this.db.all(
|
796 |
-
`SELECT user_input, assistant_response FROM conversations
|
797 |
-
WHERE session_id = ? ORDER BY timestamp DESC LIMIT ?`,
|
798 |
-
[sessionId, limit],
|
799 |
-
(err, rows) => {
|
800 |
-
if (err) {
|
801 |
-
reject(err);
|
802 |
-
return;
|
803 |
-
}
|
804 |
-
resolve(rows);
|
805 |
-
}
|
806 |
-
);
|
807 |
-
});
|
808 |
-
}
|
809 |
-
}"""feature_methods += """
|
810 |
-
initializeMemory(dbPath = "memory.db") {
|
811 |
-
this.memory = new ConversationMemory(dbPath);
|
812 |
-
}
|
813 |
-
|
814 |
-
async askWithMemory(userInput, sessionId, systemPrompt = "You are a helpful AI assistant.", temperature = 0.7) {
|
815 |
-
const response = await this.ask(userInput, systemPrompt, temperature);
|
816 |
-
await this.memory.saveInteraction(sessionId, userInput, response);
|
817 |
-
return response;
|
818 |
-
}
|
819 |
-
// Hauptcode für den KI-Assistenten
|
820 |
-
const apiKey = process.env.API_KEY || "abc..."; // API-Key über Umgebungsvariable oder Platzhalter
|
821 |
-
|
822 |
-
const assistant = new AIAssistant(apiKey);
|
823 |
-
|
824 |
-
if (process.argv.includes("--cli")) {
|
825 |
-
// CLI-Modus
|
826 |
-
runCli();
|
827 |
-
} else if (process.argv.includes("--web")) {
|
828 |
-
// Web-UI-Modus
|
829 |
-
createWebInterface();
|
830 |
-
} else {
|
831 |
-
// Standard-Modus (einfacher Test)
|
832 |
-
runTest();
|
833 |
-
}
|
834 |
-
|
835 |
-
async function runTest() {
|
836 |
-
try {
|
837 |
-
const response = await assistant.ask("Hallo, wie geht es dir?");
|
838 |
-
console.log(`Antwort: ${response}`);
|
839 |
-
} catch (error) {
|
840 |
-
console.error("Fehler beim Test:", error);
|
841 |
-
}
|
842 |
-
}
|
843 |
-
|
844 |
-
async function runCli() {
|
845 |
-
const readline = require('readline');
|
846 |
-
const rl = readline.createInterface({
|
847 |
-
input: process.stdin,
|
848 |
-
output: process.stdout
|
849 |
-
});
|
850 |
-
|
851 |
-
console.log(`AI Assistant mit ${assistant.model} bereit. Zum Beenden 'exit' eingeben.`);
|
852 |
-
|
853 |
-
function askQuestion() {
|
854 |
-
rl.question("\nFrage: ", async (userInput) => {
|
855 |
-
if (["exit", "quit", "q"].includes(userInput.toLowerCase())) {
|
856 |
-
console.log("Auf Wiedersehen!");
|
857 |
-
rl.close();
|
858 |
-
return;
|
859 |
-
}
|
860 |
-
|
861 |
-
try {
|
862 |
-
const response = await assistant.ask(userInput);
|
863 |
-
console.log(`\nAssistent: ${response}`);
|
864 |
-
} catch (error) {
|
865 |
-
console.error("Fehler:", error);
|
866 |
-
console.log("\nAssistent: Es ist ein Fehler aufgetreten.");
|
867 |
-
}
|
868 |
-
|
869 |
-
askQuestion();
|
870 |
-
});
|
871 |
-
}
|
872 |
-
|
873 |
-
askQuestion();
|
874 |
-
}
|
875 |
-
|
876 |
-
function createWebInterface() {
|
877 |
-
const app = express();
|
878 |
-
const port = process.env.PORT || 3000;
|
879 |
-
|
880 |
-
// Middleware
|
881 |
-
app.use(bodyParser.json());
|
882 |
-
app.use(bodyParser.urlencoded({ extended: true }));
|
883 |
-
app.use(express.static('public'));
|
884 |
-
|
885 |
-
// HTML für die Startseite
|
886 |
-
app.get('/', (req, res) => {
|
887 |
-
res.send(`
|
888 |
-
<!DOCTYPE html>
|
889 |
-
<html>
|
890 |
-
<head>
|
891 |
-
<title>AI Assistant</title>
|
892 |
-
<style>
|
893 |
-
body { font-family: Arial, sans-serif; max-width: 800px; margin: 0 auto; padding: 20px; }
|
894 |
-
.chat-container { border: 1px solid #ddd; border-radius: 5px; padding: 10px; height: 400px; overflow-y: auto; margin-bottom: 10px; }
|
895 |
-
.user-message { background-color: #e6f7ff; padding: 8px; border-radius: 5px; margin-bottom: 10px; }
|
896 |
-
.assistant-message { background-color: #f2f2f2; padding: 8px; border-radius: 5px; margin-bottom: 10px; }
|
897 |
-
input[type="text"] { width: 80%; padding: 8px; }
|
898 |
-
button { padding: 8px 15px; background-color: #4CAF50; color: white; border: none; border-radius: 5px; cursor: pointer; }
|
899 |
-
</style>
|
900 |
-
</head>
|
901 |
-
<body>
|
902 |
-
<h1>AI Assistant mit ${assistant.model}</h1>
|
903 |
-
|
904 |
-
<div class="chat-container" id="chatContainer"></div>
|
905 |
-
|
906 |
-
<div>
|
907 |
-
<input type="text" id="userInput" placeholder="Stellen Sie eine Frage...">
|
908 |
-
<button onclick="sendMessage()">Senden</button>
|
909 |
-
</div>
|
910 |
-
|
911 |
-
<script>
|
912 |
-
function sendMessage() {
|
913 |
-
const userInput = document.getElementById('userInput').value;
|
914 |
-
if (!userInput) return;
|
915 |
-
|
916 |
-
// Nachricht des Benutzers anzeigen
|
917 |
-
addMessage('user', userInput);
|
918 |
-
document.getElementById('userInput').value = '';
|
919 |
-
|
920 |
-
// Anfrage an den Server senden
|
921 |
-
fetch('/ask', {
|
922 |
-
method: 'POST',
|
923 |
-
headers: {
|
924 |
-
'Content-Type': 'application/json',
|
925 |
-
},
|
926 |
-
body: JSON.stringify({ user_input: userInput })
|
927 |
-
})
|
928 |
-
.then(response => response.json())
|
929 |
-
.then(data => {
|
930 |
-
addMessage('assistant', data.response);
|
931 |
-
})
|
932 |
-
.catch(error => {
|
933 |
-
console.error('Error:', error);
|
934 |
-
addMessage('assistant', 'Es ist ein Fehler aufgetreten.');
|
935 |
-
});
|
936 |
-
}
|
937 |
-
|
938 |
-
function addMessage(role, content) {
|
939 |
-
const chatContainer = document.getElementById('chatContainer');
|
940 |
-
const messageDiv = document.createElement('div');
|
941 |
-
messageDiv.className = role + '-message';
|
942 |
-
messageDiv.textContent = content;
|
943 |
-
chatContainer.appendChild(messageDiv);
|
944 |
-
chatContainer.scrollTop = chatContainer.scrollHeight;
|
945 |
-
}
|
946 |
-
|
947 |
-
// Event-Listener für die Enter-Taste
|
948 |
-
document.getElementById('userInput').addEventListener('keypress', function(e) {
|
949 |
-
if (e.key === 'Enter') {
|
950 |
-
sendMessage();
|
951 |
-
}
|
952 |
-
});
|
953 |
-
</script>
|
954 |
-
</body>
|
955 |
-
</html>
|
956 |
-
`);
|
957 |
-
});
|
958 |
-
|
959 |
-
// API-Endpunkt für Anfragen
|
960 |
-
app.post('/ask', async (req, res) => {
|
961 |
-
try {
|
962 |
-
const userInput = req.body.user_input;
|
963 |
-
|
964 |
-
if (!userInput) {
|
965 |
-
return res.status(400).json({ error: "Keine Eingabe vorhanden" });
|
966 |
-
}
|
967 |
-
|
968 |
-
const response = await assistant.ask(userInput);
|
969 |
-
res.json({ response });
|
970 |
-
} catch (error) {
|
971 |
-
console.error("Fehler bei der Anfrage:", error);
|
972 |
-
res.status(500).json({ error: "Interner Serverfehler" });
|
973 |
-
}
|
974 |
-
});
|
975 |
-
|
976 |
-
// Server starten
|
977 |
-
app.listen(port, () => {
|
978 |
-
console.log(`Server läuft auf http://localhost:${port}`);
|
979 |
-
});
|
980 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|