File size: 5,222 Bytes
bee0939
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
# processor.py

import logging
import hashlib
import time
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Dict, List

import requests
from langchain_core.messages import AIMessage

from config import ResearchConfig
from knowledge_graph import QuantumKnowledgeGraph

logger = logging.getLogger(__name__)

class CognitiveProcessor:
    """
    Executes API requests to the backend using triple redundancy and consolidates results via a consensus mechanism.
    """
    def __init__(self) -> None:
        self.executor = ThreadPoolExecutor(max_workers=ResearchConfig.MAX_CONCURRENT_REQUESTS)
        self.session_id = hashlib.sha256(datetime.now().isoformat().encode()).hexdigest()[:12]

    def process_query(self, prompt: str) -> Dict:
        futures = [self.executor.submit(self._execute_api_request, prompt) for _ in range(3)]
        results = []
        for future in as_completed(futures):
            try:
                results.append(future.result())
            except Exception as e:
                logger.exception("Error during API request execution.")
        return self._consensus_check(results)

    def _execute_api_request(self, prompt: str) -> Dict:
        headers = {
            "Authorization": f"Bearer {ResearchConfig.DEEPSEEK_API_KEY}",
            "Content-Type": "application/json",
            "X-Research-Session": self.session_id
        }
        payload = {
            "model": "deepseek-chat",
            "messages": [{
                "role": "user",
                "content": f"Respond as a Senior AI Researcher and Technical Writer:\n{prompt}"
            }],
            "temperature": 0.7,
            "max_tokens": 1500,
            "top_p": 0.9
        }
        try:
            response = requests.post(
                "https://api.deepseek.com/v1/chat/completions",
                headers=headers,
                json=payload,
                timeout=45
            )
            response.raise_for_status()
            logger.info("Backend API request successful.")
            return response.json()
        except requests.exceptions.RequestException as e:
            logger.exception("Backend API request failed.")
            return {"error": str(e)}

    def _consensus_check(self, results: List[Dict]) -> Dict:
        valid_results = [r for r in results if "error" not in r]
        if not valid_results:
            logger.error("All API requests failed.")
            return {"error": "All API requests failed"}
        # Choose the result with the longest response content as a simple consensus metric
        return max(valid_results, key=lambda x: len(x.get('choices', [{}])[0].get('message', {}).get('content', '')))

class EnhancedCognitiveProcessor(CognitiveProcessor):
    """
    Extends CognitiveProcessor with ensemble processing and knowledge graph integration.
    """
    def __init__(self) -> None:
        super().__init__()
        self.knowledge_graph = QuantumKnowledgeGraph()
        self.ensemble_models = ["deepseek-chat", "deepseek-coder"]

    def process_query(self, prompt: str) -> Dict:
        futures = [self.executor.submit(self._execute_api_request, prompt, model) for model in self.ensemble_models]
        results = []
        for future in as_completed(futures):
            try:
                results.append(future.result())
            except Exception as e:
                logger.error(f"Model processing error: {str(e)}")
        best_response = self._consensus_check(results)
        self._update_knowledge_graph(best_response)
        return best_response

    def _execute_api_request(self, prompt: str, model: str) -> Dict:
        headers = {
            "Authorization": f"Bearer {ResearchConfig.DEEPSEEK_API_KEY}",
            "Content-Type": "application/json",
            "X-Research-Session": self.session_id
        }
        payload = {
            "model": model,
            "messages": [{
                "role": "user",
                "content": f"Respond as a Senior AI Researcher and Technical Writer:\n{prompt}"
            }],
            "temperature": ResearchConfig.ENSEMBLE_MODELS[model]["temp"],
            "max_tokens": ResearchConfig.ENSEMBLE_MODELS[model]["max_tokens"],
            "top_p": 0.9
        }
        try:
            response = requests.post(
                "https://api.deepseek.com/v1/chat/completions",
                headers=headers,
                json=payload,
                timeout=45
            )
            response.raise_for_status()
            logger.info(f"API request successful for model {model}.")
            return response.json()
        except requests.exceptions.RequestException as e:
            logger.exception(f"API request failed for model {model}.")
            return {"error": str(e)}

    def _update_knowledge_graph(self, response: Dict) -> None:
        content = response.get('choices', [{}])[0].get('message', {}).get('content', '')
        node_id = self.knowledge_graph.create_node({"content": content}, "analysis")
        if self.knowledge_graph.node_counter > 1:
            self.knowledge_graph.create_relation(node_id - 1, node_id, "evolution", strength=0.8)