File size: 4,831 Bytes
1b7e88c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import multiprocessing
from time import sleep

from omagent_core.engine.automator.task_handler import TaskHandler
from omagent_core.engine.http.models.workflow_status import terminal_status
from omagent_core.engine.workflow.conductor_workflow import ConductorWorkflow
from omagent_core.utils.build import build_from_file
from omagent_core.utils.logger import logging
from omagent_core.utils.registry import registry

registry.import_module()


class ProgrammaticClient:
    def __init__(
        self,
        processor: ConductorWorkflow = None,
        config_path: str = "./config",
        workers: list = []
    ) -> None:
        self._processor = processor
        self._config_path = config_path
        self._workers = workers
        self._task_handler_processor = None
        self._task_to_domain = {}

    def start_processor(self):
        worker_config = build_from_file(self._config_path)
        self._task_handler_processor = TaskHandler(
            worker_config=worker_config, workers=self._workers
        )
        self._task_handler_processor.start_processes()
        self._processor.start_workflow_with_input(workflow_input={}, task_to_domain=self._task_to_domain)

    def start_processor_with_input(self, workflow_input: dict):
        try:
            if self._task_handler_processor is None:
                worker_config = build_from_file(self._config_path)
                self._task_handler_processor = TaskHandler(
                    worker_config=worker_config, workers=self._workers, task_to_domain=self._task_to_domain
                )
                self._task_handler_processor.start_processes()
            return self._process_workflow(self._processor, workflow_input)
        except Exception as e:
            logging.error(f"Error in start_processor_with_input: {e}")

    def start_batch_processor(self, workflow_input_list: list[dict], max_tasks: int = 10):
        results = [None] * len(workflow_input_list)
        worker_config = build_from_file(self._config_path)
        if self._task_handler_processor is None:
            self._task_handler_processor = TaskHandler(worker_config=worker_config, workers=self._workers, task_to_domain=self._task_to_domain)
            self._task_handler_processor.start_processes()
        
        result_queue = multiprocessing.Queue()
        active_processes = []
        
        for idx, workflow_input in enumerate(workflow_input_list):
            while len(active_processes) >= max_tasks:
                for p in active_processes[:]:
                    if not p.is_alive():
                        p.join()
                        active_processes.remove(p)
                        if not result_queue.empty():
                            task_idx, result = result_queue.get()
                            results[task_idx] = result
                sleep(0.1)
            
            p = multiprocessing.Process(
                target=self._process_workflow_with_queue, 
                args=(self._processor, workflow_input, result_queue, idx)
            )
            p.start()
            active_processes.append(p)

        for p in active_processes:
            p.join()
        
        while not result_queue.empty():
            task_idx, result = result_queue.get()
            results[task_idx] = result
            
        return results

    def stop_processor(self):
        if self._task_handler_processor is not None:
            self._task_handler_processor.stop_processes()

    def _process_workflow(self, workflow: ConductorWorkflow, workflow_input: dict):
        workflow_instance_id = None
        try:
            workflow_instance_id = workflow.start_workflow_with_input(
                workflow_input=workflow_input, task_to_domain=self._task_to_domain
            )
            while True:
                status = workflow.get_workflow(workflow_id=workflow_instance_id).status
                if status in terminal_status:
                    break
                sleep(1)
            return workflow.get_workflow(workflow_id=workflow_instance_id).output
        except KeyboardInterrupt:
            logging.info("\nDetected Ctrl+C, stopping workflow...")
            if workflow_instance_id is not None:
                workflow._executor.terminate(workflow_id=workflow_instance_id)
            raise  # Rethrow the exception to allow the program to exit normally

    def _process_workflow_with_queue(self, workflow: ConductorWorkflow, workflow_input: dict, 
                                   queue: multiprocessing.Queue, task_idx: int):
        try:
            result = self._process_workflow(workflow, workflow_input)
            queue.put((task_idx, result))
        except Exception as e:
            logging.error(f"Error in process workflow: {e}")
            queue.put((task_idx, None))