update worker.py
Browse files
worker.py
CHANGED
@@ -52,9 +52,8 @@ class RabbitMQWorker:
|
|
52 |
self.publisher_channel.basic_publish(
|
53 |
exchange="",
|
54 |
routing_key="ml_server",
|
55 |
-
body=json.dumps(body_dict),
|
56 |
properties=pika.BasicProperties(
|
57 |
-
delivery_mode=2,
|
58 |
headers=headers
|
59 |
)
|
60 |
)
|
@@ -84,15 +83,15 @@ class RabbitMQWorker:
|
|
84 |
logger.info("[Worker %s] Received message: %s, headers: %s", thread_id, body, headers)
|
85 |
|
86 |
try:
|
87 |
-
|
88 |
-
|
89 |
-
contexts = []
|
90 |
-
|
91 |
-
body_dict = json.loads(body)
|
92 |
|
93 |
-
|
94 |
-
|
95 |
-
|
|
|
|
|
|
|
96 |
logger.info("[Worker %s] Found %d file(s) to process.", thread_id, len(input_files))
|
97 |
|
98 |
for files in input_files:
|
@@ -106,12 +105,13 @@ class RabbitMQWorker:
|
|
106 |
err_str = f"Error processing file {files.get('key', '')}: {e}"
|
107 |
logger.error(err_str)
|
108 |
contexts.append({"key": files.get("key", ""), "body": err_str})
|
109 |
-
|
110 |
-
|
111 |
data["md_context"] = contexts
|
112 |
# topics = data.get("topics", [])
|
113 |
-
|
114 |
-
|
|
|
115 |
|
116 |
# Publish results
|
117 |
if self.publish_message(body_dict, headers):
|
@@ -121,9 +121,10 @@ class RabbitMQWorker:
|
|
121 |
logger.error("[Worker %s] Failed to publish results.", thread_id)
|
122 |
|
123 |
logger.info("[Worker %s] Contexts: %s", thread_id, contexts)
|
|
|
124 |
else:
|
125 |
ch.basic_ack(delivery_tag=method.delivery_tag)
|
126 |
-
logger.warning("[Worker %s] Unknown
|
127 |
|
128 |
except Exception as e:
|
129 |
logger.error("Error in callback: %s", e)
|
|
|
52 |
self.publisher_channel.basic_publish(
|
53 |
exchange="",
|
54 |
routing_key="ml_server",
|
55 |
+
body=json.dumps(body_dict).encode('utf-8'),
|
56 |
properties=pika.BasicProperties(
|
|
|
57 |
headers=headers
|
58 |
)
|
59 |
)
|
|
|
83 |
logger.info("[Worker %s] Received message: %s, headers: %s", thread_id, body, headers)
|
84 |
|
85 |
try:
|
86 |
+
ch.basic_ack(delivery_tag=method.delivery_tag)
|
87 |
+
contexts = []
|
|
|
|
|
|
|
88 |
|
89 |
+
body_dict = json.loads(body)
|
90 |
+
|
91 |
+
pattern = body_dict.get("pattern")
|
92 |
+
if pattern == "process_files":
|
93 |
+
data = body_dict.get("data")
|
94 |
+
input_files = data.get("input_files")
|
95 |
logger.info("[Worker %s] Found %d file(s) to process.", thread_id, len(input_files))
|
96 |
|
97 |
for files in input_files:
|
|
|
105 |
err_str = f"Error processing file {files.get('key', '')}: {e}"
|
106 |
logger.error(err_str)
|
107 |
contexts.append({"key": files.get("key", ""), "body": err_str})
|
108 |
+
|
109 |
+
|
110 |
data["md_context"] = contexts
|
111 |
# topics = data.get("topics", [])
|
112 |
+
|
113 |
+
body_dict["pattern"] = "question_extraction_update_from_gpu_server"
|
114 |
+
body_dict["data"] = data
|
115 |
|
116 |
# Publish results
|
117 |
if self.publish_message(body_dict, headers):
|
|
|
121 |
logger.error("[Worker %s] Failed to publish results.", thread_id)
|
122 |
|
123 |
logger.info("[Worker %s] Contexts: %s", thread_id, contexts)
|
124 |
+
|
125 |
else:
|
126 |
ch.basic_ack(delivery_tag=method.delivery_tag)
|
127 |
+
logger.warning("[Worker %s] Unknown pattern type in headers: %s", thread_id, pattern)
|
128 |
|
129 |
except Exception as e:
|
130 |
logger.error("Error in callback: %s", e)
|