1
File size: 8,420 Bytes
23d33cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
import gradio as gr
import threading
from email.header import decode_header
import mysql.connector
from transformers import pipeline  # Assuming you'll use Hugging Face pipeline
import email, imaplib, json, time
import logging

# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# Email and database configuration
IMAP_SERVER = 'imap.gmail.com'
EMAIL_ADDRESS = '[email protected]'
PASSWORD = 'gclc wsnx kywt uvqy'  # Store this securely in production
DB_CONFIG = {
    'host': '0.tcp.in.ngrok.io',
    'port': 11329,
    'user': 'root',
    'password': '',  # Add the correct password
    'database': 'shipment_details'
}

# JSON format for extracted shipment details
output_format = {
    "origin": "",
    "destination": "",
    "expected_shipment_datetime": "",
    "types_of_service": "",
    "warehouse": "",
    "description": "",
    "quantities": "",
    "carrier_details": ""
}

# Prompt for LLM to process shipment-related emails
prompt = """
System prompt: You will be provided with an email containing shipment details. Your task is to extract specific information based on the given instructions.
Instructions:
1. Focus only on extracting details about future shipments, ignore irrelevant information.
2. Output should be in JSON format. Missing information should be marked as null.
3. Extract the following:
    - origin
    - destination
    - expected_shipment_datetime (format: yyyy-mm-dd hh:mm:ss)
    - types_of_service (AIR, LCL, FCL)
    - warehouse
    - description (max 100 words)
    - quantities
    - carrier_details
4. The output should be formatted as follows:
{
    "origin": "",
    "destination": "",
    "expected_shipment_datetime": "",
    "types_of_service": "",
    "warehouse": "",
    "description": "",
    "quantities": "",
    "carrier_details": ""
}
"""

# Function to insert extracted shipment details into MySQL database
def insert_data(extracted_details):
    try:
        # Initialize MySQL database connection
        mydb = mysql.connector.connect(**DB_CONFIG)
        cursor = mydb.cursor()

        # Skip insertion if all required fields are empty
        required_fields = [
            'origin', 'destination', 'expected_shipment_datetime',
            'types_of_service', 'warehouse', 'description',
            'quantities', 'carrier_details'
        ]
        if all(extracted_details.get(field) in ["", None] for field in required_fields):
            logger.info("Skipping insertion: All extracted values are empty.")
            return

        # Insert data into database
        sql = """
        INSERT INTO shipment_details (
            origin, destination, expected_shipment_datetime, types_of_service, 
            warehouse, description, quantities, carrier_details, 
            sender, receiver, cc, bcc, subject
        ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """
        values = (
            extracted_details.get('origin'),
            extracted_details.get('destination'),
            extracted_details.get('expected_shipment_datetime'),
            extracted_details.get('types_of_service'),
            extracted_details.get('warehouse'),
            extracted_details.get('description'),
            extracted_details.get('quantities'),
            extracted_details.get('carrier_details'),
            extracted_details.get('sender'),
            extracted_details.get('receiver'),
            extracted_details.get('cc'),
            extracted_details.get('bcc'),
            extracted_details.get('subject')
        )
        cursor.execute(sql, values)
        mydb.commit()
        logger.info("Data inserted successfully.")

    except mysql.connector.Error as db_err:
        logger.error(f"Database error: {db_err}")
    except Exception as ex:
        logger.error(f"Error inserting data: {ex}")

# Function to extract shipment details using an LLM
def get_details(mail):
    try:
        # Initialize LLM model and tokenizer
        # Uncomment below if using Hugging Face models, or load your specific model accordingly
        # pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
        # output = pipe(f"{prompt}\n{mail}", max_new_tokens=200)

        # Assuming Llama model for completion (example)
        response = {  # Placeholder response for testing purposes
            "origin": "New York",
            "destination": "Los Angeles",
            "expected_shipment_datetime": "2024-10-20 12:00:00",
            "types_of_service": "AIR",
            "warehouse": "Warehouse 1",
            "description": "Electronics shipment",
            "quantities": "10",
            "carrier_details": "Carrier XYZ"
        }
        return json.dumps(response)  # Returning mock response for testing

    except Exception as ex:
        logger.error(f"Error generating details from LLM: {ex}")
        return None

# Function to read and process unread emails
def read_email():
    logging.info('Ready to read email...')
    try:
        logging.info('Connecting to IMAP server...')
        mail = imaplib.IMAP4_SSL(IMAP_SERVER)
        mail.login(EMAIL_ADDRESS, PASSWORD)
        mail.select('inbox')
        logging.info('Selected inbox')
        status, messages = mail.search(None, 'UNSEEN')
        message_ids = messages[0].split()
        logging.info(f"Total unread emails: {len(message_ids)}")

        for message_id in message_ids:
            try:
                status, data = mail.fetch(message_id, '(RFC822)')
                raw_email = data[0][1]
                email_message = email.message_from_bytes(raw_email)

                # Extract metadata
                sender = email_message['From']
                receiver = email_message['To']
                cc = email_message.get('Cc', '')
                bcc = email_message.get('Bcc', '')
                subject = email_message['Subject']

                # Extract email body
                if email_message.is_multipart():
                    for part in email_message.walk():
                        if part.get_content_type() == 'text/plain':
                            email_body = part.get_payload(decode=True).decode('utf-8')
                            break
                else:
                    email_body = email_message.get_payload(decode=True).decode('utf-8')

                # Extract and store details
                extracted_details_str = get_details(email_body)
                extracted_details = json.loads(extracted_details_str)
                meta_data = {
                    'sender': sender, 'receiver': receiver, 'cc': cc, 'bcc': bcc, 'subject': subject
                }
                extracted_details.update(meta_data)
                insert_data(extracted_details)

            except Exception as e:
                logger.error(f"Error processing email {message_id}: {e}")

        mail.close()
        mail.logout()

    except Exception as e:
        logger.error(f"Error reading emails: {e}")

# Email processing loop
running = False
loop_thread = None 

def email_processing_loop():
    global running
    logger.info("Starting email processing loop...")
    while running:
        read_email()
        time.sleep(10)  # Check for new emails every 10 seconds

def start_processing():
    global running, loop_thread
    if not running:
        running = True
        loop_thread = threading.Thread(target=email_processing_loop, daemon=True)
        loop_thread.start()
    return "Running"

def stop_processing():
    global running
    if running:
        running = False
    return "Stopped"

def update_status():
    return "Running" if running else "Stopped"

# Create Gradio interface
with gr.Blocks() as demo:
    gr.Markdown("# Email Processing")
    
    status_display = gr.Textbox(label="Email Processing Status", interactive=False)
    status_display.update(value=update_status())  # Initial status display

    start_button = gr.Button("Start Processing")
    stop_button = gr.Button("Stop Processing")

    start_button.click(start_processing, outputs=status_display)
    stop_button.click(stop_processing, outputs=status_display)

    # Automatically update status every 2 seconds
    gr.Timer(update_status, outputs=status_display, interval=2)

if __name__ == "__main__":
    logging.info('Starting project...')
    demo.launch()