File size: 1,531 Bytes
b1d9c58
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import boto3
from pathlib import Path
import tarfile
import logging
import os

# Set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def create_model_tar():
    model_path = Path("models/customer_support_gpt")  # Path to your model folder
    tar_path = "model.tar.gz"  # Path for the output tar.gz file
    
    # Create a tar.gz file containing all files in the model folder
    with tarfile.open(tar_path, "w:gz") as tar:
        for file_path in model_path.glob("*"):
            if file_path.is_file():
                logger.info(f"Adding {file_path} to tar archive")
                tar.add(file_path, arcname=file_path.name)
    
    return tar_path

def upload_to_s3(tar_path, bucket_name, s3_key):
    # Initialize S3 client
    s3 = boto3.client("s3")
    
    # Upload tar.gz file to S3
    logger.info(f"Uploading {tar_path} to s3://{bucket_name}/{s3_key}")
    s3.upload_file(tar_path, bucket_name, s3_key)
    logger.info("Upload complete!")

# Main code
try:
    bucket_name = 'customer-support-gpt'  # Your S3 bucket name
    s3_key = "models/model.tar.gz"  # S3 key (path in bucket)

    # Create the tar.gz archive
    tar_path = create_model_tar()

    # Upload the tar.gz to S3
    upload_to_s3(tar_path, bucket_name, s3_key)

except Exception as e:
    logger.error(f"An error occurred: {str(e)}")
    raise

finally:
    # Clean up the local tar file
    if os.path.exists(tar_path):
        os.remove(tar_path)
        logger.info(f"Deleted local file: {tar_path}")