File size: 5,177 Bytes
4a190c2
 
 
 
 
 
 
 
 
 
 
 
a73f005
 
4a190c2
a73f005
4a190c2
 
a73f005
4a190c2
a73f005
4a190c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
from typing import Type, List
import pandas as pd
import boto3
import tempfile
import os
from chatfuncs.helper_functions import get_or_create_env_var

PandasDataFrame = Type[pd.DataFrame]

# Get AWS credentials if required
bucket_name=""

RUN_AWS_FUNCTIONS = get_or_create_env_var("RUN_AWS_FUNCTIONS", "0")
print(f'The value of RUN_AWS_FUNCTIONS is {RUN_AWS_FUNCTIONS}')

AWS_REGION = get_or_create_env_var('AWS_REGION', '')
print(f'The value of AWS_REGION is {AWS_REGION}')

if RUN_AWS_FUNCTIONS == "1":
    try:
        bucket_name = os.environ['']
        session = boto3.Session() # profile_name="default"
    except Exception as e:
        print(e)

    def get_assumed_role_info():
        sts_endpoint = 'https://sts.' + AWS_REGION + '.amazonaws.com'
        sts = boto3.client('sts', region_name=AWS_REGION, endpoint_url=sts_endpoint)
        response = sts.get_caller_identity()

        # Extract ARN of the assumed role
        assumed_role_arn = response['Arn']
        
        # Extract the name of the assumed role from the ARN
        assumed_role_name = assumed_role_arn.split('/')[-1]
        
        return assumed_role_arn, assumed_role_name

    try:
        assumed_role_arn, assumed_role_name = get_assumed_role_info()

        print("Assumed Role ARN:", assumed_role_arn)
        print("Assumed Role Name:", assumed_role_name)

    except Exception as e:
        
        print(e)

# Download direct from S3 - requires login credentials
def download_file_from_s3(bucket_name, key, local_file_path):

    s3 = boto3.client('s3')
    s3.download_file(bucket_name, key, local_file_path)
    print(f"File downloaded from S3: s3://{bucket_name}/{key} to {local_file_path}")
                         
def download_folder_from_s3(bucket_name, s3_folder, local_folder):
    """
    Download all files from an S3 folder to a local folder.
    """
    s3 = boto3.client('s3')

    # List objects in the specified S3 folder
    response = s3.list_objects_v2(Bucket=bucket_name, Prefix=s3_folder)

    # Download each object
    for obj in response.get('Contents', []):
        # Extract object key and construct local file path
        object_key = obj['Key']
        local_file_path = os.path.join(local_folder, os.path.relpath(object_key, s3_folder))

        # Create directories if necessary
        os.makedirs(os.path.dirname(local_file_path), exist_ok=True)

        # Download the object
        try:
            s3.download_file(bucket_name, object_key, local_file_path)
            print(f"Downloaded 's3://{bucket_name}/{object_key}' to '{local_file_path}'")
        except Exception as e:
            print(f"Error downloading 's3://{bucket_name}/{object_key}':", e)

def download_files_from_s3(bucket_name, s3_folder, local_folder, filenames):
    """
    Download specific files from an S3 folder to a local folder.
    """
    s3 = boto3.client('s3')

    print("Trying to download file: ", filenames)

    if filenames == '*':
        # List all objects in the S3 folder
        print("Trying to download all files in AWS folder: ", s3_folder)
        response = s3.list_objects_v2(Bucket=bucket_name, Prefix=s3_folder)

        print("Found files in AWS folder: ", response.get('Contents', []))

        filenames = [obj['Key'].split('/')[-1] for obj in response.get('Contents', [])]

        print("Found filenames in AWS folder: ", filenames)

    for filename in filenames:
        object_key = os.path.join(s3_folder, filename)
        local_file_path = os.path.join(local_folder, filename)

        # Create directories if necessary
        os.makedirs(os.path.dirname(local_file_path), exist_ok=True)

        # Download the object
        try:
            s3.download_file(bucket_name, object_key, local_file_path)
            print(f"Downloaded 's3://{bucket_name}/{object_key}' to '{local_file_path}'")
        except Exception as e:
            print(f"Error downloading 's3://{bucket_name}/{object_key}':", e)

def upload_file_to_s3(local_file_paths:List[str], s3_key:str, s3_bucket:str=bucket_name):
    """
    Uploads a file from local machine to Amazon S3.

    Args:
    - local_file_path: Local file path(s) of the file(s) to upload.
    - s3_key: Key (path) to the file in the S3 bucket.
    - s3_bucket: Name of the S3 bucket.

    Returns:
    - Message as variable/printed to console
    """
    final_out_message = []

    s3_client = boto3.client('s3')

    if isinstance(local_file_paths, str):
        local_file_paths = [local_file_paths]

    for file in local_file_paths:
        try:
            # Get file name off file path
            file_name = os.path.basename(file)

            s3_key_full = s3_key + file_name
            print("S3 key: ", s3_key_full)

            s3_client.upload_file(file, s3_bucket, s3_key_full)
            out_message = "File " + file_name + " uploaded successfully!"
            print(out_message)
        
        except Exception as e:
            out_message = f"Error uploading file(s): {e}"
            print(out_message)

        final_out_message.append(out_message)
        final_out_message_str = '\n'.join(final_out_message)

    return final_out_message_str