opdx / helpers /gcloud.py
lyangas
missed files
6931ba0
raw
history blame
3.8 kB
import os
from google.cloud import storage
from tqdm import tqdm
from googleapiclient import discovery
import requests
service = discovery.build('compute', 'v1')
storage_client = storage.Client()
def download_csv_from_gcloud(bucket_name, object_name, destination_file_path):
"""Download a file from Google Cloud Storage."""
bucket = storage_client.bucket(bucket_name)
blob = bucket.blob(object_name)
# Download the file to a local path
blob.download_to_filename(destination_file_path)
print(f"File {object_name} downloaded to {destination_file_path}")
def upload_folder_to_gcloud(bucket_name, source_folder_path, destination_folder_name):
"""Uploads all files in a folder to the Google Cloud Storage bucket."""
# Instantiates a client
# storage_client = storage.Client()
# Gets the bucket
print(f"bucket_name={bucket_name}, source_folder_path={source_folder_path}, destination_folder_name={destination_folder_name}", flush=True)
bucket = storage_client.bucket(bucket_name)
# Walk through the folder and upload each file
for root, _, files in os.walk(source_folder_path):
for file_name in files:
# Construct the local file path
local_file_path = os.path.join(root, file_name)
# Construct the destination blob name
destination_blob_name = os.path.join(destination_folder_name, os.path.relpath(local_file_path, source_folder_path))
print(f"destination_blob_name={destination_blob_name}")
# Upload the file
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(local_file_path)
print(f"File {local_file_path} uploaded to {destination_blob_name}.")
def download_folder(bucket_name, folder_name, destination_directory):
"""
Download the contents of a folder from a Google Cloud Storage bucket to a local directory.
Args:
bucket_name (str): Name of the Google Cloud Storage bucket.
folder_name (str): Name of the folder in the bucket to download.
destination_directory (str): Local directory to save the downloaded files.
"""
# Get the bucket
bucket = storage_client.get_bucket(bucket_name)
# List objects in the folder
blobs = bucket.list_blobs(prefix=folder_name)
# Ensure destination directory exists
os.makedirs(destination_directory, exist_ok=True)
# Iterate over each object in the folder
for blob in tqdm(blobs, desc=f'Downloading {folder_name}'):
# Determine local file path
local_file_path = os.path.join(destination_directory, os.path.relpath(blob.name, folder_name))
# Ensure local directory exists
os.makedirs(os.path.dirname(local_file_path), exist_ok=True)
# Download the object to a local file
blob.download_to_filename(local_file_path)
def start_vm(project, zone, instance):
request = service.instances().start(project=project, zone=zone, instance=instance)
response = request.execute()
return response
def stop_vm(project, zone, instance):
request = service.instances().stop(project=project, zone=zone, instance=instance)
response = request.execute()
return response
def get_current_instance_name():
# URL for the metadata server
METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance/name"
HEADERS = {"Metadata-Flavor": "Google"}
try:
response = requests.get(METADATA_URL, headers=HEADERS)
response.raise_for_status() # Raise an error for bad status codes
instance_name = response.text
return instance_name
except requests.exceptions.RequestException as e:
print(f"Error fetching instance name: {e}")
return None