code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def table_permission_validator(namespace):
""" A special case for table because the SDK associates the QUERY permission with 'r' """
from azure.data.tables._models import TableSasPermissions
if namespace.permission:
if set(namespace.permission) - set('raud'):
help_string = '(r)ead/query (a)dd (u)pdate (d)elete'
raise ValueError('valid values are {} or a combination thereof.'.format(help_string))
namespace.permission = TableSasPermissions(_str=namespace.permission) | A special case for table because the SDK associates the QUERY permission with 'r' | table_permission_validator | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def get_source_file_or_blob_service_client(cmd, namespace):
"""
Create the second file service or blob service client for batch copy command, which is used to
list the source files or blobs. If both the source account and source URI are omitted, it
indicates that user want to copy files or blobs in the same storage account, therefore the
destination client will be set None hence the command will use destination client.
"""
t_file_svc, t_block_blob_svc = cmd.get_models('file#FileService', 'blob.blockblobservice#BlockBlobService')
usage_string = 'invalid usage: supply only one of the following argument sets:' + \
'\n\t --source-uri [--source-sas]' + \
'\n\tOR --source-container' + \
'\n\tOR --source-container --source-account-name --source-account-key' + \
'\n\tOR --source-container --source-account-name --source-sas' + \
'\n\tOR --source-share --source-account-name --source-account-key' + \
'\n\tOR --source-share --source-account-name --source-account-sas'
ns = vars(namespace)
source_account = ns.pop('source_account_name', None)
source_key = ns.pop('source_account_key', None)
source_uri = ns.pop('source_uri', None)
source_sas = ns.get('source_sas', None)
source_container = ns.get('source_container', None)
source_share = ns.get('source_share', None)
if source_uri and source_account:
raise ValueError(usage_string)
if not source_uri and bool(source_container) == bool(source_share): # must be container or share
raise ValueError(usage_string)
if (not source_account) and (not source_uri):
# Set the source_client to None if neither source_account or source_uri is given. This
# indicates the command that the source files share or blob container is in the same storage
# account as the destination file share or blob container.
#
# The command itself should create the source service client since the validator can't
# access the destination client through the namespace.
#
# A few arguments check will be made as well so as not to cause ambiguity.
if source_key or source_sas:
raise ValueError('invalid usage: --source-account-name is missing; the source account is assumed to be the'
' same as the destination account. Do not provide --source-sas or --source-account-key')
ns['source_client'] = None
if 'token_credential' not in ns: # not using oauth
return
# oauth is only possible through destination, must still get source creds
source_account, source_key, source_sas = ns['account_name'], ns['account_key'], ns['sas_token']
if source_account:
if not (source_key or source_sas):
# when neither storage account key or SAS is given, try to fetch the key in the current
# subscription
source_key = _query_account_key(cmd.cli_ctx, source_account)
if source_container:
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_block_blob_svc, name=source_account, key=source_key, sas_token=source_sas)
elif source_share:
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_file_svc, name=source_account, key=source_key, sas_token=source_sas)
elif source_uri:
if source_key or source_container or source_share:
raise ValueError(usage_string)
from .storage_url_helpers import StorageResourceIdentifier
if source_sas:
source_uri = '{}{}{}'.format(source_uri, '?', source_sas.lstrip('?'))
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, source_uri)
nor_container_or_share = not identifier.container and not identifier.share
if not identifier.is_url():
raise ValueError('incorrect usage: --source-uri expects a URI')
if identifier.blob or identifier.directory or identifier.filename or nor_container_or_share:
raise ValueError('incorrect usage: --source-uri has to be blob container or file share')
if identifier.sas_token:
ns['source_sas'] = identifier.sas_token
else:
source_key = _query_account_key(cmd.cli_ctx, identifier.account_name)
if identifier.container:
ns['source_container'] = identifier.container
if identifier.account_name != ns.get('account_name'):
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_block_blob_svc, name=identifier.account_name, key=source_key,
sas_token=identifier.sas_token)
elif identifier.share:
ns['source_share'] = identifier.share
if identifier.account_name != ns.get('account_name'):
ns['source_client'] = get_storage_data_service_client(
cmd.cli_ctx, t_file_svc, name=identifier.account_name, key=source_key,
sas_token=identifier.sas_token) | Create the second file service or blob service client for batch copy command, which is used to
list the source files or blobs. If both the source account and source URI are omitted, it
indicates that user want to copy files or blobs in the same storage account, therefore the
destination client will be set None hence the command will use destination client. | get_source_file_or_blob_service_client | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def get_source_file_or_blob_service_client_track2(cmd, namespace):
"""
Create the second file service or blob service client for batch copy command, which is used to
list the source files or blobs. If both the source account and source URI are omitted, it
indicates that user want to copy files or blobs in the same storage account.
"""
usage_string = 'invalid usage: supply only one of the following argument sets:' + \
'\n\t --source-uri [--source-sas]' + \
'\n\tOR --source-container' + \
'\n\tOR --source-container --source-account-name --source-account-key' + \
'\n\tOR --source-container --source-account-name --source-sas' + \
'\n\tOR --source-share' + \
'\n\tOR --source-share --source-account-name --source-account-key' + \
'\n\tOR --source-share --source-account-name --source-account-sas'
ns = vars(namespace)
source_account = ns.pop('source_account_name', None)
source_key = ns.pop('source_account_key', None)
source_uri = ns.pop('source_uri', None)
source_sas = ns.get('source_sas', None)
source_container = ns.get('source_container', None)
source_share = ns.get('source_share', None)
token_credential = ns.get('token_credential')
is_oauth = token_credential is not None
if source_uri and source_account:
raise ValueError(usage_string)
if not source_uri and bool(source_container) == bool(source_share): # must be container or share
raise ValueError(usage_string)
if (not source_account) and (not source_uri):
# Set the source_client to None if neither source_account nor source_uri is given. This
# indicates the command that the source files share or blob container is in the same storage
# account as the destination file share or blob container.
#
# The command itself should create the source service client since the validator can't
# access the destination client through the namespace.
#
# A few arguments check will be made as well so as not to cause ambiguity.
if source_key or source_sas:
raise ValueError('invalid usage: --source-account-name is missing; the source account is assumed to be the'
' same as the destination account. Do not provide --source-sas or --source-account-key')
source_account, source_key, source_sas = ns['account_name'], ns['account_key'], ns['sas_token']
if source_account and not is_oauth:
if not (source_key or source_sas):
# when neither storage account key nor SAS is given, try to fetch the key in the current
# subscription
source_key = _query_account_key(cmd.cli_ctx, source_account)
elif source_uri and not is_oauth:
if source_key or source_container or source_share:
raise ValueError(usage_string)
from .storage_url_helpers import StorageResourceIdentifier
if source_sas:
source_uri = '{}{}{}'.format(source_uri, '?', source_sas.lstrip('?'))
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, source_uri)
nor_container_or_share = not identifier.container and not identifier.share
if not identifier.is_url():
raise ValueError('incorrect usage: --source-uri expects a URI')
if identifier.blob or identifier.directory or identifier.filename or nor_container_or_share:
raise ValueError('incorrect usage: --source-uri has to be blob container or file share')
source_account = identifier.account_name
source_container = identifier.container
source_share = identifier.share
if identifier.sas_token:
source_sas = identifier.sas_token
else:
source_key = _query_account_key(cmd.cli_ctx, identifier.account_name)
# config source account credential
ns['source_account_name'] = source_account
ns['source_account_key'] = source_key
ns['source_container'] = source_container
ns['source_share'] = source_share
# get sas token for source
if not source_sas and not is_oauth:
from .util import create_short_lived_container_sas_track2, create_short_lived_share_sas_track2
if source_container:
source_sas = create_short_lived_container_sas_track2(cmd, account_name=source_account,
account_key=source_key,
container=source_container)
if source_share:
source_sas = create_short_lived_share_sas_track2(cmd, account_name=source_account,
account_key=source_key,
share=source_share)
ns['source_sas'] = source_sas
client_kwargs = {'account_name': ns['source_account_name'],
'account_key': ns['source_account_key'],
'sas_token': ns['source_sas']}
if is_oauth:
client_kwargs.update({'token_credential': token_credential})
if source_container:
ns['source_client'] = cf_blob_service(cmd.cli_ctx, client_kwargs)
if source_share:
ns['source_client'] = cf_share_service(cmd.cli_ctx, client_kwargs) | Create the second file service or blob service client for batch copy command, which is used to
list the source files or blobs. If both the source account and source URI are omitted, it
indicates that user want to copy files or blobs in the same storage account. | get_source_file_or_blob_service_client_track2 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def process_container_delete_parameters(cmd, namespace):
"""Process the parameters for storage container delete command"""
# check whether to use mgmt or data-plane
if namespace.bypass_immutability_policy:
# use management-plane
namespace.processed_account_name = namespace.account_name
namespace.processed_resource_group, namespace.mgmt_client = _query_account_rg(
cmd.cli_ctx, namespace.account_name)
del namespace.auth_mode
else:
# use data-plane, like before
validate_client_parameters(cmd, namespace) | Process the parameters for storage container delete command | process_container_delete_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def process_blob_download_batch_parameters(cmd, namespace):
"""Process the parameters for storage blob download command"""
from azure.cli.core.azclierror import InvalidArgumentValueError
# 1. quick check
if not os.path.exists(namespace.destination) or not os.path.isdir(namespace.destination):
raise InvalidArgumentValueError('incorrect usage: destination must be an existing directory')
# 2. try to extract account name and container name from source string
_process_blob_batch_container_parameters(cmd, namespace)
# 3. Call validators
add_progress_callback(cmd, namespace) | Process the parameters for storage blob download command | process_blob_download_batch_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def process_blob_upload_batch_parameters(cmd, namespace):
"""Process the source and destination of storage blob upload command"""
# 1. quick check
if not os.path.exists(namespace.source) or not os.path.isdir(namespace.source):
raise ValueError('incorrect usage: source must be an existing directory')
# 2. try to extract account name and container name from destination string
_process_blob_batch_container_parameters(cmd, namespace, source=False)
# 3. collect the files to be uploaded
namespace.source = os.path.realpath(namespace.source)
namespace.source_files = list(glob_files_locally(namespace.source, namespace.pattern))
# 4. determine blob type
if namespace.blob_type is None:
vhd_files = [f for f in namespace.source_files if f[0].endswith('.vhd')]
if any(vhd_files) and len(vhd_files) == len(namespace.source_files):
# when all the listed files are vhd files use page
namespace.blob_type = 'page'
elif any(vhd_files):
from azure.cli.core.azclierror import ArgumentUsageError
# source files contain vhd files but not all of them
raise ArgumentUsageError("""Fail to guess the required blob type. Type of the files to be
uploaded are not consistent. Default blob type for .vhd files is "page", while
others are "block". You can solve this problem by either explicitly set the blob
type or ensure the pattern matches a correct set of files.""")
else:
namespace.blob_type = 'block'
# 5. Ignore content-md5 for batch upload
namespace.content_md5 = None
# 6. call other validators
validate_metadata(namespace)
t_blob_content_settings = get_sdk(cmd.cli_ctx, ResourceType.DATA_STORAGE_BLOB, '_models#ContentSettings')
get_content_setting_validator(t_blob_content_settings, update=False)(cmd, namespace)
add_progress_callback(cmd, namespace)
blob_tier_validator_track2(cmd, namespace)
validate_tags(namespace) | Process the source and destination of storage blob upload command | process_blob_upload_batch_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def _process_blob_batch_container_parameters(cmd, namespace, source=True):
"""Process the container parameters for storage blob batch commands before populating args from environment."""
if source:
container_arg, container_name_arg = 'source', 'source_container_name'
else:
# destination
container_arg, container_name_arg = 'destination', 'destination_container_name'
# try to extract account name and container name from source string
from .storage_url_helpers import StorageResourceIdentifier
container_arg_val = getattr(namespace, container_arg) # either a url or name
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, container_arg_val)
if not identifier.is_url():
setattr(namespace, container_name_arg, container_arg_val)
elif identifier.blob:
raise ValueError('incorrect usage: {} should be either a container URL or name'.format(container_arg))
else:
setattr(namespace, container_name_arg, identifier.container)
if namespace.account_name is None:
namespace.account_name = identifier.account_name
elif namespace.account_name != identifier.account_name:
raise ValueError('The given storage account name is not consistent with the '
'account name in the destination URL')
# if no sas-token is given and the container url contains one, use it
if not namespace.sas_token and identifier.sas_token:
namespace.sas_token = identifier.sas_token
# Finally, grab missing storage connection parameters from environment variables
validate_client_parameters(cmd, namespace) | Process the container parameters for storage blob batch commands before populating args from environment. | _process_blob_batch_container_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def process_file_upload_batch_parameters(cmd, namespace):
"""Process the parameters of storage file batch upload command"""
# 1. quick check
if not os.path.exists(namespace.source):
raise ValueError('incorrect usage: source {} does not exist'.format(namespace.source))
if not os.path.isdir(namespace.source):
raise ValueError('incorrect usage: source must be a directory')
# 2. try to extract account name and container name from destination string
from .storage_url_helpers import StorageResourceIdentifier
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.destination)
if identifier.is_url():
if identifier.filename or identifier.directory:
raise ValueError('incorrect usage: destination must be a file share url')
namespace.destination = identifier.share
if not namespace.account_name:
namespace.account_name = identifier.account_name
namespace.source = os.path.realpath(namespace.source)
namespace.share_name = namespace.destination
# Ignore content_md5 for batch upload
namespace.content_md5 = None | Process the parameters of storage file batch upload command | process_file_upload_batch_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def process_file_download_batch_parameters(cmd, namespace):
"""Process the parameters for storage file batch download command"""
# 1. quick check
if not os.path.exists(namespace.destination) or not os.path.isdir(namespace.destination):
raise ValueError('incorrect usage: destination must be an existing directory')
# 2. try to extract account name and share name from source string
process_file_batch_source_parameters(cmd, namespace) | Process the parameters for storage file batch download command | process_file_download_batch_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def datetime_type(string):
""" Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 """
accepted_date_formats = ['%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%MZ',
'%Y-%m-%dT%HZ', '%Y-%m-%d']
for form in accepted_date_formats:
try:
if to_string:
return datetime.strptime(string, form).strftime(form)
return datetime.strptime(string, form)
except ValueError:
continue
raise ValueError("Input '{}' not valid. Valid example: 2000-12-31T12:59:59Z".format(string)) | Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 | get_datetime_type.datetime_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def get_datetime_type(to_string):
""" Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 """
from datetime import datetime
def datetime_type(string):
""" Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 """
accepted_date_formats = ['%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%dT%H:%MZ',
'%Y-%m-%dT%HZ', '%Y-%m-%d']
for form in accepted_date_formats:
try:
if to_string:
return datetime.strptime(string, form).strftime(form)
return datetime.strptime(string, form)
except ValueError:
continue
raise ValueError("Input '{}' not valid. Valid example: 2000-12-31T12:59:59Z".format(string))
return datetime_type | Validates UTC datetime. Examples of accepted forms:
2017-12-31T01:11:59Z,2017-12-31T01:11Z or 2017-12-31T01Z or 2017-12-31 | get_datetime_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def api_version_type(string):
""" Validates api version format. Examples of accepted form: 2017-12-31 """
accepted_format = '%Y-%m-%d'
try:
return datetime.strptime(string, accepted_format).strftime(accepted_format)
except ValueError:
from azure.cli.core.azclierror import InvalidArgumentValueError
raise InvalidArgumentValueError("Input '{}' not valid. Valid example: 2008-10-27.".format(string)) | Validates api version format. Examples of accepted form: 2017-12-31 | get_api_version_type.api_version_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def get_api_version_type():
""" Examples of accepted forms: 2017-12-31 """
from datetime import datetime
def api_version_type(string):
""" Validates api version format. Examples of accepted form: 2017-12-31 """
accepted_format = '%Y-%m-%d'
try:
return datetime.strptime(string, accepted_format).strftime(accepted_format)
except ValueError:
from azure.cli.core.azclierror import InvalidArgumentValueError
raise InvalidArgumentValueError("Input '{}' not valid. Valid example: 2008-10-27.".format(string))
return api_version_type | Examples of accepted forms: 2017-12-31 | get_api_version_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def ipv4_range_type(string):
""" Validates an IPv4 address or address range. """
import re
ip_format = r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'
if not re.match("^{}$".format(ip_format), string):
if not re.match("^{ip_format}-{ip_format}$".format(ip_format=ip_format), string):
raise CLIError("Please use the following format to specify ip range: '{ip1}-{ip2}'.")
return string | Validates an IPv4 address or address range. | ipv4_range_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def resource_type_type(loader):
""" Returns a function which validates that resource types string contains only a combination of service,
container, and object. Their shorthand representations are s, c, and o. """
def impl(string):
t_resources = loader.get_models('common.models#ResourceTypes')
if set(string) - set("sco"):
raise ValueError
return t_resources(_str=''.join(set(string)))
return impl | Returns a function which validates that resource types string contains only a combination of service,
container, and object. Their shorthand representations are s, c, and o. | resource_type_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def resource_type_type_v2(loader):
""" Returns a function which validates that resource types string contains only a combination of service,
container, and object. Their shorthand representations are s, c, and o. """
def _get_ordered_set(string):
if not string:
return string
result = []
for item in string:
if item not in result:
result.append(item)
return ''.join(result)
def impl(string):
t_resources = loader.get_models('_shared.models#ResourceTypes', resource_type=ResourceType.DATA_STORAGE_BLOB)
if set(string) - set("sco"):
raise ValueError
return t_resources.from_string(_get_ordered_set(string))
return impl | Returns a function which validates that resource types string contains only a combination of service,
container, and object. Their shorthand representations are s, c, and o. | resource_type_type_v2 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def services_type(loader):
""" Returns a function which validates that services string contains only a combination of blob, queue, table,
and file. Their shorthand representations are b, q, t, and f. """
def impl(string):
t_services = loader.get_models('common.models#Services')
if set(string) - set("bqtf"):
raise ValueError
return t_services(_str=''.join(set(string)))
return impl | Returns a function which validates that services string contains only a combination of blob, queue, table,
and file. Their shorthand representations are b, q, t, and f. | services_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def services_type_v2():
""" Returns a function which validates that services string contains only a combination of blob, queue, table,
and file. Their shorthand representations are b, q, t, and f. """
def impl(string):
if set(string) - set("bqtf"):
raise ValueError
return ''.join(set(string))
return impl | Returns a function which validates that services string contains only a combination of blob, queue, table,
and file. Their shorthand representations are b, q, t, and f. | services_type_v2 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_validators.py | MIT |
def get_cors(self, timeout=None, to_string=False):
""" In blob, file, queue service, CorsRule's `allowed_origins`, `allowed_methods`, `allowed_headers` and
`exposed_headers` are string, but in TableCorsRule, they become list.
If `to_string` is True, convert TableCorsRule's properties to string.
"""
r = self.client.get_service_properties(timeout=timeout)['cors']
if not to_string:
return r
if self.name == 'table':
r = [self.table_cors_to_generated(i) for i in r]
for i in r:
# backwards compatibility when migrate multiapi to Track 2
i.allowed_origins = i.allowed_origins.replace(',', ', ')
i.allowed_methods = i.allowed_methods.replace(',', ', ')
i.allowed_headers = i.allowed_headers.replace(',', ', ')
i.exposed_headers = i.exposed_headers.replace(',', ', ')
return r | In blob, file, queue service, CorsRule's `allowed_origins`, `allowed_methods`, `allowed_headers` and
`exposed_headers` are string, but in TableCorsRule, they become list.
If `to_string` is True, convert TableCorsRule's properties to string. | get_cors | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/services_wrapper.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/services_wrapper.py | MIT |
def multi_service_properties_factory(cli_ctx, kwargs):
"""Create multiple data services properties instance based on the services option"""
from .services_wrapper_azure_stack import ServiceProperties
t_base_blob_service, t_file_service, t_queue_service, = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
'blob.baseblobservice#BaseBlobService',
'file#FileService', 'queue#QueueService')
t_table_service = get_table_data_type(cli_ctx, 'table', 'TableService')
account_name = kwargs.pop('account_name', None)
account_key = kwargs.pop('account_key', None)
connection_string = kwargs.pop('connection_string', None)
sas_token = kwargs.pop('sas_token', None)
services = kwargs.pop('services', [])
def get_creator(name, service_type):
return lambda: ServiceProperties(cli_ctx, name, service_type, account_name, account_key, connection_string,
sas_token)
creators = {'b': get_creator('blob', t_base_blob_service), 'f': get_creator('file', t_file_service),
'q': get_creator('queue', t_queue_service), 't': get_creator('table', t_table_service)}
return [creators[s]() for s in services] | Create multiple data services properties instance based on the services option | multi_service_properties_factory | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_client_factory_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_client_factory_azure_stack.py | MIT |
def transform_acl_list_output(result):
""" Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. """
from collections import OrderedDict
new_result = []
for key in sorted(result.keys()):
new_entry = OrderedDict()
new_entry['Name'] = key
new_entry['Start'] = result[key]['start']
new_entry['Expiry'] = result[key]['expiry']
new_entry['Permissions'] = result[key]['permission']
new_result.append(new_entry)
return new_result | Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. | transform_acl_list_output | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_url(result):
""" Ensures the resulting URL string does not contain extra / characters """
import re
result = re.sub('//', '/', result)
result = re.sub('/', '//', result, count=1)
return encode_url_path(result) | Ensures the resulting URL string does not contain extra / characters | transform_url | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_url_without_encode(result):
""" Ensures the resulting URL string does not contain extra / characters """
import re
result = re.sub('//', '/', result)
result = re.sub('/', '//', result, count=1)
return result | Ensures the resulting URL string does not contain extra / characters | transform_url_without_encode | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_fs_access_output(result):
""" Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. """
new_result = {}
useful_keys = ['acl', 'group', 'owner', 'permissions']
for key in useful_keys:
new_result[key] = result[key]
return new_result | Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. | transform_fs_access_output | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_fs_public_access_output(result):
""" Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. """
if result.public_access == 'blob':
result.public_access = 'file'
if result.public_access == 'container':
result.public_access = 'filesystem'
return result | Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. | transform_fs_public_access_output | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_fs_list_public_access_output(result):
""" Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. """
new_result = list(result)
for i, item in enumerate(new_result):
new_result[i] = transform_fs_public_access_output(item)
return new_result | Transform to convert SDK output into a form that is more readily
usable by the CLI and tools such as jpterm. | transform_fs_list_public_access_output | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_response_with_bytearray(response):
""" transform bytearray to string """
from msrest import Serializer
for item in response:
if response[item] and isinstance(response[item], (bytes, bytearray)):
response[item] = Serializer.serialize_bytearray(response[item])
return response | transform bytearray to string | transform_response_with_bytearray | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_transformers.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_transformers.py | MIT |
def transform_file_directory_result(cli_ctx):
"""
Transform a the result returned from file and directory listing API.
This transformer add and remove properties from File and Directory objects in the given list
in order to align the object's properties so as to offer a better view to the file and dir
list.
"""
def transformer(result):
if getattr(result, 'next_marker', None):
logger.warning('Next Marker:')
logger.warning(result.next_marker)
t_file, t_dir = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'File', 'Directory', mod='file.models')
return_list = []
for each in result:
if isinstance(each, t_file):
delattr(each, 'content')
setattr(each, 'type', 'file')
elif isinstance(each, t_dir):
setattr(each, 'type', 'dir')
return_list.append(each)
return return_list
return transformer | Transform a the result returned from file and directory listing API.
This transformer add and remove properties from File and Directory objects in the given list
in order to align the object's properties so as to offer a better view to the file and dir
list. | transform_file_directory_result | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_format_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_format_azure_stack.py | MIT |
def collect_blobs(blob_service, container, pattern=None):
"""
List the blobs in the given blob container, filter the blob by comparing their path to the given pattern.
"""
return [name for (name, _) in collect_blob_objects(blob_service, container, pattern)] | List the blobs in the given blob container, filter the blob by comparing their path to the given pattern. | collect_blobs | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def collect_blob_objects(blob_service, container, pattern=None):
"""
List the blob name and blob in the given blob container, filter the blob by comparing their path to
the given pattern.
"""
if not blob_service:
raise ValueError('missing parameter blob_service')
if not container:
raise ValueError('missing parameter container')
if not _pattern_has_wildcards(pattern):
from azure.core.exceptions import ResourceNotFoundError
try:
yield pattern, blob_service.get_blob_client(container, pattern).get_blob_properties()
except ResourceNotFoundError:
return
else:
if hasattr(blob_service, 'list_blobs'):
blobs = blob_service.list_blobs(container)
else:
container_client = blob_service.get_container_client(container=container)
prefix = _get_prefix(pattern)
if prefix:
blobs = container_client.list_blobs(name_starts_with=prefix)
else:
blobs = container_client.list_blobs()
for blob in blobs:
try:
blob_name = blob.name.encode('utf-8') if isinstance(blob.name, unicode) else blob.name
except NameError:
blob_name = blob.name
if not pattern or _match_path(blob_name, pattern):
yield blob_name, blob | List the blob name and blob in the given blob container, filter the blob by comparing their path to
the given pattern. | collect_blob_objects | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def collect_files(cmd, file_service, share, pattern=None):
"""
Search files in the the given file share recursively. Filter the files by matching their path to the given pattern.
Returns a iterable of tuple (dir, name).
"""
if not file_service:
raise ValueError('missing parameter file_service')
if not share:
raise ValueError('missing parameter share')
if not _pattern_has_wildcards(pattern):
return [pattern]
return glob_files_remotely(cmd, file_service, share, pattern) | Search files in the the given file share recursively. Filter the files by matching their path to the given pattern.
Returns a iterable of tuple (dir, name). | collect_files | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def collect_files_track2(file_service, share, pattern=None):
"""
Search files in the given file share recursively. Filter the files by matching their path to the given pattern.
Returns an iterable of tuple (dir, name).
"""
if not file_service:
raise ValueError('missing parameter file_service')
if not share:
raise ValueError('missing parameter share')
if not _pattern_has_wildcards(pattern):
return [pattern]
return glob_files_remotely_track2(file_service, share, pattern) | Search files in the given file share recursively. Filter the files by matching their path to the given pattern.
Returns an iterable of tuple (dir, name). | collect_files_track2 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def glob_files_locally(folder_path, pattern):
"""glob files in local folder based on the given pattern"""
pattern = os.path.join(folder_path, pattern.lstrip('/')) if pattern else None
len_folder_path = len(folder_path) + 1
for root, _, files in os.walk(folder_path):
for f in files:
full_path = os.path.join(root, f)
if not pattern or _match_path(full_path, pattern):
yield (full_path, full_path[len_folder_path:]) | glob files in local folder based on the given pattern | glob_files_locally | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def glob_files_remotely(cmd, client, share_name, pattern, snapshot=None):
"""glob the files in remote file share based on the given pattern"""
from collections import deque
t_dir, t_file = cmd.get_models('file.models#Directory', 'file.models#File')
queue = deque([""])
while queue:
current_dir = queue.pop()
for f in client.list_directories_and_files(share_name, current_dir, snapshot=snapshot):
if isinstance(f, t_file):
if not pattern or _match_path(os.path.join(current_dir, f.name), pattern):
yield current_dir, f.name
elif isinstance(f, t_dir):
queue.appendleft(os.path.join(current_dir, f.name)) | glob the files in remote file share based on the given pattern | glob_files_remotely | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def glob_files_remotely_track2(client, share_name, pattern, snapshot=None, is_share_client=False):
"""glob the files in remote file share based on the given pattern"""
from collections import deque
if not is_share_client:
client = client.get_share_client(share_name, snapshot=snapshot)
queue = deque([""])
while queue:
current_dir = queue.pop()
for f in client.list_directories_and_files(current_dir):
if not f['is_directory']:
if not pattern or _match_path(os.path.join(current_dir, f['name']), pattern):
yield current_dir, f['name']
else:
queue.appendleft(os.path.join(current_dir, f['name'])) | glob the files in remote file share based on the given pattern | glob_files_remotely_track2 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/util.py | MIT |
def multi_service_properties_factory(cli_ctx, kwargs):
"""Create multiple data services properties instance based on the services option"""
from .services_wrapper import ServiceProperties
services = kwargs.pop('services', [])
def create_service(service):
service_to_param = {'b': ['blob', cf_blob_service], 'f': ['file', cf_share_service],
'q': ['queue', cf_queue_service],
't': ['table', cf_table_service]}
name, client = service_to_param[service]
return ServiceProperties(cli_ctx, name, client(cli_ctx, ori_kwargs.copy()))
ori_kwargs = kwargs.copy()
for i in ['connection_string', 'account_name', 'account_key', 'sas_token', 'account_url', 'token_credential']:
kwargs.pop(i, None)
return [create_service(s) for s in services] | Create multiple data services properties instance based on the services option | multi_service_properties_factory | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_client_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_client_factory.py | MIT |
def storage_command(self, name, method_name=None, command_type=None, oauth=False, generic_update=None, **kwargs):
""" Registers an Azure CLI Storage Data Plane command. These commands always include the four parameters which
can be used to obtain a storage client: account-name, account-key, connection-string, and sas-token. """
if generic_update:
command_name = '{} {}'.format(self.group_name, name) if self.group_name else name
self.generic_update_command(name, **kwargs)
elif command_type:
command_name = self.command(name, method_name, command_type=command_type, **kwargs)
else:
command_name = self.command(name, method_name, **kwargs)
self._register_data_plane_account_arguments(command_name)
if oauth:
self._register_data_plane_oauth_arguments(command_name)
_merge_new_exception_handler(kwargs, self.account_key_exception_handler()) | Registers an Azure CLI Storage Data Plane command. These commands always include the four parameters which
can be used to obtain a storage client: account-name, account-key, connection-string, and sas-token. | storage_command | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/__init__.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/__init__.py | MIT |
def handler(ex):
if hasattr(ex, 'status_code') and ex.status_code == 403 and hasattr(ex, 'error_code'):
# TODO: Revisit the logic here once the service team updates their response
if ex.error_code == 'AuthorizationPermissionMismatch':
message = """
You do not have the required permissions needed to perform this operation.
Depending on your operation, you may need to be assigned one of the following roles:
"Storage Blob Data Owner"
"Storage Blob Data Contributor"
"Storage Blob Data Reader"
"Storage Queue Data Contributor"
"Storage Queue Data Reader"
"Storage Table Data Contributor"
"Storage Table Data Reader"
If you want to use the old authentication method and allow querying for the right account key, please use the "--auth-mode" parameter and "key" value.
"""
ex.args = (message,)
elif ex.error_code == 'AuthorizationFailure':
message = """
The request may be blocked by network rules of storage account. Please check network rule set using 'az storage account show -n accountname --query networkRuleSet'.
If you want to change the default action to apply when no rule matches, please use 'az storage account update'.
"""
ex.args = (message,)
elif ex.error_code == 'AuthenticationFailed':
message = """
Authentication failure. This may be caused by either invalid account key, connection string or sas token value provided for your storage account.
"""
ex.args = (message,)
if hasattr(ex, 'status_code') and ex.status_code == 409\
and hasattr(ex, 'error_code') and ex.error_code == 'NoPendingCopyOperation':
pass | ex.args = (message,)
elif ex.error_code == 'AuthorizationFailure':
message = | get_handler_suppress_some_400.handler | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/__init__.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/__init__.py | MIT |
def get_handler_suppress_some_400(cls):
def handler(ex):
if hasattr(ex, 'status_code') and ex.status_code == 403 and hasattr(ex, 'error_code'):
# TODO: Revisit the logic here once the service team updates their response
if ex.error_code == 'AuthorizationPermissionMismatch':
message = """
You do not have the required permissions needed to perform this operation.
Depending on your operation, you may need to be assigned one of the following roles:
"Storage Blob Data Owner"
"Storage Blob Data Contributor"
"Storage Blob Data Reader"
"Storage Queue Data Contributor"
"Storage Queue Data Reader"
"Storage Table Data Contributor"
"Storage Table Data Reader"
If you want to use the old authentication method and allow querying for the right account key, please use the "--auth-mode" parameter and "key" value.
"""
ex.args = (message,)
elif ex.error_code == 'AuthorizationFailure':
message = """
The request may be blocked by network rules of storage account. Please check network rule set using 'az storage account show -n accountname --query networkRuleSet'.
If you want to change the default action to apply when no rule matches, please use 'az storage account update'.
"""
ex.args = (message,)
elif ex.error_code == 'AuthenticationFailed':
message = """
Authentication failure. This may be caused by either invalid account key, connection string or sas token value provided for your storage account.
"""
ex.args = (message,)
if hasattr(ex, 'status_code') and ex.status_code == 409\
and hasattr(ex, 'error_code') and ex.error_code == 'NoPendingCopyOperation':
pass
return handler | ex.args = (message,)
elif ex.error_code == 'AuthorizationFailure':
message = | get_handler_suppress_some_400 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/__init__.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/__init__.py | MIT |
def _register_data_plane_account_arguments(self, command_name):
""" Add parameters required to create a storage client """
from azure.cli.core.commands.parameters import get_resource_name_completion_list
from azure.cli.command_modules.storage._validators import is_storagev2, validate_client_parameters
command = self.command_loader.command_table.get(command_name, None)
if not command:
return
group_name = 'Storage Account'
command.add_argument('account_name', '--account-name', required=False, default=None,
arg_group=group_name,
completer=get_resource_name_completion_list('Microsoft.Storage/storageAccounts'),
help='Storage account name. Related environment variable: AZURE_STORAGE_ACCOUNT. Must be '
'used in conjunction with either storage account key or a SAS token. If neither are '
'present, the command will try to query the storage account key using the '
'authenticated Azure account. If a large number of storage commands are executed the '
'API quota may be hit')
command.add_argument('connection_string', '--connection-string', required=False, default=None,
validator=validate_client_parameters, arg_group=group_name,
help='Storage account connection string. Environment variable: '
'AZURE_STORAGE_CONNECTION_STRING')
resource_type = command.command_kwargs['resource_type']
if is_storagev2(resource_type.value[0]):
endpoint_argument_dict = {
ResourceType.DATA_STORAGE_BLOB: '--blob-endpoint',
ResourceType.DATA_STORAGE_FILESHARE: '--file-endpoint',
ResourceType.DATA_STORAGE_TABLE: '--table-endpoint',
ResourceType.DATA_STORAGE_QUEUE: '--queue-endpoint',
ResourceType.DATA_STORAGE_FILEDATALAKE: '--blob-endpoint'
}
command.add_argument('account_url', endpoint_argument_dict.get(resource_type, '--service-endpoint'),
required=False, default=None, arg_group=group_name,
help='Storage data service endpoint. Must be used in conjunction with either '
'storage account key or a SAS token. You can find each service primary endpoint '
'with `az storage account show`. '
'Environment variable: AZURE_STORAGE_SERVICE_ENDPOINT')
command.add_argument('account_key', '--account-key', required=False, default=None,
arg_group=group_name,
help='Storage account key. Must be used in conjunction with storage account '
'name or service endpoint. Environment variable: AZURE_STORAGE_KEY')
command.add_argument('sas_token', '--sas-token', required=False, default=None,
arg_group=group_name,
help='A Shared Access Signature (SAS). Must be used in conjunction with storage '
'account name or service endpoint. Environment variable: AZURE_STORAGE_SAS_TOKEN')
else:
command.add_argument('account_key', '--account-key', required=False, default=None,
arg_group=group_name,
help='Storage account key. Must be used in conjunction with storage account name. '
'Environment variable: AZURE_STORAGE_KEY')
command.add_argument('sas_token', '--sas-token', required=False, default=None,
arg_group=group_name,
help='A Shared Access Signature (SAS). Must be used in conjunction with storage '
'account name. Environment variable: AZURE_STORAGE_SAS_TOKEN') | Add parameters required to create a storage client | _register_data_plane_account_arguments | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/__init__.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/__init__.py | MIT |
def handler(ex):
if hasattr(ex, 'status_code') and ex.status_code == 403:
# TODO: Revisit the logic here once the service team updates their response
if 'AuthorizationPermissionMismatch' in ex.args[0]:
message = """
You do not have the required permissions needed to perform this operation.
Depending on your operation, you may need to be assigned one of the following roles:
"Storage Blob Data Contributor"
"Storage Blob Data Reader"
"Storage Queue Data Contributor"
"Storage Queue Data Reader"
"Storage Table Data Contributor"
"Storage Table Data Reader"
If you want to use the old authentication method and allow querying for the right account key, please use the "--auth-mode" parameter and "key" value.
"""
ex.args = (message,)
elif 'AuthorizationFailure' in ex.args[0]:
message = """
The request may be blocked by network rules of storage account. Please check network rule set using 'az storage account show -n accountname --query networkRuleSet'.
If you want to change the default action to apply when no rule matches, please use 'az storage account update'.
"""
ex.args = (message,)
elif 'AuthenticationFailed' in ex.args[0]:
message = """
Authentication failure. This may be caused by either invalid account key, connection string or sas token value provided for your storage account.
"""
ex.args = (message,)
if hasattr(ex, 'status_code') and ex.status_code == 409 and 'NoPendingCopyOperation' in ex.args[0]:
pass | ex.args = (message,)
elif 'AuthorizationFailure' in ex.args[0]:
message = | get_handler_suppress_some_400.handler | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/__init__.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/__init__.py | MIT |
def get_handler_suppress_some_400(cls):
def handler(ex):
if hasattr(ex, 'status_code') and ex.status_code == 403:
# TODO: Revisit the logic here once the service team updates their response
if 'AuthorizationPermissionMismatch' in ex.args[0]:
message = """
You do not have the required permissions needed to perform this operation.
Depending on your operation, you may need to be assigned one of the following roles:
"Storage Blob Data Contributor"
"Storage Blob Data Reader"
"Storage Queue Data Contributor"
"Storage Queue Data Reader"
"Storage Table Data Contributor"
"Storage Table Data Reader"
If you want to use the old authentication method and allow querying for the right account key, please use the "--auth-mode" parameter and "key" value.
"""
ex.args = (message,)
elif 'AuthorizationFailure' in ex.args[0]:
message = """
The request may be blocked by network rules of storage account. Please check network rule set using 'az storage account show -n accountname --query networkRuleSet'.
If you want to change the default action to apply when no rule matches, please use 'az storage account update'.
"""
ex.args = (message,)
elif 'AuthenticationFailed' in ex.args[0]:
message = """
Authentication failure. This may be caused by either invalid account key, connection string or sas token value provided for your storage account.
"""
ex.args = (message,)
if hasattr(ex, 'status_code') and ex.status_code == 409 and 'NoPendingCopyOperation' in ex.args[0]:
pass
return handler | ex.args = (message,)
elif 'AuthorizationFailure' in ex.args[0]:
message = | get_handler_suppress_some_400 | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/__init__.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/__init__.py | MIT |
def transform_file_output(result):
""" Transform to convert SDK file/dir list output to something that
more clearly distinguishes between files and directories. """
from collections import OrderedDict
new_result = []
iterable = result if isinstance(result, list) else result.get('items', result)
for item in iterable:
new_entry = OrderedDict()
entity_type = item['type'] # type property is added by transform_file_directory_result
is_dir = entity_type == 'dir'
new_entry['Name'] = item['name'] + '/' if is_dir else item['name']
new_entry['Content Length'] = ' ' if is_dir else item['properties']['contentLength']
new_entry['Type'] = item['type']
new_entry['Last Modified'] = item['properties']['lastModified'] or ' '
new_result.append(new_entry)
return sorted(new_result, key=lambda k: k['Name']) | Transform to convert SDK file/dir list output to something that
more clearly distinguishes between files and directories. | transform_file_output | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_format.py | MIT |
def transform_file_directory_result(result):
"""
Transform a the result returned from file and directory listing API.
This transformer add and remove properties from File and Directory objects in the given list
in order to align the object's properties so as to offer a better view to the file and dir
list.
"""
from ._transformers import transform_share_directory_json_output, transform_share_file_json_output
return_list = []
for each in result:
if getattr(each, 'is_directory', None):
setattr(each, 'type', 'dir')
each = transform_share_directory_json_output(each)
else:
setattr(each, 'type', 'file')
each = transform_share_file_json_output(each)
return_list.append(each)
return return_list | Transform a the result returned from file and directory listing API.
This transformer add and remove properties from File and Directory objects in the given list
in order to align the object's properties so as to offer a better view to the file and dir
list. | transform_file_directory_result | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/_format.py | MIT |
def get_account_info(self, group, name):
"""Returns the storage account name and key in a tuple"""
return name, self.get_account_key(group, name) | Returns the storage account name and key in a tuple | get_account_info | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/tests/storage_test_util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/tests/storage_test_util.py | MIT |
def storage_blob_copy_batch(cmd, client, source_client, container_name=None,
destination_path=None, source_container=None, source_share=None,
source_sas=None, pattern=None, dryrun=False):
"""Copy a group of blob or files to a blob container."""
if dryrun:
logger.warning('copy files or blobs to blob container')
logger.warning(' account %s', client.account_name)
logger.warning(' container %s', container_name)
logger.warning(' source %s', source_container or source_share)
logger.warning('source type %s', 'blob' if source_container else 'file')
logger.warning(' pattern %s', pattern)
logger.warning(' operations')
if source_container:
# copy blobs for blob container
# if the source client is None, recreate one from the destination client.
source_client = source_client or create_blob_service_from_storage_client(cmd, client)
if not source_sas:
source_sas = create_short_lived_container_sas(cmd, source_client.account_name, source_client.account_key,
source_container)
# pylint: disable=inconsistent-return-statements
def action_blob_copy(blob_name):
if dryrun:
logger.warning(' - copy blob %s', blob_name)
else:
return _copy_blob_to_blob_container(client, source_client, container_name, destination_path,
source_container, source_sas, blob_name)
return list(filter_none(action_blob_copy(blob) for blob in collect_blobs(source_client,
source_container,
pattern)))
if source_share:
# copy blob from file share
# if the source client is None, recreate one from the destination client.
source_client = source_client or create_file_share_from_storage_client(cmd, client)
if not source_sas:
source_sas = create_short_lived_share_sas(cmd, source_client.account_name, source_client.account_key,
source_share)
# pylint: disable=inconsistent-return-statements
def action_file_copy(file_info):
dir_name, file_name = file_info
if dryrun:
logger.warning(' - copy file %s', os.path.join(dir_name, file_name))
else:
return _copy_file_to_blob_container(client, source_client, container_name, destination_path,
source_share, source_sas, dir_name, file_name)
return list(filter_none(action_file_copy(file) for file in collect_files(cmd,
source_client,
source_share,
pattern)))
raise ValueError('Fail to find source. Neither blob container or file share is specified') | Copy a group of blob or files to a blob container. | storage_blob_copy_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob_azure_stack.py | MIT |
def transform_blob_type(cmd, blob_type):
"""
get_blob_types() will get ['block', 'page', 'append']
transform it to BlobType in track2
"""
BlobType = cmd.get_models('_models#BlobType', resource_type=ResourceType.DATA_STORAGE_BLOB)
if blob_type == 'block':
return BlobType.BlockBlob
if blob_type == 'page':
return BlobType.PageBlob
if blob_type == 'append':
return BlobType.AppendBlob
return None | get_blob_types() will get ['block', 'page', 'append']
transform it to BlobType in track2 | transform_blob_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob_azure_stack.py | MIT |
def upload_blob(cmd, client, container_name, blob_name, file_path, blob_type=None, content_settings=None, metadata=None,
validate_content=False, maxsize_condition=None, max_connections=2, lease_id=None, tier=None,
if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, timeout=None,
progress_callback=None, encryption_scope=None):
"""Upload a blob to a container."""
if encryption_scope:
count = os.path.getsize(file_path)
with open(file_path, 'rb') as stream:
data = stream.read(count)
from azure.core import MatchConditions
upload_args = {
'content_settings': content_settings,
'metadata': metadata,
'timeout': timeout,
'if_modified_since': if_modified_since,
'if_unmodified_since': if_unmodified_since,
'blob_type': transform_blob_type(cmd, blob_type),
'validate_content': validate_content,
'lease': lease_id,
'max_concurrency': max_connections,
}
if cmd.supported_api_version(min_api='2017-04-17') and tier:
upload_args['premium_page_blob_tier'] = tier
if maxsize_condition:
upload_args['maxsize_condition'] = maxsize_condition
if cmd.supported_api_version(min_api='2016-05-31'):
upload_args['validate_content'] = validate_content
# Precondition Check
if if_match:
if if_match == '*':
upload_args['match_condition'] = MatchConditions.IfPresent
else:
upload_args['etag'] = if_match
upload_args['match_condition'] = MatchConditions.IfNotModified
if if_none_match:
upload_args['etag'] = if_none_match
upload_args['match_condition'] = MatchConditions.IfModified
response = client.upload_blob(data=data, length=count, encryption_scope=encryption_scope, **upload_args)
if response['content_md5'] is not None:
from msrest import Serializer
response['content_md5'] = Serializer.serialize_bytearray(response['content_md5'])
return response
t_content_settings = cmd.get_models('blob.models#ContentSettings')
content_settings = guess_content_type(file_path, content_settings, t_content_settings)
def upload_append_blob():
check_blob_args = {
'container_name': container_name,
'blob_name': blob_name,
'lease_id': lease_id,
'if_modified_since': if_modified_since,
'if_unmodified_since': if_unmodified_since,
'if_match': if_match,
'if_none_match': if_none_match,
'timeout': timeout
}
if client.exists(container_name, blob_name):
# used to check for the preconditions as append_blob_from_path() cannot
client.get_blob_properties(**check_blob_args)
else:
client.create_blob(content_settings=content_settings, metadata=metadata, **check_blob_args)
append_blob_args = {
'container_name': container_name,
'blob_name': blob_name,
'file_path': file_path,
'progress_callback': progress_callback,
'maxsize_condition': maxsize_condition,
'lease_id': lease_id,
'timeout': timeout
}
if cmd.supported_api_version(min_api='2016-05-31'):
append_blob_args['validate_content'] = validate_content
return client.append_blob_from_path(**append_blob_args)
def upload_block_blob():
# increase the block size to 100MB when the block list will contain more than 50,000 blocks
if os.path.isfile(file_path) and os.stat(file_path).st_size > 50000 * 4 * 1024 * 1024:
client.MAX_BLOCK_SIZE = 100 * 1024 * 1024
client.MAX_SINGLE_PUT_SIZE = 256 * 1024 * 1024
create_blob_args = {
'container_name': container_name,
'blob_name': blob_name,
'file_path': file_path,
'progress_callback': progress_callback,
'content_settings': content_settings,
'metadata': metadata,
'max_connections': max_connections,
'lease_id': lease_id,
'if_modified_since': if_modified_since,
'if_unmodified_since': if_unmodified_since,
'if_match': if_match,
'if_none_match': if_none_match,
'timeout': timeout
}
if cmd.supported_api_version(min_api='2017-04-17') and tier:
create_blob_args['premium_page_blob_tier'] = tier
if cmd.supported_api_version(min_api='2016-05-31'):
create_blob_args['validate_content'] = validate_content
return client.create_blob_from_path(**create_blob_args)
type_func = {
'append': upload_append_blob,
'block': upload_block_blob,
'page': upload_block_blob # same implementation
}
return type_func[blob_type]() | Upload a blob to a container. | upload_blob | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob_azure_stack.py | MIT |
def storage_file_upload_batch(cmd, client, destination, source, destination_path=None, pattern=None, dryrun=False,
validate_content=False, content_settings=None, max_connections=1, metadata=None,
progress_callback=None):
""" Upload local files to Azure Storage File Share in batch """
from azure.cli.command_modules.storage.util import glob_files_locally, normalize_blob_file_path
source_files = list(glob_files_locally(source, pattern))
logger = get_logger(__name__)
settings_class = cmd.get_models('file.models#ContentSettings')
if dryrun:
logger.info('upload files to file share')
logger.info(' account %s', client.account_name)
logger.info(' share %s', destination)
logger.info(' total %d', len(source_files))
return [{'File': client.make_file_url(destination, os.path.dirname(dst) or None, os.path.basename(dst)),
'Type': guess_content_type(src, content_settings, settings_class).content_type} for src, dst in
source_files]
# TODO: Performance improvement
# 1. Upload files in parallel
def _upload_action(src, dst):
dst = normalize_blob_file_path(destination_path, dst)
dir_name = os.path.dirname(dst)
file_name = os.path.basename(dst)
_make_directory_in_files_share(client, destination, dir_name)
create_file_args = {'share_name': destination, 'directory_name': dir_name, 'file_name': file_name,
'local_file_path': src, 'progress_callback': progress_callback,
'content_settings': guess_content_type(src, content_settings, settings_class),
'metadata': metadata, 'max_connections': max_connections}
if cmd.supported_api_version(min_api='2016-05-31'):
create_file_args['validate_content'] = validate_content
logger.warning('uploading %s', src)
client.create_file_from_path(**create_file_args)
return client.make_file_url(destination, dir_name, file_name)
return [_upload_action(src, dst) for src, dst in source_files] | Upload local files to Azure Storage File Share in batch | storage_file_upload_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def storage_file_download_batch(cmd, client, source, destination, pattern=None, dryrun=False, validate_content=False,
max_connections=1, progress_callback=None, snapshot=None):
"""
Download files from file share to local directory in batch
"""
from azure.cli.command_modules.storage.util import glob_files_remotely, mkdir_p
source_files = glob_files_remotely(cmd, client, source, pattern)
if dryrun:
source_files_list = list(source_files)
logger = get_logger(__name__)
logger.warning('download files from file share')
logger.warning(' account %s', client.account_name)
logger.warning(' share %s', source)
logger.warning('destination %s', destination)
logger.warning(' pattern %s', pattern)
logger.warning(' total %d', len(source_files_list))
logger.warning(' operations')
for f in source_files_list:
logger.warning(' - %s/%s => %s', f[0], f[1], os.path.join(destination, *f))
return []
def _download_action(pair):
destination_dir = os.path.join(destination, pair[0])
mkdir_p(destination_dir)
get_file_args = {'share_name': source, 'directory_name': pair[0], 'file_name': pair[1],
'file_path': os.path.join(destination, *pair), 'max_connections': max_connections,
'progress_callback': progress_callback, 'snapshot': snapshot}
if cmd.supported_api_version(min_api='2016-05-31'):
get_file_args['validate_content'] = validate_content
client.get_file_to_path(**get_file_args)
return client.make_file_url(source, *pair)
return [_download_action(f) for f in source_files] | Download files from file share to local directory in batch | storage_file_download_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def storage_file_copy_batch(cmd, client, source_client, destination_share=None, destination_path=None,
source_container=None, source_share=None, source_sas=None, pattern=None, dryrun=False,
metadata=None, timeout=None):
"""
Copy a group of files asynchronously
"""
logger = None
if dryrun:
logger = get_logger(__name__)
logger.warning('copy files or blobs to file share')
logger.warning(' account %s', client.account_name)
logger.warning(' share %s', destination_share)
logger.warning(' path %s', destination_path)
logger.warning(' source %s', source_container or source_share)
logger.warning('source type %s', 'blob' if source_container else 'file')
logger.warning(' pattern %s', pattern)
logger.warning(' operations')
if source_container:
# copy blobs to file share
# if the source client is None, recreate one from the destination client.
source_client = source_client or create_blob_service_from_storage_client(cmd, client)
# the cache of existing directories in the destination file share. the cache helps to avoid
# repeatedly create existing directory so as to optimize the performance.
existing_dirs = set()
if not source_sas:
source_sas = create_short_lived_container_sas(cmd, source_client.account_name, source_client.account_key,
source_container)
# pylint: disable=inconsistent-return-statements
def action_blob_copy(blob_name):
if dryrun:
logger.warning(' - copy blob %s', blob_name)
else:
return _create_file_and_directory_from_blob(client, source_client, destination_share, source_container,
source_sas, blob_name, destination_dir=destination_path,
metadata=metadata, timeout=timeout,
existing_dirs=existing_dirs)
return list(
filter_none(action_blob_copy(blob) for blob in collect_blobs(source_client, source_container, pattern)))
if source_share:
# copy files from share to share
# if the source client is None, assume the file share is in the same storage account as
# destination, therefore client is reused.
source_client = source_client or client
# the cache of existing directories in the destination file share. the cache helps to avoid
# repeatedly create existing directory so as to optimize the performance.
existing_dirs = set()
if not source_sas:
source_sas = create_short_lived_share_sas(cmd, source_client.account_name, source_client.account_key,
source_share)
# pylint: disable=inconsistent-return-statements
def action_file_copy(file_info):
dir_name, file_name = file_info
if dryrun:
logger.warning(' - copy file %s', os.path.join(dir_name, file_name))
else:
return _create_file_and_directory_from_file(client, source_client, destination_share, source_share,
source_sas, dir_name, file_name,
destination_dir=destination_path, metadata=metadata,
timeout=timeout, existing_dirs=existing_dirs)
return list(filter_none(
action_file_copy(file) for file in collect_files(cmd, source_client, source_share, pattern)))
# won't happen, the validator should ensure either source_container or source_share is set
raise ValueError('Fail to find source. Neither blob container or file share is specified.') | Copy a group of files asynchronously | storage_file_copy_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def storage_file_delete_batch(cmd, client, source, pattern=None, dryrun=False, timeout=None):
"""
Delete files from file share in batch
"""
def delete_action(file_pair):
delete_file_args = {'share_name': source, 'directory_name': file_pair[0], 'file_name': file_pair[1],
'timeout': timeout}
return client.delete_file(**delete_file_args)
from azure.cli.command_modules.storage.util import glob_files_remotely
source_files = list(glob_files_remotely(cmd, client, source, pattern))
if dryrun:
logger = get_logger(__name__)
logger.warning('delete files from %s', source)
logger.warning(' pattern %s', pattern)
logger.warning(' share %s', source)
logger.warning(' total %d', len(source_files))
logger.warning(' operations')
for f in source_files:
logger.warning(' - %s/%s', f[0], f[1])
return []
for f in source_files:
delete_action(f) | Delete files from file share in batch | storage_file_delete_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def _create_file_and_directory_from_blob(file_service, blob_service, share, container, sas, blob_name,
destination_dir=None, metadata=None, timeout=None, existing_dirs=None):
"""
Copy a blob to file share and create the directory if needed.
"""
from azure.common import AzureException
from azure.cli.command_modules.storage.util import normalize_blob_file_path
blob_url = blob_service.make_blob_url(container, encode_for_url(blob_name), sas_token=sas)
full_path = normalize_blob_file_path(destination_dir, blob_name)
file_name = os.path.basename(full_path)
dir_name = os.path.dirname(full_path)
_make_directory_in_files_share(file_service, share, dir_name, existing_dirs)
try:
file_service.copy_file(share, dir_name, file_name, blob_url, metadata, timeout)
return file_service.make_file_url(share, dir_name, file_name)
except AzureException:
error_template = 'Failed to copy blob {} to file share {}. Please check if you have permission to read ' \
'source or set a correct sas token.'
from knack.util import CLIError
raise CLIError(error_template.format(blob_name, share)) | Copy a blob to file share and create the directory if needed. | _create_file_and_directory_from_blob | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def _create_file_and_directory_from_file(file_service, source_file_service, share, source_share, sas, source_file_dir,
source_file_name, destination_dir=None, metadata=None, timeout=None,
existing_dirs=None):
"""
Copy a file from one file share to another
"""
from azure.common import AzureException
from azure.cli.command_modules.storage.util import normalize_blob_file_path
file_url, source_file_dir, source_file_name = make_encoded_file_url_and_params(source_file_service, source_share,
source_file_dir, source_file_name,
sas_token=sas)
full_path = normalize_blob_file_path(destination_dir, os.path.join(source_file_dir, source_file_name))
file_name = os.path.basename(full_path)
dir_name = os.path.dirname(full_path)
_make_directory_in_files_share(file_service, share, dir_name, existing_dirs)
try:
file_service.copy_file(share, dir_name, file_name, file_url, metadata, timeout)
return file_service.make_file_url(share, dir_name or None, file_name)
except AzureException:
error_template = 'Failed to copy file {} from share {} to file share {}. Please check if ' \
'you have right permission to read source or set a correct sas token.'
from knack.util import CLIError
raise CLIError(error_template.format(file_name, source_share, share)) | Copy a file from one file share to another | _create_file_and_directory_from_file | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def _make_directory_in_files_share(file_service, file_share, directory_path, existing_dirs=None):
"""
Create directories recursively.
This method accept a existing_dirs set which serves as the cache of existing directory. If the
parameter is given, the method will search the set first to avoid repeatedly create directory
which already exists.
"""
from azure.common import AzureHttpError
if not directory_path:
return
parents = [directory_path]
p = os.path.dirname(directory_path)
while p:
parents.append(p)
p = os.path.dirname(p)
for dir_name in reversed(parents):
if existing_dirs and (dir_name in existing_dirs):
continue
try:
file_service.create_directory(share_name=file_share, directory_name=dir_name, fail_on_exist=False)
except AzureHttpError:
from knack.util import CLIError
raise CLIError('Failed to create directory {}'.format(dir_name))
if existing_dirs:
existing_dirs.add(directory_path) | Create directories recursively.
This method accept a existing_dirs set which serves as the cache of existing directory. If the
parameter is given, the method will search the set first to avoid repeatedly create directory
which already exists. | _make_directory_in_files_share | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file_azure_stack.py | MIT |
def storage_blob_copy_batch(cmd, client, source_client, container_name=None, destination_path=None,
source_container=None, source_share=None, source_sas=None, pattern=None, dryrun=False,
source_account_name=None, source_account_key=None, **kwargs):
"""Copy a group of blob or files to a blob container."""
if dryrun:
logger.warning('copy files or blobs to blob container')
logger.warning(' account %s', client.account_name)
logger.warning(' container %s', container_name)
logger.warning(' source %s', source_container or source_share)
logger.warning('source type %s', 'blob' if source_container else 'file')
logger.warning(' pattern %s', pattern)
logger.warning(' operations')
if source_container:
# copy blobs for blob container, skip empty dir
# pylint: disable=inconsistent-return-statements
def action_blob_copy(blob_name):
if dryrun:
logger.warning(' - copy blob %s', blob_name)
else:
return _copy_blob_to_blob_container(cmd, blob_service=client, source_blob_service=source_client,
destination_container=container_name,
destination_path=destination_path,
source_container=source_container,
source_blob_name=blob_name,
source_sas=source_sas,
**kwargs)
return list(filter_none(action_blob_copy(blob) for blob in collect_blobs(source_client,
source_container,
pattern)))
if source_share:
# copy blob from file share, skip empty dir
# pylint: disable=inconsistent-return-statements
def action_file_copy(file_info):
dir_name, file_name = file_info
if dryrun:
logger.warning(' - copy file %s', os.path.join(dir_name, file_name))
else:
return _copy_file_to_blob_container(client, source_client, container_name, destination_path,
source_share, source_sas, dir_name, file_name)
return list(filter_none(action_file_copy(file) for file in collect_files_track2(source_client,
source_share,
pattern)))
raise ValueError('Fail to find source. Neither blob container nor file share is specified') | Copy a group of blob or files to a blob container. | storage_blob_copy_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | MIT |
def transform_blob_type(cmd, blob_type):
"""
get_blob_types() will get ['block', 'page', 'append']
transform it to BlobType in track2
"""
BlobType = cmd.get_models('_models#BlobType', resource_type=ResourceType.DATA_STORAGE_BLOB)
if blob_type == 'block':
return BlobType.BlockBlob
if blob_type == 'page':
return BlobType.PageBlob
if blob_type == 'append':
return BlobType.AppendBlob
return None | get_blob_types() will get ['block', 'page', 'append']
transform it to BlobType in track2 | transform_blob_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | MIT |
def upload_blob(cmd, client, file_path=None, container_name=None, blob_name=None, blob_type=None,
metadata=None, validate_content=False, maxsize_condition=None, max_connections=2, lease_id=None,
if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None,
timeout=None, progress_callback=None, encryption_scope=None, overwrite=None, data=None,
length=None, **kwargs):
"""Upload a blob to a container."""
upload_args = {
'blob_type': transform_blob_type(cmd, blob_type),
'lease': lease_id,
'max_concurrency': max_connections
}
if file_path and 'content_settings' in kwargs:
t_blob_content_settings = cmd.get_models('_models#ContentSettings',
resource_type=ResourceType.DATA_STORAGE_BLOB)
kwargs['content_settings'] = guess_content_type(file_path, kwargs['content_settings'], t_blob_content_settings)
if overwrite is not None:
upload_args['overwrite'] = overwrite
if maxsize_condition:
upload_args['maxsize_condition'] = maxsize_condition
if cmd.supported_api_version(min_api='2016-05-31'):
upload_args['validate_content'] = validate_content
if progress_callback:
upload_args['progress_hook'] = progress_callback
check_blob_args = {
'if_modified_since': if_modified_since,
'if_unmodified_since': if_unmodified_since,
'if_match': if_match,
'if_none_match': if_none_match,
}
# used to check for the preconditions as upload_append_blob() cannot
if blob_type == 'append':
if client.exists(timeout=timeout):
client.get_blob_properties(lease=lease_id, timeout=timeout, **check_blob_args)
upload_args['max_concurrency'] = 1
else:
upload_args['if_modified_since'] = if_modified_since
upload_args['if_unmodified_since'] = if_unmodified_since
upload_args['if_match'] = if_match
upload_args['if_none_match'] = if_none_match
# Because the contents of the uploaded file may be too large, it should be passed into the a stream object,
# upload_blob() read file data in batches to avoid OOM problems
try:
if file_path:
length = os.path.getsize(file_path)
_adjust_block_blob_size(client, blob_type, length)
with open(file_path, 'rb') as stream:
response = client.upload_blob(data=stream, length=length, metadata=metadata,
encryption_scope=encryption_scope,
**upload_args, **kwargs)
if data is not None:
_adjust_block_blob_size(client, blob_type, length)
try:
response = client.upload_blob(data=data, length=length, metadata=metadata,
encryption_scope=encryption_scope,
**upload_args, **kwargs)
except UnicodeEncodeError:
response = client.upload_blob(data=data.encode('UTF-8', 'ignore').decode('UTF-8'),
length=length, metadata=metadata,
encryption_scope=encryption_scope,
**upload_args, **kwargs)
except ResourceExistsError as ex:
raise AzureResponseError(
"{}\nIf you want to overwrite the existing one, please add --overwrite in your command.".format(ex.message))
# PageBlobChunkUploader verifies the file when uploading the chunk data, If the contents of the file are
# all null byte("\x00"), the file will not be uploaded, and the response will be none.
# Therefore, the compatibility logic for response is added to keep it consistent with track 1
if response is None:
return {
"etag": None,
"lastModified": None
}
from msrest import Serializer
if 'content_md5' in response and response['content_md5'] is not None:
response['content_md5'] = Serializer.serialize_bytearray(response['content_md5'])
if 'content_crc64' in response and response['content_crc64'] is not None:
response['content_crc64'] = Serializer.serialize_bytearray(response['content_crc64'])
return response | Upload a blob to a container. | upload_blob | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | MIT |
def get_block_ids(content_length, block_length):
"""Get the block id arrary from block blob length, block size"""
block_count = 0
if block_length:
block_count = content_length // block_length
if block_count * block_length != content_length:
block_count += 1
block_ids = []
for i in range(block_count):
chunk_offset = i * block_length
block_id = '{0:032d}'.format(chunk_offset)
block_ids.append(block_id)
return block_ids | Get the block id arrary from block blob length, block size | get_block_ids | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/blob.py | MIT |
def create_acl_policy(cmd, client, container_name, policy_name, start=None, expiry=None, permission=None, **kwargs):
"""Create a stored access policy on the containing object"""
t_access_policy = cmd.get_models('common.models#AccessPolicy') or cmd.get_models('models#AccessPolicy')
acl = _get_acl(cmd, client, container_name, **kwargs)
acl[policy_name] = t_access_policy(permission, expiry, start)
if hasattr(acl, 'public_access'):
kwargs['public_access'] = getattr(acl, 'public_access')
return _set_acl(cmd, client, container_name, acl, **kwargs) | Create a stored access policy on the containing object | create_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | MIT |
def get_acl_policy(cmd, client, container_name, policy_name, **kwargs):
"""Show a stored access policy on a containing object"""
acl = _get_acl(cmd, client, container_name, **kwargs)
return acl.get(policy_name) | Show a stored access policy on a containing object | get_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | MIT |
def list_acl_policies(cmd, client, container_name, **kwargs):
"""List stored access policies on a containing object"""
return _get_acl(cmd, client, container_name, **kwargs) | List stored access policies on a containing object | list_acl_policies | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | MIT |
def set_acl_policy(cmd, client, container_name, policy_name, start=None, expiry=None, permission=None, **kwargs):
"""Set a stored access policy on a containing object"""
if not (start or expiry or permission):
from knack.util import CLIError
raise CLIError('Must specify at least one property when updating an access policy.')
acl = _get_acl(cmd, client, container_name, **kwargs)
try:
policy = acl[policy_name]
policy.start = start or policy.start
policy.expiry = expiry or policy.expiry
policy.permission = permission or policy.permission
if hasattr(acl, 'public_access'):
kwargs['public_access'] = getattr(acl, 'public_access')
except KeyError:
from knack.util import CLIError
raise CLIError('ACL does not contain {}'.format(policy_name))
return _set_acl(cmd, client, container_name, acl, **kwargs) | Set a stored access policy on a containing object | set_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | MIT |
def delete_acl_policy(cmd, client, container_name, policy_name, **kwargs):
""" Delete a stored access policy on a containing object """
acl = _get_acl(cmd, client, container_name, **kwargs)
del acl[policy_name]
if hasattr(acl, 'public_access'):
kwargs['public_access'] = getattr(acl, 'public_access')
return _set_acl(cmd, client, container_name, acl, **kwargs) | Delete a stored access policy on a containing object | delete_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/acl.py | MIT |
def create_acl_policy(cmd, client, policy_name, start=None, expiry=None, permission=None, **kwargs):
"""Create a stored access policy on the containing object"""
t_access_policy = cmd.get_models('_models#AccessPolicy', resource_type=ResourceType.DATA_STORAGE_BLOB)
acl = _get_acl(cmd, client, **kwargs)
acl[policy_name] = t_access_policy(permission, expiry, start)
if hasattr(acl, 'public_access'):
kwargs['public_access'] = getattr(acl, 'public_access')
return _set_acl(cmd, client, acl, **kwargs) | Create a stored access policy on the containing object | create_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | MIT |
def get_acl_policy(cmd, client, policy_name, **kwargs):
"""Show a stored access policy on a containing object"""
acl = _get_acl(cmd, client, **kwargs)
return acl.get(policy_name) | Show a stored access policy on a containing object | get_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | MIT |
def list_acl_policies(cmd, client, **kwargs):
"""List stored access policies on a containing object"""
return _get_acl(cmd, client, **kwargs) | List stored access policies on a containing object | list_acl_policies | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | MIT |
def set_acl_policy(cmd, client, policy_name, start=None, expiry=None, permission=None, **kwargs):
"""Set a stored access policy on a containing object"""
if not (start or expiry or permission):
from knack.util import CLIError
raise CLIError('Must specify at least one property when updating an access policy.')
acl = _get_acl(cmd, client, **kwargs)
try:
policy = acl[policy_name]
if policy is None:
t_access_policy = cmd.get_models('_models#AccessPolicy', resource_type=ResourceType.DATA_STORAGE_BLOB)
acl[policy_name] = t_access_policy(permission, expiry, start)
else:
policy.start = start if start else policy.start
policy.expiry = expiry if expiry else policy.expiry
policy.permission = permission or policy.permission
if hasattr(acl, 'public_access'):
kwargs['public_access'] = getattr(acl, 'public_access')
except KeyError:
from knack.util import CLIError
raise CLIError('ACL does not contain {}'.format(policy_name))
return _set_acl(cmd, client, acl, **kwargs) | Set a stored access policy on a containing object | set_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | MIT |
def delete_acl_policy(cmd, client, policy_name, **kwargs):
""" Delete a stored access policy on a containing object """
acl = _get_acl(cmd, client, **kwargs)
del acl[policy_name]
if hasattr(acl, 'public_access'):
kwargs['public_access'] = getattr(acl, 'public_access')
return _set_acl(cmd, client, acl, **kwargs) | Delete a stored access policy on a containing object | delete_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/access_policy.py | MIT |
def add_or_rule(cmd, client, resource_group_name, account_name, policy_id,
source_container, destination_container, min_creation_time=None, prefix_match=None):
"""
Initialize rule for or policy
"""
policy_properties = client.get(resource_group_name, account_name, policy_id)
ObjectReplicationPolicyRule, ObjectReplicationPolicyFilter = \
cmd.get_models('ObjectReplicationPolicyRule', 'ObjectReplicationPolicyFilter')
new_or_rule = ObjectReplicationPolicyRule(
source_container=source_container,
destination_container=destination_container,
filters=ObjectReplicationPolicyFilter(prefix_match=prefix_match, min_creation_time=min_creation_time)
)
policy_properties.rules.append(new_or_rule)
return client.create_or_update(resource_group_name, account_name, policy_id, policy_properties) | Initialize rule for or policy | add_or_rule | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/account.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/account.py | MIT |
def storage_file_upload_batch(cmd, client, destination, source, destination_path=None, pattern=None, dryrun=False,
validate_content=False, content_settings=None, max_connections=1, metadata=None,
progress_callback=None):
""" Upload local files to Azure Storage File Share in batch """
from azure.cli.command_modules.storage.util import glob_files_locally, normalize_blob_file_path
source_files = list(glob_files_locally(source, pattern))
settings_class = cmd.get_models('_models#ContentSettings', resource_type=ResourceType.DATA_STORAGE_FILESHARE)
if dryrun:
logger.info('upload files to file share')
logger.info(' account %s', client.account_name)
logger.info(' share %s', destination)
logger.info(' total %d', len(source_files))
res = []
for src, dst in source_files:
formatted_dst = normalize_blob_file_path(destination_path, dst)
kwargs = {
'directory_name': os.path.dirname(formatted_dst),
'file_name': os.path.basename(formatted_dst)
}
file = create_file_url(client, **kwargs)
guessed_type = guess_content_type(src, content_settings, settings_class).content_type
res.append({'File': file, 'Type': guessed_type})
return res
def _upload_action(src, dst2):
dst2 = normalize_blob_file_path(destination_path, dst2)
dir_name = os.path.dirname(dst2)
file_name = os.path.basename(dst2)
_make_directory_in_files_share(client, destination, dir_name, V2=True)
logger.warning('uploading %s', src)
storage_file_upload(client.get_file_client(dst2), src, content_settings, metadata, validate_content,
progress_callback, max_connections)
args = {
'directory_name': dir_name,
'file_name': file_name
}
return create_file_url(client, **args)
# 1. Upload files in parallel
# 2. Return the list of uploaded files
return _execute_in_parallel(max_connections, _upload_action, source_files) | Upload local files to Azure Storage File Share in batch | storage_file_upload_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def storage_file_download_batch(client, source, destination, pattern=None, dryrun=False, validate_content=False,
max_connections=1, progress_callback=None):
"""
Download files from file share to local directory in batch
"""
from azure.cli.command_modules.storage.util import glob_files_remotely_track2
source_files = glob_files_remotely_track2(client, source, pattern, is_share_client=True)
if dryrun:
source_files_list = list(source_files)
logger.warning('download files from file share')
logger.warning(' account %s', client.account_name)
logger.warning(' share %s', source)
logger.warning('destination %s', destination)
logger.warning(' pattern %s', pattern)
logger.warning(' total %d', len(source_files_list))
logger.warning(' operations')
for f in source_files_list:
logger.warning(' - %s/%s => %s', f[0], f[1], os.path.join(destination, *f))
return []
def _download_action(pair):
path = os.path.join(*pair)
local_path = os.path.join(destination, *pair)
file_client = client.get_file_client(path)
download_file(file_client, destination_path=local_path, max_connections=max_connections,
progress_callback=progress_callback, validate_content=validate_content)
return file_client.url.replace('%5C', '/')
return list(_download_action(f) for f in source_files) | Download files from file share to local directory in batch | storage_file_download_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def storage_file_copy_batch(cmd, client, source_client, share_name=None, destination_path=None,
source_container=None, source_share=None, source_sas=None, pattern=None, dryrun=False,
metadata=None, timeout=None, **kwargs):
"""
Copy a group of files asynchronously
"""
if dryrun:
logger.warning('copy files or blobs to file share')
logger.warning(' account %s', client.account_name)
logger.warning(' share %s', share_name)
logger.warning(' path %s', destination_path)
logger.warning(' source account %s', kwargs.get('source_account_name', ""))
logger.warning(' source %s', source_container or source_share)
logger.warning('source type %s', 'blob' if source_container else 'file')
logger.warning(' pattern %s', pattern)
logger.warning(' operations')
if source_container:
# copy blobs to file share
# the cache of existing directories in the destination file share. the cache helps to avoid
# repeatedly create existing directory so as to optimize the performance.
existing_dirs = set()
# pylint: disable=inconsistent-return-statements
def action_blob_copy(blob_name):
if dryrun:
logger.warning(' - copy blob %s', blob_name)
else:
return _create_file_and_directory_from_blob(cmd, client, source_client, share_name, source_container,
source_sas, blob_name, destination_dir=destination_path,
metadata=metadata, timeout=timeout,
existing_dirs=existing_dirs)
return list(
filter_none(action_blob_copy(blob) for blob in collect_blobs(source_client, source_container, pattern)))
if source_share:
# copy files from share to share
# the cache of existing directories in the destination file share. the cache helps to avoid
# repeatedly create existing directory so as to optimize the performance.
existing_dirs = set()
# pylint: disable=inconsistent-return-statements
def action_file_copy(file_info):
dir_name, file_name = file_info
if dryrun:
logger.warning(' - copy file %s', os.path.join(dir_name, file_name))
else:
return _create_file_and_directory_from_file(cmd, client, source_client, share_name, source_share,
source_sas, dir_name, file_name,
destination_dir=destination_path, metadata=metadata,
timeout=timeout, existing_dirs=existing_dirs)
return list(filter_none(
action_file_copy(file) for file in collect_files_track2(source_client, source_share, pattern)))
# won't happen, the validator should ensure either source_container or source_share is set
raise ValueError('Fail to find source. Neither blob container or file share is specified.') | Copy a group of files asynchronously | storage_file_copy_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def storage_file_delete_batch(client, source, pattern=None, dryrun=False, timeout=None):
"""
Delete files from file share in batch
"""
def delete_action(pair):
path = os.path.join(*pair)
file_client = client.get_file_client(path)
return file_client.delete_file(timeout=timeout)
from azure.cli.command_modules.storage.util import glob_files_remotely_track2
source_files = list(glob_files_remotely_track2(client, source, pattern, is_share_client=True))
if dryrun:
logger.warning('delete files from %s', source)
logger.warning(' pattern %s', pattern)
logger.warning(' share %s', source)
logger.warning(' total %d', len(source_files))
logger.warning(' operations')
for f in source_files:
logger.warning(' - %s/%s', f[0], f[1])
return []
for f in source_files:
delete_action(f) | Delete files from file share in batch | storage_file_delete_batch | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def _create_file_and_directory_from_blob(cmd, file_service, blob_service, share, container, sas, blob_name,
destination_dir=None, metadata=None, timeout=None, existing_dirs=None):
"""
Copy a blob to file share and create the directory if needed.
"""
from azure.common import AzureException
from azure.cli.command_modules.storage.util import normalize_blob_file_path
t_blob_client = cmd.get_models('_blob_client#BlobClient', resource_type=ResourceType.DATA_STORAGE_BLOB)
source_client = t_blob_client(account_url=blob_service.url, container_name=container,
blob_name=blob_name, credential=sas)
blob_url = source_client.url
full_path = normalize_blob_file_path(destination_dir, blob_name)
dir_name = os.path.dirname(full_path)
_make_directory_in_files_share(file_service, share, dir_name, existing_dirs, V2=True)
try:
file_client = file_service.get_file_client(full_path)
file_client.start_copy_from_url(source_url=blob_url, metadata=metadata, timeout=timeout)
return file_client.url
except AzureException:
error_template = 'Failed to copy blob {} to file share {}. Please check if you have permission to read ' \
'source or set a correct sas token.'
from knack.util import CLIError
raise CLIError(error_template.format(blob_name, share)) | Copy a blob to file share and create the directory if needed. | _create_file_and_directory_from_blob | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def _create_file_and_directory_from_file(cmd, file_service, source_file_service, share, source_share, sas,
source_file_dir, source_file_name, destination_dir=None, metadata=None,
timeout=None, existing_dirs=None):
"""
Copy a file from one file share to another
"""
from azure.common import AzureException
from azure.cli.command_modules.storage.util import normalize_blob_file_path
file_path = source_file_name
if source_file_dir:
file_path = source_file_dir + '/' + file_path
t_file_client = cmd.get_models('_file_client#ShareFileClient', resource_type=ResourceType.DATA_STORAGE_FILESHARE)
source_client = t_file_client(account_url=source_file_service.url, share_name=source_share, file_path=file_path,
credential=sas)
file_url = source_client.url
full_path = normalize_blob_file_path(destination_dir, os.path.join(source_file_dir, source_file_name))
file_name = os.path.basename(full_path)
dir_name = os.path.dirname(full_path)
_make_directory_in_files_share(file_service, share, dir_name, existing_dirs, V2=True)
try:
file_client = file_service.get_file_client(full_path)
file_client.start_copy_from_url(source_url=file_url, metadata=metadata, timeout=timeout)
return file_client.url
except AzureException:
error_template = 'Failed to copy file {} from share {} to file share {}. Please check if ' \
'you have right permission to read source or set a correct sas token.'
from knack.util import CLIError
raise CLIError(error_template.format(file_name, source_share, share)) | Copy a file from one file share to another | _create_file_and_directory_from_file | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def _make_directory_in_files_share(file_service, file_share, directory_path, existing_dirs=None, V2=False):
"""
Create directories recursively.
This method accept a existing_dirs set which serves as the cache of existing directory. If the
parameter is given, the method will search the set first to avoid repeatedly create directory
which already exists.
"""
from azure.common import AzureHttpError
if not directory_path:
return
parents = [directory_path]
p = os.path.dirname(directory_path)
while p:
parents.append(p)
p = os.path.dirname(p)
for dir_name in reversed(parents):
if existing_dirs and (dir_name in existing_dirs):
continue
try:
if V2:
file_service.create_directory(directory_name=dir_name)
else:
file_service.create_directory(share_name=file_share, directory_name=dir_name, fail_on_exist=False)
except ResourceExistsError:
pass
except AzureHttpError:
from knack.util import CLIError
raise CLIError('Failed to create directory {}'.format(dir_name))
if existing_dirs:
existing_dirs.add(directory_path) | Create directories recursively.
This method accept a existing_dirs set which serves as the cache of existing directory. If the
parameter is given, the method will search the set first to avoid repeatedly create directory
which already exists. | _make_directory_in_files_share | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/file.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/file.py | MIT |
def create_acl_policy(cmd, client, policy_name, start=None, expiry=None, permission=None, **kwargs):
"""Create a stored access policy on the containing object"""
t_access_policy = cmd.get_models('_models#AccessPolicy', resource_type=ResourceType.DATA_STORAGE_BLOB)
acl = _get_acl(cmd, client, **kwargs)
acl['signed_identifiers'][policy_name] = t_access_policy(permission, expiry, start)
if 'public_access' in acl:
kwargs['public_access'] = acl['public_access']
return _set_acl(cmd, client, acl['signed_identifiers'], **kwargs) | Create a stored access policy on the containing object | create_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | MIT |
def get_acl_policy(cmd, client, policy_name, **kwargs):
"""Show a stored access policy on a containing object"""
acl = _get_acl(cmd, client, **kwargs)
return acl['signed_identifiers'].get(policy_name) | Show a stored access policy on a containing object | get_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | MIT |
def list_acl_policies(cmd, client, **kwargs):
"""List stored access policies on a containing object"""
return _get_acl(cmd, client, **kwargs)['signed_identifiers'] | List stored access policies on a containing object | list_acl_policies | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | MIT |
def set_acl_policy(cmd, client, policy_name, start=None, expiry=None, permission=None, **kwargs):
"""Set a stored access policy on a containing object"""
if not (start or expiry or permission):
from knack.util import CLIError
raise CLIError('Must specify at least one property when updating an access policy.')
acl = _get_acl(cmd, client, **kwargs)
try:
policy = acl['signed_identifiers'][policy_name]
if policy is None:
t_access_policy = cmd.get_models('_models#AccessPolicy', resource_type=ResourceType.DATA_STORAGE_BLOB)
acl['signed_identifiers'][policy_name] = t_access_policy(permission, expiry, start)
else:
policy.start = start if start else policy.start
policy.expiry = expiry if expiry else policy.expiry
policy.permission = permission or policy.permission
if 'public_access' in acl:
kwargs['public_access'] = acl['public_access']
except KeyError:
from knack.util import CLIError
raise CLIError('ACL does not contain {}'.format(policy_name))
return _set_acl(cmd, client, acl['signed_identifiers'], **kwargs) | Set a stored access policy on a containing object | set_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | MIT |
def delete_acl_policy(cmd, client, policy_name, **kwargs):
""" Delete a stored access policy on a containing object """
acl = _get_acl(cmd, client, **kwargs)
try:
del acl['signed_identifiers'][policy_name]
except KeyError:
from knack.util import CLIError
raise CLIError('ACL does not contain {}'.format(policy_name))
if 'public_access' in acl:
kwargs['public_access'] = acl['public_access']
return _set_acl(cmd, client, acl['signed_identifiers'], **kwargs) | Delete a stored access policy on a containing object | delete_acl_policy | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/storage/operations/container_access_policy.py | MIT |
def get_site_availability(cmd, name):
""" This is used by az webapp up to verify if a site needs to be created or should just be deployed"""
client = web_client_factory(cmd.cli_ctx)
availability = client.check_name_availability(name, 'Site')
# check for "." in app name. it is valid for hostnames to contain it, but not allowed for webapp names
if "." in name:
availability.name_available = False
availability.reason = "Invalid"
availability.message = ("Site names only allow alphanumeric characters and hyphens, "
"cannot start or end in a hyphen, and must be less than 64 chars.")
return availability | This is used by az webapp up to verify if a site needs to be created or should just be deployed | get_site_availability | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/appservice/_create_util.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appservice/_create_util.py | MIT |
def validate_timeout_value(namespace):
"""Validates that zip deployment timeout is set to a reasonable min value"""
if isinstance(namespace.timeout, int):
if namespace.timeout <= 29:
raise ArgumentUsageError('--timeout value should be a positive value in seconds and should be at least 30') | Validates that zip deployment timeout is set to a reasonable min value | validate_timeout_value | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/appservice/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appservice/_validators.py | MIT |
def validate_site_create(cmd, namespace):
"""Validate the SiteName that is being used to create is available
This API requires that the RG is already created"""
client = web_client_factory(cmd.cli_ctx)
if isinstance(namespace.name, str) and isinstance(namespace.resource_group_name, str) \
and isinstance(namespace.plan, str):
resource_group_name = namespace.resource_group_name
plan = namespace.plan
if is_valid_resource_id(plan):
parsed_result = parse_resource_id(plan)
plan_info = client.app_service_plans.get(parsed_result['resource_group'], parsed_result['name'])
else:
plan_info = client.app_service_plans.get(resource_group_name, plan)
if not plan_info:
raise ResourceNotFoundError("The plan '{}' doesn't exist in the resource group '{}'".format(
plan, resource_group_name))
# verify that the name is available for create
validation_payload = {
"name": namespace.name,
"type": "Microsoft.Web/sites",
"location": plan_info.location,
"properties": {
"serverfarmId": plan_info.id
}
}
validation = client.validate(resource_group_name, validation_payload)
if validation.status.lower() == "failure" and validation.error.code != 'SiteAlreadyExists':
raise ValidationError(validation.error.message) | Validate the SiteName that is being used to create is available
This API requires that the RG is already created | validate_site_create | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/appservice/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appservice/_validators.py | MIT |
def update_git_token(cmd, git_token=None):
'''
Update source control token cached in Azure app service. If no token is provided,
the command will clean up existing token. Note that tokens are now redacted in the result.
'''
client = web_client_factory(cmd.cli_ctx)
from azure.mgmt.web.models import SourceControl
sc = SourceControl(name='not-really-needed', source_control_name='GitHub', token=git_token or '')
response = client.update_source_control('GitHub', sc)
logger.warning('Tokens have been redacted.')
response.refresh_token = None
response.token = None
response.token_secret = None
return response | Update source control token cached in Azure app service. If no token is provided,
the command will clean up existing token. Note that tokens are now redacted in the result. | update_git_token | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/appservice/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appservice/custom.py | MIT |
def set_deployment_user(cmd, user_name, password=None):
'''
Update deployment credentials.(Note, all webapps in your subscription will be impacted)
'''
User = cmd.get_models('User')
client = web_client_factory(cmd.cli_ctx)
user = User(publishing_user_name=user_name)
if password is None:
try:
password = prompt_pass(msg='Password: ', confirm=True)
except NoTTYException:
raise ArgumentUsageError('Please specify both username and password in non-interactive mode.')
user.publishing_password = password
return client.update_publishing_user(user) | Update deployment credentials.(Note, all webapps in your subscription will be impacted) | set_deployment_user | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/appservice/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appservice/custom.py | MIT |
def _get_cert(certificate_password, certificate_file):
''' Decrypts the .pfx file '''
cert_password_bytes = certificate_password.encode('utf-8') if certificate_password else None
with open(certificate_file, 'rb') as f:
p12 = pkcs12.load_pkcs12(f.read(), cert_password_bytes)
cert = p12.cert.certificate
thumbprint = cert.fingerprint(hashes.SHA1()).hex().upper()
return thumbprint | Decrypts the .pfx file | _get_cert | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/appservice/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appservice/custom.py | MIT |
def generate_key(byte_length=32):
"""
Generate cryptographically secure device key.
"""
import secrets
token_bytes = secrets.token_bytes(byte_length)
return base64.b64encode(token_bytes).decode("utf8") | Generate cryptographically secure device key. | generate_key | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/iot/_utils.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/iot/_utils.py | MIT |
def _ensure_location(cli_ctx, resource_group_name, location):
"""Check to see if a location was provided. If not,
fall back to the resource group location.
:param object cli_ctx: CLI Context
:param str resource_group_name: Resource group name
:param str location: Location to create the resource
"""
if location is None:
resource_group_client = resource_service_factory(cli_ctx).resource_groups
return resource_group_client.get(resource_group_name).location
return location | Check to see if a location was provided. If not,
fall back to the resource group location.
:param object cli_ctx: CLI Context
:param str resource_group_name: Resource group name
:param str location: Location to create the resource | _ensure_location | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/iot/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/iot/custom.py | MIT |
def _get_iot_central_app_by_name(client, app_name):
"""Search the current subscription for an app with the given name.
:param object client: IoTCentralClient
:param str app_name: App name to search for
"""
all_apps = iot_central_app_list(client)
if all_apps is None:
raise CLIError(
"No IoT Central application found in current subscription.")
try:
target_app = next(
x for x in all_apps if app_name.lower() == x.name.lower())
except StopIteration:
raise CLIError(
"No IoT Central application found with name {} in current subscription.".format(app_name))
return target_app | Search the current subscription for an app with the given name.
:param object client: IoTCentralClient
:param str app_name: App name to search for | _get_iot_central_app_by_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/iot/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/iot/custom.py | MIT |
def signed_session(self, session=None):
"""Create requests session with SAS auth headers.
:rtype: requests.Session.
"""
session = session or super().signed_session()
session.headers['Authorization'] = self.generate_sas_token()
return session | Create requests session with SAS auth headers.
:rtype: requests.Session. | signed_session | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/iot/sas_token_auth.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/iot/sas_token_auth.py | MIT |
def validate_api_properties(ns):
""" Extracts JSON format or 'a=b c=d' format as api properties """
api_properties = ns.api_properties
if api_properties is None:
return
if len(api_properties) > 1:
ns.api_properties = extract_key_values_pairs(api_properties)
else:
string = api_properties[0]
try:
ns.api_properties = shell_safe_json_parse(string)
return
except CLIError:
result = extract_key_values_pairs([string])
if _is_suspected_json(string):
logger.warning('Api properties looks like a JSON format but not valid, interpreted as key=value pairs:'
' %s', str(result))
ns.api_properties = result
return | Extracts JSON format or 'a=b c=d' format as api properties | validate_api_properties | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/_params.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/_params.py | MIT |
def _is_suspected_json(string):
""" If the string looks like a JSON """
if string.startswith('{') or string.startswith('\'{') or string.startswith('\"{'):
return True
if string.startswith('[') or string.startswith('\'[') or string.startswith('\"['):
return True
if re.match(r"^['\"\s]*{.+}|\[.+\]['\"\s]*$", string):
return True
return False | If the string looks like a JSON | _is_suspected_json | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/_params.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/_params.py | MIT |
def _sku_filter(cmd, namespace):
"""
Get a list of ResourceSku and filter by existing conditions: 'kind', 'location' and 'sku_name'
"""
kind = getattr(namespace, 'kind', None)
location = getattr(namespace, 'location', None)
sku_name = getattr(namespace, 'sku_name', None)
def _filter_sku(_sku):
if sku_name is not None:
if _sku.name != sku_name:
return False
if kind is not None:
if _sku.kind != kind:
return False
if location is not None:
if location.lower() not in [x.lower() for x in _sku.locations]:
return False
return True
return [x for x in cf_resource_skus(cmd.cli_ctx).list() if _filter_sku(x)] | Get a list of ResourceSku and filter by existing conditions: 'kind', 'location' and 'sku_name' | _sku_filter | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/_params.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/_params.py | MIT |
def list_resources(client, resource_group_name=None):
"""
List all Azure Cognitive Services accounts.
"""
if resource_group_name:
return client.list_by_resource_group(resource_group_name)
return client.list() | List all Azure Cognitive Services accounts. | list_resources | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | MIT |
def recover(client, location, resource_group_name, account_name):
"""
Recover a deleted Azure Cognitive Services account.
"""
properties = CognitiveServicesAccountProperties()
properties.restore = True
params = CognitiveServicesAccount(properties=properties)
params.location = location
return client.begin_create(resource_group_name, account_name, params) | Recover a deleted Azure Cognitive Services account. | recover | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | MIT |
def list_usages(client, resource_group_name, account_name):
"""
List usages for Azure Cognitive Services account.
"""
return client.list_usages(resource_group_name, account_name).value | List usages for Azure Cognitive Services account. | list_usages | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | MIT |
def list_kinds(client):
"""
List all valid kinds for Azure Cognitive Services account.
:param client: the ResourceSkusOperations
:return: a list
"""
# The client should be ResourceSkusOperations, and list() should return a list of SKUs for all regions.
# The sku will have "kind" and we use that to extract full list of kinds.
kinds = {x.kind for x in client.list()}
return sorted(list(kinds)) | List all valid kinds for Azure Cognitive Services account.
:param client: the ResourceSkusOperations
:return: a list | list_kinds | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | MIT |
def list_skus(cmd, kind=None, location=None, resource_group_name=None, account_name=None):
"""
List skus for Azure Cognitive Services account.
"""
if resource_group_name is not None or account_name is not None:
logger.warning(
'list-skus with an existing account has been deprecated and will be removed in a future release.')
if resource_group_name is None:
# account_name must not be None
raise CLIError('--resource-group is required when --name is specified.')
# keep the original behavior to avoid breaking changes
return cf_accounts(cmd.cli_ctx).list_skus(resource_group_name, account_name)
# in other cases, use kind and location to filter SKUs
def _filter_sku(_sku):
if kind is not None:
if _sku.kind != kind:
return False
if location is not None:
if location.lower() not in [x.lower() for x in _sku.locations]:
return False
return True
return [x for x in cf_resource_skus(cmd.cli_ctx).list() if _filter_sku(x)] | List skus for Azure Cognitive Services account. | list_skus | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/cognitiveservices/custom.py | MIT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.