code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def _get_storage_account_key(cli_ctx, account_name, account_key):
"""Returns account key for the given storage account.
:param str account_name: storage account name.
:param str or None account_key: account key provide as command line argument.
"""
if account_key:
return account_key
storage_client = _get_storage_management_client(cli_ctx)
account = [a.id for a in list(storage_client.storage_accounts.list()) if a.name == account_name]
if not account:
raise CLIError('Cannot find "{0}" storage account.'.format(account_name))
resource_group = parse_resource_id(account[0])['resource_group']
keys_list_result = storage_client.storage_accounts.list_keys(resource_group, account_name)
if not keys_list_result or not keys_list_result.keys:
raise CLIError('Cannot find a key for "{0}" storage account.'.format(account_name))
return keys_list_result.keys[0].value | Returns account key for the given storage account.
:param str account_name: storage account name.
:param str or None account_key: account key provide as command line argument. | _get_storage_account_key | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_effective_storage_account_name_and_key(cli_ctx, account_name, account_key):
"""Returns storage account name and key to be used.
:param str or None account_name: storage account name provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
"""
if account_name:
return account_name, _get_storage_account_key(cli_ctx, account_name, account_key) or ''
return cli_ctx.config.get('batchai', 'storage_account', ''), cli_ctx.config.get('batchai', 'storage_key', '') | Returns storage account name and key to be used.
:param str or None account_name: storage account name provided as command line argument.
:param str or None account_key: storage account key provided as command line argument. | _get_effective_storage_account_name_and_key | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_account_name_from_azure_file_url(azure_file_url):
"""Extracts account name from Azure File URL
:param str azure_file_url: Azure File URL
:return str: account name
"""
if not azure_file_url:
raise CLIError('Azure File URL cannot absent or be empty')
o = urlparse(azure_file_url)
try:
account, _ = o.netloc.split('.', 1)
return account
except ValueError:
raise CLIError('Ill-formed Azure File URL "{0}"'.format(azure_file_url)) | Extracts account name from Azure File URL
:param str azure_file_url: Azure File URL
:return str: account name | _get_account_name_from_azure_file_url | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_effective_credentials(cli_ctx, existing_credentials, account_name):
"""Returns AzureStorageCredentialInfo for the account
:param models.AzureStorageCredentialsInfo existing_credentials: known credentials
:param str account_name: storage account name
:return models.AzureStorageCredentialsInfo: credentials to be used
"""
if existing_credentials and (existing_credentials.account_key or existing_credentials.account_key_secret_reference):
return existing_credentials
return models.AzureStorageCredentialsInfo(
account_key=_get_storage_account_key(cli_ctx, account_name, account_key=None)) | Returns AzureStorageCredentialInfo for the account
:param models.AzureStorageCredentialsInfo existing_credentials: known credentials
:param str account_name: storage account name
:return models.AzureStorageCredentialsInfo: credentials to be used | _get_effective_credentials | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _patch_mount_volumes(cli_ctx, volumes, account_name=None, account_key=None):
"""Patches mount volumes by replacing placeholders and adding credentials information.
:param models.MountVolumes or None volumes: mount volumes.
:param str or None account_name: name of the storage account provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
:return models.ClusterCreateParameters: updated parameters.
"""
if volumes is None:
return None
result = copy.deepcopy(volumes) # type: models.MountVolumes
storage_account_name, storage_account_key = _get_effective_storage_account_name_and_key(
cli_ctx, account_name, account_key)
require_storage_account = False
require_storage_account_key = False
# Patch parameters of azure file share.
if result.azure_file_shares:
for ref in result.azure_file_shares:
# Populate account name if it was not provided
if not ref.account_name:
ref.account_name = _get_account_name_from_azure_file_url(ref.azure_file_url)
# Replace placeholders
if ref.account_name == AZURE_BATCHAI_STORAGE_ACCOUNT_PLACEHOLDER:
require_storage_account = True
ref.account_name = storage_account_name
if ref.azure_file_url and AZURE_BATCHAI_STORAGE_ACCOUNT_PLACEHOLDER in ref.azure_file_url:
require_storage_account = True
ref.azure_file_url = ref.azure_file_url.replace(
AZURE_BATCHAI_STORAGE_ACCOUNT_PLACEHOLDER, storage_account_name)
if ref.credentials and ref.credentials.account_key == AZURE_BATCHAI_STORAGE_KEY_PLACEHOLDER:
require_storage_account_key = True
ref.credentials.account_key = storage_account_key
if not ref.credentials and ref.account_name == storage_account_name:
require_storage_account_key = True
ref.credentials = models.AzureStorageCredentialsInfo(account_key=storage_account_key)
if ref.account_name:
ref.credentials = _get_effective_credentials(cli_ctx, ref.credentials, ref.account_name)
# Patch parameters of blob file systems.
if result.azure_blob_file_systems:
for ref in result.azure_blob_file_systems:
# Replace placeholders
if ref.account_name == AZURE_BATCHAI_STORAGE_ACCOUNT_PLACEHOLDER:
require_storage_account = True
ref.account_name = storage_account_name
if ref.credentials and ref.credentials.account_key == AZURE_BATCHAI_STORAGE_KEY_PLACEHOLDER:
require_storage_account_key = True
ref.credentials.account_key = storage_account_key
if not ref.credentials and ref.account_name == storage_account_name:
require_storage_account_key = True
ref.credentials = models.AzureStorageCredentialsInfo(account_key=storage_account_key)
# Populate the rest of credentials based on the account name
if not ref.account_name:
raise CLIError('Ill-formed Azure Blob File System reference in the configuration file - no account '
'name provided.')
if ref.account_name:
ref.credentials = _get_effective_credentials(cli_ctx, ref.credentials, ref.account_name)
if require_storage_account and not storage_account_name:
raise CLIError(MSG_CONFIGURE_STORAGE_ACCOUNT)
if require_storage_account_key and not storage_account_key:
raise CLIError(MSG_CONFIGURE_STORAGE_KEY)
return result | Patches mount volumes by replacing placeholders and adding credentials information.
:param models.MountVolumes or None volumes: mount volumes.
:param str or None account_name: name of the storage account provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
:return models.ClusterCreateParameters: updated parameters. | _patch_mount_volumes | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _update_user_account_settings(params, admin_user_name, ssh_key, password):
"""Update account settings of cluster or file server creation parameters
:param models.ClusterCreateParameters or models.FileServerCreateParameters params: params to update
:param str or None admin_user_name: name of admin user to create.
:param str or None ssh_key: ssh public key value or path to the file containing the key.
:param str or None password: password.
:return models.ClusterCreateParameters: updated parameters.
"""
result = copy.deepcopy(params)
if hasattr(result, 'user_account_settings'):
parent = result
else:
if result.ssh_configuration is None:
result.ssh_configuration = models.SshConfiguration(user_account_settings=None)
parent = result.ssh_configuration
if parent.user_account_settings is None:
parent.user_account_settings = models.UserAccountSettings(admin_user_name=None)
# Get effective user name, password and key trying them in the following order: provided via command line,
# provided in the config file, current user name and his default public ssh key.
effective_user_name = admin_user_name or parent.user_account_settings.admin_user_name or get_default_admin_username() # pylint: disable=line-too-long
effective_password = password or parent.user_account_settings.admin_user_password
# Use default ssh public key only if no password is configured.
effective_key = (ssh_key or parent.user_account_settings.admin_user_ssh_public_key or
(None if effective_password else _get_default_ssh_public_key_location()))
if effective_key:
if os.path.exists(os.path.expanduser(effective_key)):
with open(os.path.expanduser(effective_key)) as f:
effective_key = f.read()
try:
if effective_key and not keys.is_valid_ssh_rsa_public_key(effective_key):
raise CLIError('Incorrect ssh public key value.')
except Exception:
raise CLIError('Incorrect ssh public key value.')
parent.user_account_settings.admin_user_name = effective_user_name
parent.user_account_settings.admin_user_ssh_public_key = effective_key
parent.user_account_settings.admin_user_password = effective_password
if not parent.user_account_settings.admin_user_name:
raise CLIError('Please provide admin user name.')
if (not parent.user_account_settings.admin_user_ssh_public_key and
not parent.user_account_settings.admin_user_password):
raise CLIError('Please provide admin user password or ssh key.')
return result | Update account settings of cluster or file server creation parameters
:param models.ClusterCreateParameters or models.FileServerCreateParameters params: params to update
:param str or None admin_user_name: name of admin user to create.
:param str or None ssh_key: ssh public key value or path to the file containing the key.
:param str or None password: password.
:return models.ClusterCreateParameters: updated parameters. | _update_user_account_settings | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _add_nfs_to_mount_volumes(volumes, file_server_id, mount_path):
"""Adds NFS to the mount volumes.
:param models.MountVolumes or None volumes: existing mount volumes.
:param str file_server_id: resource id of the file server.
:param str mount_path: relative mount path for the file server.
:return models.ClusterCreateParameters: updated parameters.
"""
result = copy.deepcopy(volumes) if volumes else models.MountVolumes()
if not mount_path:
raise CLIError('File server relative mount path cannot be empty.')
if result.file_servers is None:
result.file_servers = []
result.file_servers.append(models.FileServerReference(
relative_mount_path=mount_path,
file_server=models.ResourceId(id=file_server_id),
mount_options="rw"))
return result | Adds NFS to the mount volumes.
:param models.MountVolumes or None volumes: existing mount volumes.
:param str file_server_id: resource id of the file server.
:param str mount_path: relative mount path for the file server.
:return models.ClusterCreateParameters: updated parameters. | _add_nfs_to_mount_volumes | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_azure_file_url(cli_ctx, account_name, azure_file_share):
"""Returns Azure File URL for the given account
:param str account_name: account name
:param str azure_file_share: name of the share
:return str: Azure File URL to be used in mount volumes
"""
return 'https://{0}.file.{1}/{2}'.format(account_name, cli_ctx.cloud.suffixes.storage_endpoint, azure_file_share) | Returns Azure File URL for the given account
:param str account_name: account name
:param str azure_file_share: name of the share
:return str: Azure File URL to be used in mount volumes | _get_azure_file_url | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _add_azure_file_share_to_mount_volumes(cli_ctx, volumes, azure_file_share, mount_path, account_name=None,
account_key=None):
"""Add Azure File share to the mount volumes.
:param model.MountVolumes volumes: existing mount volumes.
:param str azure_file_share: name of the azure file share.
:param str mount_path: relative mount path for Azure File share.
:param str or None account_name: storage account name provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
:return models.ClusterCreateParameters: updated parameters.
"""
result = copy.deepcopy(volumes) if volumes else models.MountVolumes()
if not mount_path:
raise CLIError('Azure File share relative mount path cannot be empty.')
if result.azure_file_shares is None:
result.azure_file_shares = []
effective_account_name, effective_account_key = _get_effective_storage_account_name_and_key(cli_ctx, account_name,
account_key)
if not effective_account_name:
raise CLIError(MSG_CONFIGURE_STORAGE_ACCOUNT)
if not effective_account_key:
raise CLIError(MSG_CONFIGURE_STORAGE_KEY)
result.azure_file_shares.append(models.AzureFileShareReference(
relative_mount_path=mount_path,
account_name=effective_account_name,
azure_file_url=_get_azure_file_url(cli_ctx, effective_account_name, azure_file_share),
credentials=models.AzureStorageCredentialsInfo(account_key=effective_account_key)))
return result | Add Azure File share to the mount volumes.
:param model.MountVolumes volumes: existing mount volumes.
:param str azure_file_share: name of the azure file share.
:param str mount_path: relative mount path for Azure File share.
:param str or None account_name: storage account name provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
:return models.ClusterCreateParameters: updated parameters. | _add_azure_file_share_to_mount_volumes | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _add_azure_container_to_mount_volumes(cli_ctx, volumes, container_name, mount_path, account_name=None,
account_key=None):
"""Add Azure Storage container to the mount volumes.
:param model.MountVolumes: existing mount volumes.
:param str container_name: container name.
:param str mount_path: relative mount path for the container.
:param str or None account_name: storage account name provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
:return models.ClusterCreateParameters: updated parameters.
"""
result = copy.deepcopy(volumes) if volumes else models.MountVolumes()
if not mount_path:
raise CLIError('Azure Storage Container relative mount path cannot be empty.')
if result.azure_blob_file_systems is None:
result.azure_blob_file_systems = []
storage_account_name, storage_account_key = _get_effective_storage_account_name_and_key(cli_ctx, account_name,
account_key)
if not storage_account_name:
raise CLIError(MSG_CONFIGURE_STORAGE_ACCOUNT)
if not storage_account_key:
raise CLIError(MSG_CONFIGURE_STORAGE_KEY)
result.azure_blob_file_systems.append(models.AzureBlobFileSystemReference(
relative_mount_path=mount_path,
account_name=storage_account_name,
container_name=container_name,
credentials=models.AzureStorageCredentialsInfo(account_key=storage_account_key)))
return result | Add Azure Storage container to the mount volumes.
:param model.MountVolumes: existing mount volumes.
:param str container_name: container name.
:param str mount_path: relative mount path for the container.
:param str or None account_name: storage account name provided as command line argument.
:param str or None account_key: storage account key provided as command line argument.
:return models.ClusterCreateParameters: updated parameters. | _add_azure_container_to_mount_volumes | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_image_reference(image, custom_image):
"""Returns image reference for the given image and custom image.
:param str image or None: image alias or full spec.
:param str custom_image or None: resource id of the custom image.
:raise CLIError: if the image with given alias was not found.
"""
if custom_image and not image:
raise CLIError('You need to specify --image argument with information about the custom image')
if custom_image and not is_valid_resource_id(custom_image):
raise CLIError('Ill-formed custom image resource id')
if ':' in image:
# full image specification is provided
try:
publisher, offer, sku, version = image.split(':')
if not publisher:
raise CLIError('Image publisher must be provided in --image argument')
if not offer:
raise CLIError('Image offer must be provided in --image argument')
if not sku:
raise CLIError('Image sku must be provided in --image argument')
return models.ImageReference(
publisher=publisher,
offer=offer,
sku=sku,
version=version or None,
virtual_machine_image_id=custom_image
)
except ValueError:
raise CLIError('--image must have format "publisher:offer:sku:version" or "publisher:offer:sku:"')
# image alias is used
reference = None
for alias, value in SUPPORTED_IMAGE_ALIASES.items():
if alias.lower() == image.lower():
reference = value
if not reference:
raise CLIError('Unsupported image alias "{0}", supported aliases are {1}'.format(
image, ', '.join(SUPPORTED_IMAGE_ALIASES.keys())))
result = copy.deepcopy(reference)
result.virtual_machine_image_id = custom_image
return result | Returns image reference for the given image and custom image.
:param str image or None: image alias or full spec.
:param str custom_image or None: resource id of the custom image.
:raise CLIError: if the image with given alias was not found. | _get_image_reference | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_scale_settings(initial_count, min_count, max_count):
"""Returns scale settings for a cluster with given parameters"""
if not initial_count and not min_count and not max_count:
# Get from the config file
return None
if sum(1 if v is not None else 0 for v in (min_count, max_count)) == 1:
raise CLIError('You need to either provide both min and max node counts or not provide any of them')
if min_count is not None and max_count is not None and min_count > max_count:
raise CLIError('Maximum nodes count must be greater or equal to minimum nodes count')
if min_count == max_count:
if min_count is None or initial_count == min_count:
return models.ScaleSettings(
manual=models.ManualScaleSettings(target_node_count=initial_count))
if initial_count is None:
return models.ScaleSettings(
manual=models.ManualScaleSettings(target_node_count=min_count)
)
return models.ScaleSettings(
auto_scale=models.AutoScaleSettings(
minimum_node_count=min_count,
maximum_node_count=max_count,
initial_node_count=initial_count or 0)) | Returns scale settings for a cluster with given parameters | _get_scale_settings | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _update_nodes_information(params, image, custom_image, vm_size, vm_priority, target, min_nodes, max_nodes):
"""Updates cluster's nodes information.
:param models.ClusterCreateParameters params: cluster create parameters.
:param str or None image: image.
:param str or None custom_image: custom image resource id.
:param str or None vm_size: VM size.
:param str vm_priority: Priority.
:param int or None target: initial number of nodes.
:param int or None min_nodes: min number of nodes.
:param int or None max_nodes: max number of nodes.
:return models.ClusterCreateParameters: updated parameters.
"""
result = copy.deepcopy(params)
if vm_size:
result.vm_size = vm_size
if not result.vm_size:
raise CLIError('Please provide VM size')
if vm_priority:
result.vm_priority = vm_priority
if image or custom_image:
result.virtual_machine_configuration = models.VirtualMachineConfiguration(
image_reference=_get_image_reference(image, custom_image))
scale_settings = _get_scale_settings(target, min_nodes, max_nodes)
if scale_settings:
result.scale_settings = scale_settings
if not result.scale_settings or (not result.scale_settings.manual and not result.scale_settings.auto_scale):
raise CLIError('Please provide scale setting for the cluster via command line or configuration file')
return result | Updates cluster's nodes information.
:param models.ClusterCreateParameters params: cluster create parameters.
:param str or None image: image.
:param str or None custom_image: custom image resource id.
:param str or None vm_size: VM size.
:param str vm_priority: Priority.
:param int or None target: initial number of nodes.
:param int or None min_nodes: min number of nodes.
:param int or None max_nodes: max number of nodes.
:return models.ClusterCreateParameters: updated parameters. | _update_nodes_information | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _configure_auto_storage(cli_ctx, location):
"""Configures auto storage account for the cluster
:param str location: location for the auto-storage account.
:return (str, str): a tuple with auto storage account name and key.
"""
ResourceGroup = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'ResourceGroup', mod='models')
BlockBlobService, FileService = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
'blob#BlockBlobService', 'file#FileService')
resource_group = _get_auto_storage_resource_group()
resource_client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES)
if resource_client.resource_groups.check_existence(resource_group):
logger.warning('BatchAI will use existing %s resource group for auto-storage account',
resource_group)
else:
logger.warning('Creating %s resource group for auto-storage account', resource_group)
resource_client.resource_groups.create_or_update(
resource_group, ResourceGroup(location=location))
storage_client = _get_storage_management_client(cli_ctx)
account = None
for a in storage_client.storage_accounts.list_by_resource_group(resource_group):
if a.primary_location == location.lower().replace(' ', ''):
account = a.name
logger.warning('Using existing %s storage account as an auto-storage account', account)
break
if account is None:
account = _create_auto_storage_account(storage_client, resource_group, location)
logger.warning('Created auto storage account %s', account)
key = _get_storage_account_key(cli_ctx, account, None)
file_service = FileService(account, key)
file_service.create_share(AUTO_STORAGE_SHARE_NAME, fail_on_exist=False)
blob_service = BlockBlobService(account, key)
blob_service.create_container(AUTO_STORAGE_CONTAINER_NAME, fail_on_exist=False)
return account, key | Configures auto storage account for the cluster
:param str location: location for the auto-storage account.
:return (str, str): a tuple with auto storage account name and key. | _configure_auto_storage | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _generate_auto_storage_account_name():
"""Generates unique name for auto storage account"""
characters = list(string.ascii_lowercase * 12)
shuffle(characters)
return AUTO_STORAGE_ACCOUNT_PREFIX + ''.join(characters[:12]) | Generates unique name for auto storage account | _generate_auto_storage_account_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _create_auto_storage_account(storage_client, resource_group, location):
"""Creates new auto storage account in the given resource group and location
:param StorageManagementClient storage_client: storage client.
:param str resource_group: name of the resource group.
:param str location: location.
:return str: name of the created storage account.
"""
from azure.mgmt.storage.models import Kind, Sku, SkuName
name = _generate_auto_storage_account_name()
check = storage_client.storage_accounts.check_name_availability(name)
while not check.name_available:
name = _generate_auto_storage_account_name()
check = storage_client.storage_accounts.check_name_availability(name).name_available
storage_client.storage_accounts.create(resource_group, name, {
'sku': Sku(name=SkuName.standard_lrs),
'kind': Kind.storage,
'location': location}).result()
return name | Creates new auto storage account in the given resource group and location
:param StorageManagementClient storage_client: storage client.
:param str resource_group: name of the resource group.
:param str location: location.
:return str: name of the created storage account. | _create_auto_storage_account | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _add_setup_task(cmd_line, output, cluster):
"""Adds a setup task with given command line and output destination to the cluster.
:param str cmd_line: node setup command line.
:param str output: output destination.
:param models.ClusterCreateParameters cluster: cluster creation parameters.
"""
if cmd_line is None:
return cluster
if output is None:
raise CLIError('--setup-task requires providing of --setup-task-output')
cluster = copy.deepcopy(cluster)
cluster.node_setup = cluster.node_setup or models.NodeSetup()
cluster.node_setup.setup_task = models.SetupTask(
command_line=cmd_line,
std_out_err_path_prefix=output,
run_elevated=False)
return cluster | Adds a setup task with given command line and output destination to the cluster.
:param str cmd_line: node setup command line.
:param str output: output destination.
:param models.ClusterCreateParameters cluster: cluster creation parameters. | _add_setup_task | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _generate_ssh_keys():
"""Generates ssh keys pair"""
private_key_path = os.path.join(os.path.expanduser('~'), '.ssh', 'id_rsa')
public_key_path = os.path.join(os.path.expanduser('~'), '.ssh', 'id_rsa.pub')
keys.generate_ssh_keys(private_key_path, public_key_path)
logger.warning('Attempted to find or generate SSH key files id_rsa and id_rsa.pub under ~/.ssh to allow SSH access '
'to the nodes. If using machines without permanent storage, back up your keys to a safe location.') | Generates ssh keys pair | _generate_ssh_keys | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_effective_resource_parameters(name_or_id, resource_group, workspace):
"""Returns effective resource group, workspace and name for the given resource"""
if not name_or_id:
return None, None, None
if is_valid_resource_id(name_or_id):
parts = parse_resource_id(name_or_id)
return parts['resource_group'], parts['name'], parts['resource_name']
return resource_group, workspace, name_or_id | Returns effective resource group, workspace and name for the given resource | _get_effective_resource_parameters | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _is_on_mount_point(path, mount_path):
"""Checks if path is on mount_path"""
path = os.path.normpath(path).replace('\\', '/')
mount_path = os.path.normpath(mount_path).replace('\\', '/')
return path == mount_path or os.path.commonprefix([path, mount_path + '/']) == mount_path + '/' | Checks if path is on mount_path | _is_on_mount_point | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _list_node_setup_files_for_cluster(cli_ctx, cluster, path, expiry):
"""Lists node setup task's log files for the given cluster.
:param models.Cluster cluster: the cluster.
:param str path: relative path under cluster node setup task's output directory.
:param int expiry: time in seconds for how long generated SASes will remain valid.
"""
unsupported_location = 'List files is supported only for clusters with startup task configure to store its ' \
'output on Azure File Share or Azure Blob Container'
if cluster.node_setup is None or cluster.node_setup.setup_task is None:
# Nothing to check or return if there is no setup task.
return []
prefix = cluster.node_setup.setup_task.std_out_err_path_prefix
if not _is_on_mount_point(prefix, '$AZ_BATCHAI_MOUNT_ROOT'):
# The stdouterr directory must be on $AZ_BATCHAI_MOUNT_ROOT
raise CLIError(unsupported_location)
suffix = cluster.node_setup.setup_task.std_out_err_path_suffix
if not suffix:
# Clusters created with older API version do not report the path suffix, so we cannot find their files.
raise CLIError('List files is not supported for this cluster')
relative_mount_path = prefix[len('$AZ_BATCHAI_MOUNT_ROOT/'):]
if cluster.node_setup.mount_volumes is None:
# If nothing is mounted, the files were stored somewhere else and we cannot find them.
raise CLIError(unsupported_location)
# try mounted Azure file shares
for afs in cluster.node_setup.mount_volumes.azure_file_shares or []:
if _is_on_mount_point(relative_mount_path, afs.relative_mount_path):
return _get_files_from_afs(cli_ctx, afs, os.path.join(suffix, path), expiry)
# try mounted blob containers
for bfs in cluster.node_setup.mount_volumes.azure_blob_file_systems or []:
if _is_on_mount_point(relative_mount_path, bfs.relative_mount_path):
return _get_files_from_bfs(cli_ctx, bfs, os.path.join(suffix, path), expiry)
# the folder on some other file system or on local disk
raise CLIError(unsupported_location) | Lists node setup task's log files for the given cluster.
:param models.Cluster cluster: the cluster.
:param str path: relative path under cluster node setup task's output directory.
:param int expiry: time in seconds for how long generated SASes will remain valid. | _list_node_setup_files_for_cluster | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_files_from_bfs(cli_ctx, bfs, path, expiry):
"""Returns a list of files and directories under given path on mounted blob container.
:param models.AzureBlobFileSystemReference bfs: blob file system reference.
:param str path: path to list files from.
:param int expiry: SAS expiration time in minutes.
"""
BlockBlobService = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'blob#BlockBlobService')
Blob = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'blob#Blob')
BlobPermissions = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'blob#BlobPermissions')
result = []
service = BlockBlobService(bfs.account_name, _get_storage_account_key(cli_ctx, bfs.account_name, None))
effective_path = _get_path_for_storage(path)
folders = set()
for b in service.list_blobs(bfs.container_name, effective_path + '/', delimiter='/'):
if isinstance(b, Blob):
name = os.path.basename(b.name)
sas = service.generate_blob_shared_access_signature(
bfs.container_name, b.name, BlobPermissions(read=True),
expiry=datetime.datetime.utcnow() + datetime.timedelta(minutes=expiry))
result.append(
LogFile(
name, service.make_blob_url(bfs.container_name, b.name, 'https', sas),
False, b.properties.content_length))
else:
name = b.name.split('/')[-2]
folders.add(name)
result.append(LogFile(name, None, True, None))
result = [f for f in result if f.is_directory or f.name not in folders]
return result | Returns a list of files and directories under given path on mounted blob container.
:param models.AzureBlobFileSystemReference bfs: blob file system reference.
:param str path: path to list files from.
:param int expiry: SAS expiration time in minutes. | _get_files_from_bfs | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_path_for_storage(path):
"""Returns a path in format acceptable for passing to storage"""
result = os.path.normpath(path).replace('\\', '/')
if result.endswith('/.'):
result = result[:-2]
return result | Returns a path in format acceptable for passing to storage | _get_path_for_storage | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_files_from_afs(cli_ctx, afs, path, expiry):
"""Returns a list of files and directories under given path on mounted Azure File share.
:param models.AzureFileShareReference afs: Azure file share reference.
:param str path: path to list files from.
:param int expiry: SAS expiration time in minutes.
"""
FileService, File, FilePermissions = get_sdk(cli_ctx, ResourceType.DATA_STORAGE,
'file#FileService', 'file.models#File', 'file.models#FilePermissions')
result = []
service = FileService(afs.account_name, _get_storage_account_key(cli_ctx, afs.account_name, None))
share_name = afs.azure_file_url.split('/')[-1]
effective_path = _get_path_for_storage(path)
if not service.exists(share_name, effective_path):
return result
for f in service.list_directories_and_files(share_name, effective_path):
if isinstance(f, File):
sas = service.generate_file_shared_access_signature(
share_name, effective_path, f.name, permission=FilePermissions(read=True),
expiry=datetime.datetime.utcnow() + datetime.timedelta(minutes=expiry))
result.append(
LogFile(
f.name, service.make_file_url(share_name, effective_path, f.name, 'https', sas),
False, f.properties.content_length))
else:
result.append(LogFile(f.name, None, True, None))
return result | Returns a list of files and directories under given path on mounted Azure File share.
:param models.AzureFileShareReference afs: Azure file share reference.
:param str path: path to list files from.
:param int expiry: SAS expiration time in minutes. | _get_files_from_afs | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _log_failed_job(resource_group, job):
"""Logs information about failed job
:param str resource_group: resource group name
:param models.Job job: failed job.
"""
logger.warning('The job "%s" in resource group "%s" failed.', job.name, resource_group)
info = job.execution_info # type: models.JobPropertiesExecutionInfo
if info:
logger.warning('Job failed with exit code %d at %s; execution took %s', info.exit_code,
str(info.end_time), str(info.end_time - info.start_time))
errors = info.errors
if errors:
for e in errors:
details = '<none>'
if e.details:
details = '\n' + '\n'.join(['{0}: {1}'.format(d.name, d.value) for d in e.details])
logger.warning('Error message: %s\nDetails:\n %s', e.message, details)
sys.exit(info.exit_code)
logger.warning('Failed job has no execution info') | Logs information about failed job
:param str resource_group: resource group name
:param models.Job job: failed job. | _log_failed_job | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _get_available_local_port():
"""
Gets a random, available local port
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # pylint: disable=no-member
s.bind(('', 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port | Gets a random, available local port | _get_available_local_port | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _create_tunnel(remote_host, port, username, password, ssh_private_key, local_addresses, remote_addresses, func):
"""Creates a tunnel to the remote host and runs provided func under the tunnel.
:param str remote_host: ip or address of the remote host
:param int port: the ssh port number
:param str username: username to login under
:param str or None password: the user password
:param str or None ssh_private_key: the path to private ssh key
:param local_addresses: local addresses to be forwarded
:param remote_addresses: target addresses
:param func: a function to run on the remote host. The forwarding is stopped as soon as func completes execution.
"""
from sshtunnel import SSHTunnelForwarder
local_addresses = [(a[0], a[1] if a[1] != 0 else _get_available_local_port()) for a in local_addresses]
with SSHTunnelForwarder((remote_host, port),
ssh_username=username,
ssh_password=password,
ssh_pkey=ssh_private_key,
remote_bind_addresses=remote_addresses,
local_bind_addresses=local_addresses):
func() | Creates a tunnel to the remote host and runs provided func under the tunnel.
:param str remote_host: ip or address of the remote host
:param int port: the ssh port number
:param str username: username to login under
:param str or None password: the user password
:param str or None ssh_private_key: the path to private ssh key
:param local_addresses: local addresses to be forwarded
:param remote_addresses: target addresses
:param func: a function to run on the remote host. The forwarding is stopped as soon as func completes execution. | _create_tunnel | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def _ssh_exec(ip, port, cmdline, username, password, ssh_private_key):
"""Executes the given cmdline on the provided host under given credentials.
:param str ip: id address
:param int port: the ssh port number
:param str cmdline: command line to execute
:param str username: username to login
:param str or None password: the user password
:param str or None ssh_private_key: the path to the private ssh key
"""
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip, port, username, password=password, key_filename=ssh_private_key)
transport = ssh.get_transport()
transport.set_keepalive(15)
_, out, err = ssh.exec_command('bash -ilc "{}"'.format(cmdline), get_pty=True)
output_lock = threading.Lock()
def _worker(s):
for item in s:
with output_lock:
print(item, end='')
threads = [threading.Thread(target=_worker, args=(s,)) for s in [out, err]]
for t in threads:
t.start()
# On Windows thread.join() call prevents the master thread from handling Ctrl-C, so we are joining with timeout.
while True:
for t in threads:
t.join(timeout=1)
if not t.is_alive():
return | Executes the given cmdline on the provided host under given credentials.
:param str ip: id address
:param int port: the ssh port number
:param str cmdline: command line to execute
:param str username: username to login
:param str or None password: the user password
:param str or None ssh_private_key: the path to the private ssh key | _ssh_exec | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/custom.py | MIT |
def workspace_list_table_format(result):
"""Format workspace list as a table"""
table = []
for item in result:
table.append(workspace_show_table_format(item))
return table | Format workspace list as a table | workspace_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def workspace_show_table_format(workspace):
"""Format the workspace as a table"""
row = OrderedDict()
row['Name'] = workspace['name']
row['Resource Group'] = workspace['resourceGroup']
row['Location'] = workspace['location']
row['State'] = workspace['provisioningState']
return row | Format the workspace as a table | workspace_show_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def cluster_list_table_format(result):
"""Format cluster list as a table."""
table = []
for item in result:
table.append(cluster_show_table_format(item))
return table | Format cluster list as a table. | cluster_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def cluster_show_table_format(result):
"""Format cluster as a table."""
from azure.mgmt.core.tools import parse_resource_id
row = OrderedDict()
row['Name'] = result['name']
row['Resource Group'] = result['resourceGroup']
row['Workspace'] = parse_resource_id(result['id'])['name']
row['VM Size'] = result['vmSize']
if result['provisioningState'] == 'deleting':
row['State'] = 'deleting'
else:
row['State'] = result['allocationState']
row['Idle'] = str(result['nodeStateCounts']['idleNodeCount'])
row['Running'] = str(result['nodeStateCounts']['runningNodeCount'])
row['Preparing'] = str(result['nodeStateCounts']['preparingNodeCount'])
row['Leaving'] = str(result['nodeStateCounts']['leavingNodeCount'])
row['Unusable'] = str(result['nodeStateCounts']['unusableNodeCount'])
return row | Format cluster as a table. | cluster_show_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def experiment_list_table_format(result):
"""Format experiment list as a table"""
table = []
for item in result:
table.append(experiment_show_table_format(item))
return table | Format experiment list as a table | experiment_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def experiment_show_table_format(experiment):
"""Format the experiment as a table"""
from azure.mgmt.core.tools import parse_resource_id
row = OrderedDict()
row['Name'] = experiment['name']
row['Resource Group'] = experiment['resourceGroup']
row['Workspace'] = parse_resource_id(experiment['id'])['name']
row['State'] = experiment['provisioningState']
return row | Format the experiment as a table | experiment_show_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def job_list_table_format(result):
"""Format job list as a table."""
table = []
for item in result:
table.append(job_show_table_format(item))
return table | Format job list as a table. | job_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def job_show_table_format(job):
"""Format job as a table."""
from azure.mgmt.core.tools import parse_resource_id
row = OrderedDict()
row['Name'] = job['name']
cluster = parse_resource_id(job['cluster']['id'])
row['Cluster'] = cluster['resource_name']
row['Cluster RG'] = job['cluster']['resourceGroup']
row['Cluster Workspace'] = cluster['name']
row['Tool'] = job['toolType']
row['Nodes'] = job['nodeCount']
if job['provisioningState'] == 'deleting':
row['State'] = 'deleting'
else:
row['State'] = job['executionState']
if job['executionInfo'] and \
job['executionInfo']['exitCode'] is not None:
row['Exit code'] = str(job['executionInfo']['exitCode'])
else:
row['Exit code'] = ''
return row | Format job as a table. | job_show_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def file_list_table_format(result):
"""Format file list as a table."""
table = []
for item in result:
row = OrderedDict()
row['Name'] = item['name']
row['Type'] = item['fileType']
row['Size'] = '' if item['fileType'] == 'directory' else str(item['contentLength'])
row['Modified'] = item['lastModified'] or ' '
table.append(row)
return table | Format file list as a table. | file_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def file_server_list_table_format(result):
"""Format file server list as a table."""
table = []
for item in result:
table.append(file_server_show_table_format(item))
return table | Format file server list as a table. | file_server_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def file_server_show_table_format(result):
"""Format file server list as a table."""
row = OrderedDict()
row['Name'] = result['name']
row['Resource Group'] = result['resourceGroup']
row['Size'] = result['vmSize']
disks = result['dataDisks']
if disks:
row['Disks'] = '{0} x {1} Gb'.format(disks['diskCount'], disks['diskSizeInGb'])
mount_settings = result['mountSettings']
if mount_settings:
row['Public IP'] = mount_settings['fileServerPublicIp']
row['Internal IP'] = mount_settings['fileServerInternalIp']
row['Mount Point'] = mount_settings['mountPoint']
return row | Format file server list as a table. | file_server_show_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def remote_login_table_format(result):
"""Format remote login info list as a table."""
table = []
for item in result:
row = OrderedDict()
row['ID'] = item['nodeId']
row['IP'] = item['ipAddress']
row['SSH Port'] = int(item['port'])
table.append(row)
return table | Format remote login info list as a table. | remote_login_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def usage_table_format(result):
"""Format usage information as a table."""
table = []
for item in result:
row = OrderedDict()
row['Value'] = item['name']['localizedValue']
row['Usage'] = item['currentValue'] or "0"
row['Limit'] = item['limit'] or "0"
table.append(row)
return table | Format usage information as a table. | usage_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def node_setup_files_list_table_format(result):
"""Format list of node setup task files"""
table = []
for item in result:
row = OrderedDict()
row['Name'] = item['name']
row['Is directory'] = 'yes' if item['is_directory'] else 'no'
row['Size'] = '' if item['size'] is None else (item['size'] or '0')
table.append(row)
return table | Format list of node setup task files | node_setup_files_list_table_format | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/_format.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/_format.py | MIT |
def _get_mock_storage_accounts_and_keys(accounts_and_keys):
"""Creates a mock storage client which knows about given storage accounts and keys"""
Endpoints = collections.namedtuple('Endpoints', 'file')
Account = collections.namedtuple('Account', 'id name primary_endpoints')
mock_storage_client = MagicMock()
mock_storage_client.storage_accounts.list = MagicMock(
return_value=[Account(
'/subscriptions/000/resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/{0}'.format(a),
a, Endpoints('https://{0}.file.core.windows.net/'.format(a)))
for a in accounts_and_keys.keys()])
Keys = collections.namedtuple('Keys', 'keys')
Key = collections.namedtuple('Key', 'value')
mock_storage_client.storage_accounts.list_keys = MagicMock(
side_effect=lambda _, account: Keys([Key(accounts_and_keys.get(account, None))]))
return mock_storage_client | Creates a mock storage client which knows about given storage accounts and keys | _get_mock_storage_accounts_and_keys | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/batchai/tests/latest/test_batchai_custom.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/batchai/tests/latest/test_batchai_custom.py | MIT |
def test_sb_namespace(self, resource_group):
self.kwargs.update({
'namespacename': self.create_random_name(prefix='sb-nscli', length=20),
'namespacename1': self.create_random_name(prefix='sb-nscli1', length=20),
'namespacename2': self.create_random_name(prefix='sb-nscli2', length=20),
'namespacename3': self.create_random_name(prefix='sb-nscli3', length=20),
'identity1': self.create_random_name(prefix='sb-identity1', length=20),
'identity2': self.create_random_name(prefix='sb-identity2', length=20),
'tags': 'tag1=value1',
'tags2': 'tag2=value2',
'loc': 'East US',
'loc1': 'Australiaeast',
'loc2': 'TaiwanNorth'
})
identity1 = self.cmd('identity create --name {identity1} --resource-group {rg}').get_output_in_json()
self.assertEqual(identity1['name'], self.kwargs['identity1'])
self.kwargs.update({'id1': identity1['id']})
identity2 = self.cmd('identity create --name {identity2} --resource-group {rg}').get_output_in_json()
self.assertEqual(identity2['name'], self.kwargs['identity2'])
self.kwargs.update({'id2': identity2['id']})
# Create standard namespace with disableLocalAuth enabled
namespace = self.cmd('servicebus namespace create --name {namespacename} --resource-group {rg} '
'--sku Standard --location eastus --tags tag1=value1 tag2=value2 '
'--disable-local-auth --minimum-tls-version 1.1').get_output_in_json()
self.assertEqual('Standard', namespace['sku']['name'])
self.assertEqual(self.kwargs['loc'].strip().replace(' ', '').lower(), namespace['location'].strip().replace(' ', '').lower())
self.assertTrue(namespace['disableLocalAuth'])
self.assertFalse(namespace['zoneRedundant'])
self.assertEqual(2, len(namespace['tags']))
# Create Premium namespace with Sku Capacity 2
namespace = self.cmd('servicebus namespace create --name {namespacename1} --resource-group {rg} '
'--sku Premium --location eastus').get_output_in_json()
self.assertEqual(1, namespace['sku']['capacity'])
self.assertEqual('Premium', namespace['sku']['name'])
self.assertEqual('1.2', namespace['minimumTlsVersion'])
self.assertEqual(self.kwargs['loc'].strip().replace(' ', '').lower(), namespace['location'].strip().replace(' ', '').lower())
self.assertFalse(namespace['disableLocalAuth'])
self.assertFalse(namespace['zoneRedundant'])
self.assertEqual(0, len(namespace['tags']))
# Update Capacity of Premium namespace
namespace = self.cmd('servicebus namespace update --name {namespacename1} --resource-group {rg} '
'--capacity 4 --tags {tags} {tags2}').get_output_in_json()
self.assertEqual(4, namespace['sku']['capacity'])
self.assertEqual('Premium', namespace['sku']['name'])
self.assertEqual('1.2', namespace['minimumTlsVersion'])
self.assertEqual(self.kwargs['loc'].strip().replace(' ', '').lower(), namespace['location'].strip().replace(' ', '').lower())
self.assertFalse(namespace['disableLocalAuth'])
self.assertFalse(namespace['zoneRedundant'])
self.assertEqual(2, len(namespace['tags']))
# Set disableLocalAuth to False using update command
namespace = self.cmd('servicebus namespace update --name {namespacename1} --resource-group {rg} '
'--disable-local-auth').get_output_in_json()
self.assertEqual('Premium', namespace['sku']['name'])
self.assertEqual('1.2', namespace['minimumTlsVersion'])
self.assertEqual(self.kwargs['loc'].strip().replace(' ', '').lower(), namespace['location'].strip().replace(' ', '').lower())
self.assertTrue(namespace['disableLocalAuth'])
self.assertFalse(namespace['zoneRedundant'])
self.assertEqual(2, len(namespace['tags']))
# Create premium namespace with SystemAssigned and UserAssigned Identity
namespace = self.cmd('servicebus namespace create --resource-group {rg} --name {namespacename2} '
'--location eastus --sku Premium --mi-system-assigned --mi-user-assigned {id1} {id2} '
'--capacity 2 --zone-redundant --premium-messaging-partitions 2 ').get_output_in_json()
self.assertEqual(2, namespace['sku']['capacity'])
self.assertEqual('Premium', namespace['sku']['name'])
self.assertEqual('1.2', namespace['minimumTlsVersion'])
self.assertEqual(self.kwargs['loc'].strip().replace(' ', '').lower(), namespace['location'].strip().replace(' ', '').lower())
self.assertFalse(namespace['disableLocalAuth'])
self.assertTrue(namespace['zoneRedundant'])
self.assertEqual(2, namespace['premiumMessagingPartitions'])
self.assertEqual(0, len(namespace['tags']))
# List Namespace within ResourceGroup
self.cmd('servicebus namespace list --resource-group {rg}')
# List all Namespace within subscription
self.cmd('servicebus namespace list')
# create a namespace with geo-replication enable
namespace = self.cmd('servicebus namespace create --resource-group {rg} --name {namespacename3} '
'--location {loc1} --sku Premium --geo-data-replication-config role-type=Primary location-name={loc1} '
'--geo-data-replication-config role-type=Secondary location-name={loc2}').get_output_in_json()
time.sleep(200)
'''namespace = self.cmd('servicebus namespace replica add --resource-group {rg} --name {namespacename3} '
'--geo-data-replication-config role-type=Secondary location-name={loc2} ').get_output_in_json()'''
self.assertEqual(2, len(namespace['geoDataReplication']['locations']))
namespace = self.cmd('servicebus namespace update --resource-group {rg} --name {namespacename3} '
'--max-replication-lag-duration-in-seconds 300').get_output_in_json()
self.assertEqual(300, namespace['geoDataReplication']['maxReplicationLagDurationInSeconds'])
time.sleep(600)
namespace = self.cmd('servicebus namespace failover --name {namespacename3} --resource-group {rg} '
'--primary-location {loc2} ').get_output_in_json()
'''namespace = self.cmd('servicebus namespace replica remove --resource-group {rg} --name {namespacename3} '
'--geo-data-replication-config cluster-arm-id={clusterid2} role-type=Secondary location-name={loc2} ').get_output_in_json()'''
# Delete Namespace list by ResourceGroup
self.cmd('servicebus namespace delete --resource-group {rg} --name {namespacename} ')
self.cmd('servicebus namespace delete --resource-group {rg} --name {namespacename1} ')
self.cmd('servicebus namespace delete --resource-group {rg} --name {namespacename2} ') | namespace = self.cmd('servicebus namespace replica add --resource-group {rg} --name {namespacename3} '
'--geo-data-replication-config role-type=Secondary location-name={loc2} ').get_output_in_json() | test_sb_namespace | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/servicebus/tests/latest/test_servicebus_namespace_commands.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/servicebus/tests/latest/test_servicebus_namespace_commands.py | MIT |
def test_redis_cache_export_import(self, resource_group):
randName = self.create_random_name(prefix=name_prefix, length=24)
self.kwargs = {
'rg': resource_group,
'name': randName,
'location': 'westus',
'sku': premium_sku,
'size': premium_size,
'prefix': "redistest",
'containersasURL': "https://####",
'filesasURL': "https://####",
'storageName': "str"+randName[:-3],
'containerName': "testcontainer",
'userIdentity': user_identity,
'storageSubscriptionId': "6364f508-1150-4431-b973-c3f133466e56", # replace it with your subscription id while running tests in live mode
'startTime': (datetime.datetime.utcnow() - datetime.timedelta(minutes=60)).strftime(f"%Y-%m-%dT%H:%MZ"),
'expiryTime': (datetime.datetime.utcnow() + datetime.timedelta(minutes=200)).strftime(f"%Y-%m-%dT%H:%MZ")
}
if self.is_live :
storage = self.cmd('az storage account create -n {storageName} -g {rg} -l {location} --sku Standard_LRS').get_output_in_json()
self.cmd('az storage container create -n {containerName} --account-name {storageName}')
containersasToken = self.cmd('az storage container generate-sas -n {containerName} --account-name {storageName} --permissions dlrw --expiry {expiryTime} --start {startTime}').output
containersasURL = storage['primaryEndpoints']['blob'] + self.kwargs['containerName'] + "?" + containersasToken[1:-2]
filesasURL = storage['primaryEndpoints']['blob'] + self.kwargs['containerName'] + "/"+ self.kwargs["prefix"] + "?" + containersasToken[1:-2]
self.kwargs['containersasURL'] = containersasURL
self.kwargs['filesasURL'] = filesasURL
self.cmd('az redis create -n {name} -g {rg} -l {location} --sku {sku} --vm-size {size}')
'''
self.cmd('az redis export -n {name} -g {rg} --prefix {prefix} --container \'{containersasURL}\' --preferred-data-archive-auth-method SAS')
if self.is_live:
time.sleep(5 * 60)
self.cmd('az redis import-method -n {name} -g {rg} --files "{filesasURL}" --preferred-data-archive-auth-method SAS')
if self.is_live:
time.sleep(5 * 60)
self.cmd('az redis import -n {name} -g {rg} --files "{filesasURL}" --preferred-data-archive-auth-method SAS')
if self.is_live:
time.sleep(5 * 60)
'''
# Test import/export with managed identity
if self.is_live:
# Setup storage account and cache with managed identity
self.cmd('az redis identity assign -g {rg} -n {name} --mi-user-assigned {userIdentity}')
identities = self.cmd('az identity list').get_output_in_json()
identity = list(filter(lambda x:x['id']==user_identity,identities))[0]
principal_id = identity["principalId"]
storage_id = self.cmd('az storage account show -g {rg} -n {storageName}').get_output_in_json()['id']
self.cmd(f'az role assignment create --assignee-object-id {principal_id} --role "Storage Blob Data Contributor" --scope {storage_id}')
#Remove SAS token from URLs (not necessary with managed identity)
self.kwargs['containersasURL'] = self.kwargs['containersasURL'].split('?')[0]
self.kwargs['filesasURL'] = self.kwargs['filesasURL'].split('?')[0]
self.cmd('az redis export -n {name} -g {rg} --prefix {prefix} --container \'{containersasURL}\' --preferred-data-archive-auth-method ManagedIdentity --storage-subscription-id {storageSubscriptionId}')
# TODO: un comment after July DP release
# self.cmd('az redis import -n {name} -g {rg} --files {filesasURL} --preferred-data-archive-auth-method ManagedIdentity --storage-subscription-id {storageSubscriptionId}')
self.cmd('az redis delete -n {name} -g {rg} -y') | self.cmd('az redis export -n {name} -g {rg} --prefix {prefix} --container \'{containersasURL}\' --preferred-data-archive-auth-method SAS')
if self.is_live:
time.sleep(5 * 60)
self.cmd('az redis import-method -n {name} -g {rg} --files "{filesasURL}" --preferred-data-archive-auth-method SAS')
if self.is_live:
time.sleep(5 * 60)
self.cmd('az redis import -n {name} -g {rg} --files "{filesasURL}" --preferred-data-archive-auth-method SAS')
if self.is_live:
time.sleep(5 * 60) | test_redis_cache_export_import | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/redis/tests/latest/test_redis_scenario.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/redis/tests/latest/test_redis_scenario.py | MIT |
def provision(self):
'''Create the target resource, and return the parameters for connection creation.
'''
target_type = self._get_target_type()
creation_steps = AddonConfig.get(target_type).get('create')
logger.warning('Start creating a new %s', target_type.value)
for cnt, step in enumerate(creation_steps):
# apply parmeters to format the command
cmd = step.format(**self._params)
try:
run_cli_cmd(cmd)
except CLIInternalError as err:
logger.warning('Creation failed, start rolling back')
self.rollback(cnt)
raise CLIInternalError('Provision failed, please create the target resource manually '
'and then create the connection. Error details: {}'.format(str(err)))
target_id = self.get_target_id()
logger.warning('Created, the resource id is: %s', target_id)
auth_info = self.get_auth_info()
logger.warning('The auth info used to create connection is: %s', str(auth_info))
return target_id, auth_info | Create the target resource, and return the parameters for connection creation. | provision | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def get_target_id(self):
'''Get the resource id of the provisioned target resource.
'''
target_type = self._get_target_type()
target_resource = TARGET_RESOURCES.get(target_type)
return target_resource.format(**self._params) | Get the resource id of the provisioned target resource. | get_target_id | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def get_auth_info(self):
'''Get the auth info for the provisioned target resource
'''
source_type = self._get_source_type()
target_type = self._get_target_type()
default_auth_type = SUPPORTED_AUTH_TYPE.get(source_type).get(target_type)[0]
if default_auth_type == AUTH_TYPE.SystemIdentity:
return {'auth_type': 'systemAssignedIdentity'}
# Override the method when default auth type is not system identity
raise CLIInternalError('The method get_auth_info should be overridden '
'when default auth type is not system identity.') | Get the auth info for the provisioned target resource | get_auth_info | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def rollback(self, cnt=None):
'''Delete the created resources if creation fails
:param cnt: Step in which the creation fails
'''
target_type = self._get_target_type()
deletion_steps = AddonConfig.get(target_type).get('delete')
logger.warning('Start deleting the %s', target_type.value)
if cnt is None:
cnt = len(deletion_steps)
# deletion should be in reverse order
for index in range(cnt - 1, -1, -1):
# apply parameters to format the command
cmd = deletion_steps[index].format(**self._params)
try:
run_cli_cmd(cmd)
except CLIInternalError:
logger.warning('Rollback failed, please manually check and delete the unintended resources '
'in resource group: %s. You may use this command: %s',
self._params.get('source_resource_group'), cmd)
return
logger.warning('Rollback succeeded, the created resources were successfully deleted') | Delete the created resources if creation fails
:param cnt: Step in which the creation fails | rollback | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def _prepare_params(self):
'''Prepare the parameters used in CLI command
'''
params = {'subscription': get_subscription_id(self._cmd.cli_ctx)}
target_type = self._get_target_type()
for arg, val in AddonConfig.get(target_type).get('params').items():
func = getattr(self, val, None)
if func:
val = func()
params[arg] = val
return params | Prepare the parameters used in CLI command | _prepare_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def _retrive_source_rg(self):
'''Retrieve the resource group name in source resource id
'''
if not is_valid_resource_id(self._source_id):
e = InvalidArgumentValueError('The source resource id is invalid: {}'.format(self._source_id))
telemetry.set_exception(e, "source-id-invalid")
raise e
segments = parse_resource_id(self._source_id)
return segments.get('resource_group') | Retrieve the resource group name in source resource id | _retrive_source_rg | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def _retrive_source_loc(self):
'''Retrieve the location of source resource group
'''
rg = self._retrive_source_rg()
output = run_cli_cmd('az group show -n "{}" -o json'.format(rg))
return output.get('location') | Retrieve the location of source resource group | _retrive_source_loc | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def _get_source_type(self):
'''Get source resource type
'''
from ._validators import get_resource_regex
for _type, resource in SOURCE_RESOURCES.items():
matched = re.match(get_resource_regex(resource), self._source_id)
if matched:
return _type
e = InvalidArgumentValueError('The source resource id is invalid: {}'.format(self._source_id))
telemetry.set_exception(e, "source-id-invalid")
raise e | Get source resource type | _get_source_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def _get_target_type(self):
'''Get target resource type
'''
from ._validators import get_target_resource_name
return get_target_resource_name(self._cmd) | Get target resource type | _get_target_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_addon_factory.py | MIT |
def should_load_source(source):
'''Check whether to load `az {source} connection`
If {source} is an extension (e.g, spring-cloud), load the command group only when {source} is installed
:param source: the source resource type
'''
from azure.cli.core.extension.operations import list_extensions
from ._resource_config import SOURCE_RESOURCES_IN_EXTENSION
# names of CLI installed extensions
installed_extensions = [item.get('name') for item in list_extensions()]
# if source resource is released as an extension, load our command groups
# only when the extension is installed
if source not in SOURCE_RESOURCES_IN_EXTENSION or source.value in installed_extensions:
return True
return False | Check whether to load `az {source} connection`
If {source} is an extension (e.g, spring-cloud), load the command group only when {source} is installed
:param source: the source resource type | should_load_source | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | MIT |
def generate_random_string(length=5, prefix='', lower_only=False, ensure_complexity=False):
'''Generate a random string
:param length: the length of generated random string, not including the prefix
:param prefix: the prefix string
:param lower_only: ensure the generated string only includes lower case characters
:param ensure_complexity: ensure the generated string satisfy complexity requirements
'''
import random
import string
if lower_only and ensure_complexity:
raise CLIInternalError(
'lower_only and ensure_complexity can not both be specified to True')
if ensure_complexity and length < 8:
raise CLIInternalError('ensure_complexity needs length >= 8')
character_set = string.ascii_letters + string.digits
if lower_only:
character_set = string.ascii_lowercase
while True:
randstr = '{}{}'.format(prefix, ''.join(
random.sample(character_set, length)))
lowers = [c for c in randstr if c.islower()]
uppers = [c for c in randstr if c.isupper()]
numbers = [c for c in randstr if c.isnumeric()]
if not ensure_complexity or (lowers and uppers and numbers):
break
return randstr | Generate a random string
:param length: the length of generated random string, not including the prefix
:param prefix: the prefix string
:param lower_only: ensure the generated string only includes lower case characters
:param ensure_complexity: ensure the generated string satisfy complexity requirements | generate_random_string | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | MIT |
def run_cli_cmd(cmd, retry=0, interval=0, should_retry_func=None):
'''Run a CLI command
:param cmd: The CLI command to be executed
:param retry: The times to re-try
:param interval: The seconds wait before retry
'''
output = _in_process_execute(cmd)
if output.error or (should_retry_func and should_retry_func(output)):
if retry:
time.sleep(interval)
return run_cli_cmd(cmd, retry - 1, interval)
raise CLIInternalError('Command execution failed, command is: '
'{}, error message is: \n {}'.format(cmd, output.error))
return output.result | Run a CLI command
:param cmd: The CLI command to be executed
:param retry: The times to re-try
:param interval: The seconds wait before retry | run_cli_cmd | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | MIT |
def set_user_token_header(client, cli_ctx):
'''Set user token header to work around OBO'''
# pylint: disable=protected-access
# HACK: set custom header to work around OBO
# profile = Profile(cli_ctx=cli_ctx)
# creds, _, _ = profile.get_raw_token()
# client._client._config.headers_policy._headers['x-ms-serviceconnector-user-token'] = creds[1]
# HACK: hide token header
client._config.logging_policy.headers_to_redact.append(
'x-ms-serviceconnector-user-token')
return client | Set user token header to work around OBO | set_user_token_header | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | MIT |
def set_user_token_by_source_and_target(client, cli_ctx, source, target):
'''Set user token header to work around OBO according to source and target'''
if source in SOURCE_RESOURCES_USERTOKEN or target in TARGET_RESOURCES_USERTOKEN:
return set_user_token_header(client, cli_ctx)
return client | Set user token header to work around OBO according to source and target | set_user_token_by_source_and_target | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_utils.py | MIT |
def get_source_resource_name(cmd):
'''Get source resource name
e.g, az webapp connection list: => RESOURCE.WebApp
'''
source = None
source_name = cmd.name.split(' ')[0]
if source_name == RESOURCE.Local.value.lower():
return RESOURCE.Local
for item in SOURCE_RESOURCES:
if item.value.lower() == source_name.lower():
source = item
return source | Get source resource name
e.g, az webapp connection list: => RESOURCE.WebApp | get_source_resource_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_target_resource_name(cmd):
'''Get target resource name
e.g, az webapp connection create postgres: => RESOURCE.Postgres
'''
target = None
target_name = cmd.name.split(' ')[-1]
for item in TARGET_RESOURCES:
if item.value.lower() == target_name.lower():
target = item
return target | Get target resource name
e.g, az webapp connection create postgres: => RESOURCE.Postgres | get_target_resource_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_resource_type_by_id(resource_id):
'''Get source or target resource type by resource id
'''
target_type = None
all_resources = {}
all_resources.update(SOURCE_RESOURCES)
all_resources.update(TARGET_RESOURCES)
for _type, _id in all_resources.items():
matched = re.match(get_resource_regex(_id), resource_id, re.IGNORECASE)
if matched:
target_type = _type
return target_type | Get source or target resource type by resource id | get_resource_type_by_id | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_resource_regex(resource):
'''Replace '{...}' with '[^/]*' for regex matching
'''
regex = resource
for item in re.findall(r'(\{[^\{\}]*\})', resource):
regex = regex.replace(item, '[^/]*')
return regex | Replace '{...}' with '[^/]*' for regex matching | get_resource_regex | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def check_required_args(resource, cmd_arg_values):
'''Check whether a resource's required arguments are in cmd_arg_values
'''
args = re.findall(r'\{([^\{\}]*)\}', resource)
if 'subscription' in args:
args.remove('subscription')
for arg in args:
if not cmd_arg_values.get(arg, None):
return False
return True | Check whether a resource's required arguments are in cmd_arg_values | check_required_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def generate_connection_name(cmd):
'''Generate connection name for users if not provided
'''
target = get_target_resource_name(cmd).value.replace('-', '')
randstr = ''.join(random.sample(string.ascii_lowercase + string.digits, 5))
name = '{}_{}'.format(target, randstr)
logger.warning('Connection name is not specified, use generated one: --connection %s', name)
return name | Generate connection name for users if not provided | generate_connection_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_client_type(cmd, namespace):
'''Infer client type from source resource
'''
def _infer_webapp(source_id):
value_type_map = {
'python': CLIENT_TYPE.Python,
'dotnet': CLIENT_TYPE.Dotnet,
'dotnetcore': CLIENT_TYPE.Dotnet,
'php': CLIENT_TYPE.Php,
'java': CLIENT_TYPE.Java,
'nodejs': CLIENT_TYPE.Nodejs,
'node': CLIENT_TYPE.Nodejs,
'ruby': CLIENT_TYPE.Ruby,
}
prop_type_map = {
'javaVersion': CLIENT_TYPE.Java,
'netFrameworkVersion': CLIENT_TYPE.Dotnet,
'pythonVersion': CLIENT_TYPE.Python,
'nodeVersion': CLIENT_TYPE.Nodejs,
'phpVersion': CLIENT_TYPE.Php,
}
client_type = None
try:
output = run_cli_cmd('az webapp show --id "{}" -o json'.format(source_id))
prop = output.get('siteConfig').get('linuxFxVersion', None) or\
output.get('siteConfig').get('windowsFxVersion', None)
# use 'linuxFxVersion' and 'windowsFxVersion' property to decide
if prop:
language = prop.split('|')[0].lower()
client_type = value_type_map.get(language, None)
# use '*Version' property to decide
if client_type is None:
for prop, _type in prop_type_map.items():
if output.get('siteConfig', {}).get(prop, None) is not None:
client_type = _type
break
except Exception: # pylint: disable=broad-except
pass
return client_type
def _infer_springcloud(source_id):
client_type = CLIENT_TYPE.SpringBoot
try:
segments = parse_resource_id(source_id)
output = run_cli_cmd('az spring app show -g "{}" -s "{}" -n "{}"'
' -o json'.format(segments.get('resource_group'), segments.get('name'),
segments.get('child_name_1')))
prop_val = output.get('properties')\
.get('activeDeployment')\
.get('properties')\
.get('deploymentSettings')\
.get('runtimeVersion')
language = prop_val.split('_')[0].lower()
if language == 'java':
client_type = CLIENT_TYPE.Java
elif language == 'netcore':
client_type = CLIENT_TYPE.Dotnet
except Exception: # pylint: disable=broad-except
pass
return client_type
# fallback to use None as client type
client_type = None
if 'webapp' in cmd.name:
client_type = _infer_webapp(namespace.source_id)
elif 'spring-cloud' in cmd.name or 'spring' in cmd.name:
client_type = _infer_springcloud(namespace.source_id)
method = 'detected'
if client_type is None:
client_type = CLIENT_TYPE.Blank
method = 'default'
logger.warning('Client type is not specified, use %s one: --client-type %s', method, client_type.value)
return client_type.value | Infer client type from source resource | get_client_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def interactive_input(arg, hint):
'''Get interactive inputs from users
'''
value = None
cmd_value = None
if arg == 'secret_auth_info':
name = prompt('User name of database (--secret name=): ')
secret = prompt_pass('Password of database (--secret secret=): ')
value = {
'name': name,
'secret_info': {
'secret_type': 'rawValue',
'value': secret
},
'auth_type': 'secret'
}
cmd_value = 'name={} secret={}'.format(name, '*' * len(secret))
elif arg == 'service_principal_auth_info_secret':
client_id = prompt('ServicePrincipal client-id (--service-principal client_id=): ')
object_id = prompt('Enterprise Application object-id (--service-principal object-id=): ')
secret = prompt_pass('ServicePrincipal secret (--service-principal secret=): ')
value = {
'client_id': client_id,
'object-id': object_id,
'secret': secret,
'auth_type': 'servicePrincipalSecret'
}
cmd_value = 'client-id={} principal-id={} secret={}'.format(client_id, object_id, '*' * len(secret))
elif arg == 'user_identity_auth_info':
client_id = prompt('UserAssignedIdentity client-id (--user-identity client_id=): ')
subscription_id = prompt('UserAssignedIdentity subscription-id (--user-identity subs_id=): ')
value = {
'client_id': client_id,
'subscription_id': subscription_id,
'auth_type': 'userAssignedIdentity'
}
cmd_value = 'client-id={} subscription-id={}'.format(client_id, subscription_id)
elif arg == 'user_account_auth_info':
object_id = prompt(
'User Account object-id (--user-account object-id=): ')
value = {
'auth_type': 'userAccount',
'principal_id': object_id
}
cmd_value = 'object-id={}'.format(object_id)
else:
value = prompt('{}: '.format(hint))
cmd_value = value
# check blank value
if isinstance(value, dict):
for sub_val in value.values():
if not sub_val:
raise RequiredArgumentMissingError('{} should not be blank'.format(hint))
elif not value:
raise RequiredArgumentMissingError('{} should not be blank'.format(hint))
return value, cmd_value | Get interactive inputs from users | interactive_input | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_local_context_value(cmd, arg):
'''Get local context values
'''
groups = ['all', 'cupertino', 'serviceconnector', 'postgres']
arg_map = {
'source_resource_group': ['resource_group_name'],
'target_resource_group': ['resource_group_name'],
'server': ['server_name', "server"],
'database': ['database_name', "database"],
'site': ['webapp_name']
}
for group in groups:
possible_args = arg_map.get(arg, [arg])
for item in possible_args:
if cmd.cli_ctx.local_context.get(group, item):
return cmd.cli_ctx.local_context.get(group, item)
return None | Get local context values | get_local_context_value | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def opt_out_auth(namespace):
'''Validate if config and auth are both opted out
'''
opt_out_list = getattr(namespace, 'opt_out_list', None)
if opt_out_list is not None and \
OPT_OUT_OPTION.AUTHENTICATION.value in opt_out_list:
return True
return False | Validate if config and auth are both opted out | opt_out_auth | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def intelligent_experience(cmd, namespace, missing_args):
'''Use local context and interactive inputs to get arg values
'''
cmd_arg_values = {}
# use commandline source/target resource args
for arg in missing_args:
if getattr(namespace, arg, None) is not None:
cmd_arg_values[arg] = getattr(namespace, arg)
# for auth info without additional parameters
if 'secret_auth_info_auto' in missing_args:
cmd_arg_values['secret_auth_info_auto'] = {
'auth_type': 'secret'
}
logger.warning('Auth info is not specified, use default one: --secret')
elif 'system_identity_auth_info' in missing_args:
cmd_arg_values['system_identity_auth_info'] = {
'auth_type': 'systemAssignedIdentity'
}
logger.warning('Auth info is not specified, use default one: --system-identity')
if opt_out_auth(namespace):
logger.warning('Auth info is only used to generate configurations. %s',
'Skip enabling identity and role assignments.')
elif 'user_account_auth_info' in missing_args:
cmd_arg_values['user_account_auth_info'] = {
'auth_type': 'userAccount'
}
logger.warning(
'Auth info is not specified, use default one: --user-account')
if cmd.cli_ctx.local_context.is_on:
# arguments found in local context
context_arg_values = {}
for arg in missing_args:
if arg not in cmd_arg_values:
if get_local_context_value(cmd, arg):
context_arg_values[arg] = get_local_context_value(cmd, arg)
# apply local context arguments
param_str = ''
for k, v in context_arg_values.items():
option = missing_args[k].get('options')[0]
value = v
param_str += '{} {} '.format(option, value)
if param_str:
logger.warning('Apply local context arguments: %s', param_str.strip())
cmd_arg_values.update(context_arg_values)
# arguments from interactive inputs
param_str = ''
for arg in missing_args:
if arg not in cmd_arg_values:
hint = '{} ({})'.format(missing_args[arg].get('help'), '/'.join(missing_args[arg].get('options')))
value, cmd_value = interactive_input(arg, hint)
cmd_arg_values[arg] = value
# show applied params
option = missing_args[arg].get('options')[0]
param_str += '{} {} '.format(option, cmd_value)
if param_str:
logger.warning('Apply interactive input arguments: %s', param_str.strip())
return cmd_arg_values | Use local context and interactive inputs to get arg values | intelligent_experience | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_source_resource_id(cmd, namespace):
'''Validate resource id of a source resource
'''
if getattr(namespace, 'source_id', None):
if not is_valid_resource_id(namespace.source_id):
e = InvalidArgumentValueError('Resource id is invalid: {}'.format(namespace.source_id))
telemetry.set_exception(e, 'source-id-invalid')
raise e
source = get_source_resource_name(cmd)
# For Web App, match slot pattern first:
if source == RESOURCE.WebApp:
slotPattern = WEB_APP_SLOT_RESOURCE
matched = re.match(get_resource_regex(slotPattern), namespace.source_id, re.IGNORECASE)
if matched:
namespace.source_id = matched.group()
return True
if source == RESOURCE.SpringCloud:
deploymentPattern = SPRING_APP_DEPLOYMENT_RESOURCE
matched = re.match(get_resource_regex(deploymentPattern), namespace.source_id, re.IGNORECASE)
if matched:
namespace.source_id = matched.group()
return True
# For other source and Web App which cannot match slot pattern
pattern = SOURCE_RESOURCES.get(source)
matched = re.match(get_resource_regex(pattern),
namespace.source_id, re.IGNORECASE)
if matched:
namespace.source_id = matched.group()
return True
e = InvalidArgumentValueError(
'Unsupported source resource id: {}. '
'Source id pattern should be: {}'.format(namespace.source_id, pattern))
telemetry.set_exception(e, 'source-id-unsupported')
raise e
return False | Validate resource id of a source resource | validate_source_resource_id | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_connection_id(namespace):
'''Validate resource id of a connection
'''
if getattr(namespace, 'indentifier', None):
matched = False
for resource in list(SOURCE_RESOURCES.values()) + [WEB_APP_SLOT_RESOURCE, SPRING_APP_DEPLOYMENT_RESOURCE]:
regex = '({})/providers/Microsoft.ServiceLinker/linkers/([^/]*)'.format(get_resource_regex(resource))
matched = re.match(regex, namespace.indentifier, re.IGNORECASE)
if matched:
namespace.source_id = matched.group(1)
namespace.connection_name = matched.group(2)
return True
if not matched:
e = InvalidArgumentValueError('Connection id is invalid: {}'.format(namespace.indentifier))
telemetry.set_exception(e, 'connection-id-invalid')
raise e
return False | Validate resource id of a connection | validate_connection_id | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_target_resource_id(cmd, namespace):
'''Validate resource id of a target resource
'''
if getattr(namespace, 'target_id', None):
target = get_target_resource_name(cmd)
if not (target == RESOURCE.FabricSql) and not is_valid_resource_id(namespace.target_id):
e = InvalidArgumentValueError('Resource id is invalid: {}'.format(namespace.target_id))
telemetry.set_exception(e, 'target-id-invalid')
raise e
pattern = TARGET_RESOURCES.get(target)
matched = re.match(get_resource_regex(pattern), namespace.target_id, re.IGNORECASE)
if matched:
namespace.target_id = matched.group()
return True
e = InvalidArgumentValueError('Target resource id is invalid: {}. '
'Target id pattern should be: {}'.format(namespace.target_id, pattern))
telemetry.set_exception(e, 'target-id-unsupported')
raise e
return False | Validate resource id of a target resource | validate_target_resource_id | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_opt_out_auth_and_config(namespace):
'''Validate if config and auth are both opted out
'''
opt_out_list = getattr(namespace, 'opt_out_list', None)
if opt_out_list is not None and \
OPT_OUT_OPTION.AUTHENTICATION.value in opt_out_list and \
OPT_OUT_OPTION.CONFIGURATION_INFO.value in opt_out_list:
return True
return False | Validate if config and auth are both opted out | validate_opt_out_auth_and_config | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_missing_source_args(cmd, namespace):
'''Get source resource related args
'''
source = get_source_resource_name(cmd)
missing_args = {}
for arg, content in SOURCE_RESOURCES_PARAMS.get(source, {}).items():
missing_args[arg] = content
# For WebApp, slot may needed
args = SOURCE_RESOURCES_OPTIONAL_PARAMS.get(source)
if args:
for arg, content in args.items():
if getattr(namespace, arg, None):
missing_args[arg] = content
return missing_args | Get source resource related args | get_missing_source_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_missing_source_create_args(cmd, namespace):
'''Get source resource related args in create
'''
source = get_source_resource_name(cmd)
missing_args = {}
args = SOURCE_RESOURCES_CREATE_PARAMS.get(source)
if args:
for arg, content in args.items():
if not getattr(namespace, arg, None):
missing_args[arg] = content
return missing_args | Get source resource related args in create | get_missing_source_create_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_missing_target_args(cmd):
'''Get target resource related args
'''
target = get_target_resource_name(cmd)
missing_args = {}
if target in TARGET_RESOURCES_PARAMS:
for arg, content in TARGET_RESOURCES_PARAMS.get(target).items():
missing_args[arg] = content
return missing_args | Get target resource related args | get_missing_target_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_missing_auth_args(cmd, namespace):
'''Get auth info related args user didn't provide in command line
'''
source = get_source_resource_name(cmd)
target = get_target_resource_name(cmd)
missing_args = {}
# check if there are auth_info related params
auth_param_exist = False
for _, params in AUTH_TYPE_PARAMS.items():
for arg in params:
if getattr(namespace, arg, None):
auth_param_exist = True
break
if target == RESOURCE.ConfluentKafka:
return missing_args
# when keyvault csi is enabled, auth_type is userIdentity without subs_id and client_id
if source == RESOURCE.KubernetesCluster and target == RESOURCE.KeyVault:
if getattr(namespace, 'enable_csi', None):
if auth_param_exist:
logger.warning('When CSI driver is enabled (--enable-csi), \
Service Connector uses the user assigned managed identity generated by AKS \
azure-keyvault-secrets-provider add-on to authenticate. \
Additional auth info is ignored.')
setattr(namespace, 'user_identity_auth_info', {
'auth_type': 'userAssignedIdentity'
})
return missing_args
if not auth_param_exist:
setattr(namespace, 'enable_csi', True)
setattr(namespace, 'user_identity_auth_info', {
'auth_type': 'userAssignedIdentity'
})
logger.warning('Auth info is not specified, use secrets store csi driver as default: --enable-csi')
return missing_args
# ACA as target use null auth
if target == RESOURCE.ContainerApp:
return missing_args
if source and target and not auth_param_exist:
default_auth_type = SUPPORTED_AUTH_TYPE.get(source, {}).get(target, {})[0]
for arg, content in AUTH_TYPE_PARAMS.get(default_auth_type).items():
if getattr(namespace, arg, None) is None:
missing_args[arg] = content
return missing_args | Get auth info related args user didn't provide in command line | get_missing_auth_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_missing_connection_name(namespace):
'''Get connection_name arg if user didn't provide it in command line
'''
missing_args = {}
if getattr(namespace, 'connection_name', None) is None:
missing_args['connection_name'] = {
'help': 'The connection name',
'options': ['--connection']
}
return missing_args | Get connection_name arg if user didn't provide it in command line | get_missing_connection_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def get_missing_client_type(namespace):
'''Get client_type arg if user didn't provide it in command line
'''
missing_args = {}
if getattr(namespace, 'client_type', None) is None:
missing_args['client_type'] = {
'help': 'Client type of the connection',
'options': ['--client-type']
}
return missing_args | Get client_type arg if user didn't provide it in command line | get_missing_client_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_local_default_params(cmd, namespace): # pylint: disable=unused-argument
'''Get missing args of local connection command
'''
missing_args = {}
if getattr(namespace, 'id', None):
namespace.id = namespace.id.lower()
if not is_valid_resource_id(namespace.id):
raise InvalidArgumentValueError(
'Resource id is invalid: {}'.format(namespace.id))
resource = LOCAL_CONNECTION_RESOURCE.lower()
for item in re.findall(r'(\{[^\{\}]*\})', resource):
resource = resource.replace(item, '([^/]*)')
matched = re.match(resource, namespace.id)
if matched:
namespace.resource_group_name = matched.group(2)
namespace.location = matched.group(3)
namespace.connection_name = matched.group(4)
else:
raise InvalidArgumentValueError(
'Unsupported resource id: {}'.format(namespace.id))
else:
if not getattr(namespace, 'resource_group_name', None):
missing_args.update(
{'resource_group_name': LOCAL_CONNECTION_PARAMS.get("resource_group_name")})
return missing_args | Get missing args of local connection command | validate_local_default_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_list_params(cmd, namespace):
'''Get missing args of list command
'''
missing_args = {}
if not validate_source_resource_id(cmd, namespace):
missing_args.update(get_missing_source_args(cmd, namespace))
return missing_args | Get missing args of list command | validate_list_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_create_params(cmd, namespace):
'''Get missing args of create command
'''
missing_args = {}
if not validate_source_resource_id(cmd, namespace):
missing_args.update(get_missing_source_args(cmd, namespace))
missing_args.update(get_missing_source_create_args(cmd, namespace))
if not validate_target_resource_id(cmd, namespace):
missing_args.update(get_missing_target_args(cmd))
if not validate_opt_out_auth_and_config(namespace):
missing_args.update(get_missing_auth_args(cmd, namespace))
return missing_args | Get missing args of create command | validate_create_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_local_create_params(cmd, namespace):
'''Get missing args of create command
'''
missing_args = {}
if not validate_target_resource_id(cmd, namespace):
missing_args.update(get_missing_target_args(cmd))
missing_args.update(get_missing_auth_args(cmd, namespace))
return missing_args | Get missing args of create command | validate_local_create_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_addon_params(cmd, namespace):
'''Get missing args of add command with '--new'
'''
missing_args = {}
if not validate_source_resource_id(cmd, namespace):
missing_args.update(get_missing_source_args(cmd, namespace))
missing_args.update(get_missing_auth_args(cmd, namespace))
return missing_args | Get missing args of add command with '--new' | validate_addon_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_update_params(cmd, namespace):
'''Get missing args of update command
'''
missing_args = {}
if not validate_connection_id(namespace) and not validate_source_resource_id(cmd, namespace):
missing_args.update(get_missing_source_args(cmd, namespace))
# missing_args.update(get_missing_auth_args(cmd, namespace))
missing_args.update(get_missing_connection_name(namespace))
return missing_args | Get missing args of update command | validate_update_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_local_update_params(cmd, namespace): # pylint: disable=unused-argument
'''Get missing args of update command
'''
missing_args = {}
# missing_args.update(get_missing_auth_args(cmd, namespace))
return missing_args | Get missing args of update command | validate_local_update_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def validate_default_params(cmd, namespace):
'''Get missing args of commands except for list, create
'''
missing_args = {}
if not validate_connection_id(namespace):
missing_args.update(get_missing_source_args(cmd, namespace))
missing_args.update(get_missing_connection_name(namespace))
return missing_args | Get missing args of commands except for list, create | validate_default_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_source_args(cmd, namespace, arg_values):
'''Set source resource id by arg_values
'''
source = get_source_resource_name(cmd)
resource = SOURCE_RESOURCES.get(source)
if check_required_args(resource, arg_values):
namespace.source_id = resource.format(
subscription=get_subscription_id(cmd.cli_ctx),
**arg_values
)
apply_source_optional_args(cmd, namespace, arg_values) | Set source resource id by arg_values | apply_source_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_source_optional_args(cmd, namespace, arg_values):
'''Set source resource id by optional arg_values
'''
source = get_source_resource_name(cmd)
if source == RESOURCE.WebApp:
if arg_values.get('slot', None):
resource = WEB_APP_SLOT_RESOURCE
if check_required_args(resource, arg_values):
namespace.source_id = resource.format(
subscription=get_subscription_id(cmd.cli_ctx),
**arg_values
)
if source == RESOURCE.SpringCloud:
if arg_values.get('deployment', None):
resource = SPRING_APP_DEPLOYMENT_RESOURCE
if check_required_args(resource, arg_values):
namespace.source_id = resource.format(
subscription=get_subscription_id(cmd.cli_ctx),
**arg_values
) | Set source resource id by optional arg_values | apply_source_optional_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_source_create_args(cmd, namespace, arg_values):
'''Set source resource related args in create by arg_values
'''
source = get_source_resource_name(cmd)
for arg in SOURCE_RESOURCES_CREATE_PARAMS.get(source, {}):
if arg in arg_values:
setattr(namespace, arg, arg_values.get(arg, None)) | Set source resource related args in create by arg_values | apply_source_create_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_target_args(cmd, namespace, arg_values):
'''Set target resource id by arg_values
'''
target = get_target_resource_name(cmd)
resource = TARGET_RESOURCES.get(target)
if check_required_args(resource, arg_values):
namespace.target_id = resource.format(
subscription=get_subscription_id(cmd.cli_ctx),
**arg_values
) | Set target resource id by arg_values | apply_target_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_auth_args(cmd, namespace, arg_values):
'''Set auth info by arg_values
'''
source = get_source_resource_name(cmd)
target = get_target_resource_name(cmd)
if source and target:
auth_types = SUPPORTED_AUTH_TYPE.get(source, {}).get(target, {})
for auth_type in auth_types:
if auth_type == AUTH_TYPE.Null:
continue
for arg in AUTH_TYPE_PARAMS.get(auth_type):
if arg in arg_values:
setattr(namespace, arg, arg_values.get(arg, None))
if arg == 'workload_identity_auth_info':
apply_workload_identity(namespace, arg_values) | Set auth info by arg_values | apply_auth_args | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_connection_name(namespace, arg_values):
'''Set connection_name by arg_values
'''
if getattr(namespace, 'connection_name', None) is None:
namespace.connection_name = arg_values.get('connection_name', None) | Set connection_name by arg_values | apply_connection_name | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_client_type(namespace, arg_values):
'''Set client_type by arg_values
'''
if getattr(namespace, 'client_type', None) is None:
namespace.client_type = arg_values.get('client_type', None) | Set client_type by arg_values | apply_client_type | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_list_params(cmd, namespace, arg_values):
'''Set list command missing args
'''
apply_source_args(cmd, namespace, arg_values) | Set list command missing args | apply_list_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_create_params(cmd, namespace, arg_values):
'''Set create command missing args
'''
apply_source_args(cmd, namespace, arg_values)
apply_source_create_args(cmd, namespace, arg_values)
apply_target_args(cmd, namespace, arg_values)
apply_auth_args(cmd, namespace, arg_values) | Set create command missing args | apply_create_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_local_create_params(cmd, namespace, arg_values):
'''Set create command missing args
'''
apply_target_args(cmd, namespace, arg_values)
apply_auth_args(cmd, namespace, arg_values) | Set create command missing args | apply_local_create_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_addon_params(cmd, namespace, arg_values):
'''Set addon command missing args
'''
apply_source_args(cmd, namespace, arg_values)
apply_auth_args(cmd, namespace, arg_values) | Set addon command missing args | apply_addon_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
def apply_update_params(cmd, namespace, arg_values):
'''Set update command missing args
'''
apply_source_args(cmd, namespace, arg_values)
apply_connection_name(namespace, arg_values)
apply_auth_args(cmd, namespace, arg_values) | Set update command missing args | apply_update_params | python | Azure/azure-cli | src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py | MIT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.