code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
194
url
stringlengths
46
254
license
stringclasses
4 values
def apply_local_update_params(cmd, namespace, arg_values): '''Set update command missing args ''' apply_auth_args(cmd, namespace, arg_values)
Set update command missing args
apply_local_update_params
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
MIT
def apply_default_params(cmd, namespace, arg_values): '''Set missing args of commands except for list, create ''' apply_source_args(cmd, namespace, arg_values) apply_connection_name(namespace, arg_values)
Set missing args of commands except for list, create
apply_default_params
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
MIT
def validate_local_params(cmd, namespace): '''Validate command parameters ''' def _validate_and_apply(validate, apply): missing_args = validate(cmd, namespace) if missing_args: arg_values = intelligent_experience(cmd, namespace, missing_args) apply(cmd, namespace, arg_values) # for command: 'list' if cmd.name.endswith(' list'): _validate_and_apply(validate_local_list_params, apply_local_default_params) else: _validate_and_apply(validate_local_default_params, apply_local_default_params) # for command: 'create' if 'create' in cmd.name: # if --new is specified if getattr(namespace, 'connection_name', None) is None: namespace.connection_name = generate_connection_name(cmd) else: validate_connection_name(namespace.connection_name) if getattr(namespace, 'new_addon', None): _validate_and_apply(validate_addon_params, apply_addon_params) else: _validate_and_apply(validate_local_create_params, apply_local_create_params) if getattr(namespace, 'client_type', None) is None: namespace.client_type = get_client_type(cmd, namespace) # for command: update elif 'update' in cmd.name: _validate_and_apply(validate_local_update_params, apply_local_update_params)
Validate command parameters
validate_local_params
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
MIT
def validate_params(cmd, namespace): '''Validate command parameters ''' def _validate_and_apply(validate, apply): missing_args = validate(cmd, namespace) if missing_args: arg_values = intelligent_experience(cmd, namespace, missing_args) apply(cmd, namespace, arg_values) # for command: 'list' if cmd.name.endswith(' list'): _validate_and_apply(validate_list_params, apply_list_params) # for command: 'create' elif 'create' in cmd.name: # if --new is specified if getattr(namespace, 'connection_name', None) is None: namespace.connection_name = generate_connection_name(cmd) else: validate_connection_name(namespace.connection_name) if getattr(namespace, 'new_addon', None): _validate_and_apply(validate_addon_params, apply_addon_params) else: _validate_and_apply(validate_create_params, apply_create_params) if getattr(namespace, 'client_type', None) is None: namespace.client_type = get_client_type(cmd, namespace) # for command: update elif 'update' in cmd.name: _validate_and_apply(validate_update_params, apply_update_params) # for command: all others else: _validate_and_apply(validate_default_params, apply_default_params)
Validate command parameters
validate_params
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
MIT
def validate_service_state(linker_parameters): '''Validate whether user provided params are applicable to service state ''' target_type = linker_parameters.get('target_service', {}).get('type') # AzureResource and other types (e.g., FabricResource, SelfHostedResource) if target_type == "AzureResource": target_id = linker_parameters.get('target_service', {}).get('id') else: target_id = linker_parameters.get('target_service', {}).get('endpoint') for target, resource_id in TARGET_RESOURCES.items(): matched = re.match(get_resource_regex(resource_id), target_id, re.IGNORECASE) if matched: target_type = target if target_type == RESOURCE.AppConfig and linker_parameters.get('auth_info', {}).get('auth_type') == 'secret': segments = parse_resource_id(target_id) rg = segments.get('resource_group') name = segments.get('name') sub = segments.get('subscription') if not rg or not name: return output = run_cli_cmd('az appconfig show -g "{}" -n "{}" --subscription "{}"'.format(rg, name, sub)) if output and output.get('disableLocalAuth') is True: raise ValidationError('Secret as auth type is not allowed when local auth is disabled for the ' 'specified appconfig, you may use service principal or managed identity.') if target_type == RESOURCE.Redis: auth_type = linker_parameters.get('auth_info', {}).get('auth_type') if auth_type == AUTH_TYPE.Secret.value or auth_type == AUTH_TYPE.SecretAuto.value: return redis = run_cli_cmd('az redis show --ids "{}"'.format(target_id)) if redis.get('redisConfiguration', {}).get('aadEnabled', 'False') != "True": raise ValidationError('Please enable Microsoft Entra Authentication on your Redis first. ' 'Note that it will cause your cache instances to reboot to load new ' 'configuration and result in a failover. Consider performing the ' 'operation during low traffic or outside of business hours.')
Validate whether user provided params are applicable to service state
validate_service_state
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/serviceconnector/_validators.py
MIT
def get_custom_sdk(custom_module, client_factory, table_transformer): """Returns a CliCommandType instance with specified operation template based on the given custom module name. This is useful when the command is not defined in the default 'custom' module but instead in a module under 'operations' package.""" return CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.{}#'.format(custom_module) + '{}', client_factory=client_factory, table_transformer=table_transformer )
Returns a CliCommandType instance with specified operation template based on the given custom module name. This is useful when the command is not defined in the default 'custom' module but instead in a module under 'operations' package.
load_command_table.get_custom_sdk
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/commands.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/commands.py
MIT
def load_command_table(self, _): configstore_custom_util = CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.custom#{}', table_transformer=configstore_output_format, client_factory=cf_configstore ) configstore_identity_util = CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.custom#{}', table_transformer=configstore_identity_format, client_factory=cf_configstore ) configstore_credential_util = CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.custom#{}', table_transformer=configstore_credential_format, client_factory=cf_configstore ) configstore_keyvalue_util = CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.keyvalue#{}', table_transformer=keyvalue_entry_format, client_factory=cf_configstore_operations ) configstore_replica_util = CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.custom#{}', table_transformer=configstore_replica_output_format, client_factory=cf_replicas ) configstore_snapshot_util = CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.snapshot#{}', table_transformer=configstore_snapshot_output_format ) def get_custom_sdk(custom_module, client_factory, table_transformer): """Returns a CliCommandType instance with specified operation template based on the given custom module name. This is useful when the command is not defined in the default 'custom' module but instead in a module under 'operations' package.""" return CliCommandType( operations_tmpl='azure.cli.command_modules.appconfig.{}#'.format(custom_module) + '{}', client_factory=client_factory, table_transformer=table_transformer ) # Management Plane Commands with self.command_group('appconfig', configstore_custom_util) as g: g.command('create', 'create_configstore') g.command('delete', 'delete_configstore', confirmation=True) g.command('update', 'update_configstore') g.command('list', 'list_configstore') g.command('list-deleted', 'list_deleted_configstore', table_transformer=deleted_configstore_output_format) g.command('recover', 'recover_deleted_configstore', confirmation=True) g.command('purge', 'purge_deleted_configstore', confirmation=True) g.show_command('show', 'show_configstore') g.show_command('show-deleted', 'show_deleted_configstore', table_transformer=deleted_configstore_output_format) with self.command_group('appconfig replica', configstore_replica_util) as g: g.command('list', 'list_replica') g.command('create', 'create_replica') g.command('delete', 'delete_replica') g.show_command('show', 'show_replica') with self.command_group('appconfig credential', configstore_credential_util) as g: g.command('list', 'list_credential') g.command('regenerate', 'regenerate_credential') with self.command_group('appconfig identity', configstore_identity_util) as g: g.command('assign', 'assign_managed_identity') g.command('remove', 'remove_managed_identity') g.show_command('show', 'show_managed_identity') with self.command_group('appconfig revision', configstore_keyvalue_util) as g: g.command('list', 'list_revision') # Data Plane Commands with self.command_group('appconfig kv', configstore_keyvalue_util) as g: g.command('set', 'set_key') g.command('delete', 'delete_key') g.show_command('show', 'show_key') g.command('list', 'list_key') g.command('lock', 'lock_key') g.command('unlock', 'unlock_key') g.command('restore', 'restore_key') g.command('import', 'import_config') g.command('export', 'export_config') g.command('set-keyvault', 'set_keyvault') # FeatureManagement Commands with self.command_group('appconfig feature', custom_command_type=get_custom_sdk('feature', cf_configstore_operations, featureflag_entry_format)) as g: g.custom_command('set', 'set_feature') g.custom_command('delete', 'delete_feature') g.custom_show_command('show', 'show_feature') g.custom_command('list', 'list_feature') g.custom_command('lock', 'lock_feature') g.custom_command('unlock', 'unlock_feature') g.custom_command('enable', 'enable_feature') g.custom_command('disable', 'disable_feature') with self.command_group('appconfig feature filter', custom_command_type=get_custom_sdk('feature', cf_configstore_operations, featurefilter_entry_format)) as g: g.custom_command('add', 'add_filter') g.custom_command('update', 'update_filter') g.custom_command('delete', 'delete_filter') g.custom_show_command('show', 'show_filter') g.custom_command('list', 'list_filter') # Snapshot Commands with self.command_group('appconfig snapshot', configstore_snapshot_util) as g: g.command('create', 'create_snapshot') g.show_command('show', 'show_snapshot') g.command('list', 'list_snapshots') g.command('archive', 'archive_snapshot') g.command('recover', 'recover_snapshot')
Returns a CliCommandType instance with specified operation template based on the given custom module name. This is useful when the command is not defined in the default 'custom' module but instead in a module under 'operations' package.
load_command_table
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/commands.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/commands.py
MIT
def __update_existing_key_value(azconfig_client, retrieved_kv, updated_value, correlation_request_id=None): ''' To update the value of a pre-existing KeyValue Args: azconfig_client - AppConfig client making calls to the service retrieved_kv - Pre-existing ConfigurationSetting object updated_value - Value string to be updated Return: KeyValue object ''' set_kv = ConfigurationSetting(key=retrieved_kv.key, value=updated_value, label=retrieved_kv.label, tags=retrieved_kv.tags, content_type=FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE, read_only=retrieved_kv.read_only, etag=retrieved_kv.etag, last_modified=retrieved_kv.last_modified) try: new_kv = azconfig_client.set_configuration_setting(set_kv, match_condition=MatchConditions.IfNotModified, headers={HttpHeaders.CORRELATION_REQUEST_ID: correlation_request_id}) return convert_configurationsetting_to_keyvalue(new_kv) except ResourceReadOnlyError: raise CLIError("Failed to update read only feature flag. Unlock the feature flag before updating it.")
To update the value of a pre-existing KeyValue Args: azconfig_client - AppConfig client making calls to the service retrieved_kv - Pre-existing ConfigurationSetting object updated_value - Value string to be updated Return: KeyValue object
__update_existing_key_value
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/feature.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/feature.py
MIT
def __list_all_keyvalues(azconfig_client, key_filter, label=None, correlation_request_id=None): ''' To get all keys by name or pattern Args: azconfig_client - AppConfig client making calls to the service key_filter - Filter for the key of the feature flag label - Feature label or pattern Return: List of KeyValue objects ''' # We dont support listing comma separated keys and ned to fail with appropriate error # (?<!\\) Matches if the preceding character is not a backslash # (?:\\\\)* Matches any number of occurrences of two backslashes # , Matches a comma unescaped_comma_regex = re.compile(r'(?<!\\)(?:\\\\)*,') if unescaped_comma_regex.search(key_filter): raise CLIError("Comma separated feature names are not supported. Please provide escaped string if your feature name contains comma. \nSee \"az appconfig feature list -h\" for correct usage.") label = prep_label_filter_for_url_encoding(label) try: configsetting_iterable = azconfig_client.list_configuration_settings(key_filter=key_filter, label_filter=label, headers={HttpHeaders.CORRELATION_REQUEST_ID: correlation_request_id}) except HttpResponseError as exception: raise CLIErrors.AzureResponseError('Failed to read feature flag(s) that match the specified feature and label. ' + str(exception)) try: retrieved_kv = [convert_configurationsetting_to_keyvalue(x) for x in configsetting_iterable] valid_features = [] for kv in retrieved_kv: if kv.content_type == FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE: valid_features.append(kv) return valid_features except Exception as exception: raise CLIError(str(exception))
To get all keys by name or pattern Args: azconfig_client - AppConfig client making calls to the service key_filter - Filter for the key of the feature flag label - Feature label or pattern Return: List of KeyValue objects
__list_all_keyvalues
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/feature.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/feature.py
MIT
def get_serializer(level): ''' Helper method that returns a serializer method called in formatting a string representation of a key-value. ''' source_modes = ("appconfig", "appservice", "file") kvset_modes = ("kvset", "restore") if level not in source_modes and level not in kvset_modes: raise CLIError("Invalid argument '{}' supplied. level argument should be on of the following: {}".format(level, source_modes + kvset_modes)) def serialize(obj): if isinstance(obj, dict): return json.JSONEncoder().default(obj) # Feature flag format: {"feature": <feature-name>, "state": <on/off>, "conditions": <conditions-dict>} if is_feature_flag(obj): # State property doesn't make sense in feature flag version 2 schema beacuse of the added properties - variants, allocation, telemetry # The State property only exists in the CLI, we should move to showing enabled property instead as the other clients # As we move to showing the enabled property, we will show the state property in the CLI only if compatibility mode is true env_compatibility_mode = os.environ.get("AZURE_APPCONFIG_FM_COMPATIBLE", True) compatibility_mode = str(env_compatibility_mode).lower() == "true" feature = map_keyvalue_to_featureflag(obj, hide_enabled=compatibility_mode) # name feature_json = {'feature': feature.name} # state if hasattr(feature, 'state'): feature_json['state'] = feature.state # enabled if hasattr(feature, 'enabled'): feature_json['enabled'] = feature.enabled # description if feature.description is not None: feature_json['description'] = feature.description # conditions if feature.conditions: feature_json['conditions'] = custom_serialize_conditions(feature.conditions) # allocation if feature.allocation: feature_json['allocation'] = custom_serialize_allocation(feature.allocation) # variants if feature.variants: feature_json['variants'] = custom_serialize_variants(feature.variants) # telemetry if feature.telemetry: feature_json['telemetry'] = custom_serialize_telemetry(feature.telemetry) return feature_json res = {'key': obj.key, 'value': obj.value} # import/export key, value, content_type and tags (same level as key-value): {"key": <key>, "value": <value>, "AppService:SlotSetting": <true/false>} if level == 'appservice': if obj.tags: slot_setting = obj.tags.get(AppServiceConstants.APPSVC_SLOT_SETTING_KEY, 'false') res[AppServiceConstants.APPSVC_SLOT_SETTING_KEY] = slot_setting # import/export key, value, content-type, and tags (as a sub group): {"key": <key>, "value": <value>, "label": <label> "content_type": <content_type>, "tags": <tags_dict>} elif level == 'appconfig': res["label"] = obj.label res["content_type"] = obj.content_type # tags tag_json = {} if obj.tags: for tag_k, tag_v in obj.tags.items(): tag_json[tag_k] = tag_v res['tags'] = tag_json return res def serialize_kvset(kv): if level == "kvset": # File import with kvset profile kv_json = { 'key': kv.key, 'value': kv.value, 'label': kv.label, 'content_type': kv.content_type } else: # Restore preview format kv_json = { 'key': kv.key, 'value': kv.value, 'label': kv.label, 'locked': kv.locked, 'last modified': kv.last_modified, 'content type': kv.content_type } # tags tag_json = {} if kv.tags: for tag_k, tag_v in kv.tags.items(): tag_json[tag_k] = tag_v kv_json['tags'] = tag_json return kv_json return serialize if level in source_modes else serialize_kvset
Helper method that returns a serializer method called in formatting a string representation of a key-value.
get_serializer
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_diff_utils.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_diff_utils.py
MIT
def validate_datetime(namespace): ''' valid datetime format: YYYY-MM-DDThh:mm:ss["Z"/±hh:mm]''' supported_formats = ["%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M:%Sz", "%Y-%m-%dT%H:%M:%S%z"] if namespace.datetime is not None: for supported_format in supported_formats: if __tryparse_datetime(namespace.datetime, supported_format): return raise InvalidArgumentValueError('The input datetime is invalid. Correct format should be YYYY-MM-DDThh:mm:ss["Z"/±hh:mm].')
valid datetime format: YYYY-MM-DDThh:mm:ss["Z"/±hh:mm]
validate_datetime
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
MIT
def validate_connection_string(cmd, namespace): ''' Endpoint=https://example.azconfig.io;Id=xxxxx;Secret=xxxx''' # Only use defaults when both name and connection string not specified if not (namespace.connection_string or namespace.name): namespace.connection_string = cmd.cli_ctx.config.get('defaults', 'appconfig_connection_string', None) or cmd.cli_ctx.config.get('appconfig', 'connection_string', None) namespace.name = cmd.cli_ctx.config.get('defaults', 'app_configuration_store', None) connection_string = namespace.connection_string if connection_string: if not is_valid_connection_string(connection_string): raise CLIError('''The connection string is invalid. \ Correct format should be Endpoint=https://example.azconfig.io;Id=xxxxx;Secret=xxxx ''')
Endpoint=https://example.azconfig.io;Id=xxxxx;Secret=xxxx
validate_connection_string
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
MIT
def validate_filter_parameters(namespace): """ Extracts multiple space-separated filter paramters in name[=value] format """ if isinstance(namespace.filter_parameters, list): filter_parameters_dict = {} for item in namespace.filter_parameters: param_tuple = validate_filter_parameter(item) if param_tuple: # pylint: disable=unbalanced-tuple-unpacking param_name, param_value = param_tuple # If param_name already exists, error out if param_name in filter_parameters_dict: raise CLIError('Filter parameter name "{}" cannot be duplicated.'.format(param_name)) filter_parameters_dict.update({param_name: param_value}) namespace.filter_parameters = filter_parameters_dict
Extracts multiple space-separated filter paramters in name[=value] format
validate_filter_parameters
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
MIT
def validate_filter_parameter(string): """ Extracts a single filter parameter in name[=value] format """ result = () if string: comps = string.split('=', 1) if comps[0]: if len(comps) > 1: # In the portal, if value textbox is blank we store the value as empty string. # In CLI, we should allow inputs like 'name=', which correspond to empty string value. # But there is no way to differentiate between CLI inputs 'name=' and 'name=""'. # So even though "" is invalid JSON escaped string, we will accept it and set the value as empty string. filter_param_value = '\"\"' if comps[1] == "" else comps[1] try: # Ensure that provided value of this filter parameter is valid JSON. Error out if value is invalid JSON. filter_param_value = json.loads(filter_param_value) except ValueError: raise InvalidArgumentValueError('Filter parameter value must be a JSON escaped string. "{}" is not a valid JSON object.'.format(filter_param_value)) result = (comps[0], filter_param_value) else: result = (string, '') else: # Error out on invalid arguments like '=value' or '=' raise InvalidArgumentValueError('Invalid filter parameter "{}". Parameter name cannot be empty.'.format(string)) return result
Extracts a single filter parameter in name[=value] format
validate_filter_parameter
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
MIT
def validate_secret_identifier(namespace): """ Validate the format of keyvault reference secret identifier """ from azure.keyvault.secrets._shared import parse_key_vault_id identifier = getattr(namespace, 'secret_identifier', None) try: # this throws an exception for invalid format of secret identifier parse_key_vault_id(source_id=identifier) except Exception as e: raise CLIError("Received an exception while validating the format of secret identifier.\n{0}".format(str(e)))
Validate the format of keyvault reference secret identifier
validate_secret_identifier
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_validators.py
MIT
def convert_from_dict(cls, variant_dict): """ Convert dict to FeatureVariant object Args: dictionary - {string, Any} Return: FeatureVariant object """ name = variant_dict.get(FeatureFlagConstants.NAME, None) if not name: raise ValidationError( "Feature variant must contain required '%s' attribute: \n%s" % (FeatureFlagConstants.NAME, json.dumps(variant_dict, indent=2, ensure_ascii=False)) ) configuration_value = variant_dict.get( FeatureFlagConstants.VARIANT_CONFIGURATION_VALUE, None ) status_override = variant_dict.get( FeatureFlagConstants.VARIANT_STATUS_OVERRIDE, None ) return cls( name=name, configuration_value=configuration_value, status_override=status_override, )
Convert dict to FeatureVariant object Args: dictionary - {string, Any} Return: FeatureVariant object
convert_from_dict
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def convert_from_dict(cls, user_allocation_dict): """ Convert dict to FeatureUserAllocation object Args: dictionary - {string, Any} Return: FeatureUserAllocation object """ variant = user_allocation_dict.get(FeatureFlagConstants.VARIANT, None) users = user_allocation_dict.get(FeatureFlagConstants.USERS, None) if not variant or not users: raise ValidationError( "User variant allocation must contain required '%s' and '%s' attributes: \n%s" % (FeatureFlagConstants.VARIANT, FeatureFlagConstants.USERS, json.dumps(user_allocation_dict, indent=2, ensure_ascii=False)) ) return cls(variant=variant, users=users)
Convert dict to FeatureUserAllocation object Args: dictionary - {string, Any} Return: FeatureUserAllocation object
convert_from_dict
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def convert_from_dict(cls, group_allocation_dict): """ Convert dict to FeatureGroupAllocation object Args: dictionary - {string, Any} Return: FeatureGroupAllocation object """ variant = group_allocation_dict.get(FeatureFlagConstants.VARIANT, None) groups = group_allocation_dict.get(FeatureFlagConstants.GROUPS, None) if not variant or not groups: raise ValidationError( "Group variant allocation must contain required '%s' and '%s' attributes: \n%s" % (FeatureFlagConstants.VARIANT, FeatureFlagConstants.GROUPS, json.dumps(group_allocation_dict, indent=2, ensure_ascii=False)) ) return cls(variant=variant, groups=groups)
Convert dict to FeatureGroupAllocation object Args: dictionary - {string, Any} Return: FeatureGroupAllocation object
convert_from_dict
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def convert_from_dict(cls, percentile_allocation_dict): """ Convert dict to FeaturePercentileAllocation object Args: dictionary - {string, Any} Return: FeaturePercentileAllocation object """ variant = percentile_allocation_dict.get(FeatureFlagConstants.VARIANT, None) percentile_from = percentile_allocation_dict.get(FeatureFlagConstants.FROM, None) percentile_to = percentile_allocation_dict.get(FeatureFlagConstants.TO, None) if not variant or percentile_from is None or percentile_to is None: raise ValidationError( "Percentile allocation must contain required '%s', '%s' and '%s' attributes: \n%s" % (FeatureFlagConstants.VARIANT, FeatureFlagConstants.TO, FeatureFlagConstants.FROM, json.dumps(percentile_allocation_dict, indent=2, ensure_ascii=False)) ) if not isinstance(percentile_from, int) or not isinstance(percentile_to, int): raise ValidationError( "Percentile allocation '%s' and '%s' must be integers: \n%s" % (FeatureFlagConstants.FROM, FeatureFlagConstants.TO, json.dumps(percentile_allocation_dict, indent=2, ensure_ascii=False)) ) if ( percentile_from < 0 or percentile_from > 100 or percentile_to < 0 or percentile_to > 100 ): raise ValidationError( "Percentile allocation '%s' and '%s' must be between 0 and 100: \n%s" % (FeatureFlagConstants.TO, FeatureFlagConstants.FROM, json.dumps(percentile_allocation_dict, indent=2, ensure_ascii=False)) ) if percentile_from >= percentile_to: raise ValidationError( "Percentile allocation '%s' must be less than '%s': \n%s" % (FeatureFlagConstants.FROM, FeatureFlagConstants.TO, json.dumps(percentile_allocation_dict, indent=2, ensure_ascii=False)) ) return cls(variant=variant, from_=percentile_from, to=percentile_to)
Convert dict to FeaturePercentileAllocation object Args: dictionary - {string, Any} Return: FeaturePercentileAllocation object
convert_from_dict
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def convert_from_dict(cls, allocation_dict): """ Convert dict to FeatureAllocation object Args: dict {string, Any} Return: FeatureAllocation object """ default_when_disabled = allocation_dict.get( FeatureFlagConstants.DEFAULT_WHEN_DISABLED, None ) default_when_enabled = allocation_dict.get( FeatureFlagConstants.DEFAULT_WHEN_ENABLED, None ) seed = allocation_dict.get( FeatureFlagConstants.SEED, None ) allocation = cls( default_when_enabled=default_when_enabled, default_when_disabled=default_when_disabled, seed=seed ) allocation_user = allocation_dict.get(FeatureFlagConstants.USER, None) # Convert all users to FeatureUserAllocation object if allocation_user: allocation_user_list = [] for user in allocation_user: if user: feature_user_allocation = FeatureUserAllocation.convert_from_dict(user) allocation_user_list.append(feature_user_allocation) allocation.user = allocation_user_list # Convert all groups to FeatureGroupAllocation object allocation_group = allocation_dict.get(FeatureFlagConstants.GROUP, None) if allocation_group: allocation_group_list = [] for group in allocation_group: if group: feature_group_allocation = FeatureGroupAllocation.convert_from_dict(group) allocation_group_list.append(feature_group_allocation) allocation.group = allocation_group_list # Convert all percentile to FeatureAllocationPercentile object allocation_percentile = allocation_dict.get( FeatureFlagConstants.PERCENTILE, None ) if allocation_percentile: allocation_percentile_list = [] for percentile in allocation_percentile: if percentile: feature_percentile_allocation = FeaturePercentileAllocation.convert_from_dict(percentile) allocation_percentile_list.append(feature_percentile_allocation) allocation.percentile = allocation_percentile_list return allocation
Convert dict to FeatureAllocation object Args: dict {string, Any} Return: FeatureAllocation object
convert_from_dict
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def convert_from_dict(cls, feature_telemetry_dict, feature_name): """ Convert dict to FeatureTelemetry object Args: dictionary - {string, Any} Return: FeatureTelemetry object """ enabled = None telemetry_enabled = feature_telemetry_dict.get(FeatureFlagConstants.ENABLED, None) if telemetry_enabled is not None: enabled = convert_string_to_bool(telemetry_enabled, feature_name) metadata = feature_telemetry_dict.get(FeatureFlagConstants.METADATA, None) return cls(enabled=enabled, metadata=metadata)
Convert dict to FeatureTelemetry object Args: dictionary - {string, Any} Return: FeatureTelemetry object
convert_from_dict
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def custom_serialize_conditions(conditions_dict): """ Helper Function to serialize Conditions Args: conditions_dict - Dictionary of {str, Any} Return: JSON serializable Dictionary """ featurefilterdict = {} for key, value in conditions_dict.items(): if key == FeatureFlagConstants.CLIENT_FILTERS: if value is not None: featurefilterdict[key] = [ feature_filter.__dict__ for feature_filter in value ] else: featurefilterdict[key] = value return featurefilterdict
Helper Function to serialize Conditions Args: conditions_dict - Dictionary of {str, Any} Return: JSON serializable Dictionary
custom_serialize_conditions
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def custom_serialize_allocation(allocation): """ Helper Function to serialize Allocation Args: allocation_dict - FeatureAllocation object Return: JSON serializable Dictionary """ featureallocationdict = {} if allocation.user: featureallocationdict[FeatureFlagConstants.USER] = [ user.__dict__ for user in allocation.user ] if allocation.group: featureallocationdict[FeatureFlagConstants.GROUP] = [ group.__dict__ for group in allocation.group ] if allocation.percentile: featureallocationdict[FeatureFlagConstants.PERCENTILE] = [ percentile.__dict__ for percentile in allocation.percentile ] if allocation.default_when_enabled: featureallocationdict[FeatureFlagConstants.DEFAULT_WHEN_ENABLED] = allocation.default_when_enabled if allocation.default_when_disabled: featureallocationdict[FeatureFlagConstants.DEFAULT_WHEN_DISABLED] = allocation.default_when_disabled if allocation.seed: featureallocationdict[FeatureFlagConstants.SEED] = allocation.seed return featureallocationdict
Helper Function to serialize Allocation Args: allocation_dict - FeatureAllocation object Return: JSON serializable Dictionary
custom_serialize_allocation
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def custom_serialize_variants(variants_list): """ Helper Function to serialize Variants Args: variants_list - List of FeatureVariant objects Return: JSON serializable List """ featurevariants = [] for variant in variants_list: variant_dict = {} variant_dict[FeatureFlagConstants.NAME] = variant.name variant_dict[FeatureFlagConstants.VARIANT_CONFIGURATION_VALUE] = variant.configuration_value if variant.status_override: variant_dict[FeatureFlagConstants.VARIANT_STATUS_OVERRIDE] = variant.status_override featurevariants.append(variant_dict) return featurevariants
Helper Function to serialize Variants Args: variants_list - List of FeatureVariant objects Return: JSON serializable List
custom_serialize_variants
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def custom_serialize_telemetry(telemetry): """ Helper Function to serialize Telemetry Args: telemetry - FeatureTelemetry object Return: JSON serializable dictionary """ feature_telemetry_dict = {} if telemetry.enabled is not None: feature_telemetry_dict[FeatureFlagConstants.ENABLED] = telemetry.enabled if telemetry.metadata: feature_telemetry_dict[FeatureFlagConstants.METADATA] = telemetry.metadata return feature_telemetry_dict
Helper Function to serialize Telemetry Args: telemetry - FeatureTelemetry object Return: JSON serializable dictionary
custom_serialize_telemetry
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def map_featureflag_to_keyvalue(featureflag): """ Helper Function to convert FeatureFlag object to KeyValue object Args: featureflag - FeatureFlag object to be converted Return: KeyValue object """ try: enabled = False if featureflag.state in ("on", "conditional"): enabled = True feature_flag_value = FeatureFlagValue( id_=featureflag.name, description=featureflag.description, enabled=enabled, conditions=featureflag.conditions, display_name=featureflag.display_name, allocation=featureflag.allocation, variants=featureflag.variants, telemetry=featureflag.telemetry, ) set_kv = KeyValue( key=featureflag.key, label=featureflag.label, value=json.dumps( feature_flag_value, default=lambda o: {k: v for k, v in o.__dict__.items() if v is not None}, ensure_ascii=False ), content_type=FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE, tags={}, ) set_kv.locked = featureflag.locked set_kv.last_modified = featureflag.last_modified except ValueError as exception: error_msg = "Exception while converting feature flag to key value: {0}\n{1}".format(featureflag.key, exception) raise ValueError(error_msg) except Exception as exception: error_msg = "Exception while converting feature flag to key value: {0}\n{1}".format(featureflag.key, exception) raise Exception(error_msg) # pylint: disable=broad-exception-raised return set_kv
Helper Function to convert FeatureFlag object to KeyValue object Args: featureflag - FeatureFlag object to be converted Return: KeyValue object
map_featureflag_to_keyvalue
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def map_keyvalue_to_featureflag(keyvalue, show_all_details=True, hide_enabled=True): """ Helper Function to convert KeyValue object to FeatureFlag object for display Args: keyvalue - KeyValue object to be converted show_all_details - Boolean for controlling whether we want to display "Conditions", "Allocation", "Variants", "Telemetry" or not hide_enabled - Boolean for controlling whether we want to hide the "Enabled" field or not Return: FeatureFlag object """ feature_flag_value = map_keyvalue_to_featureflagvalue(keyvalue) feature_name = feature_flag_value.id state = FeatureState.OFF if feature_flag_value.enabled: state = FeatureState.ON conditions = feature_flag_value.conditions filters = None # if conditions["client_filters"] list is not empty, make state conditional if conditions and FeatureFlagConstants.CLIENT_FILTERS in conditions: filters = conditions[FeatureFlagConstants.CLIENT_FILTERS] if ( filters or feature_flag_value.allocation or feature_flag_value.variants or feature_flag_value.telemetry ) and state == FeatureState.ON: state = FeatureState.CONDITIONAL feature_flag = FeatureFlag( name=feature_name, key=keyvalue.key, label=keyvalue.label, state=state, enabled=feature_flag_value.enabled, description=feature_flag_value.description, conditions=conditions, locked=keyvalue.locked, display_name=feature_flag_value.display_name, last_modified=keyvalue.last_modified, allocation=feature_flag_value.allocation, variants=feature_flag_value.variants, telemetry=feature_flag_value.telemetry ) # By Default, we will try to show conditions unless the user has # specifically filtered them using --fields arg. # But in some operations like 'Delete feature', we don't want # to display all the conditions as a result of delete operation if not show_all_details: del feature_flag.conditions del feature_flag.allocation del feature_flag.variants del feature_flag.telemetry if hide_enabled: del feature_flag.enabled else: del feature_flag.state return feature_flag
Helper Function to convert KeyValue object to FeatureFlag object for display Args: keyvalue - KeyValue object to be converted show_all_details - Boolean for controlling whether we want to display "Conditions", "Allocation", "Variants", "Telemetry" or not hide_enabled - Boolean for controlling whether we want to hide the "Enabled" field or not Return: FeatureFlag object
map_keyvalue_to_featureflag
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def map_keyvalue_to_featureflagvalue(keyvalue): """ Helper Function to convert value string to a valid FeatureFlagValue. Throws Exception if value is an invalid JSON. Args: keyvalue - KeyValue object Return: Valid FeatureFlagValue object """ try: # Make sure value string is a valid json feature_flag_dict = shell_safe_json_parse(keyvalue.value) feature_flag_version = FeatureFlagVersion.V1 # Default to v1 feature flags # Make sure value json has all the fields we support in the backend valid_fields = { FeatureFlagConstants.ID, FeatureFlagConstants.DESCRIPTION, FeatureFlagConstants.ENABLED, FeatureFlagConstants.CONDITIONS, FeatureFlagConstants.ALLOCATION, FeatureFlagConstants.VARIANTS, FeatureFlagConstants.TELEMETRY, FeatureFlagConstants.DISPLAY_NAME } if valid_fields.union(feature_flag_dict.keys()) != valid_fields: logger.debug( "'%s' feature flag is missing required values or it contains ", keyvalue.key + "unsupported values. Unsupported values will be ignored.\n", ) # check if feature flag is version v1 or v2. # We always default values for v1 feature flags but avoid defaults for v2 feature flags. if ( FeatureFlagConstants.ALLOCATION in feature_flag_dict or FeatureFlagConstants.VARIANTS in feature_flag_dict or FeatureFlagConstants.TELEMETRY in feature_flag_dict ): feature_flag_version = FeatureFlagVersion.V2 feature_name = feature_flag_dict.get(FeatureFlagConstants.ID, "") if feature_flag_version == FeatureFlagVersion.V1: feature_flag_dict[FeatureFlagConstants.DESCRIPTION] = feature_flag_dict.get(FeatureFlagConstants.DESCRIPTION, "") # assign empty string as default if not feature_name: raise ValueError("Feature flag 'id' cannot be empty.") conditions = feature_flag_dict.get(FeatureFlagConstants.CONDITIONS, None) default_conditions = {FeatureFlagConstants.CLIENT_FILTERS: []} if conditions: client_filters = conditions.get(FeatureFlagConstants.CLIENT_FILTERS, None) # Convert all filters to FeatureFilter objects if client_filters: client_filters_list = [] for client_filter in client_filters: # If there is a filter, it should always have a name # In case it doesn't, ignore this filter lowercase_filter = {k.lower(): v for k, v in client_filter.items()} name = lowercase_filter.get(FeatureFlagConstants.NAME, None) if name: params = lowercase_filter.get( FeatureFlagConstants.FILTER_PARAMETERS, {} ) client_filters_list.append(FeatureFilter(name, params)) else: logger.warning( "Ignoring this filter without the %s attribute:\n%s", FeatureFlagConstants.FILTER_NAME, json.dumps(client_filter, indent=2, ensure_ascii=False) ) conditions[FeatureFlagConstants.CLIENT_FILTERS] = client_filters_list requirement_type = conditions.get( FeatureFlagConstants.REQUIREMENT_TYPE, None ) if requirement_type: if requirement_type.lower() not in ( FeatureFlagConstants.REQUIREMENT_TYPE_ALL, FeatureFlagConstants.REQUIREMENT_TYPE_ANY, ): raise ValidationError( f"Feature '{feature_name}' must have an any/all requirement type." ) conditions[FeatureFlagConstants.REQUIREMENT_TYPE] = requirement_type # Backend returns conditions: {client_filters: None} for flags with no conditions. # No need to write empty conditions to key-values. if client_filters or requirement_type: feature_flag_dict[FeatureFlagConstants.CONDITIONS] = conditions else: feature_flag_dict[FeatureFlagConstants.CONDITIONS] = default_conditions elif feature_flag_version == FeatureFlagVersion.V1: feature_flag_dict[FeatureFlagConstants.CONDITIONS] = default_conditions # Allocation allocation = feature_flag_dict.get(FeatureFlagConstants.ALLOCATION, None) if allocation: feature_flag_dict[FeatureFlagConstants.ALLOCATION] = FeatureAllocation.convert_from_dict(allocation) # Variants variants = feature_flag_dict.get(FeatureFlagConstants.VARIANTS, None) if variants: variant_list = [] for variant in variants: if variant: feature_variant = FeatureVariant.convert_from_dict(variant) variant_list.append(feature_variant) feature_flag_dict[FeatureFlagConstants.VARIANTS] = variant_list # Telemetry telemetry = feature_flag_dict.get(FeatureFlagConstants.TELEMETRY, None) if telemetry: feature_flag_dict[FeatureFlagConstants.TELEMETRY] = FeatureTelemetry.convert_from_dict(telemetry, feature_name) feature_flag_value = FeatureFlagValue( id_=feature_name, description=feature_flag_dict.get(FeatureFlagConstants.DESCRIPTION, None), enabled=feature_flag_dict.get(FeatureFlagConstants.ENABLED, False), display_name=feature_flag_dict.get(FeatureFlagConstants.DISPLAY_NAME, None), conditions=feature_flag_dict.get(FeatureFlagConstants.CONDITIONS, None), allocation=feature_flag_dict.get(FeatureFlagConstants.ALLOCATION, None), variants=feature_flag_dict.get(FeatureFlagConstants.VARIANTS, None), telemetry=feature_flag_dict.get(FeatureFlagConstants.TELEMETRY, None), ) except (InvalidArgumentValueError, TypeError, ValueError) as exception: error_msg = ( "Invalid value. Unable to decode the following JSON value: \n" + "key:{0} value:{1}\nFull exception: \n{2}".format( keyvalue.key, keyvalue.value, str(exception) ) ) raise ValueError(error_msg) except: logger.error( "Exception while parsing feature flag. key:%s value:%s.", keyvalue.key, keyvalue.value, ) raise return feature_flag_value
Helper Function to convert value string to a valid FeatureFlagValue. Throws Exception if value is an invalid JSON. Args: keyvalue - KeyValue object Return: Valid FeatureFlagValue object
map_keyvalue_to_featureflagvalue
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def is_feature_flag(kv): # pylint: disable=line-too-long """ Helper function used to determine if a key-value is a feature flag """ if ( kv and kv.key and isinstance(kv.key, str) and kv.content_type and isinstance(kv.content_type, str) ): return ( kv.key.startswith(FeatureFlagConstants.FEATURE_FLAG_PREFIX) and kv.content_type == FeatureFlagConstants.FEATURE_FLAG_CONTENT_TYPE ) return False
Helper function used to determine if a key-value is a feature flag
is_feature_flag
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def convert_string_to_bool(value, feature_name): """ Convert the value to a boolean if it is a string. :param Union[str, bool] enabled: Value to be converted. :return: Converted value. :rtype: bool """ if isinstance(value, bool): return value if value.lower() == "true": return True if value.lower() == "false": return False raise ValueError( f"Invalid setting 'enabled' with value '{value}' for feature '{feature_name}'." )
Convert the value to a boolean if it is a string. :param Union[str, bool] enabled: Value to be converted. :return: Converted value. :rtype: bool
convert_string_to_bool
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_featuremodels.py
MIT
def get_appconfig_service_client(cli_ctx, api_version=None): ''' Returns the client for managing configuration stores.''' from azure.cli.core.commands.client_factory import get_mgmt_service_client from azure.mgmt.appconfiguration import AppConfigurationManagementClient client = get_mgmt_service_client(cli_ctx, AppConfigurationManagementClient, api_version=api_version) return client
Returns the client for managing configuration stores.
get_appconfig_service_client
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_client_factory.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_client_factory.py
MIT
def _get_value(item, *args): """Recursively get a nested value from a dict. :param dict item: The dict object """ try: for arg in args: item = item[arg] return _EvenLadder(item) if item is not None else ' ' except (KeyError, TypeError, IndexError): return ' '
Recursively get a nested value from a dict. :param dict item: The dict object
_get_value
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_format.py
MIT
def _get_value_by_names(item, property_names): """ CLI core converts KeyValue object field names to camelCase (eg: content_type becomes contentType) But when customers specify field filters, we return a dict of requested fields instead of KeyValue object In that case, field names are not converted to camelCase. We need to check for both content_type and contentType We iterate through given names and return the first value that is found """ # pylint: disable=line-too-long # This is the value that will be returned if no value is found or the value is None. This maintains consistency with the default table transformer logic # https://github.com/Azure/azure-cli/blob/fdb9aa742b404a433f207b573eea37341769020c/src/azure-cli-core/azure/cli/core/commands/transform.py#L91C1-L98C30 empty_value = ' ' property_value = empty_value for property_name in property_names: property_value = _get_value(item, property_name) if property_value != empty_value: break return property_value
CLI core converts KeyValue object field names to camelCase (eg: content_type becomes contentType) But when customers specify field filters, we return a dict of requested fields instead of KeyValue object In that case, field names are not converted to camelCase. We need to check for both content_type and contentType We iterate through given names and return the first value that is found
_get_value_by_names
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/_format.py
MIT
def test_azconfig_json_content_type(self, resource_group, location): src_config_store_prefix = get_resource_name_prefix('Source') dest_config_store_prefix = get_resource_name_prefix('Destination') src_config_store_name = self.create_random_name(prefix=src_config_store_prefix, length=36) dest_config_store_name = self.create_random_name(prefix=dest_config_store_prefix, length=36) location = 'eastus' sku = 'standard' self.kwargs.update({ 'config_store_name': src_config_store_name, 'rg_loc': location, 'rg': resource_group, 'sku': sku }) create_config_store(self, self.kwargs) # Get src connection string credential_list = self.cmd( 'appconfig credential list -n {config_store_name} -g {rg}').get_output_in_json() self.kwargs.update({ 'src_connection_string': credential_list[0]['connectionString'], 'config_store_name': dest_config_store_name }) create_config_store(self, self.kwargs) # Get dest connection string credential_list = self.cmd( 'appconfig credential list -n {config_store_name} -g {rg}').get_output_in_json() self.kwargs.update({ 'dest_connection_string': credential_list[0]['connectionString'] }) """ Test Scenario 1: Create settings with JSON Content Type - Create settings in Src AppConfig store with JSON Content type - Make sure that input value is in valid JSON format """ entry_key = "Key01" entry_value = '\\"Red\\"' appconfig_value = entry_value.replace('\\', '') json_content_type_01 = 'application/json' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_01 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_01)]) entry_key = "Key02" entry_value = '\\"Red\\Robin\\Hood\\"' appconfig_value = entry_value.replace('\\', '') json_content_type_02 = 'application/json;charset=utf-8' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_02 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_02)]) entry_key = "Key03" entry_value = 'true' json_content_type_03 = 'application/boolean+json;' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_03 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', entry_value), self.check('contentType', json_content_type_03)]) entry_key = "Key04" entry_value = '45.6' json_content_type_04 = 'application/json+text+number;charset=utf-8;param1=value1' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_04 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', entry_value), self.check('contentType', json_content_type_04)]) entry_key = "Key05" entry_value = '\\"true\\"' appconfig_value = entry_value.replace('\\', '') json_content_type_05 = 'application/string+json;' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_05 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_05)]) entry_key = "Key06" entry_value = '\\"999\\"' appconfig_value = entry_value.replace('\\', '') self.kwargs.update({ 'key': entry_key, 'value': entry_value }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_05)]) entry_key = "Key07" entry_value = 'null' json_content_type_07 = 'application/json+null;charset=utf-8;' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_07 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', entry_value), self.check('contentType', json_content_type_07)]) entry_key = "Key08" entry_value = '[1,2,3,4]' json_content_type_08 = 'application/vnd.numericarray+json' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_08 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', entry_value), self.check('contentType', json_content_type_08)]) entry_key = "Key09" entry_value = '[\\"abc\\",\\"def\\"]' appconfig_value = entry_value.replace('\\', '') json_content_type_09 = 'application/vnd.stringarray+json' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_09 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_09)]) entry_key = "Key10" entry_value = '[\\"text\\",true,null]' appconfig_value = entry_value.replace('\\', '') json_content_type_10 = 'application/json+hybridarray' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_10 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_10)]) entry_key = "Key11" entry_value = '{\\"Name\\":\\"Value\\"}' appconfig_value = entry_value.replace('\\', '') json_content_type_11 = 'application/json' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'appconfig_value': appconfig_value, 'content_type': json_content_type_11 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', '{appconfig_value}'), self.check('contentType', json_content_type_11)]) entry_key = "Key12" entry_value = '{\\"MyNullValue\\":null,\\"MyObject\\":{\\"Property\\":{\\"Name\\":{\\"Name1\\":\\"Value1\\",\\"Name2\\":[\\"qqq\\",\\"rrr\\"]}}},\\"MyArray\\":[1,2,3]}' appconfig_value = entry_value.replace('\\', '') self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'appconfig_value': appconfig_value }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', '{appconfig_value}'), self.check('contentType', json_content_type_11)]) # Treat missing value argument as null value entry_key = "Key13" appconfig_value = "null" json_content_type_13 = 'application/null+json+empty' self.kwargs.update({ 'key': entry_key, 'content_type': json_content_type_13 }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --content-type {content_type} -y', checks=[self.check('key', entry_key), self.check('value', appconfig_value), self.check('contentType', json_content_type_13)]) # Validate that input value is in JSON format entry_key = "Key14" entry_value = 'Red' self.kwargs.update({ 'key': entry_key, 'value': entry_value, 'content_type': json_content_type_01 }) with self.assertRaisesRegex(CLIError, "is not a valid JSON object, which conflicts with the content type."): self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y') self.kwargs.update({ 'value': '[abc,def]' }) with self.assertRaisesRegex(CLIError, "is not a valid JSON object, which conflicts with the content type."): self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y') self.kwargs.update({ 'value': 'True' }) with self.assertRaisesRegex(CLIError, "is not a valid JSON object, which conflicts with the content type."): self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} --content-type {content_type} -y') # Create a non-JSON key-value and update its content type in subsequent command self.kwargs.update({ 'value': entry_value }) self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --value {value} -y', checks=[self.check('key', entry_key), self.check('value', entry_value)]) with self.assertRaisesRegex(CLIError, "Set the value again in valid JSON format."): self.cmd('appconfig kv set --connection-string {src_connection_string} --key {key} --content-type {content_type} -y') """ Test Scenario 2: AppConfig <--> AppConfig Import/Export - Add Feature Flag and Key vault Reference - Import settings from Src to Dest AppConfig store with JSON content type - Export to JSON file from src config store - Export to JSON file from dest config store - Compare both exported files - Delete all settings from dest config store - Export settings from Src to Dest AppConfig store - Export to JSON file from src config store - Export to JSON file from dest config store - Compare both exported files - Delete all settings from both stores """ # Add a new feature flag entry_feature = 'Beta' internal_feature_key = FeatureFlagConstants.FEATURE_FLAG_PREFIX + entry_feature default_description = "" default_conditions = "{{\'client_filters\': []}}" default_locked = False default_state = "off" self.kwargs.update({ 'feature': entry_feature }) self.cmd('appconfig feature set --connection-string {src_connection_string} --feature {feature} -y', checks=[self.check('locked', default_locked), self.check('name', entry_feature), self.check('key', internal_feature_key), self.check('description', default_description), self.check('state', default_state), self.check('conditions', default_conditions)]) # Add new KeyVault reference keyvault_key = "HostSecrets" keyvault_id = "https://fake.vault.azure.net/secrets/fakesecret" keyvault_value = f"{{{json.dumps({'uri': keyvault_id})}}}" self.kwargs.update({ 'key': keyvault_key, 'secret_identifier': keyvault_id }) self.cmd('appconfig kv set-keyvault --connection-string {src_connection_string} --key {key} --secret-identifier {secret_identifier} -y', checks=[self.check('contentType', KeyVaultConstants.KEYVAULT_CONTENT_TYPE), self.check('key', keyvault_key), self.check('value', keyvault_value)]) # Test IMPORT function by importing all settings from src config store to dest config store self.kwargs.update({ 'import_source': 'appconfig' }) self.cmd( 'appconfig kv import --connection-string {dest_connection_string} -s {import_source} --src-connection-string {src_connection_string} --content-type {content_type} -y') # Export to JSON file from src config store exported_src_file_path = os.path.join(TEST_DIR, 'json_export_src.json') self.kwargs.update({ 'export_dest': 'file', 'export_format': 'json', 'separator': ':', 'exported_file_path': exported_src_file_path, }) self.cmd( 'appconfig kv export --connection-string {src_connection_string} -d {export_dest} --path "{exported_file_path}" --format {export_format} --separator {separator} -y') # Export to JSON file from dest config store exported_dest_file_path = os.path.join(TEST_DIR, 'json_export_dest.json') self.kwargs.update({ 'exported_file_path': exported_dest_file_path }) self.cmd( 'appconfig kv export --connection-string {dest_connection_string} -d {export_dest} --path "{exported_file_path}" --format {export_format} --separator {separator} -y') with open(exported_src_file_path) as json_file: src_kvs = json.load(json_file) with open(exported_dest_file_path) as json_file: dest_kvs = json.load(json_file) assert src_kvs == dest_kvs # Delete all settings from dest config store any_key_pattern = '*' any_label_pattern = '*' self.kwargs.update({ 'key': any_key_pattern, 'label': any_label_pattern }) self.cmd('appconfig kv delete --connection-string {dest_connection_string} --key {key} --label {label} -y') # Test EXPORT function by exporting all settings from src config store to dest config store self.cmd( 'appconfig kv export --connection-string {src_connection_string} -d {import_source} --dest-connection-string {dest_connection_string} -y') # Export to JSON file from src config store exported_src_file_path = os.path.join(TEST_DIR, 'json_export_src.json') self.kwargs.update({ 'export_dest': 'file', 'export_format': 'json', 'separator': ':', 'exported_file_path': exported_src_file_path, }) self.cmd( 'appconfig kv export --connection-string {src_connection_string} -d {export_dest} --path "{exported_file_path}" --format {export_format} --separator {separator} -y') # Export to JSON file from dest config store exported_dest_file_path = os.path.join(TEST_DIR, 'json_export_dest.json') self.kwargs.update({ 'exported_file_path': exported_dest_file_path }) self.cmd( 'appconfig kv export --connection-string {dest_connection_string} -d {export_dest} --path "{exported_file_path}" --format {export_format} --separator {separator} -y') with open(exported_src_file_path) as json_file: src_kvs = json.load(json_file) with open(exported_dest_file_path) as json_file: dest_kvs = json.load(json_file) assert src_kvs == dest_kvs os.remove(exported_dest_file_path) os.remove(exported_src_file_path) # Delete all settings from both config stores self.cmd('appconfig kv delete --connection-string {src_connection_string} --key {key} --label {label} -y') self.cmd('appconfig kv delete --connection-string {dest_connection_string} --key {key} --label {label} -y') """ Test Scenario 3: File <--> AppConfig Import/Export - Import settings to config store from JSON file with JSON content type - Export settings from config store to JSON file - Compare imported and exported files - Delete all settings from both stores """ os.environ['AZURE_APPCONFIG_FM_COMPATIBLE'] = 'True' imported_file_path = os.path.join(TEST_DIR, 'json_import.json') exported_file_path = os.path.join(TEST_DIR, 'json_export.json') self.kwargs.update({ 'import_source': 'file', 'imported_format': 'json', 'separator': ':', 'imported_file_path': imported_file_path, 'exported_file_path': exported_file_path }) self.cmd( 'appconfig kv import --connection-string {src_connection_string} -s {import_source} --path "{imported_file_path}" --format {imported_format} --separator {separator} --content-type {content_type} -y') self.cmd( 'appconfig kv export --connection-string {src_connection_string} -d {import_source} --path "{exported_file_path}" --format {imported_format} --separator {separator} -y') with open(exported_file_path) as json_file: exported_file = json.load(json_file) with open(imported_file_path) as json_file: imported_file = json.load(json_file) assert exported_file == imported_file """ Test Scenario 4: JSON Content Type and YAML files - Import settings from YAML file with JSON content type should fail - Export settings to YAML file should not fail even though settings have JSON content type - Compare previously exported settings in json format with the newly exported settings in YAML format - Delete all settings from config store """ imported_file_path = os.path.join(TEST_DIR, 'yaml_import.json') exported_yaml_file_path = os.path.join(TEST_DIR, 'yaml_export.json') self.kwargs.update({ 'imported_format': 'yaml', 'imported_file_path': imported_file_path, 'exported_file_path': exported_yaml_file_path }) with self.assertRaisesRegex(CLIError, "Please provide JSON file format to match your content type."): self.cmd('appconfig kv import --connection-string {src_connection_string} -s {import_source} --path "{imported_file_path}" --format {imported_format} --separator {separator} --content-type {content_type} -y') self.cmd( 'appconfig kv export --connection-string {src_connection_string} -d {import_source} --path "{exported_file_path}" --format {imported_format} --separator {separator} -y') exported_yaml_file = {} exported_json_file = {} with open(exported_yaml_file_path) as yaml_file: for yaml_data in list(yaml.safe_load_all(yaml_file)): exported_yaml_file.update(yaml_data) with open(exported_file_path) as json_file: exported_json_file = json.load(json_file) assert exported_yaml_file == exported_json_file os.remove(exported_yaml_file_path) os.remove(exported_file_path)
Test Scenario 1: Create settings with JSON Content Type - Create settings in Src AppConfig store with JSON Content type - Make sure that input value is in valid JSON format
test_azconfig_json_content_type
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/tests/latest/test_appconfig_json_content_type.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/tests/latest/test_appconfig_json_content_type.py
MIT
def test_azconfig_import_export(self, resource_group, location): store_name_prefix = get_resource_name_prefix('ImportTest') config_store_name = self.create_random_name(prefix=store_name_prefix, length=36) location = 'eastus' sku = 'standard' self.kwargs.update({ 'config_store_name': config_store_name, 'rg_loc': location, 'rg': resource_group, 'sku': sku }) create_config_store(self, self.kwargs) # File <--> AppConfig tests imported_file_path = os.path.join(TEST_DIR, 'import.json') imported_json_array = os.path.join(TEST_DIR, 'import_json_array.json') imported_plain_string_file_path = os.path.join(TEST_DIR, 'import_invalid_plain_string.json') exported_file_path = os.path.join(TEST_DIR, 'export.json') exported_json_object = os.path.join(TEST_DIR, 'export_changed_json.json') exported_json_object_reference = os.path.join(TEST_DIR, 'export_changed_json_ref.json') self.kwargs.update({ 'import_source': 'file', 'imported_format': 'json', 'separator': '/', 'imported_file_path': imported_file_path, 'exported_file_path': exported_file_path }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --separator {separator} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --separator {separator} -y') with open(imported_file_path) as json_file: imported_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs # ignore already existing kvs ignore_match_file_path = os.path.join(TEST_DIR, 'ignore_match_import.json') key_name = 'BackgroundColor' self.kwargs.update({ 'key': key_name, 'imported_file_path': ignore_match_file_path }) background_color_kv = self.cmd('appconfig kv show -n {config_store_name} --key {key}').get_output_in_json() self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --separator {separator} -y') # Confirm that the key has the same etag after re-importing self.cmd('appconfig kv show -n {config_store_name} --key {key}', checks=[ self.check('key', key_name), self.check('etag', background_color_kv['etag']), ]) self.kwargs.update({ 'imported_file_path': imported_file_path }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --separator {separator} -y --import-mode all') updated_background_color_kv = self.cmd('appconfig kv show -n {config_store_name} --key {key}').get_output_in_json() self.assertNotEqual(background_color_kv['etag'], updated_background_color_kv['etag']) # skip key vault reference while exporting self.kwargs.update({ 'key': "key_vault_reference", 'secret_identifier': "https://testkeyvault.vault.azure.net/secrets/mysecret" }) self.cmd( 'appconfig kv set-keyvault -n {config_store_name} --key {key} --secret-identifier {secret_identifier} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --separator {separator} --skip-keyvault -y') with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs os.remove(exported_file_path) # Error out when importing plain string. self.kwargs.update({ 'imported_file_path': imported_plain_string_file_path }) with self.assertRaisesRegex(CLIError, "The input is not a well formatted json file.\nException: Json object required but type 'str' was given."): self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format}') ''' 1. Import configuration from JSON file which has a key "arr" with array values. Assign label "array_test" and separator "/" 2. Add new value with the same prefix as the array data key but not continuous with array indices. (e.g., "arr/foo") 3. Export configurations with the same label to a JSON file. 4. Confirm that the value of "arr" is now a JSON object. ''' self.kwargs.update({ 'imported_file_path': imported_json_array, 'exported_file_path': exported_json_object, 'key': 'arr/foo', 'value': 'bar', 'label': 'array_test' }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --separator {separator} --label {label} -y') self.cmd( 'appconfig kv set -n {config_store_name} --key {key} --value {value} --label {label} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --separator {separator} --label {label} -y') with open(exported_json_object) as json_file: exported_kvs = json.load(json_file) with open(exported_json_object_reference) as json_file: expected_exported_kvs = json.load(json_file) assert expected_exported_kvs == exported_kvs os.remove(exported_json_object) # Feature flags test imported_file_path = os.path.join(TEST_DIR, 'import_features.json') exported_file_path = os.path.join(TEST_DIR, 'export_features.json') key_filtered_features_file_path = os.path.join(TEST_DIR, 'key_filtered_features.json') prefix_added_features_file_path = os.path.join(TEST_DIR, 'prefix_added_features.json') skipped_features_file_path = os.path.join(TEST_DIR, 'skipped_features.json') export_separator_features_file_path = os.path.join(TEST_DIR, 'export_separator_features.json') import_separator_features_file_path = os.path.join(TEST_DIR, 'import_separator_features.json') import_features_alt_syntax_file_path = os.path.join(TEST_DIR, 'import_features_alt_syntax.json') import_features_random_conditions_file_path = os.path.join(TEST_DIR, 'import_features_random_conditions.json') os.environ['AZURE_APPCONFIG_FM_COMPATIBLE'] = 'True' self.kwargs.update({ 'label': 'KeyValuesWithFeatures', 'imported_file_path': imported_file_path, 'exported_file_path': exported_file_path }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --label {label} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} -y') with open(imported_file_path) as json_file: imported_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs # skip features while exporting self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} --skip-features -y') with open(skipped_features_file_path) as json_file: only_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert only_kvs == exported_kvs # skip features while importing self.kwargs.update({ 'label': 'SkipFeatures' }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --label {label} --skip-features -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} -y') with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert only_kvs == exported_kvs # Prefix addition test self.kwargs.update({ 'label': 'PrefixTest', 'prefix': 'Test' }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --label {label} --prefix {prefix} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} -y') with open(prefix_added_features_file_path) as json_file: prefix_added_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert prefix_added_kvs == exported_kvs # Prefix trimming test self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} --prefix {prefix} -y') with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs # Key filtering test self.kwargs.update({ 'label': 'KeyValuesWithFeatures', 'key': 'Col*' }) self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} --key {key} -y') with open(key_filtered_features_file_path) as json_file: key_filtered_features = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert key_filtered_features == exported_kvs # Separator test self.kwargs.update({ 'label': 'SeparatorTest', 'separator': ':', 'imported_file_path': import_separator_features_file_path }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --label {label} --separator {separator} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} --separator {separator} -y') with open(export_separator_features_file_path) as json_file: imported_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs # Support alternative syntax for always ON/OFF features self.kwargs.update({ 'label': 'AltSyntaxTest', 'imported_file_path': import_features_alt_syntax_file_path }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --label {label} --separator {separator} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} --separator {separator} -y') with open(imported_file_path) as json_file: imported_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs # Support including all properties in the feature flag conditions self.kwargs.update({ 'imported_file_path': import_features_random_conditions_file_path, 'label': 'RandomConditionsTest', }) self.cmd( 'appconfig kv import -n {config_store_name} -s {import_source} --path "{imported_file_path}" --format {imported_format} --label {label} -y') self.cmd( 'appconfig kv export -n {config_store_name} -d {import_source} --path "{exported_file_path}" --format {imported_format} --label {label} -y') with open(import_features_random_conditions_file_path) as json_file: imported_kvs = json.load(json_file) with open(exported_file_path) as json_file: exported_kvs = json.load(json_file) assert imported_kvs == exported_kvs os.remove(exported_file_path)
1. Import configuration from JSON file which has a key "arr" with array values. Assign label "array_test" and separator "/" 2. Add new value with the same prefix as the array data key but not continuous with array indices. (e.g., "arr/foo") 3. Export configurations with the same label to a JSON file. 4. Confirm that the value of "arr" is now a JSON object.
test_azconfig_import_export
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/appconfig/tests/latest/test_appconfig_kv_import_export_commands.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/appconfig/tests/latest/test_appconfig_kv_import_export_commands.py
MIT
def validate_tenant(cmd, namespace): """ Make sure tenant is a GUID. If domain name is provided, resolve to GUID. https://learn.microsoft.com/en-us/entra/identity-platform/v2-protocols-oidc#find-your-apps-openid-configuration-document-uri """ from azure.cli.core.util import is_guid if namespace.tenant is not None and not is_guid(namespace.tenant): import requests active_directory_endpoint = cmd.cli_ctx.cloud.endpoints.active_directory url = '{}/{}/v2.0/.well-known/openid-configuration'.format(active_directory_endpoint, namespace.tenant) response = requests.get(url, verify=not should_disable_connection_verify()) if response.status_code != 200: from knack.util import CLIError raise CLIError("Failed to resolve tenant '{}'.\n\nError detail: {}".format(namespace.tenant, response.text)) # Example issuer: https://login.microsoftonline.com/72f988bf-86f1-41af-91ab-2d7cd011db47/v2.0 tenant_id = response.json()['issuer'].split("/")[3] logger.debug('Resolved tenant domain name %s to GUID %s', namespace.tenant, tenant_id) namespace.tenant = tenant_id
Make sure tenant is a GUID. If domain name is provided, resolve to GUID. https://learn.microsoft.com/en-us/entra/identity-platform/v2-protocols-oidc#find-your-apps-openid-configuration-document-uri
validate_tenant
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/_validators.py
MIT
def list_subscriptions(cmd, all=False, refresh=False): # pylint: disable=redefined-builtin """List the imported subscriptions.""" from azure.cli.core.api import load_subscriptions subscriptions = load_subscriptions(cmd.cli_ctx, all_clouds=all, refresh=refresh) if not subscriptions: logger.warning('Please run "az login" to access your accounts.') for sub in subscriptions: sub['cloudName'] = sub.pop('environmentName', None) if not all: enabled_ones = [s for s in subscriptions if s.get('state') == 'Enabled'] if len(enabled_ones) != len(subscriptions): logger.warning("A few accounts are skipped as they don't have 'Enabled' state. " "Use '--all' to display them.") subscriptions = enabled_ones return subscriptions
List the imported subscriptions.
list_subscriptions
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def get_access_token(cmd, subscription=None, resource=None, scopes=None, resource_type=None, tenant=None): """ get AAD token to access to a specified resource. Use 'az cloud show' command for other Azure resources """ if resource is None and resource_type: endpoints_attr_name = cloud_resource_type_mappings[resource_type] resource = getattr(cmd.cli_ctx.cloud.endpoints, endpoints_attr_name) profile = Profile(cli_ctx=cmd.cli_ctx) creds, subscription, tenant = profile.get_raw_token(subscription=subscription, resource=resource, scopes=scopes, tenant=tenant) result = { 'tokenType': creds[0], 'accessToken': creds[1], 'expires_on': creds[2]['expires_on'], 'expiresOn': creds[2]['expiresOn'], 'tenant': tenant } if subscription: result['subscription'] = subscription return result
get AAD token to access to a specified resource. Use 'az cloud show' command for other Azure resources
get_access_token
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def set_active_subscription(cmd, subscription): """Set the current subscription""" profile = Profile(cli_ctx=cmd.cli_ctx) if not id: raise CLIError('Please provide subscription id or unique name.') profile.set_active_subscription(subscription)
Set the current subscription
set_active_subscription
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def account_clear(cmd): """Clear all stored subscriptions. To clear individual, use 'logout'""" _remove_adal_token_cache() if in_cloud_console(): logger.warning(_CLOUD_CONSOLE_LOGOUT_WARNING) profile = Profile(cli_ctx=cmd.cli_ctx) profile.logout_all()
Clear all stored subscriptions. To clear individual, use 'logout
account_clear
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def login(cmd, username=None, password=None, tenant=None, scopes=None, allow_no_subscriptions=False, # Device code flow use_device_code=False, # Service principal service_principal=None, certificate=None, use_cert_sn_issuer=None, client_assertion=None, # Managed identity identity=False, client_id=None, object_id=None, resource_id=None): """Log in to access Azure subscriptions""" # quick argument usage check if any([password, service_principal, tenant]) and identity: raise CLIError("usage error: '--identity' is not applicable with other arguments") if any([password, service_principal, username, identity]) and use_device_code: raise CLIError("usage error: '--use-device-code' is not applicable with other arguments") if use_cert_sn_issuer and not service_principal: raise CLIError("usage error: '--use-sn-issuer' is only applicable with a service principal") if service_principal and not username: raise CLIError('usage error: --service-principal --username NAME --password SECRET --tenant TENANT') if username and not service_principal and not identity: logger.warning(USERNAME_PASSWORD_DEPRECATION_WARNING) interactive = False profile = Profile(cli_ctx=cmd.cli_ctx) if identity: if in_cloud_console(): return profile.login_in_cloud_shell() return profile.login_with_managed_identity( identity_id=username, client_id=client_id, object_id=object_id, resource_id=resource_id, allow_no_subscriptions=allow_no_subscriptions) if in_cloud_console(): # tell users they might not need login logger.warning(_CLOUD_CONSOLE_LOGIN_WARNING) if username: if not (password or client_assertion or certificate): try: password = prompt_pass('Password: ') except NoTTYException: raise CLIError('Please specify both username and password in non-interactive mode.') else: interactive = True if service_principal: from azure.cli.core.auth.identity import ServicePrincipalAuth password = ServicePrincipalAuth.build_credential( client_secret=password, certificate=certificate, use_cert_sn_issuer=use_cert_sn_issuer, client_assertion=client_assertion) login_experience_v2 = cmd.cli_ctx.config.getboolean('core', 'login_experience_v2', fallback=True) # Send login_experience_v2 config to telemetry from azure.cli.core.telemetry import set_login_experience_v2 set_login_experience_v2(login_experience_v2) select_subscription = interactive and sys.stdin.isatty() and sys.stdout.isatty() and login_experience_v2 subscriptions = profile.login( interactive, username, password, service_principal, tenant, scopes=scopes, use_device_code=use_device_code, allow_no_subscriptions=allow_no_subscriptions, use_cert_sn_issuer=use_cert_sn_issuer, show_progress=select_subscription) # Launch interactive account selection. No JSON output. if select_subscription: from ._subscription_selector import SubscriptionSelector from azure.cli.core._profile import _SUBSCRIPTION_ID selected = SubscriptionSelector(subscriptions)() profile.set_active_subscription(selected[_SUBSCRIPTION_ID]) print(LOGIN_ANNOUNCEMENT) logger.warning(LOGIN_OUTPUT_WARNING) return all_subscriptions = list(subscriptions) for sub in all_subscriptions: sub['cloudName'] = sub.pop('environmentName', None) return all_subscriptions
Log in to access Azure subscriptions
login
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def logout(cmd, username=None): """Log out to remove access to Azure subscriptions""" _remove_adal_token_cache() if in_cloud_console(): logger.warning(_CLOUD_CONSOLE_LOGOUT_WARNING) profile = Profile(cli_ctx=cmd.cli_ctx) if not username: username = profile.get_current_account_user() profile.logout(username)
Log out to remove access to Azure subscriptions
logout
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def _remove_adal_token_cache(): """Remove ADAL token cache file ~/.azure/accessTokens.json, as it is no longer needed by MSAL-based Azure CLI. """ from azure.cli.core._environment import get_config_dir adal_token_cache = os.path.join(get_config_dir(), 'accessTokens.json') try: os.remove(adal_token_cache) return True # Deleted except FileNotFoundError: return False # Not exist
Remove ADAL token cache file ~/.azure/accessTokens.json, as it is no longer needed by MSAL-based Azure CLI.
_remove_adal_token_cache
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/custom.py
MIT
def __call__(self): """Select a subscription. NOTE: The original subscription list (isDefault property) is not modified. Call Profile.set_active_subscription to modify it. """ from knack.prompting import prompt print(f'\n[Tenant and subscription selection]\n\n{self._table_str}\n') tenant_string = self._get_tenant_string(self._active_one) print(f"The default is marked with an {self.DEFAULT_ROW_MARKER}; " f"the default tenant is '{tenant_string}' and subscription is " f"'{self._active_one[_SUBSCRIPTION_NAME]}' ({self._active_one[_SUBSCRIPTION_ID]}).\n") selected = self._active_one # Keep prompting until the user inputs a valid index while True: select_index = prompt('Select a subscription and tenant (Type a number or Enter for no changes): ') # Nothing is typed, keep current selection if select_index == '': break if select_index in self._index_to_subscription_map: selected = self._index_to_subscription_map[select_index] break logger.warning("Invalid selection.") # Let retry # Echo the selection tenant_string = self._get_tenant_string(selected) print(f"\nTenant: {tenant_string}\n" f"Subscription: {selected[_SUBSCRIPTION_NAME]} ({selected[_SUBSCRIPTION_ID]})\n") return selected
Select a subscription. NOTE: The original subscription list (isDefault property) is not modified. Call Profile.set_active_subscription to modify it.
__call__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/_subscription_selector.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/_subscription_selector.py
MIT
def test_conditional_access_mfa(self): """ This test should be run using a user account that - doesn't require MFA for ARM - requires MFA for data-plane resource The result ATs are checked per Microsoft identity platform access tokens https://learn.microsoft.com/en-us/azure/active-directory/develop/access-tokens Following claims are checked: - aud (Audience): https://tools.ietf.org/html/rfc7519#section-4.1.3 - amr (Authentication Method Reference): https://tools.ietf.org/html/rfc8176 """ scope = 'https://pas.windows.net/CheckMyAccess/Linux/.default' self.kwargs['scope'] = scope # region non-MFA session # Login to ARM (MFA not required) # In the browser, if the user already exists, make sure to logout first and re-login to clear browser cache self.cmd('az login') # Getting ARM AT and check claims result = self.cmd('az account get-access-token').get_output_in_json() decoded = decode_access_token(result['accessToken']) assert decoded['aud'] == self.cli_ctx.cloud.endpoints.active_directory_resource_id assert decoded['amr'] == ['pwd'] # Getting data-plane AT with ARM RT (step-up) fails with self.assertRaises(AuthenticationError) as cm: self.cmd('az account get-access-token --scope {scope}') # Check re-login recommendation re_login_command = 'az login --scope {scope}'.format(**self.kwargs) assert 'AADSTS50076' in cm.exception.error_msg assert re_login_command in cm.exception.recommendations[0] # endregion # region MFA session # Re-login with data-plane scope (MFA required) # Getting ARM AT with data-plane RT (step-down) succeeds self.cmd(re_login_command) # Getting ARM AT and check claims result = self.cmd('az account get-access-token').get_output_in_json() decoded = decode_access_token(result['accessToken']) assert decoded['aud'] == self.cli_ctx.cloud.endpoints.active_directory_resource_id assert decoded['amr'] == ['pwd'] # Getting data-plane AT and check claims result = self.cmd('az account get-access-token --scope {scope}').get_output_in_json() decoded = decode_access_token(result['accessToken']) assert decoded['aud'] in scope assert decoded['amr'] == ['pwd', 'mfa'] self.cmd('logout')
This test should be run using a user account that - doesn't require MFA for ARM - requires MFA for data-plane resource The result ATs are checked per Microsoft identity platform access tokens https://learn.microsoft.com/en-us/azure/active-directory/develop/access-tokens Following claims are checked: - aud (Audience): https://tools.ietf.org/html/rfc7519#section-4.1.3 - amr (Authentication Method Reference): https://tools.ietf.org/html/rfc8176
test_conditional_access_mfa
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/tests/latest/test_auth_e2e.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/tests/latest/test_auth_e2e.py
MIT
def invoke_subscription_selector(): """This method provides a way of directly testing subscription selection, without running an actual `az login`. Invoke this method with: python -m azure.cli.command_modules.profile.tests.latest.test_subscription_selector """ result = SubscriptionSelector(DUMMY_SUBSCRIPTIONS)() print("Result:", result)
This method provides a way of directly testing subscription selection, without running an actual `az login`. Invoke this method with: python -m azure.cli.command_modules.profile.tests.latest.test_subscription_selector
invoke_subscription_selector
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/profile/tests/latest/test_subscription_selector.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/profile/tests/latest/test_subscription_selector.py
MIT
def create_or_update_project(client, project_name, service_name, resource_group_name, location, source_platform, target_platform, tags=None): """This implementation eschews the source and target connection details and the database list. This is because this generally only helps in a GUI context--to guide the user more easily through creating a task. Since this info is necessary at the Task level, there is no need to include it at the Project level where for CLI it is more of a useless redundancy.""" # Set inputs to lowercase source_platform = source_platform.lower() target_platform = target_platform.lower() scenario_handled_in_core = core_handles_scenario(source_platform, target_platform) # Validation: Test scenario eligibility if not scenario_handled_in_core: raise CLIError("The provided source-platform, target-platform combination is not appropriate. \n\ Please refer to the help file 'az dms project create -h' for the supported scenarios.") parameters = Project(location=location, source_platform=source_platform, target_platform=target_platform, tags=tags) return client.create_or_update(parameters=parameters, group_name=resource_group_name, service_name=service_name, project_name=project_name)
This implementation eschews the source and target connection details and the database list. This is because this generally only helps in a GUI context--to guide the user more easily through creating a task. Since this info is necessary at the Task level, there is no need to include it at the Project level where for CLI it is more of a useless redundancy.
create_or_update_project
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/dms/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/dms/custom.py
MIT
def run_cli_cmd(cmd, retry=0): '''Run a CLI command :param cmd: The CLI command to be executed :param retry: The times to re-try ''' import json import subprocess output = subprocess.run(cmd, shell=True, check=False, capture_output=True) if output.returncode != 0: if retry: run_cli_cmd(cmd, retry - 1) else: raise CLIInternalError('Command execution failed, command is: ' '{}, error message is: {}'.format(cmd, output.stderr)) return json.loads(output.stdout) if output.stdout else json.loads(None)
Run a CLI command :param cmd: The CLI command to be executed :param retry: The times to re-try
run_cli_cmd
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/security/_utils.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/security/_utils.py
MIT
def _environment_variables_type(value): """Space-separated values in 'key=value' format.""" try: env_name, env_value = value.split('=', 1) return {'name': env_name, 'value': env_value} except ValueError: message = ("Incorrectly formatted environment settings. " "Argument values should be in the format a=b c=d") raise CLIError(message)
Space-separated values in 'key=value' format.
_environment_variables_type
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_params.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_params.py
MIT
def _secure_environment_variables_type(value): """Space-separated values in 'key=value' format.""" try: env_name, env_secure_value = value.split('=', 1) return {'name': env_name, 'secureValue': env_secure_value} except ValueError: message = ("Incorrectly formatted secure environment settings. " "Argument values should be in the format a=b c=d") raise CLIError(message)
Space-separated values in 'key=value' format.
_secure_environment_variables_type
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_params.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_params.py
MIT
def _config_map_type(key_value_pair): """Space-separated values in 'key=value' format.""" try: key, value = key_value_pair.split('=', 1) return {'key': key, 'value': value} except ValueError: message = ("Incorrectly formatted config map key-value pairs. " "Argument values should be in the format a=b c=d") raise CLIError(message)
Space-separated values in 'key=value' format.
_config_map_type
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_params.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_params.py
MIT
def validate_secrets(ns): """ Extracts multiple space-separated secrets in key=value format """ if isinstance(ns.secrets, list): secrets_dict = {} for item in ns.secrets: secrets_dict.update(validate_secret(item)) ns.secrets = secrets_dict
Extracts multiple space-separated secrets in key=value format
validate_secrets
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_validators.py
MIT
def validate_secret(string): """ Extracts a single secret in key=value format """ result = {} if string: comps = string.split('=', 1) if len(comps) != 2: raise CLIError("Secrets need to be specifed in key=value format.") result = {comps[0]: b64encode(comps[1].encode('ascii')).decode('ascii')} return result
Extracts a single secret in key=value format
validate_secret
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_validators.py
MIT
def list_containers(client, resource_group_name=None): """List all container groups in a resource group. """ if resource_group_name is None: return client.list() return client.list_by_resource_group(resource_group_name)
List all container groups in a resource group.
list_containers
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def get_container(client, resource_group_name, name): """Show details of a container group. """ return client.get(resource_group_name, name)
Show details of a container group.
get_container
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def delete_container(client, resource_group_name, name, **kwargs): """Delete a container group. """ return client.begin_delete(resource_group_name, name)
Delete a container group.
delete_container
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def create_container(cmd, resource_group_name, name=None, image=None, location=None, cpu=None, memory=None, config_map=None, restart_policy=None, ports=None, protocol=None, os_type=None, ip_address=None, dns_name_label=None, command_line=None, environment_variables=None, secure_environment_variables=None, registry_login_server=None, registry_username=None, registry_password=None, azure_file_volume_share_name=None, azure_file_volume_account_name=None, azure_file_volume_account_key=None, azure_file_volume_mount_path=None, log_analytics_workspace=None, log_analytics_workspace_key=None, vnet=None, vnet_name=None, vnet_address_prefix='10.0.0.0/16', subnet=None, subnet_address_prefix='10.0.0.0/24', gitrepo_url=None, gitrepo_dir='.', gitrepo_revision=None, gitrepo_mount_path=None, secrets=None, secrets_mount_path=None, file=None, assign_identity=None, identity_scope=None, identity_role='Contributor', no_wait=False, acr_identity=None, zone=None, priority=None, sku=None, cce_policy=None, add_capabilities=None, drop_capabilities=None, privileged=False, allow_privilege_escalation=False, run_as_group=None, run_as_user=None, seccomp_profile=None, container_group_profile_id=None, container_group_profile_revision=None, standby_pool_profile_id=None, fail_container_group_create_on_reuse_failure=False): """Create a container group. """ if file: return _create_update_from_file(cmd.cli_ctx, resource_group_name, name, location, file, no_wait) # Image is no longer a required parameter if not name: raise CLIError("error: the --name/-n argument is required unless specified with a passed in file.") container_group_profile_reference = _create_container_group_profile_reference(container_group_profile_id=container_group_profile_id, container_group_profile_revision=container_group_profile_revision) standby_pool_profile_reference = _create_standby_pool_profile_reference(standby_pool_profile_id=standby_pool_profile_id, fail_container_group_create_on_reuse_failure=fail_container_group_create_on_reuse_failure) ports = ports or [80] protocol = protocol or ContainerGroupNetworkProtocol.tcp config_map = _create_config_map(config_map) container_resource_requirements = _create_resource_requirements(cpu=cpu, memory=memory) image_registry_credentials = _create_image_registry_credentials(cmd=cmd, resource_group_name=resource_group_name, registry_login_server=registry_login_server, registry_username=registry_username, registry_password=registry_password, image=image, identity=acr_identity) command = shlex.split(command_line) if command_line else None volumes = [] mounts = [] azure_file_volume = _create_azure_file_volume(azure_file_volume_share_name=azure_file_volume_share_name, azure_file_volume_account_name=azure_file_volume_account_name, azure_file_volume_account_key=azure_file_volume_account_key) azure_file_volume_mount = _create_azure_file_volume_mount(azure_file_volume=azure_file_volume, azure_file_volume_mount_path=azure_file_volume_mount_path) if azure_file_volume: volumes.append(azure_file_volume) mounts.append(azure_file_volume_mount) secrets_volume = _create_secrets_volume(secrets) secrets_volume_mount = _create_secrets_volume_mount(secrets_volume=secrets_volume, secrets_mount_path=secrets_mount_path) if secrets_volume: volumes.append(secrets_volume) mounts.append(secrets_volume_mount) diagnostics = None tags = {} if log_analytics_workspace and log_analytics_workspace_key: log_analytics = LogAnalytics( workspace_id=log_analytics_workspace, workspace_key=log_analytics_workspace_key) diagnostics = ContainerGroupDiagnostics( log_analytics=log_analytics ) elif log_analytics_workspace and not log_analytics_workspace_key: diagnostics, tags = _get_diagnostics_from_workspace( cmd.cli_ctx, log_analytics_workspace) if not diagnostics: raise CLIError('Log Analytics workspace "' + log_analytics_workspace + '" not found.') elif not log_analytics_workspace and log_analytics_workspace_key: raise CLIError('"--log-analytics-workspace-key" requires "--log-analytics-workspace".') gitrepo_volume = _create_gitrepo_volume(gitrepo_url=gitrepo_url, gitrepo_dir=gitrepo_dir, gitrepo_revision=gitrepo_revision) gitrepo_volume_mount = _create_gitrepo_volume_mount(gitrepo_volume=gitrepo_volume, gitrepo_mount_path=gitrepo_mount_path) if gitrepo_volume: volumes.append(gitrepo_volume) mounts.append(gitrepo_volume_mount) # Concatenate secure and standard environment variables if environment_variables and secure_environment_variables: environment_variables = environment_variables + secure_environment_variables else: environment_variables = environment_variables or secure_environment_variables identity = None if assign_identity is not None: identity = _build_identities_info(assign_identity) # Set up VNET and subnet if needed subnet_id = None cgroup_subnet = None if subnet: subnet_id = _get_subnet_id(cmd, location, resource_group_name, vnet, vnet_address_prefix, subnet, subnet_address_prefix) cgroup_subnet = [ContainerGroupSubnetId(id=subnet_id)] cgroup_ip_address = _create_ip_address(ip_address, ports, protocol, dns_name_label, subnet_id) # Setup zones, validation done in control plane so check is not needed here zones = None if zone: zones = [zone] # Set up Priority of the Container Group. if priority == "Spot": priority = ContainerGroupPriority.Spot elif priority == "Regular": priority = ContainerGroupPriority.Regular # Set up Container Group Sku. confidential_compute_properties = None security_context = None if sku == "Confidential": sku = ContainerGroupSku.Confidential confidential_compute_properties = ConfidentialComputeProperties(cce_policy=cce_policy) security_context_capabilities = SecurityContextCapabilitiesDefinition(add=add_capabilities, drop=drop_capabilities) security_context = SecurityContextDefinition(privileged=privileged, allow_privilege_escalation=allow_privilege_escalation, capabilities=security_context_capabilities, run_as_group=run_as_group, run_as_user=run_as_user, seccomp_profile=seccomp_profile) container = Container(name=name, image=image, resources=container_resource_requirements, config_map=config_map, command=command, ports=[ContainerPort( port=p, protocol=protocol) for p in ports] if cgroup_ip_address else None, environment_variables=environment_variables, volume_mounts=mounts or None, security_context=security_context) cgroup = ContainerGroup(location=location, identity=identity, containers=[container], os_type=os_type, container_group_profile=container_group_profile_reference, standby_pool_profile=standby_pool_profile_reference, restart_policy=restart_policy, ip_address=cgroup_ip_address, image_registry_credentials=image_registry_credentials, volumes=volumes or None, subnet_ids=cgroup_subnet, diagnostics=diagnostics, tags=tags, zones=zones, priority=priority, sku=sku, confidential_compute_properties=confidential_compute_properties) container_group_client = cf_container_groups(cmd.cli_ctx) lro = sdk_no_wait(no_wait, container_group_client.begin_create_or_update, resource_group_name, name, cgroup) if assign_identity is not None and identity_scope: from azure.cli.core.commands.arm import assign_identity cg = container_group_client.get(resource_group_name, name) assign_identity(cmd.cli_ctx, lambda: cg, lambda cg: cg, identity_role, identity_scope) return lro
Create a container group.
create_container
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def list_container_group_profiles(client, resource_group_name=None): """List all container group profiles in a resource group. """ if resource_group_name is None: return client.list() return client.list_by_resource_group(resource_group_name)
List all container group profiles in a resource group.
list_container_group_profiles
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def get_container_group_profile(client, resource_group_name, name): """Show details of a container group profile. """ return client.get(resource_group_name, name)
Show details of a container group profile.
get_container_group_profile
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def delete_container_group_profile(client, resource_group_name, name, **kwargs): """Delete a container group profile. """ return client.delete(resource_group_name, name)
Delete a container group profile.
delete_container_group_profile
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def create_container_group_profile(cmd, resource_group_name, name=None, image=None, location=None, cpu=1, memory=1.5, config_map=None, restart_policy=None, ports=None, protocol=None, os_type='Linux', ip_address=None, command_line=None, environment_variables=None, secure_environment_variables=None, registry_login_server=None, registry_username=None, registry_password=None, azure_file_volume_share_name=None, azure_file_volume_account_name=None, azure_file_volume_account_key=None, azure_file_volume_mount_path=None, log_analytics_workspace=None, log_analytics_workspace_key=None, gitrepo_url=None, gitrepo_dir='.', gitrepo_revision=None, gitrepo_mount_path=None, secrets=None, secrets_mount_path=None, file=None, no_wait=False, acr_identity=None, zone=None, priority=None, sku=None, cce_policy=None, add_capabilities=None, drop_capabilities=None, privileged=False, allow_privilege_escalation=False, run_as_group=None, run_as_user=None, seccomp_profile=None): """Create a container group profile. """ if file: return _create_update_from_file(cmd.cli_ctx, resource_group_name, name, location, file, no_wait) if not name: raise CLIError("error: the --name/-n argument is required unless specified with a passed in file.") if not image: raise CLIError("error: the --image argument is required unless specified with a passed in file.") ports = ports or [80] protocol = protocol or ContainerGroupNetworkProtocol.tcp config_map = _create_config_map(config_map) container_resource_requirements = _create_resource_requirements(cpu=cpu, memory=memory) image_registry_credentials = _create_image_registry_credentials(cmd=cmd, resource_group_name=resource_group_name, registry_login_server=registry_login_server, registry_username=registry_username, registry_password=registry_password, image=image, identity=acr_identity) command = shlex.split(command_line) if command_line else None volumes = [] mounts = [] azure_file_volume = _create_azure_file_volume(azure_file_volume_share_name=azure_file_volume_share_name, azure_file_volume_account_name=azure_file_volume_account_name, azure_file_volume_account_key=azure_file_volume_account_key) azure_file_volume_mount = _create_azure_file_volume_mount(azure_file_volume=azure_file_volume, azure_file_volume_mount_path=azure_file_volume_mount_path) if azure_file_volume: volumes.append(azure_file_volume) mounts.append(azure_file_volume_mount) secrets_volume = _create_secrets_volume(secrets) secrets_volume_mount = _create_secrets_volume_mount(secrets_volume=secrets_volume, secrets_mount_path=secrets_mount_path) if secrets_volume: volumes.append(secrets_volume) mounts.append(secrets_volume_mount) diagnostics = None tags = {} if log_analytics_workspace and log_analytics_workspace_key: log_analytics = LogAnalytics( workspace_id=log_analytics_workspace, workspace_key=log_analytics_workspace_key) diagnostics = ContainerGroupDiagnostics( log_analytics=log_analytics ) elif log_analytics_workspace and not log_analytics_workspace_key: diagnostics, tags = _get_diagnostics_from_workspace( cmd.cli_ctx, log_analytics_workspace) if not diagnostics: raise CLIError('Log Analytics workspace "' + log_analytics_workspace + '" not found.') elif not log_analytics_workspace and log_analytics_workspace_key: raise CLIError('"--log-analytics-workspace-key" requires "--log-analytics-workspace".') gitrepo_volume = _create_gitrepo_volume(gitrepo_url=gitrepo_url, gitrepo_dir=gitrepo_dir, gitrepo_revision=gitrepo_revision) gitrepo_volume_mount = _create_gitrepo_volume_mount(gitrepo_volume=gitrepo_volume, gitrepo_mount_path=gitrepo_mount_path) if gitrepo_volume: volumes.append(gitrepo_volume) mounts.append(gitrepo_volume_mount) # Concatenate secure and standard environment variables if environment_variables and secure_environment_variables: environment_variables = environment_variables + secure_environment_variables else: environment_variables = environment_variables or secure_environment_variables cgroup_ip_address = _create_ip_address_cg_profile(ip_address, ports, protocol) # Setup zones, validation done in control plane so check is not needed here zones = None if zone: zones = [zone] # Set up Priority of the Container Group. if priority == "Spot": priority = ContainerGroupPriority.Spot elif priority == "Regular": priority = ContainerGroupPriority.Regular # Set up Container Group Sku. confidential_compute_properties = None security_context = None if sku == "Confidential": sku = ContainerGroupSku.Confidential confidential_compute_properties = ConfidentialComputeProperties(cce_policy=cce_policy) security_context_capabilities = SecurityContextCapabilitiesDefinition(add=add_capabilities, drop=drop_capabilities) security_context = SecurityContextDefinition(privileged=privileged, allow_privilege_escalation=allow_privilege_escalation, capabilities=security_context_capabilities, run_as_group=run_as_group, run_as_user=run_as_user, seccomp_profile=seccomp_profile) container = Container(name=name, image=image, resources=container_resource_requirements, config_map=config_map, command=command, ports=[ContainerPort( port=p, protocol=protocol) for p in ports] if cgroup_ip_address else None, environment_variables=environment_variables, volume_mounts=mounts or None, security_context=security_context) cgroupprofile = ContainerGroupProfile(location=location, containers=[container], os_type=os_type, restart_policy=restart_policy, ip_address=cgroup_ip_address, image_registry_credentials=image_registry_credentials, volumes=volumes or None, diagnostics=diagnostics, tags=tags, zones=zones, priority=priority, sku=sku, confidential_compute_properties=confidential_compute_properties) container_group_profile_client = cf_container_group_profiles(cmd.cli_ctx) lro = sdk_no_wait(no_wait, container_group_profile_client.create_or_update, resource_group_name, name, cgroupprofile) return lro
Create a container group profile.
create_container_group_profile
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def list_container_group_profile_revisions(client, resource_group_name, name): """List all revisions for a container group profile. """ return client.list_all_revisions(resource_group_name, name)
List all revisions for a container group profile.
list_container_group_profile_revisions
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def get_container_group_profile_revision(client, resource_group_name, name, revision): """Show details of a container group profile revision. """ return client.get_by_revision_number(resource_group_name, name, revision)
Show details of a container group profile revision.
get_container_group_profile_revision
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def yaml_env_var_constructor(loader, node): ''' Extract the matched value, expand env variable, and replace the match ''' env_matcher = re.compile(r"\$\{([^}^{]+)\}") value = node.value match = env_matcher.findall(value) if match: full_value = value for env_var in match: full_value = full_value.replace( f'${{{env_var}}}', os.environ.get(env_var, env_var) ) return full_value return value
Extract the matched value, expand env variable, and replace the match
yaml_env_var_constructor
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_resource_requirements(cpu, memory): """Create resource requirements. """ if cpu or memory: container_resource_requests = ResourceRequests(memory_in_gb=memory, cpu=cpu) return ResourceRequirements(requests=container_resource_requests)
Create resource requirements.
_create_resource_requirements
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_config_map(key_value_pairs): """Create config map. """ config_map = ConfigMap(key_value_pairs={}) if key_value_pairs: key_value_dict = {} for pair in key_value_pairs: key_value_dict[pair['key']] = pair['value'] config_map = ConfigMap(key_value_pairs=key_value_dict) return config_map
Create config map.
_create_config_map
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_container_group_profile_reference(container_group_profile_id, container_group_profile_revision): """Create container group profile reference. """ if container_group_profile_id and container_group_profile_revision: container_group_profile_reference = ContainerGroupProfileReferenceDefinition(id=container_group_profile_id, revision=container_group_profile_revision) return container_group_profile_reference
Create container group profile reference.
_create_container_group_profile_reference
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_standby_pool_profile_reference(standby_pool_profile_id, fail_container_group_create_on_reuse_failure): """Create standby pool profile reference. """ if standby_pool_profile_id: standby_pool_profile_reference = StandbyPoolProfileDefinition(id=standby_pool_profile_id, fail_container_group_create_on_reuse_failure=fail_container_group_create_on_reuse_failure) return standby_pool_profile_reference
Create standby pool profile reference.
_create_standby_pool_profile_reference
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_azure_file_volume(azure_file_volume_share_name, azure_file_volume_account_name, azure_file_volume_account_key): """Create Azure File volume. """ azure_file_volume = None if azure_file_volume_share_name: if not azure_file_volume_account_name: raise CLIError('Please specify --azure-file-volume-account-name in order to use Azure File volume.') if not azure_file_volume_account_key: try: azure_file_volume_account_key = prompt_pass(msg='Azure File storage account key: ') except NoTTYException: raise CLIError('Please specify --azure-file-volume-account-key in order to use Azure File volume.') azure_file_volume = AzureFileVolume(share_name=azure_file_volume_share_name, storage_account_name=azure_file_volume_account_name, storage_account_key=azure_file_volume_account_key) return Volume(name=AZURE_FILE_VOLUME_NAME, azure_file=azure_file_volume) if azure_file_volume else None
Create Azure File volume.
_create_azure_file_volume
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_secrets_volume(secrets): """Create secrets volume. """ return Volume(name=SECRETS_VOLUME_NAME, secret=secrets) if secrets else None
Create secrets volume.
_create_secrets_volume
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_gitrepo_volume(gitrepo_url, gitrepo_dir, gitrepo_revision): """Create Git Repo volume. """ gitrepo_volume = GitRepoVolume(repository=gitrepo_url, directory=gitrepo_dir, revision=gitrepo_revision) return Volume(name=GITREPO_VOLUME_NAME, git_repo=gitrepo_volume) if gitrepo_url else None
Create Git Repo volume.
_create_gitrepo_volume
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_azure_file_volume_mount(azure_file_volume, azure_file_volume_mount_path): """Create Azure File volume mount. """ if azure_file_volume_mount_path: if not azure_file_volume: raise CLIError('Please specify --azure-file-volume-share-name --azure-file-volume-account-name --azure-file-volume-account-key ' 'to enable Azure File volume mount.') return VolumeMount(name=AZURE_FILE_VOLUME_NAME, mount_path=azure_file_volume_mount_path)
Create Azure File volume mount.
_create_azure_file_volume_mount
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_secrets_volume_mount(secrets_volume, secrets_mount_path): """Create secrets volume mount. """ if secrets_volume: if not secrets_mount_path: raise CLIError('Please specify --secrets --secrets-mount-path ' 'to enable secrets volume mount.') return VolumeMount(name=SECRETS_VOLUME_NAME, mount_path=secrets_mount_path)
Create secrets volume mount.
_create_secrets_volume_mount
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_gitrepo_volume_mount(gitrepo_volume, gitrepo_mount_path): """Create Git Repo volume mount. """ if gitrepo_mount_path: if not gitrepo_volume: raise CLIError('Please specify --gitrepo-url (--gitrepo-dir --gitrepo-revision) ' 'to enable Git Repo volume mount.') return VolumeMount(name=GITREPO_VOLUME_NAME, mount_path=gitrepo_mount_path)
Create Git Repo volume mount.
_create_gitrepo_volume_mount
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_ip_address(ip_address, ports, protocol, dns_name_label, subnet_id): """Create IP address. """ if (ip_address and ip_address.lower() == 'public') or dns_name_label: return IpAddress(ports=[Port(protocol=protocol, port=p) for p in ports], dns_name_label=dns_name_label, type=ContainerGroupIpAddressType.public) if subnet_id: return IpAddress(ports=[Port(protocol=protocol, port=p) for p in ports], type=ContainerGroupIpAddressType.private)
Create IP address.
_create_ip_address
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _create_ip_address_cg_profile(ip_address, ports, protocol): """Create IP address. """ if (ip_address and ip_address.lower() == 'public'): return IpAddress(ports=[Port(protocol=protocol, port=p) for p in ports], type=ContainerGroupIpAddressType.public) if (ip_address and ip_address.lower() == 'private'): return IpAddress(ports=[Port(protocol=protocol, port=p) for p in ports], type=ContainerGroupIpAddressType.private)
Create IP address.
_create_ip_address_cg_profile
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def container_logs(cmd, resource_group_name, name, container_name=None, follow=False): """Tail a container instance log. """ container_client = cf_container(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) container_group = container_group_client.get(resource_group_name, name) # If container name is not present, use the first container. if container_name is None: container_name = container_group.containers[0].name if not follow: log = container_client.list_logs(resource_group_name, name, container_name) print(log.content) else: _start_streaming( terminate_condition=_is_container_terminated, terminate_condition_args=(container_group_client, resource_group_name, name, container_name), shupdown_grace_period=5, stream_target=_stream_logs, stream_args=(container_client, resource_group_name, name, container_name, container_group.restart_policy))
Tail a container instance log.
container_logs
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def container_exec(cmd, resource_group_name, name, exec_command, container_name=None): """Start exec for a container. """ container_client = cf_container(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) container_group = container_group_client.get(resource_group_name, name) if container_name or container_name is None and len(container_group.containers) == 1: # If only one container in container group, use that container. if container_name is None: container_name = container_group.containers[0].name try: terminalsize = os.get_terminal_size() except OSError: terminalsize = os.terminal_size((80, 24)) terminal_size = ContainerExecRequestTerminalSize(rows=terminalsize.lines, cols=terminalsize.columns) exec_request = ContainerExecRequest(command=exec_command, terminal_size=terminal_size) execContainerResponse = container_client.execute_command(resource_group_name, name, container_name, exec_request) if platform.system() is WINDOWS_NAME: _start_exec_pipe_windows(execContainerResponse.web_socket_uri, execContainerResponse.password) else: _start_exec_pipe_linux(execContainerResponse.web_socket_uri, execContainerResponse.password) else: raise CLIError('--container-name required when container group has more than one container.')
Start exec for a container.
container_exec
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def attach_to_container(cmd, resource_group_name, name, container_name=None): """Attach to a container. """ container_client = cf_container(cmd.cli_ctx) container_group_client = cf_container_groups(cmd.cli_ctx) container_group = container_group_client.get(resource_group_name, name) # If container name is not present, use the first container. if container_name is None: container_name = container_group.containers[0].name _start_streaming( terminate_condition=_is_container_terminated, terminate_condition_args=(container_group_client, resource_group_name, name, container_name), shupdown_grace_period=5, stream_target=_stream_container_events_and_logs, stream_args=(container_group_client, container_client, resource_group_name, name, container_name))
Attach to a container.
attach_to_container
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _start_streaming(terminate_condition, terminate_condition_args, shupdown_grace_period, stream_target, stream_args): """Start streaming for the stream target. """ import colorama colorama.init() try: t = threading.Thread(target=stream_target, args=stream_args) t.daemon = True t.start() while not terminate_condition(*terminate_condition_args) and t.is_alive(): time.sleep(10) time.sleep(shupdown_grace_period) finally: colorama.deinit()
Start streaming for the stream target.
_start_streaming
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _stream_logs(client, resource_group_name, name, container_name, restart_policy): """Stream logs for a container. """ lastOutputLines = 0 while True: log = client.list_logs(resource_group_name, name, container_name) lines = log.content.split('\n') currentOutputLines = len(lines) # Should only happen when the container restarts. if currentOutputLines < lastOutputLines and restart_policy != 'Never': print("Warning: you're having '--restart-policy={}'; the container '{}' was just restarted; the tail of the current log might be missing. Exiting...".format(restart_policy, container_name)) break _move_console_cursor_up(lastOutputLines) print(log.content) lastOutputLines = currentOutputLines time.sleep(2)
Stream logs for a container.
_stream_logs
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _stream_container_events_and_logs(container_group_client, container_client, resource_group_name, name, container_name): """Stream container events and logs. """ lastOutputLines = 0 lastContainerState = None while True: container_group, container = _find_container(container_group_client, resource_group_name, name, container_name) container_state = 'Unknown' if container.instance_view and container.instance_view.current_state and container.instance_view.current_state.state: container_state = container.instance_view.current_state.state _move_console_cursor_up(lastOutputLines) if container_state != lastContainerState: print("Container '{}' is in state '{}'...".format(container_name, container_state)) currentOutputLines = 0 if container.instance_view and container.instance_view.events: for event in sorted(container.instance_view.events, key=lambda e: e.last_timestamp): print('(count: {}) (last timestamp: {}) {}'.format(event.count, event.last_timestamp, event.message)) currentOutputLines += 1 lastOutputLines = currentOutputLines lastContainerState = container_state if container_state == 'Running': print('\nStart streaming logs:') break time.sleep(2) _stream_logs(container_client, resource_group_name, name, container_name, container_group.restart_policy)
Stream container events and logs.
_stream_container_events_and_logs
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _is_container_terminated(client, resource_group_name, name, container_name): """Check if a container should be considered terminated. """ container_group, container = _find_container(client, resource_group_name, name, container_name) # If a container group is terminated, assume the container is also terminated. if container_group.instance_view and container_group.instance_view.state: if container_group.instance_view.state == 'Succeeded' or container_group.instance_view.state == 'Failed': return True # If the restart policy is Always, assume the container will be restarted. if container_group.restart_policy: if container_group.restart_policy == 'Always': return False # Only assume the container is terminated if its state is Terminated. if container.instance_view and container.instance_view.current_state and container.instance_view.current_state.state == 'Terminated': return True return False
Check if a container should be considered terminated.
_is_container_terminated
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _find_container(client, resource_group_name, name, container_name): """Find a container in a container group. """ container_group = client.get(resource_group_name, name) containers = [c for c in container_group.containers if c.name == container_name] if len(containers) != 1: raise CLIError("Found 0 or more than 1 container with name '{}'".format(container_name)) return container_group, containers[0]
Find a container in a container group.
_find_container
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def _move_console_cursor_up(lines): """Move console cursor up. """ if lines > 0: # Use stdout.write to support Python 2 sys.stdout.write('\033[{}A\033[K\033[J'.format(lines))
Move console cursor up.
_move_console_cursor_up
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/custom.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/custom.py
MIT
def enable_vt_mode(): """Enables virtual terminal mode for Windows 10 console. Windows 10 supports VT (virtual terminal) / ANSI escape sequences since version 1607. cmd.exe enables VT mode, but only for itself. It disables VT mode before starting other programs, and also at shutdown (See: https://bugs.python.org/issue30075). """ try: _update_conout_mode() _update_conin_mode() except OSError as e: if e.errno == ERROR_INVALID_PARAMETER: logger.debug("Unable to enable virtual terminal processing for legacy Windows terminal.") else: logger.debug("Unable to enable virtual terminal processing: %s.", e.errno)
Enables virtual terminal mode for Windows 10 console. Windows 10 supports VT (virtual terminal) / ANSI escape sequences since version 1607. cmd.exe enables VT mode, but only for itself. It disables VT mode before starting other programs, and also at shutdown (See: https://bugs.python.org/issue30075).
enable_vt_mode
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_vt_helper.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_vt_helper.py
MIT
def _get_images(container_group): """Get all images of a container group. """ containers = container_group.get('containers') if containers is not None and containers: images = set() for container in containers: images.add(container['image']) return ','.join(images) return None
Get all images of a container group.
_get_images
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def _format_cpu_memory(container_group): """Format CPU and memory. """ containers = container_group.get('containers') if containers is not None and containers: total_cpu = 0 total_memory = 0 for container in containers: resources = container.get('resources') if resources is not None: resources_requests = resources.get('requests') if resources_requests is not None: total_cpu += resources_requests.get('cpu', 0) total_memory += resources_requests.get('memoryInGb', 0) return '{0} core/{1} gb'.format(total_cpu, total_memory) return None
Format CPU and memory.
_format_cpu_memory
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def _format_ip_address(container_group): """Format IP address. """ ip_address = container_group.get('ipAddress') if ip_address: ports = ip_address['ports'] or [] if ip_address['type'] == 'Private': for container in container_group.get('containers'): ports += container.get('ports') ports = ','.join(str(p['port']) for p in ports) return '{0}:{1}'.format(ip_address.get('ip'), ports) return None
Format IP address.
_format_ip_address
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def transform_container_group(result): """Transform a container group to table output. """ return OrderedDict([('Name', result['name']), ('ResourceGroup', result['resourceGroup']), ('Status', _format_status(result)), ('Image', _get_images(result)), ('IP:ports', _format_ip_address(result)), ('Network', _format_network(result)), ('CPU/Memory', _format_cpu_memory(result)), ('OsType', result.get('osType')), ('Location', result['location'])])
Transform a container group to table output.
transform_container_group
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def transform_container_group_list(result): """Transform a container group list to table output. """ return [transform_container_group(container_group) for container_group in result]
Transform a container group list to table output.
transform_container_group_list
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def transform_container_group_profile(result): """Transform a container group profile to table output. """ return OrderedDict([('Name', result['name']), ('ResourceGroup', result['resourceGroup']), ('Image', _get_images(result)), ('IP:ports', _format_ip_address(result)), ('CPU/Memory', _format_cpu_memory(result)), ('OsType', result.get('osType')), ('Location', result['location'])])
Transform a container group profile to table output.
transform_container_group_profile
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def transform_container_group_profile_list(result): """Transform a container group profile list to table output. """ return [transform_container_group_profile(container_group_profile) for container_group_profile in result]
Transform a container group profile list to table output.
transform_container_group_profile_list
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/container/_format.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/container/_format.py
MIT
def __init__(self, *, allowed_audiences: Optional[List[str]] = None, **kwargs: Any) -> None: """ :keyword allowed_audiences: The configuration settings of the allowed list of audiences from which to validate the JWT token. :paramtype allowed_audiences: list[str] """ super().__init__(**kwargs) self.allowed_audiences = allowed_audiences
:keyword allowed_audiences: The configuration settings of the allowed list of audiences from which to validate the JWT token. :paramtype allowed_audiences: list[str]
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT
def __init__( self, *, groups: Optional[List[str]] = None, identities: Optional[List[str]] = None, **kwargs: Any ) -> None: """ :keyword groups: The list of the allowed groups. :paramtype groups: list[str] :keyword identities: The list of the allowed identities. :paramtype identities: list[str] """ super().__init__(**kwargs) self.groups = groups self.identities = identities
:keyword groups: The list of the allowed groups. :paramtype groups: list[str] :keyword identities: The list of the allowed identities. :paramtype identities: list[str]
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT
def __init__( self, *, enabled: Optional[bool] = None, registration: Optional["AppleRegistration"] = None, login: Optional["LoginScopes"] = None, **kwargs: Any ) -> None: """ :keyword enabled: :code:`<code>false</code>` if the Apple provider should not be enabled despite the set registration; otherwise, :code:`<code>true</code>`. :paramtype enabled: bool :keyword registration: The configuration settings of the Apple registration. :paramtype registration: ~azure.mgmt.appcontainers.models.AppleRegistration :keyword login: The configuration settings of the login flow. :paramtype login: ~azure.mgmt.appcontainers.models.LoginScopes """ super().__init__(**kwargs) self.enabled = enabled self.registration = registration self.login = login
:keyword enabled: :code:`<code>false</code>` if the Apple provider should not be enabled despite the set registration; otherwise, :code:`<code>true</code>`. :paramtype enabled: bool :keyword registration: The configuration settings of the Apple registration. :paramtype registration: ~azure.mgmt.appcontainers.models.AppleRegistration :keyword login: The configuration settings of the login flow. :paramtype login: ~azure.mgmt.appcontainers.models.LoginScopes
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT
def __init__( self, *, client_id: Optional[str] = None, client_secret_setting_name: Optional[str] = None, **kwargs: Any ) -> None: """ :keyword client_id: The Client ID of the app used for login. :paramtype client_id: str :keyword client_secret_setting_name: The app setting name that contains the client secret. :paramtype client_secret_setting_name: str """ super().__init__(**kwargs) self.client_id = client_id self.client_secret_setting_name = client_secret_setting_name
:keyword client_id: The Client ID of the app used for login. :paramtype client_id: str :keyword client_secret_setting_name: The app setting name that contains the client secret. :paramtype client_secret_setting_name: str
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT
def __init__( self, *, destination: Optional[str] = None, log_analytics_configuration: Optional["LogAnalyticsConfiguration"] = None, **kwargs: Any ) -> None: """ :keyword destination: Logs destination, can be 'log-analytics', 'azure-monitor' or 'none'. :paramtype destination: str :keyword log_analytics_configuration: Log Analytics configuration, must only be provided when destination is configured as 'log-analytics'. :paramtype log_analytics_configuration: ~azure.mgmt.appcontainers.models.LogAnalyticsConfiguration """ super().__init__(**kwargs) self.destination = destination self.log_analytics_configuration = log_analytics_configuration
:keyword destination: Logs destination, can be 'log-analytics', 'azure-monitor' or 'none'. :paramtype destination: str :keyword log_analytics_configuration: Log Analytics configuration, must only be provided when destination is configured as 'log-analytics'. :paramtype log_analytics_configuration: ~azure.mgmt.appcontainers.models.LogAnalyticsConfiguration
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT
def __init__( self, *, app_id: Optional[str] = None, app_secret_setting_name: Optional[str] = None, **kwargs: Any ) -> None: """ :keyword app_id: The App ID of the app used for login. :paramtype app_id: str :keyword app_secret_setting_name: The app setting name that contains the app secret. :paramtype app_secret_setting_name: str """ super().__init__(**kwargs) self.app_id = app_id self.app_secret_setting_name = app_secret_setting_name
:keyword app_id: The App ID of the app used for login. :paramtype app_id: str :keyword app_secret_setting_name: The app setting name that contains the app secret. :paramtype app_secret_setting_name: str
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT
def __init__( self, *, platform: Optional["AuthPlatform"] = None, global_validation: Optional["GlobalValidation"] = None, identity_providers: Optional["IdentityProviders"] = None, login: Optional["Login"] = None, http_settings: Optional["HttpSettings"] = None, encryption_settings: Optional["EncryptionSettings"] = None, **kwargs: Any ) -> None: """ :keyword platform: The configuration settings of the platform of ContainerApp Service Authentication/Authorization. :paramtype platform: ~azure.mgmt.appcontainers.models.AuthPlatform :keyword global_validation: The configuration settings that determines the validation flow of users using Service Authentication/Authorization. :paramtype global_validation: ~azure.mgmt.appcontainers.models.GlobalValidation :keyword identity_providers: The configuration settings of each of the identity providers used to configure ContainerApp Service Authentication/Authorization. :paramtype identity_providers: ~azure.mgmt.appcontainers.models.IdentityProviders :keyword login: The configuration settings of the login flow of users using ContainerApp Service Authentication/Authorization. :paramtype login: ~azure.mgmt.appcontainers.models.Login :keyword http_settings: The configuration settings of the HTTP requests for authentication and authorization requests made against ContainerApp Service Authentication/Authorization. :paramtype http_settings: ~azure.mgmt.appcontainers.models.HttpSettings :keyword encryption_settings: The configuration settings of the secrets references of encryption key and signing key for ContainerApp Service Authentication/Authorization. :paramtype encryption_settings: ~azure.mgmt.appcontainers.models.EncryptionSettings """ super().__init__(**kwargs) self.platform = platform self.global_validation = global_validation self.identity_providers = identity_providers self.login = login self.http_settings = http_settings self.encryption_settings = encryption_settings
:keyword platform: The configuration settings of the platform of ContainerApp Service Authentication/Authorization. :paramtype platform: ~azure.mgmt.appcontainers.models.AuthPlatform :keyword global_validation: The configuration settings that determines the validation flow of users using Service Authentication/Authorization. :paramtype global_validation: ~azure.mgmt.appcontainers.models.GlobalValidation :keyword identity_providers: The configuration settings of each of the identity providers used to configure ContainerApp Service Authentication/Authorization. :paramtype identity_providers: ~azure.mgmt.appcontainers.models.IdentityProviders :keyword login: The configuration settings of the login flow of users using ContainerApp Service Authentication/Authorization. :paramtype login: ~azure.mgmt.appcontainers.models.Login :keyword http_settings: The configuration settings of the HTTP requests for authentication and authorization requests made against ContainerApp Service Authentication/Authorization. :paramtype http_settings: ~azure.mgmt.appcontainers.models.HttpSettings :keyword encryption_settings: The configuration settings of the secrets references of encryption key and signing key for ContainerApp Service Authentication/Authorization. :paramtype encryption_settings: ~azure.mgmt.appcontainers.models.EncryptionSettings
__init__
python
Azure/azure-cli
src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli/azure/cli/command_modules/containerapp/_sdk_models.py
MIT