code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
194
url
stringlengths
46
254
license
stringclasses
4 values
def validate_file_or_dict(string): """Parse string as a JSON file or in-line JSON string.""" import os string = os.path.expanduser(string) try: if os.path.exists(string): from azure.cli.core.util import get_file_json # Error 1: 'string' is an existing file path, but the file contains invalid JSON string # ex has no recommendation return get_file_json(string) # Error 2: If string ends with '.json', it can't be a JSON string, since a JSON string must ends with # ], }, or ", so it must be JSON file, and we don't allow parsing it as in-line string if string.endswith('.json'): raise CLIError("JSON file does not exist: '{}'".format(string)) from azure.cli.core.util import shell_safe_json_parse # Error 3: string is a non-existing file path or invalid JSON string # ex has recommendations for shell interpretation return shell_safe_json_parse(string) except CLIError as ex: from azure.cli.core.azclierror import InvalidArgumentValueError new_ex = InvalidArgumentValueError(ex, recommendation=JSON_RECOMMENDATION_MESSAGE) # Preserve the recommendation if hasattr(ex, "recommendations"): new_ex.set_recommendation(ex.recommendations) raise new_ex from ex
Parse string as a JSON file or in-line JSON string.
validate_file_or_dict
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/validators.py
MIT
def validate_parameter_set(namespace, required, forbidden, dest_to_options=None, description=None): """ validates that a given namespace contains the specified required parameters and does not contain any of the provided forbidden parameters (unless the value came from a default). """ missing_required = [x for x in required if not getattr(namespace, x)] included_forbidden = [x for x in forbidden if getattr(namespace, x) and not hasattr(getattr(namespace, x), 'is_default')] if missing_required or included_forbidden: def _dest_to_option(dest): try: return dest_to_options[dest] except (KeyError, TypeError): # assume the default dest to option return '--{}'.format(dest).replace('_', '-') error = 'invalid usage{}{}'.format(' for ' if description else ':', description) if missing_required: missing_string = ', '.join(_dest_to_option(x) for x in missing_required) error = '{}\n\tmissing: {}'.format(error, missing_string) if included_forbidden: forbidden_string = ', '.join(_dest_to_option(x) for x in included_forbidden) error = '{}\n\tnot applicable: {}'.format(error, forbidden_string) raise CLIError(error)
validates that a given namespace contains the specified required parameters and does not contain any of the provided forbidden parameters (unless the value came from a default).
validate_parameter_set
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/validators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/validators.py
MIT
def __init__(self, cli_ctx, object_type='', redact=True, sensitive_keys=None, target=None, tag_func=None, message_func=None, **kwargs): """ Create a collection of sensitive metadata. :param cli_ctx: The CLI context associated with the sensitive item. :type cli_ctx: knack.cli.CLI :param object_type: A label describing the type of object containing sensitive info. :type: object_type: str :param redact: Whether or not to redact the sensitive information. :type redact: bool :param target: The name of the object containing sensitive info. :type target: str :param tag_func: Callable which returns the desired unformatted tag string for the sensitive item. Omit to use the default. :type tag_func: callable :param message_func: Callable which returns the desired unformatted message string for the sensitive item. Omit to use the default. :type message_func: callable """ def _default_get_message(self): from ..credential_helper import sensitive_data_detailed_warning_message, sensitive_data_warning_message if self.sensitive_keys: return sensitive_data_detailed_warning_message.format(', '.join(self.sensitive_keys)) return sensitive_data_warning_message super().__init__( cli_ctx=cli_ctx, object_type=object_type, target=target, color='\x1b[33m', tag_func=tag_func or (lambda _: _SENSITIVE_TAG), message_func=message_func or _default_get_message ) self.redact = redact self.sensitive_keys = sensitive_keys if sensitive_keys else []
Create a collection of sensitive metadata. :param cli_ctx: The CLI context associated with the sensitive item. :type cli_ctx: knack.cli.CLI :param object_type: A label describing the type of object containing sensitive info. :type: object_type: str :param redact: Whether or not to redact the sensitive information. :type redact: bool :param target: The name of the object containing sensitive info. :type target: str :param tag_func: Callable which returns the desired unformatted tag string for the sensitive item. Omit to use the default. :type tag_func: callable :param message_func: Callable which returns the desired unformatted message string for the sensitive item. Omit to use the default. :type message_func: callable
__init__
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/sensitive.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/sensitive.py
MIT
def cli_ctx(self): """ Return the cli_ctx of command or command_loader """ return self.cmd.cli_ctx if self.cmd else self.command_loader.cli_ctx
Return the cli_ctx of command or command_loader
cli_ctx
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def handler(self, command_args): """ Callback function of CLICommand handler """ raise NotImplementedError()
Callback function of CLICommand handler
handler
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def arguments_loader(self): """ Callback function of CLICommand arguments_loader """ raise NotImplementedError()
Callback function of CLICommand arguments_loader
arguments_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def description_loader(self): """ Callback function of CLICommand description_loader """ raise NotImplementedError()
Callback function of CLICommand description_loader
description_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def get_op_handler(self, op_path): """ Import and load the operation handler by path """ # Patch the unversioned sdk path to include the appropriate API version for the # resource type in question. from importlib import import_module import types from azure.cli.core.profiles import AZURE_API_PROFILES from azure.cli.core.profiles._shared import get_versioned_sdk_path for rt in AZURE_API_PROFILES[self.cli_ctx.cloud.profile]: if op_path.startswith(rt.import_prefix + '.'): op_path = op_path.replace(rt.import_prefix, get_versioned_sdk_path(self.cli_ctx.cloud.profile, rt, operation_group=self.operation_group)) try: mod_to_import, attr_path = op_path.split('#') handler = import_module(mod_to_import) for part in attr_path.split('.'): handler = getattr(handler, part) if isinstance(handler, types.FunctionType): return handler return handler.__func__ except (ValueError, AttributeError): raise ValueError("The operation '{}' is invalid.".format(op_path))
Import and load the operation handler by path
get_op_handler
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def load_getter_op_arguments(self, getter_op_path, cmd_args=None): """ Load arguments from function signature of getter command op """ op = self.get_op_handler(getter_op_path) getter_args = dict(extract_args_from_signature(op, excluded_params=EXCLUDED_PARAMS)) cmd_args = cmd_args or {} cmd_args.update(getter_args) # The cmd argument is required when calling self.handler function. cmd_args['cmd'] = CLICommandArgument('cmd', arg_type=ignore_type) return cmd_args
Load arguments from function signature of getter command op
load_getter_op_arguments
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def load_op_handler_description(self, handler=None): """ Load the description from function signature of command op """ if handler is None: def default_handler(): """""" # Use empty __doc__ property here, which is required in extract_full_summary_from_signature handler = default_handler self.apply_doc_string(handler) return extract_full_summary_from_signature(handler)
Load the description from function signature of command op
load_op_handler_description
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def handler(self, command_args): """ Callback function of CLICommand handler """ from azure.cli.core.util import get_arg_list, augment_no_wait_handler_args op = self.get_op_handler(self.op_path) op_args = get_arg_list(op) self.cmd = command_args.get('cmd') if 'cmd' in op_args else command_args.pop('cmd') client = self.client_factory(self.cli_ctx, command_args) if self.client_factory else None supports_no_wait = self.merged_kwargs.get('supports_no_wait', None) if supports_no_wait: no_wait_enabled = command_args.pop('no_wait', False) augment_no_wait_handler_args(no_wait_enabled, op, command_args) if client: client_arg_name = self.resolve_client_arg_name(self.op_path) if client_arg_name in op_args: command_args[client_arg_name] = client return op(**command_args)
Callback function of CLICommand handler
handler
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def arguments_loader(self): """ Callback function of CLICommand arguments_loader """ op = self.get_op_handler(self.op_path) self.apply_doc_string(op) cmd_args = list(extract_args_from_signature( op, excluded_params=self.command_loader.excluded_command_handler_args)) return cmd_args
Callback function of CLICommand arguments_loader
arguments_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def description_loader(self): """ Callback function of CLICommand description_loader """ op = self.get_op_handler(self.op_path) return self.load_op_handler_description(op)
Callback function of CLICommand description_loader
description_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def handler(self, command_args): # pylint: disable=too-many-locals, too-many-statements, too-many-branches """ Callback function of CLICommand handler """ from knack.util import CLIError from azure.cli.core.commands import cached_get, cached_put, _is_poller from azure.cli.core.util import find_child_item, augment_no_wait_handler_args from azure.cli.core.commands.arm import add_usage, remove_usage, set_usage, \ add_properties, remove_properties, set_properties self.cmd = command_args.get('cmd') force_string = command_args.get('force_string', False) ordered_arguments = command_args.pop('ordered_arguments', []) dest_names = self.child_arg_name.split('.') child_names = [command_args.get(key, None) for key in dest_names] for item in ['properties_to_add', 'properties_to_set', 'properties_to_remove']: if command_args[item]: raise CLIError("Unexpected '{}' was not empty.".format(item)) del command_args[item] getter, getterargs = self._extract_op_handler_and_args(command_args, self.getter_op_path) if self.child_collection_prop_name: parent = cached_get(self.cmd, getter, **getterargs) instance = find_child_item( parent, *child_names, path=self.child_collection_prop_name, key_path=self.child_collection_key) else: parent = None instance = cached_get(self.cmd, getter, **getterargs) # pass instance to the custom_function, if provided if self.custom_function_op_path: custom_function, custom_func_args = self._extract_op_handler_and_args( command_args, self.custom_function_op_path) if self.child_collection_prop_name: parent = custom_function(instance=instance, parent=parent, **custom_func_args) else: instance = custom_function(instance=instance, **custom_func_args) # apply generic updates after custom updates setter, setterargs = self._extract_op_handler_and_args(command_args, self.setter_op_path) for arg in ordered_arguments: arg_type, arg_values = arg if arg_type == '--set': try: for expression in arg_values: set_properties(instance, expression, force_string) except ValueError: raise CLIError('invalid syntax: {}'.format(set_usage)) elif arg_type == '--add': try: add_properties(instance, arg_values, force_string) except ValueError: raise CLIError('invalid syntax: {}'.format(add_usage)) elif arg_type == '--remove': try: remove_properties(instance, arg_values) except ValueError: raise CLIError('invalid syntax: {}'.format(remove_usage)) # Done... update the instance! setterargs[self.setter_arg_name] = parent if self.child_collection_prop_name else instance # Handle no-wait supports_no_wait = self.cmd.command_kwargs.get('supports_no_wait', None) if supports_no_wait: no_wait_enabled = command_args.get('no_wait', False) augment_no_wait_handler_args(no_wait_enabled, setter, setterargs) else: no_wait_param = self.cmd.command_kwargs.get('no_wait_param', None) if no_wait_param: setterargs[no_wait_param] = command_args[no_wait_param] if self.setter_arg_name == 'parameters': result = cached_put(self.cmd, setter, **setterargs) else: result = cached_put(self.cmd, setter, setterargs[self.setter_arg_name], setter_arg_name=self.setter_arg_name, **setterargs) if supports_no_wait and no_wait_enabled: return None no_wait_param = self.cmd.command_kwargs.get('no_wait_param', None) if no_wait_param and setterargs.get(no_wait_param, None): return None if _is_poller(result): result = result.result() if self.child_collection_prop_name: result = find_child_item( result, *child_names, path=self.child_collection_prop_name, key_path=self.child_collection_key) return result
Callback function of CLICommand handler
handler
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def arguments_loader(self): """ Callback function of CLICommand arguments_loader """ from azure.cli.core.commands.arm import set_usage, add_usage, remove_usage arguments = self.load_getter_op_arguments(self.getter_op_path) arguments.update(self.load_setter_op_arguments()) arguments.update(self.load_custom_function_op_arguments()) arguments.pop('instance', None) # inherited from custom_function(instance, ...) arguments.pop('parent', None) arguments.pop('expand', None) # possibly inherited from the getter arguments.pop(self.setter_arg_name, None) # Add the generic update parameters group_name = 'Generic Update' arguments['properties_to_set'] = CLICommandArgument( 'properties_to_set', options_list=['--set'], nargs='+', action=self.OrderedArgsAction, default=[], help='Update an object by specifying a property path and value to set. Example: {}'.format(set_usage), metavar='KEY=VALUE', arg_group=group_name ) arguments['properties_to_add'] = CLICommandArgument( 'properties_to_add', options_list=['--add'], nargs='+', action=self.OrderedArgsAction, default=[], help='Add an object to a list of objects by specifying a path and ' 'key value pairs. Example: {}'.format(add_usage), metavar='LIST KEY=VALUE', arg_group=group_name ) arguments['properties_to_remove'] = CLICommandArgument( 'properties_to_remove', options_list=['--remove'], nargs='+', action=self.OrderedArgsAction, default=[], help='Remove a property or an element from a list. Example: {}'.format(remove_usage), metavar='LIST INDEX', arg_group=group_name ) arguments['force_string'] = CLICommandArgument( 'force_string', action='store_true', arg_group=group_name, help="When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON." ) return list(arguments.items())
Callback function of CLICommand arguments_loader
arguments_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def description_loader(self): """ Callback function of CLICommand description_loader """ return self.load_op_handler_description()
Callback function of CLICommand description_loader
description_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def handler(self, command_args): """ Callback function of CLICommand handler """ from azure.cli.core.commands.arm import show_exception_handler, EXCLUDED_NON_CLIENT_PARAMS op = self.get_op_handler(self.op_path) getter_args = dict(extract_args_from_signature(op, excluded_params=EXCLUDED_NON_CLIENT_PARAMS)) self.cmd = command_args.get('cmd') if 'cmd' in getter_args else command_args.pop('cmd') client_arg_name = self.resolve_client_arg_name(self.op_path) try: client = self.client_factory(self.cli_ctx) if self.client_factory else None except TypeError: client = self.client_factory(self.cli_ctx, command_args) if self.client_factory else None if client and (client_arg_name in getter_args): command_args[client_arg_name] = client op = self.get_op_handler(self.op_path) # Fetch op handler again after cmd property is set try: return op(**command_args) except Exception as ex: # pylint: disable=broad-except show_exception_handler(ex)
Callback function of CLICommand handler
handler
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def arguments_loader(self): """ Callback function of CLICommand arguments_loader """ cmd_args = self.load_getter_op_arguments(self.op_path) return list(cmd_args.items())
Callback function of CLICommand arguments_loader
arguments_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def description_loader(self): """ Callback function of CLICommand description_loader """ op = self.get_op_handler(self.op_path) return self.load_op_handler_description(op)
Callback function of CLICommand description_loader
description_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def handler(self, command_args): # pylint: disable=too-many-statements, too-many-locals """ Callback function of CLICommand handler """ from azure.cli.core.commands.arm import EXCLUDED_NON_CLIENT_PARAMS op = self.get_op_handler(self.op_path) getter_args = dict(extract_args_from_signature(op, excluded_params=EXCLUDED_NON_CLIENT_PARAMS)) self.cmd = command_args.get('cmd') if 'cmd' in getter_args else command_args.pop('cmd') client_arg_name = self.resolve_client_arg_name(self.op_path) try: client = self.client_factory(self.cli_ctx) if self.client_factory else None except TypeError: client = self.client_factory(self.cli_ctx, command_args) if self.client_factory else None if client and (client_arg_name in getter_args): command_args[client_arg_name] = client getter = self.get_op_handler(self.op_path) # Fetch op handler again after cmd property is set return self.wait(command_args, cli_ctx=self.cli_ctx, getter=getter)
Callback function of CLICommand handler
handler
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def arguments_loader(self): """ Callback function of CLICommand arguments_loader """ cmd_args = self.load_getter_op_arguments(self.op_path) cmd_args.update(self.wait_args()) return list(cmd_args.items())
Callback function of CLICommand arguments_loader
arguments_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def description_loader(self): """ Callback function of CLICommand description_loader """ return self.load_op_handler_description()
Callback function of CLICommand description_loader
description_loader
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/command_operation.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/command_operation.py
MIT
def resource_exists(cli_ctx, subscription, resource_group, name, namespace, type, **_): # pylint: disable=redefined-builtin ''' Checks if the given resource exists. ''' odata_filter = "resourceGroup eq '{}' and name eq '{}'" \ " and resourceType eq '{}/{}'".format(resource_group, name, namespace, type) # Support cross subscription resource existence check client = get_mgmt_service_client( cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, subscription_id=subscription).resources existing = len(list(client.list(filter=odata_filter))) == 1 return existing
Checks if the given resource exists.
resource_exists
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/arm.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/arm.py
MIT
def _find_split(): """ Find the first = sign to split on (that isn't in [brackets])""" key = [] value = [] brackets = False chars = list(expression) while chars: c = chars.pop(0) if c == '=' and not brackets: # keys done the rest is value value = chars break if c == '[': brackets = True key += c elif c == ']' and brackets: brackets = False key += c else: # normal character key += c return ''.join(key), ''.join(value)
Find the first = sign to split on (that isn't in [brackets])
_split_key_value_pair._find_split
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/arm.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/arm.py
MIT
def _split_key_value_pair(expression): def _find_split(): """ Find the first = sign to split on (that isn't in [brackets])""" key = [] value = [] brackets = False chars = list(expression) while chars: c = chars.pop(0) if c == '=' and not brackets: # keys done the rest is value value = chars break if c == '[': brackets = True key += c elif c == ']' and brackets: brackets = False key += c else: # normal character key += c return ''.join(key), ''.join(value) equals_count = expression.count('=') if equals_count == 1: return expression.split('=', 1) return _find_split()
Find the first = sign to split on (that isn't in [brackets])
_split_key_value_pair
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/arm.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/arm.py
MIT
def write(self, args): """ writes the progress """ raise NotImplementedError
writes the progress
write
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def flush(self): """ flushes the message out the pipeline""" raise NotImplementedError
flushes the message out the pipeline
flush
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def clear(self): """ resets the view to neutral """ pass # pylint: disable=unnecessary-pass
resets the view to neutral
clear
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def add(self, **kwargs): """ adds a progress report :param kwargs: dictionary containing 'message', 'total_val', 'value' """ message = kwargs.get('message', self.message) total_val = kwargs.get('total_val', self.total_val) value = kwargs.get('value', self.value) if value and total_val: assert 0 <= value <= total_val self.closed = value == total_val self.total_val = total_val self.value = value self.message = message
adds a progress report :param kwargs: dictionary containing 'message', 'total_val', 'value'
add
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def report(self): """ report the progress """ percent = self.value / self.total_val if self.value is not None and self.total_val else None return {'message': self.message, 'percent': percent}
report the progress
report
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def init_progress(self, progress_view): """ activate a view """ self.active_progress = progress_view
activate a view
init_progress
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def add(self, **kwargs): """ adds a progress report """ self.reporter.add(**kwargs) self.update()
adds a progress report
add
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def update(self): """ updates the view with the progress """ self.active_progress.write(self.reporter.report()) self.active_progress.flush()
updates the view with the progress
update
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def stop(self): """ if there is an abupt stop before ending """ self.reporter.closed = True self.add(message='Interrupted') self.active_progress.clear()
if there is an abupt stop before ending
stop
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def begin(self, **kwargs): """ start reporting progress """ kwargs['message'] = kwargs.get('message', 'Starting') self.add(**kwargs) self.reporter.closed = False
start reporting progress
begin
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def end(self, **kwargs): """ ending reporting of progress """ kwargs['message'] = kwargs.get('message', 'Finished') self.reporter.closed = True self.add(**kwargs) self.active_progress.clear()
ending reporting of progress
end
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def is_running(self): """ whether progress is continuing """ return not self.reporter.closed
whether progress is continuing
is_running
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def write(self, args): """ writes the progress :param args: dictionary containing key 'message' """ if self.spinner is None: self.spinner = Spinner( # pylint: disable=no-member label='In Progress', stream=self.out, hide_cursor=False) msg = args.get('message', 'In Progress') try: self.spinner.step(label=msg) except OSError: pass
writes the progress :param args: dictionary containing key 'message'
write
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def write(self, args): """ writes the progress :param args: args is a dictionary containing 'percent', 'message' """ percent = args.get('percent', 0) message = args.get('message', '') if percent: progress = _format_value(message, percent) self.out.write(progress)
writes the progress :param args: args is a dictionary containing 'percent', 'message'
write
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def get_progress_view(determinant=False, outstream=sys.stderr, spinner=None): """ gets your view """ if determinant: return DeterminateStandardOut(out=outstream) return IndeterminateStandardOut(out=outstream, spinner=spinner)
gets your view
get_progress_view
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/progress.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/progress.py
MIT
def get_folded_parameter_help_string( display_name, allow_none=False, allow_new=False, default_none=False, other_required_option=None, allow_cross_sub=True): """ Assembles a parameterized help string for folded parameters. """ quotes = '""' if platform.system() == 'Windows' else "''" if default_none and not allow_none: raise CLIError('Cannot use default_none=True and allow_none=False') if not allow_new and not allow_none and not default_none: help_text = 'Name or ID of an existing {}.'.format(display_name) elif not allow_new and allow_none and not default_none: help_text = 'Name or ID of an existing {}, or {} for none.'.format(display_name, quotes) elif allow_new and not allow_none and not default_none: help_text = 'Name or ID of the {}. Will create resource if it does not exist.'.format( display_name) elif allow_new and allow_none and not default_none: help_text = 'Name or ID of the {}, or {} for none. Uses existing resource if available or will create a new ' \ 'resource with defaults if omitted.' help_text = help_text.format(display_name, quotes) elif not allow_new and allow_none and default_none: help_text = 'Name or ID of an existing {}, or none by default.'.format(display_name) elif allow_new and allow_none and default_none: help_text = 'Name or ID of a {}. Uses existing resource or creates new if specified, or none if omitted.' help_text = help_text.format(display_name) # add parent name option string (if applicable) if other_required_option: help_text = '{} If name specified, also specify {}.'.format(help_text, other_required_option) extra_sub_text = " or subscription" if allow_cross_sub else "" help_text = '{} If you want to use an existing {display_name} in other resource group{append_sub}, ' \ 'please provide the ID instead of the name of the {display_name}'.format(help_text, display_name=display_name, append_sub=extra_sub_text) return help_text
Assembles a parameterized help string for folded parameters.
get_folded_parameter_help_string
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/template_create.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/template_create.py
MIT
def _explode_list_args(args): '''Iterate through each attribute member of args and create a copy with the IterateValues 'flattened' to only contain a single value Ex. { a1:'x', a2:IterateValue(['y', 'z']) } => [{ a1:'x', a2:'y'),{ a1:'x', a2:'z'}] ''' from azure.cli.core.commands.validators import IterateValue list_args = {argname: argvalue for argname, argvalue in vars(args).items() if isinstance(argvalue, IterateValue)} if not list_args: yield args else: values = list(zip(*list_args.values())) for key in list_args: delattr(args, key) for value in values: new_ns = argparse.Namespace(**vars(args)) for key_index, key in enumerate(list_args.keys()): setattr(new_ns, key, value[key_index]) yield new_ns
Iterate through each attribute member of args and create a copy with the IterateValues 'flattened' to only contain a single value Ex. { a1:'x', a2:IterateValue(['y', 'z']) } => [{ a1:'x', a2:'y'),{ a1:'x', a2:'z'}]
_explode_list_args
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def _add_vscode_extension_metadata(self, arg, overrides): """ Adds metadata for use by the VSCode CLI extension. Do not remove or modify without contacting the VSCode team. """ if not hasattr(arg.type, 'required_tooling'): required = arg.type.settings.get('required', False) setattr(arg.type, 'required_tooling', required) if 'configured_default' in overrides.settings: def_config = overrides.settings.get('configured_default', None) setattr(arg.type, 'default_name_tooling', def_config)
Adds metadata for use by the VSCode CLI extension. Do not remove or modify without contacting the VSCode team.
_add_vscode_extension_metadata
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def cached_put(cmd_obj, operation, parameters, *args, setter_arg_name='parameters', **kwargs): """ setter_arg_name: The name of the argument in the setter which corresponds to the object being updated. In track2, unknown kwargs will raise, so we should not pass 'parameters" for operation when the name of the argument in the setter which corresponds to the object being updated is not 'parameters'. """ def _put_operation(): result = None if args: extended_args = args + (parameters,) result = operation(*extended_args) elif kwargs is not None: kwargs[setter_arg_name] = parameters result = operation(**kwargs) del kwargs[setter_arg_name] return result # early out if the command does not use the cache if not cmd_obj.command_kwargs.get('supports_local_cache', False): return _put_operation() use_cache = cmd_obj.cli_ctx.data.get('_cache', False) if not use_cache: result = _put_operation() # allow overriding model path, e.g. for extensions model_path = cmd_obj.command_kwargs.get('model_path', None) cache_obj = CacheObject(cmd_obj, parameters.serialize(), operation, model_path=model_path) if use_cache: cache_obj.save(args, kwargs) return cache_obj # for a successful PUT, attempt to delete the cache file obj_dir, obj_file = cache_obj.path(args, kwargs) obj_path = os.path.join(obj_dir, obj_file) try: os.remove(obj_path) except OSError: # FileNotFoundError introduced in Python 3 pass return result
setter_arg_name: The name of the argument in the setter which corresponds to the object being updated. In track2, unknown kwargs will raise, so we should not pass 'parameters" for operation when the name of the argument in the setter which corresponds to the object being updated is not 'parameters'.
cached_put
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def _generate_template_progress(self, correlation_id): # pylint: disable=no-self-use """ gets the progress for template deployments """ from azure.cli.core.commands.client_factory import get_mgmt_service_client from azure.mgmt.monitor import MonitorManagementClient if correlation_id is not None: # pylint: disable=too-many-nested-blocks formatter = "eventTimestamp ge {}" end_time = datetime.datetime.utcnow() start_time = end_time - datetime.timedelta(seconds=DEFAULT_QUERY_TIME_RANGE) odata_filters = formatter.format(start_time.strftime('%Y-%m-%dT%H:%M:%SZ')) odata_filters = "{} and {} eq '{}'".format(odata_filters, 'correlationId', correlation_id) activity_log = get_mgmt_service_client( self.cli_ctx, MonitorManagementClient).activity_logs.list(filter=odata_filters) results = [] max_events = 50 # default max value for events in list_activity_log for index, item in enumerate(activity_log): if index < max_events: results.append(item) else: break if results: for event in results: update = False long_name = event.resource_id.split('/')[-1] if long_name not in self.deploy_dict: self.deploy_dict[long_name] = {} update = True deploy_values = self.deploy_dict[long_name] checked_values = { str(event.resource_type.value): 'type', str(event.status.value): 'status value', str(event.event_name.value): 'request', } try: checked_values[str(event.properties.get('statusCode', ''))] = 'status' except AttributeError: pass if deploy_values.get('timestamp', None) is None or \ event.event_timestamp > deploy_values.get('timestamp'): for k, v in checked_values.items(): if deploy_values.get(v, None) != k: update = True deploy_values[v] = k deploy_values['timestamp'] = event.event_timestamp # don't want to show the timestamp json_val = deploy_values.copy() json_val.pop('timestamp', None) status_val = deploy_values.get('status value', None) if status_val and status_val != 'Started': result = deploy_values['status value'] + ': ' + long_name result += ' (' + deploy_values.get('type', '') + ')' if update: logger.info(result)
gets the progress for template deployments
_generate_template_progress
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def __init__(self, command_loader, group_name, **kwargs): """ :param command_loader: The command loader that commands will be registered into :type command_loader: azure.cli.core.AzCommandsLoader :param group_name: The name of the group of commands in the command hierarchy :type group_name: str """ merged_kwargs = self._merge_kwargs(kwargs, base_kwargs=command_loader.module_kwargs) operations_tmpl = merged_kwargs.pop('operations_tmpl', None) super().__init__(command_loader, group_name, operations_tmpl, **merged_kwargs) self.group_kwargs = merged_kwargs if operations_tmpl: self.group_kwargs['operations_tmpl'] = operations_tmpl self.is_stale = False
:param command_loader: The command loader that commands will be registered into :type command_loader: azure.cli.core.AzCommandsLoader :param group_name: The name of the group of commands in the command hierarchy :type group_name: str
__init__
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def command(self, name, handler_name=None, **kwargs): """ Register a CLI command. :param name: Name of the command as it will be called on the command line :type name: str :param handler_name: Name of the method the command maps to :type handler_name: str :param kwargs: Keyword arguments. Supported keyword arguments include: - client_factory: Callable which returns a client needed to access the underlying command method. (function) - confirmation: Prompt prior to the action being executed. This is useful if the action would cause a loss of data. (bool) - exception_handler: Exception handler for handling non-standard exceptions (function) - supports_no_wait: The command supports no wait. (bool) - no_wait_param: [deprecated] The name of a boolean parameter that will be exposed as `--no-wait` to skip long running operation polling. (string) - transform: Transform function for transforming the output of the command (function) - table_transformer: Transform function or JMESPath query to be applied to table output to create a better output format for tables. (function or string) - resource_type: The ResourceType enum value to use with min or max API. (ResourceType) - min_api: Minimum API version required for commands within the group (string) - max_api: Maximum API version required for commands within the group (string) :rtype: None """ return self._command(name, method_name=handler_name, **kwargs)
Register a CLI command. :param name: Name of the command as it will be called on the command line :type name: str :param handler_name: Name of the method the command maps to :type handler_name: str :param kwargs: Keyword arguments. Supported keyword arguments include: - client_factory: Callable which returns a client needed to access the underlying command method. (function) - confirmation: Prompt prior to the action being executed. This is useful if the action would cause a loss of data. (bool) - exception_handler: Exception handler for handling non-standard exceptions (function) - supports_no_wait: The command supports no wait. (bool) - no_wait_param: [deprecated] The name of a boolean parameter that will be exposed as `--no-wait` to skip long running operation polling. (string) - transform: Transform function for transforming the output of the command (function) - table_transformer: Transform function or JMESPath query to be applied to table output to create a better output format for tables. (function or string) - resource_type: The ResourceType enum value to use with min or max API. (ResourceType) - min_api: Minimum API version required for commands within the group (string) - max_api: Maximum API version required for commands within the group (string) :rtype: None
command
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def custom_command(self, name, method_name=None, **kwargs): """ Register a CLI command. :param name: Name of the command as it will be called on the command line :type name: str :param method_name: Name of the method the command maps to :type method_name: str :param kwargs: Keyword arguments. Supported keyword arguments include: - client_factory: Callable which returns a client needed to access the underlying command method. (function) - confirmation: Prompt prior to the action being executed. This is useful if the action would cause a loss of data. (bool) - exception_handler: Exception handler for handling non-standard exceptions (function) - supports_no_wait: The command supports no wait. (bool) - no_wait_param: [deprecated] The name of a boolean parameter that will be exposed as `--no-wait` to skip long running operation polling. (string) - transform: Transform function for transforming the output of the command (function) - table_transformer: Transform function or JMESPath query to be applied to table output to create a better output format for tables. (function or string) - resource_type: The ResourceType enum value to use with min or max API. (ResourceType) - min_api: Minimum API version required for commands within the group (string) - max_api: Maximum API version required for commands within the group (string) :rtype: None """ return self._command(name, method_name=method_name, custom_command=True, **kwargs)
Register a CLI command. :param name: Name of the command as it will be called on the command line :type name: str :param method_name: Name of the method the command maps to :type method_name: str :param kwargs: Keyword arguments. Supported keyword arguments include: - client_factory: Callable which returns a client needed to access the underlying command method. (function) - confirmation: Prompt prior to the action being executed. This is useful if the action would cause a loss of data. (bool) - exception_handler: Exception handler for handling non-standard exceptions (function) - supports_no_wait: The command supports no wait. (bool) - no_wait_param: [deprecated] The name of a boolean parameter that will be exposed as `--no-wait` to skip long running operation polling. (string) - transform: Transform function for transforming the output of the command (function) - table_transformer: Transform function or JMESPath query to be applied to table output to create a better output format for tables. (function or string) - resource_type: The ResourceType enum value to use with min or max API. (ResourceType) - min_api: Minimum API version required for commands within the group (string) - max_api: Maximum API version required for commands within the group (string) :rtype: None
custom_command
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/__init__.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/__init__.py
MIT
def test_custom_credential(self): from azure.cli.core.auth.util import AccessToken class AccessTokenCredential: """Simple access token authentication. Return the access token as-is. """ def __init__(self, access_token): self.access_token = access_token self.get_token_called = False def get_token(self, *scopes, **kwargs): self.get_token_called = True import time # Assume the access token expires in 1 year return AccessToken(self.access_token, int(time.time()) + 31536000) cli = DummyCli() cred = AccessTokenCredential("mock_token") client = get_mgmt_service_client(cli, ResourceType.MGMT_RESOURCE_RESOURCES, subscription_id=MOCKED_SUBSCRIPTION_ID, credential=cred) assert isinstance(client._config.credential, AccessTokenCredential) from azure.core.exceptions import ResourceNotFoundError try: client.resource_groups.get('test_rg') except ResourceNotFoundError: # Error is expected since we are using a dummy subscription ID pass assert cred.get_token_called # Error if no subscription is provided with self.assertRaises(ValueError): get_mgmt_service_client(cli, ResourceType.MGMT_RESOURCE_RESOURCES, credential=cred)
Simple access token authentication. Return the access token as-is.
test_custom_credential
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/commands/tests/test_client_factory.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/commands/tests/test_client_factory.py
MIT
def __init__(self, headers_to_redact=None): """ :param list[str] headers_to_redact: headers that should be redacted from the log. Default to 'Authorization', 'x-ms-authorization-auxiliary'. """ if headers_to_redact is not None: self.headers_to_redact = headers_to_redact else: self.headers_to_redact = ['authorization', 'x-ms-authorization-auxiliary']
:param list[str] headers_to_redact: headers that should be redacted from the log. Default to 'Authorization', 'x-ms-authorization-auxiliary'.
__init__
python
Azure/azure-cli
src/azure-cli-core/azure/cli/core/sdk/policies.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-core/azure/cli/core/sdk/policies.py
MIT
def find_recording_dir(test_file): """ Find the directory containing the recording of given test file based on current profile. """ return os.path.join(os.path.dirname(test_file), 'recordings')
Find the directory containing the recording of given test file based on current profile.
find_recording_dir
python
Azure/azure-cli
src/azure-cli-testsdk/azure/cli/testsdk/utilities.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-testsdk/azure/cli/testsdk/utilities.py
MIT
def serial_test(): """ Mark the test as serial """ return pytest.mark.serial()
Mark the test as serial
serial_test
python
Azure/azure-cli
src/azure-cli-testsdk/azure/cli/testsdk/decorators.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-testsdk/azure/cli/testsdk/decorators.py
MIT
def create_temp_file(self, size_kb, full_random=False): """ Create a temporary file for testing. The test harness will delete the file during tearing down. :param float size_kb: specify the generated file size in kb. """ fd, path = tempfile.mkstemp() os.close(fd) self.addCleanup(lambda: os.remove(path)) with open(path, mode='r+b') as f: if full_random: chunk = os.urandom(1024) else: chunk = bytearray([0] * 1024) for _ in range(int(size_kb)): f.write(chunk) chunk = os.urandom(int(1024 * (size_kb % 1))) f.write(chunk) return path
Create a temporary file for testing. The test harness will delete the file during tearing down. :param float size_kb: specify the generated file size in kb.
create_temp_file
python
Azure/azure-cli
src/azure-cli-testsdk/azure/cli/testsdk/scenario_tests/base.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-testsdk/azure/cli/testsdk/scenario_tests/base.py
MIT
def create_temp_dir(self): """ Create a temporary directory for testing. The test harness will delete the directory during tearing down. """ temp_dir = tempfile.mkdtemp() self.addCleanup(lambda: shutil.rmtree(temp_dir, ignore_errors=True)) return temp_dir
Create a temporary directory for testing. The test harness will delete the directory during tearing down.
create_temp_dir
python
Azure/azure-cli
src/azure-cli-testsdk/azure/cli/testsdk/scenario_tests/base.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-testsdk/azure/cli/testsdk/scenario_tests/base.py
MIT
def _custom_request_query_matcher(cls, r1, r2): """ Ensure method, path, and query parameters match. """ from urllib.parse import urlparse, parse_qs url1 = urlparse(r1.uri) url2 = urlparse(r2.uri) q1 = parse_qs(url1.query) q2 = parse_qs(url2.query) shared_keys = set(q1.keys()).intersection(set(q2.keys())) if len(shared_keys) != len(q1) or len(shared_keys) != len(q2): return False for key in shared_keys: if q1[key][0].lower() != q2[key][0].lower(): return False return True
Ensure method, path, and query parameters match.
_custom_request_query_matcher
python
Azure/azure-cli
src/azure-cli-testsdk/azure/cli/testsdk/scenario_tests/base.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-testsdk/azure/cli/testsdk/scenario_tests/base.py
MIT
def save_payload(config_dir, payload): """ Save a telemetry payload to the telemetry cache directory under the given configuration directory """ logger = logging.getLogger('telemetry.save') if payload: cache_saver, cache_dir = _create_rotate_file_logger(config_dir) if cache_saver: cache_saver.info(payload) logger.info('Save telemetry record of length %d in cache file under %s', len(payload), cache_dir) return cache_dir return None
Save a telemetry payload to the telemetry cache directory under the given configuration directory
save_payload
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/util.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/util.py
MIT
def get_last_sent(self): """ Read the timestamp of the last sent telemetry record from the telemetry note file. """ raw = 'N/A' fallback = datetime.datetime.min try: raw = self.fh.read().strip() last_send = datetime.datetime.strptime(raw, '%Y-%m-%dT%H:%M:%S') self._logger.info("Read timestamp from the note. The last send was %s.", last_send) return last_send except (OSError, AttributeError, ValueError) as err: self._logger.warning("Fail to parse or read the timestamp '%s' in the note file. Set the last send time " "to minimal. Reason: %s", raw, err) return fallback
Read the timestamp of the last sent telemetry record from the telemetry note file.
get_last_sent
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_note.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_note.py
MIT
def snapshot_and_read(self): """ Scan the telemetry cache files. """ if not os.path.isdir(self._cache_dir): return # Collect all cache/cache.x files candidates = [(fn, os.stat(os.path.join(self._cache_dir, fn))) for fn in os.listdir(self._cache_dir)] # sort the cache files base on their last modification time. candidates = [(fn, file_stat) for fn, file_stat in candidates if stat.S_ISREG(file_stat.st_mode)] candidates.sort(key=lambda pair: pair[1].st_mtime, reverse=True) # move the newer cache file first if not candidates: self._logger.info('No cache to be uploaded.') return self._logger.info('%d cache files to upload.', len(candidates)) for each in os.listdir(self._cache_dir): self._read_file(os.path.join(self._cache_dir, each)) shutil.rmtree(self._cache_dir, ignore_errors=True) self._logger.info('Remove directory %s', self._cache_dir)
Scan the telemetry cache files.
snapshot_and_read
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/records_collection.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/records_collection.py
MIT
def _read_file(self, path): """ Read content of a telemetry cache file and parse them into records. """ try: with open(path, mode='r') as fh: for line in fh.readlines(): self._add_record(line) self._logger.info("Processed file %s into %d records.", path, len(self._records)) except OSError as err: self._logger.warning("Fail to open file %s. Reason: %s.", path, err)
Read content of a telemetry cache file and parse them into records.
_read_file
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/records_collection.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/records_collection.py
MIT
def _add_record(self, content_line): """ Parse a line in the recording file. """ try: _, content = content_line.split(',', 1) self._records.append(content) except ValueError as err: self._logger.warning("Fail to parse a line of the record %s. Error %s.", content_line, err)
Parse a line in the recording file.
_add_record
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/records_collection.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/records_collection.py
MIT
def config_logging_for_upload(config_dir): """Set up a logging handler for the logger during the upload process. The upload process is an independent process apart from the main CLI process. Its stderr and stdout are both redirected to /dev/null. Therefore stream handler is not applicable. This method will set up a telemetry logging file under the user profile configuration dir, which is specified by the `config_dir` parameter, to save the logging records. The method should be called once in the entry of the upload process. """ folder = _ensure_telemetry_log_folder(config_dir) if folder: handler = logging.handlers.RotatingFileHandler(os.path.join(folder, TELEMETRY_LOG_NAME), maxBytes=10 * 1024 * 1024, backupCount=5) del logging.root.handlers[:] formatter = logging.Formatter('%(process)d : %(asctime)s : %(levelname)s : %(name)s : %(message)s', None) handler.setFormatter(formatter) logging.root.addHandler(handler) logging.root.setLevel(logging.DEBUG)
Set up a logging handler for the logger during the upload process. The upload process is an independent process apart from the main CLI process. Its stderr and stdout are both redirected to /dev/null. Therefore stream handler is not applicable. This method will set up a telemetry logging file under the user profile configuration dir, which is specified by the `config_dir` parameter, to save the logging records. The method should be called once in the entry of the upload process.
config_logging_for_upload
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_logging.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_logging.py
MIT
def get_logger(section_name): """Returns a logger for the given sub section. The logger's name will reflect that this logger is from the telemetry module.""" return logging.getLogger('{}.{}'.format(LOGGER_NAME, section_name))
Returns a logger for the given sub section. The logger's name will reflect that this logger is from the telemetry module.
get_logger
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_logging.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_logging.py
MIT
def send(self, data_to_send): """ Override the default resend mechanism in SenderBase. Stop resend when it fails.""" request_payload = json.dumps([a.write() for a in data_to_send]) content = bytearray(request_payload, 'utf-8') begin = datetime.datetime.now() request = http_client_t.Request(self._service_endpoint_uri, content, {'Accept': 'application/json', 'Content-Type': 'application/json; charset=utf-8'}) try: http_client_t.urlopen(request, timeout=10) self._logger.info('Sending %d bytes', len(content)) except HTTPError as e: self._logger.error('Upload failed. HTTPError: %s', e) except OSError as e: # socket timeout # stop retry during socket timeout self._logger.error('Upload failed. OSError: %s', e) except Exception as e: # pylint: disable=broad-except self._logger.error('Unexpected exception: %s', e) finally: self._logger.info('Finish uploading in %f seconds.', (datetime.datetime.now() - begin).total_seconds())
Override the default resend mechanism in SenderBase. Stop resend when it fails.
send
python
Azure/azure-cli
src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_client.py
https://github.com/Azure/azure-cli/blob/master/src/azure-cli-telemetry/azure/cli/telemetry/components/telemetry_client.py
MIT
def calculate_folder_size(start_path): """Calculate total size of a folder and file count.""" # https://stackoverflow.com/questions/1392413/calculating-a-directorys-size-using-python total_size = 0 total_count = 0 for dirpath, dirnames, filenames in os.walk(start_path): for f in filenames: fp = os.path.join(dirpath, f) # skip if it is symbolic link if not os.path.islink(fp): total_count += 1 total_size += os.path.getsize(fp) return total_size, total_count
Calculate total size of a folder and file count.
calculate_folder_size
python
Azure/azure-cli
scripts/trim_sdk.py
https://github.com/Azure/azure-cli/blob/master/scripts/trim_sdk.py
MIT
def find_code_piece_in_parent(self, key, code): """find code piece in frozen namespaces""" if key not in self._code_pieces: return for namespace, value in self._code_pieces[key].items(): if namespace not in self._parent_namespaces: continue if value["max_count_code"] == code and value["codes"][code] > 1: return namespace
find code piece in frozen namespaces
find_code_piece_in_parent
python
Azure/azure-cli
scripts/compact_aaz.py
https://github.com/Azure/azure-cli/blob/master/scripts/compact_aaz.py
MIT
def get_repo_root(): """ Returns the root path to this repository. The root is where .git folder is. """ import os.path here = os.path.dirname(os.path.realpath(__file__)) while not os.path.exists(os.path.join(here, '.git')): here = os.path.dirname(here) return here
Returns the root path to this repository. The root is where .git folder is.
get_repo_root
python
Azure/azure-cli
scripts/_common.py
https://github.com/Azure/azure-cli/blob/master/scripts/_common.py
MIT
def __init__(self): """ self.jobs: Record the test time of each module self.modules: All modules and core, ignore extensions self.serial_modules: All modules which need to execute in serial mode self.works: Record which modules each worker needs to test self.instance_cnt: The total number of concurrent automation full test pipeline instance with specify python version Because we share the vm pool with azure-sdk team, so we can't set the number of concurrency arbitrarily Best practice is to keep the number of concurrent tasks below 50 If you set a larger number of concurrency, it will cause many instances to be in the waiting state And the network module has the largest number of test cases and can only be tested serially for now, so setting instance_cnt = 8 is sufficient Total concurrent number: AutomationTest20200901 * 3 + AutomationTest20190301 * 3 + AutomationTest20180301 * 3 + AutomationFullTest * 8 * 3 (python_version) = 33 self.instance_idx: The index of concurrent automation full test pipeline instance with specify python version For example: instance_cnt = 8, instance_idx = 3: means we have 8 instances totally, and now we are scheduling modules on third instance instance_cnt = 1, instance_idx = 1: means we only have 1 instance, so we don't need to schedule modules """ self.jobs = [] self.modules = {} self.serial_modules = serial_modules self.works = [] self.instance_cnt = instance_cnt self.instance_idx = instance_idx for i in range(self.instance_cnt): worker = {} self.works.append(worker) self.profile = profile
self.jobs: Record the test time of each module self.modules: All modules and core, ignore extensions self.serial_modules: All modules which need to execute in serial mode self.works: Record which modules each worker needs to test self.instance_cnt: The total number of concurrent automation full test pipeline instance with specify python version Because we share the vm pool with azure-sdk team, so we can't set the number of concurrency arbitrarily Best practice is to keep the number of concurrent tasks below 50 If you set a larger number of concurrency, it will cause many instances to be in the waiting state And the network module has the largest number of test cases and can only be tested serially for now, so setting instance_cnt = 8 is sufficient Total concurrent number: AutomationTest20200901 * 3 + AutomationTest20190301 * 3 + AutomationTest20180301 * 3 + AutomationFullTest * 8 * 3 (python_version) = 33 self.instance_idx: The index of concurrent automation full test pipeline instance with specify python version For example: instance_cnt = 8, instance_idx = 3: means we have 8 instances totally, and now we are scheduling modules on third instance instance_cnt = 1, instance_idx = 1: means we only have 1 instance, so we don't need to schedule modules
__init__
python
Azure/azure-cli
scripts/ci/automation_full_test.py
https://github.com/Azure/azure-cli/blob/master/scripts/ci/automation_full_test.py
MIT
def get_worker(self): """ Use greedy algorithm distribute jobs to each worker For each job, we assign it to the worker with the fewest jobs currently :return worker number """ for idx, worker in enumerate(self.works): tmp_time = sum(worker.values()) if sum(worker.values()) else 0 if idx == 0: worker_time = tmp_time worker_num = idx if tmp_time < worker_time: worker_time = tmp_time worker_num = idx return worker_num
Use greedy algorithm distribute jobs to each worker For each job, we assign it to the worker with the fewest jobs currently :return worker number
get_worker
python
Azure/azure-cli
scripts/ci/automation_full_test.py
https://github.com/Azure/azure-cli/blob/master/scripts/ci/automation_full_test.py
MIT
def get_container_name(): """ Generate container name in storage account. It is also an identifier of the pipeline run. :return: """ logger.info('Enter get_container_name()') name = BUILD_ID logger.info('Exit get_container_name()') return name
Generate container name in storage account. It is also an identifier of the pipeline run. :return:
get_container_name
python
Azure/azure-cli
scripts/live_test/sendemail.py
https://github.com/Azure/azure-cli/blob/master/scripts/live_test/sendemail.py
MIT
def upload_files(container): """ Upload html and json files to container :param container: :return: """ logger.info('Enter upload_files()') # Create container cmd = 'az storage container create -n {} --account-name clitestresultstac --public-access container --auth-mode login'.format(container) os.system(cmd) # Upload files for root, dirs, files in os.walk(ARTIFACT_DIR): for name in files: if name.endswith('html') or name.endswith('json'): fullpath = os.path.join(root, name) cmd = 'az storage blob upload -f {} -c {} -n {} --account-name clitestresultstac --auth-mode login'.format(fullpath, container, name) os.system(cmd) logger.info('Exit upload_files()')
Upload html and json files to container :param container: :return:
upload_files
python
Azure/azure-cli
scripts/live_test/sendemail.py
https://github.com/Azure/azure-cli/blob/master/scripts/live_test/sendemail.py
MIT
def generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_TARGET, ACCOUNT_KEY, USER_REPO_EXT, USER_BRANCH_EXT): """ Generate index.html. Upload it to storage account :param container: :param container_url: :return: a HTML string """ logger.warning('Enter generate()') # [{'name': name, 'url': url}] data = [] url = container_url + '?restype=container&comp=list' content = requests.get(url).content logger.warning(content) root = ET.fromstring(content) for blobs in root: for blob in blobs: name = url = '' for e in blob: if e.tag == 'Name': name = e.text if e.tag == 'Url': url = e.text if name == '' or url == '': logger.warning('[Warning] Blob\'s name or url is empty, name: {}, url: {}'.format(name, url)) if name.endswith('.html'): data.append({'name': name, 'url': url}) break logger.warning(data) html = render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_REPO_EXT, USER_BRANCH_EXT) with open('index.html', 'w') as f: f.write(html) # Upload to storage account cmd = 'az storage blob upload -f index.html -c {} -n index.html --account-name clitestresultstac --auth-mode login --overwrite'.format(container) logger.warning('Running: ' + cmd) os.system(cmd) # Upload to latest container if it is a full live test of official repo dev branch if USER_TARGET.lower() in ['all', ''] \ and USER_REPO == 'https://github.com/Azure/azure-cli.git' \ and USER_REPO_EXT == 'https://github.com/Azure/azure-cli-extensions.git' \ and USER_BRANCH == 'dev' and USER_BRANCH_EXT == 'main' and USER_LIVE == '--live': cmd = 'az storage blob upload -f index.html -c latest -n index.html --account-name clitestresultstac --auth-mode login --overwrite' logger.warning('Running: ' + cmd) os.system(cmd) logger.warning('Exit generate()') return html
Generate index.html. Upload it to storage account :param container: :param container_url: :return: a HTML string
generate
python
Azure/azure-cli
scripts/live_test/generate_index.py
https://github.com/Azure/azure-cli/blob/master/scripts/live_test/generate_index.py
MIT
def render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_REPO_EXT, USER_BRANCH_EXT): """ Return a HTML string :param data: :param container: :param container_url: :param testdata: :param USER_REPO: :param USER_BRANCH: :param COMMIT_ID: :param USER_LIVE: :return: """ logger.warning('Enter render()') content = """ <!DOCTYPE html> <html> <head> <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css" integrity="sha384-TX8t27EcRE3e/ihU7zmQxVncDAy5uIKz4rEkgIXeMed4M0jlfIDPvg6uqKI2xXr2" crossorigin="anonymous"> <style> table, th, td { border: 1px solid black; border-collapse: collapse; } </style> </head> <body> <h2>Testing results of Azure CLI</h2> """ live = 'True' if USER_LIVE == '--live' else 'False' date = datetime.date.today() content += """ <p> Repository: {}<br> Branch: {}<br> Repository of extension: {}<br> Branch of extension: {}<br> Commit: {}<br> Live: {}<br> Date: {} </p> """.format(USER_REPO, USER_BRANCH, USER_REPO_EXT, USER_BRANCH_EXT, COMMIT_ID, live, date) content += """ <p> <b>Pass: {}, Fail: {}, Pass rate: {}</b> </p> """.format(testdata.total[1], testdata.total[2], testdata.total[3]) content += """ <p> <a href=https://microsoft-my.sharepoint.com/:w:/p/fey/EZGC9LwrN3RAscVS5ylG4HMBX9h7W0ZSA7CDrhXN5Lvx6g?e=V8HUmd>User Manual of Live Test Pipeline</a> (Please read it) <br> <a href=https://microsoft-my.sharepoint.com/:w:/p/fey/EcgPLHSkef9Mi14Rjx79N9sBvyVDO4b_V97BMcoI1HTq-A?e=Ioap3B>Upgrading API Versions in Azure CLI Live Test Pipeline</a> (Advanced feature) <br> <a href=https://msit.powerbi.com/groups/8de24d49-e97c-4672-9bfc-45fee0ec58f7/reports/65dfcfce-5d59-4dc9-8bc5-3726443c8fe1/ReportSection>Power BI Report</a> (History data, beautiful charts and tables) </p> """ table = """ <p><b>Test results summary</b></p> <table> <tr> <th>Module</th> <th>Pass</th> <th>Fail</th> <th>Pass rate</th> <th>Reports</th> </tr> """ table += """ <tr> <td>Total</td> <td>{}</td> <td>{}</td> <td>{}</td> <td>N/A</td> </tr> """.format(testdata.total[1], testdata.total[2], testdata.total[3]) sorted_modules = sorted(testdata.modules, key=sort_by_module_name) for module, passed, failed, rate in sorted_modules: reports = '' for x in data: name = x['name'] url = x['url'] if name.startswith(module + '.'): display_name = 'report' # if 'parallel' in name: # display_name = 'parallel' # elif 'sequential' in name: # display_name = 'sequential' try: html = requests.get(url).text pattern = re.compile('\\d+ tests ran in') match = pattern.search(html) number = match.group().split()[0] if number.isdigit(): display_name += '(' + number + ')' except: logger.exception(traceback.print_exc()) reports += '<a href="{}">{}</a> '.format(url, display_name) table += """ <tr> <td>{}</td> <td>{}</td> <td>{}</td> <td>{}</td> <td>{}</td> </tr> """.format(module, passed, failed, rate, reports) table += """ </table> """ content += table # content += """ # <p><b>Reports</b></p> # """ # # for item in data: # name = item['name'] # url = item['url'] # content += """ # <a href={}>{}</a><br> # """.format(url, name) content += """ </body> </html> """ logger.warning(content) logger.warning('Exit render()') return content
Return a HTML string :param data: :param container: :param container_url: :param testdata: :param USER_REPO: :param USER_BRANCH: :param COMMIT_ID: :param USER_LIVE: :return:
render
python
Azure/azure-cli
scripts/live_test/generate_index.py
https://github.com/Azure/azure-cli/blob/master/scripts/live_test/generate_index.py
MIT
def collect(self): """ Collect data :return: """ print('Generating summary...') data_dict = {} for root, dirs, files in os.walk(self.artifact_dir): for name in files: if name.endswith('json'): try: print('Reading {}'.format(name)) module = name.split('.')[0] with open(os.path.join(root, name)) as f: result = json.loads(f.read()) passed = failed = 0 if 'passed' in result['summary']: passed = result['summary']['passed'] if 'failed' in result['summary']: failed = result['summary']['failed'] if module in data_dict: values = data_dict[module] data_dict[module] = (values[0] + passed, values[1] + failed) else: data_dict[module] = (passed, failed) except Exception: print(traceback.format_exc()) passed_sum = failed_sum = 0 for k in data_dict: v = data_dict[k] passed = v[0] failed = v[1] total = passed + failed rate = 1 if total == 0 else passed / total rate = '{:.2%}'.format(rate) self.modules.append((k, passed, failed, rate)) print('module: {}, passed: {}, failed: {}, rate: {}'.format(k, passed, failed, rate)) passed_sum += passed failed_sum += failed print(self.modules) print('Sorting...') sorted_modules = sorted(self.modules, key=lambda x: x[0]) self.modules = sorted_modules print(self.modules) total_sum = passed_sum + failed_sum rate_sum = 1 if total_sum == 0 else passed_sum / total_sum rate_sum = '{:.2%}'.format(rate_sum) self.total = ('Total', passed_sum, failed_sum, rate_sum) print('module: Total, passed: {}, failed: {}, rate: {}'.format(passed_sum, failed_sum, rate_sum))
Collect data :return:
collect
python
Azure/azure-cli
scripts/live_test/test_data.py
https://github.com/Azure/azure-cli/blob/master/scripts/live_test/test_data.py
MIT
def get_modules(): """ :return: str[] """ path = 'azure-cli/src/azure-cli/azure/cli/command_modules' modules = [m for m in os.listdir(path) if os.path.isdir(os.path.join(path, m))] if EXTENSION: path = 'azure-cli-extensions/src' modules.extend(m for m in os.listdir(path) if os.path.isdir(os.path.join(path, m))) return modules
:return: str[]
get_modules
python
Azure/azure-cli
scripts/live_test/gen.py
https://github.com/Azure/azure-cli/blob/master/scripts/live_test/gen.py
MIT
def mean(data): """Return the sample arithmetic mean of data.""" n = len(data) if n < 1: raise ValueError('len < 1') return sum(data)/float(n)
Return the sample arithmetic mean of data.
mean
python
Azure/azure-cli
scripts/performance/measure.py
https://github.com/Azure/azure-cli/blob/master/scripts/performance/measure.py
MIT
def sq_deviation(data): """Return sum of square deviations of sequence data.""" c = mean(data) return sum((x-c)**2 for x in data)
Return sum of square deviations of sequence data.
sq_deviation
python
Azure/azure-cli
scripts/performance/measure.py
https://github.com/Azure/azure-cli/blob/master/scripts/performance/measure.py
MIT
def pstdev(data): """Calculates the population standard deviation.""" n = len(data) if n < 2: raise ValueError('len < 2') ss = sq_deviation(data) return (ss/n) ** 0.5
Calculates the population standard deviation.
pstdev
python
Azure/azure-cli
scripts/performance/measure.py
https://github.com/Azure/azure-cli/blob/master/scripts/performance/measure.py
MIT
def parse_input(input_parameter): """From a syntax like package_name#submodule, build a package name and complete module name. """ split_package_name = input_parameter.split('#') package_name = split_package_name[0] module_name = package_name.replace("-", ".") if len(split_package_name) >= 2: module_name = ".".join([module_name, split_package_name[1]]) return package_name, module_name
From a syntax like package_name#submodule, build a package name and complete module name.
parse_input
python
Azure/azure-cli
scripts/sdk_process/patch_models.py
https://github.com/Azure/azure-cli/blob/master/scripts/sdk_process/patch_models.py
MIT
def solve_one_model(models_module, output_folder, track2=False): """Will build the compacted models in the output_folder""" models_classes = [ (len(model_class.__mro__), inspect.getfile(model_class), model_class) for model_name, model_class in vars(models_module).items() if model_name[0].isupper() and Model in model_class.__mro__ ] # Only sort based on the first element in the tuple models_classes.sort(key=lambda x: x[0]) paged_models_classes = [ (inspect.getfile(model_class), model_class) for model_name, model_class in vars(models_module).items() if model_name[0].isupper() and Paged in model_class.__mro__ ] enum_models_classes = [ (inspect.getfile(model_class), model_class) for model_name, model_class in vars(models_module).items() if model_name[0].isupper() and Enum in model_class.__mro__ ] if enum_models_classes: enum_file = Path(enum_models_classes[0][0]) shutil.copyfile(enum_file, Path(output_folder, enum_file.name)) enum_file_module_name = enum_file.with_suffix('').name else: enum_file_module_name = None write_model_file(Path(output_folder, "models_py3.py"), models_classes, track2=track2) write_paging_file(Path(output_folder, "paged_models.py"), paged_models_classes) write_init( Path(output_folder, "__init__.py"), "models_py3", "models", "paged_models", enum_file_module_name )
Will build the compacted models in the output_folder
solve_one_model
python
Azure/azure-cli
scripts/sdk_process/patch_models.py
https://github.com/Azure/azure-cli/blob/master/scripts/sdk_process/patch_models.py
MIT
def find_models_to_change(module_name): """Will figure out if the package is a multi-api one, and understand what to generate. """ main_module = importlib.import_module(module_name) try: models_module = main_module.models models_module.__path__ # It didn't fail, that's a single API package return [models_module] except AttributeError: # This means I loaded the fake module "models" # and it's multi-api, load all models return [ importlib.import_module('.' + label + '.models', main_module.__name__) for (_, label, ispkg) in pkgutil.iter_modules(main_module.__path__) if ispkg and label != 'aio' ]
Will figure out if the package is a multi-api one, and understand what to generate.
find_models_to_change
python
Azure/azure-cli
scripts/sdk_process/patch_models.py
https://github.com/Azure/azure-cli/blob/master/scripts/sdk_process/patch_models.py
MIT
def find_autorest_generated_folder(module_prefix="azure.mgmt"): """Find all Autorest generated code in that module prefix. This actually looks for a "models" package only. We could be smarter if necessary. """ _LOGGER.info("Looking for Autorest generated package in %s", module_prefix) result = [] prefix_module = importlib.import_module(module_prefix) for _, sub_package, ispkg in pkgutil.iter_modules(prefix_module.__path__, module_prefix + "."): try: _LOGGER.debug("Try %s", sub_package) importlib.import_module(".models", sub_package) # If not exception, we found it _LOGGER.info("Found %s", sub_package) result.append(sub_package) except ModuleNotFoundError: # No model, might dig deeper if ispkg: result += find_autorest_generated_folder(sub_package) return result
Find all Autorest generated code in that module prefix. This actually looks for a "models" package only. We could be smarter if necessary.
find_autorest_generated_folder
python
Azure/azure-cli
scripts/sdk_process/patch_models.py
https://github.com/Azure/azure-cli/blob/master/scripts/sdk_process/patch_models.py
MIT
def get_repo_root(): """ Returns the root path to this repository. The root is where .git folder is. """ import os.path here = os.path.dirname(os.path.realpath(__file__)) while not os.path.exists(os.path.join(here, '.git')): here = os.path.dirname(here) return here
Returns the root path to this repository. The root is where .git folder is.
get_repo_root
python
Azure/azure-cli
scripts/temp_help/convert_all.py
https://github.com/Azure/azure-cli/blob/master/scripts/temp_help/convert_all.py
MIT
def generate_formula_with_template() -> str: """Generate a brew formula by using a template""" template_path = os.path.join(os.path.dirname(__file__), TEMPLATE_FILE_NAME) with open(template_path, mode='r') as fq: template_content = fq.read() template = jinja2.Template(template_content) content = template.render( cli_version=CLI_VERSION, upstream_url=HOMEBREW_UPSTREAM_URL, upstream_sha=compute_sha256(HOMEBREW_UPSTREAM_URL), resources=collect_resources(), bottle_hash=last_bottle_hash() ) if not content.endswith('\n'): content += '\n' return content
Generate a brew formula by using a template
generate_formula_with_template
python
Azure/azure-cli
scripts/release/homebrew/docker/formula_generate.py
https://github.com/Azure/azure-cli/blob/master/scripts/release/homebrew/docker/formula_generate.py
MIT
def last_bottle_hash(): """Fetch the bottle do ... end from the latest brew formula""" resp = requests.get(HOMEBREW_FORMULAR_LATEST) resp.raise_for_status() lines = resp.text.split('\n') look_for_end = False start = 0 end = 0 for idx, content in enumerate(lines): if look_for_end: if 'end' in content: end = idx break else: if 'bottle do' in content: start = idx look_for_end = True return '\n'.join(lines[start: end + 1])
Fetch the bottle do ... end from the latest brew formula
last_bottle_hash
python
Azure/azure-cli
scripts/release/homebrew/docker/formula_generate.py
https://github.com/Azure/azure-cli/blob/master/scripts/release/homebrew/docker/formula_generate.py
MIT
def update_formula() -> str: """Generate a brew formula by updating the existing one""" nodes = collect_resources_dict() resp = requests.get(HOMEBREW_FORMULAR_LATEST) resp.raise_for_status() text = resp.text # update python version text = re.sub('depends_on "python@.*"', f'depends_on "python@{PYTHON_VERSION}"', text, 1) venv_str = f'venv = virtualenv_create(libexec, "python{PYTHON_VERSION}", system_site_packages: false)' text = re.sub(r'venv = virtualenv_create.*', venv_str, text, 1) # update url and sha256 of azure-cli text = re.sub('url ".*"', 'url "{}"'.format(HOMEBREW_UPSTREAM_URL), text, 1) upstream_sha = compute_sha256(HOMEBREW_UPSTREAM_URL) text = re.sub('sha256 ".*"', 'sha256 "{}"'.format(upstream_sha), text, 1) text = re.sub('.*revision.*\n', '', text, 1) # remove revision for previous version if exists pack = None packs_to_remove = set() lines = text.split('\n') node_index_dict = OrderedDict() line_idx_to_remove = set() upgrade = False for idx, line in enumerate(lines): # In released formula, the url is in the release tag format, such as # "https://github.com/Azure/azure-cli/archive/azure-cli-2.17.1.tar.gz". # version is extracted from url. During build, the url is in the format like # "https://codeload.github.com/Azure/azure-cli/legacy.tar.gz/7e09fd50c9ef02e1ed7d4709c7ab1a71acd3840b". # We need to add the version explicitly after url. # We will change the url in our release pipeline and remove version. if line.startswith(" url"): lines[idx] = lines[idx] + '\n' + ' version "{}"'.format(CLI_VERSION) elif line.strip().startswith("resource"): m = re.search(r'resource "(.*)" do', line) if m is not None: pack = m.group(1) node_index_dict[pack] = idx elif pack is not None: if line.startswith(" url"): # update the url of package if pack in nodes.keys(): url_match = re.search(r'url "(.*)"', line) if url_match is not None and nodes[pack]['url'] != url_match.group(1): lines[idx] = re.sub('url ".*"', 'url "{}"'.format(nodes[pack]['url']), line, 1) upgrade = True else: packs_to_remove.add(pack) elif line.startswith(" sha256"): # update the sha256 of package if pack in nodes.keys(): lines[idx] = re.sub('sha256 ".*"', 'sha256 "{}"'.format(nodes[pack]['checksum']), line, 1) del nodes[pack] elif line.startswith(" end"): pack = None upgrade = False elif upgrade: # In case of upgrading, remove any patch following url and sha256 but before end line_idx_to_remove.add(idx) elif line.strip().startswith('def install'): if nodes: # add new dependency packages for node_name, node in nodes.items(): # find the right place to insert the new resource per alphabetic order i = bisect.bisect_left(list(node_index_dict.keys()), node_name) line_idx = list(node_index_dict.items())[i][1] resource = RESOURCE_TEMPLATE.render(resource=node) lines[line_idx] = resource + '\n\n' + lines[line_idx] lines = [line for idx, line in enumerate(lines) if idx not in line_idx_to_remove] new_text = "\n".join(lines) # remove dependency packages that are no longer needed for pack in packs_to_remove: new_text = re.sub(r'resource "{}" do.*?\n end\n\s+'.format(pack), '', new_text, flags=re.DOTALL) return new_text
Generate a brew formula by updating the existing one
update_formula
python
Azure/azure-cli
scripts/release/homebrew/docker/formula_generate.py
https://github.com/Azure/azure-cli/blob/master/scripts/release/homebrew/docker/formula_generate.py
MIT
def split_paragraphs( text: str, normalizer: Callable[[str], str], remove_empty: bool = True ) -> Tuple[TextSlice]: """ This function is adapted from dolma: https://github.com/allenai/dolma Split a string into paragraphs. A paragraph is defined as a sequence of zero or more characters, followed by a newline character, or a sequence of one or more characters, followed by the end of the string. """ text_slices = tuple( TextSlice(normalizer(text[match.start():match.end()]), match.start(), match.end()) for match in re.finditer(r"([^\n]*\n|[^\n]+$)", text) ) if remove_empty is True: text_slices = tuple( text_slice for text_slice in text_slices if text_slice[0].strip() ) return text_slices
This function is adapted from dolma: https://github.com/allenai/dolma Split a string into paragraphs. A paragraph is defined as a sequence of zero or more characters, followed by a newline character, or a sequence of one or more characters, followed by the end of the string.
split_paragraphs
python
togethercomputer/RedPajama-Data
app/src/core/document.py
https://github.com/togethercomputer/RedPajama-Data/blob/master/app/src/core/document.py
Apache-2.0
def sha1_hash32(data: bytes) -> int: """ A 32-bit hash function based on SHA1. Note: This implementation is copied from datasketch to avoid dependency. Args: data (bytes): the data to generate 32-bit integer hash from. Returns: int: an integer hash value that can be encoded using 32 bits. """ return struct.unpack("<I", hashlib.sha1(data).digest()[:4])[0]
A 32-bit hash function based on SHA1. Note: This implementation is copied from datasketch to avoid dependency. Args: data (bytes): the data to generate 32-bit integer hash from. Returns: int: an integer hash value that can be encoded using 32 bits.
sha1_hash32
python
togethercomputer/RedPajama-Data
app/src/dedupe/utils.py
https://github.com/togethercomputer/RedPajama-Data/blob/master/app/src/dedupe/utils.py
Apache-2.0
def normalize( text: str, remove_punct: bool = True, lowercase: bool = True, nfd_unicode: bool = True, white_space: bool = True ) -> str: """ Normalize the text by lowercasing and removing punctuation. """ # remove punctuation if remove_punct: text = text.translate(TRANSLATION_TABLE_PUNCTUATION) # lowercase if lowercase: text = text.lower() if white_space: text = text.strip() text = re.sub(r"\s+", " ", text) # NFD unicode normalization if nfd_unicode: text = unicodedata.normalize("NFD", text) return text
Normalize the text by lowercasing and removing punctuation.
normalize
python
togethercomputer/RedPajama-Data
app/src/utilities/text/normalization.py
https://github.com/togethercomputer/RedPajama-Data/blob/master/app/src/utilities/text/normalization.py
Apache-2.0
def parser_schedulers_config(scheduler_id: str): """ add extra config parameter to scheduler """ kwargs = {} if "karras" in scheduler_id: kwargs["use_karras_sigmas"] = True return kwargs
add extra config parameter to scheduler
parser_schedulers_config
python
ddPn08/Radiata
lib/diffusers/scheduler.py
https://github.com/ddPn08/Radiata/blob/master/lib/diffusers/scheduler.py
Apache-2.0
def get_unweighted_text_embeddings( self, text_input: torch.Tensor, chunk_length: int, no_boseos_middle: Optional[bool] = False, ) -> torch.Tensor: """ When the length of tokens is a multiple of the capacity of the text encoder, it should be split into chunks and sent to the text encoder individually. """ max_embeddings_multiples = (text_input.shape[1] - 2) // (chunk_length - 2) if max_embeddings_multiples > 1: text_embeddings = [] for i in range(max_embeddings_multiples): # extract the i-th chunk text_input_chunk = text_input[ :, i * (chunk_length - 2) : (i + 1) * (chunk_length - 2) + 2 ].clone() # cover the head and the tail by the starting and the ending tokens text_input_chunk[:, 0] = text_input[0, 0] text_input_chunk[:, -1] = text_input[0, -1] text_embedding = self.text_encoder(text_input_chunk)[0] if no_boseos_middle: if i == 0: # discard the ending token text_embedding = text_embedding[:, :-1] elif i == max_embeddings_multiples - 1: # discard the starting token text_embedding = text_embedding[:, 1:] else: # discard both starting and ending tokens text_embedding = text_embedding[:, 1:-1] text_embeddings.append(text_embedding) text_embeddings = torch.concat(text_embeddings, axis=1) else: text_embeddings = self.text_encoder(text_input)[0] return text_embeddings
When the length of tokens is a multiple of the capacity of the text encoder, it should be split into chunks and sent to the text encoder individually.
get_unweighted_text_embeddings
python
ddPn08/Radiata
modules/diffusion/pipelines/lpw.py
https://github.com/ddPn08/Radiata/blob/master/modules/diffusion/pipelines/lpw.py
Apache-2.0
def __init__(self, filename, split_number): ''' Getting the file name and the split index Initializing the output directory, if present then truncate it. Getting the file extension ''' self.file_name = filename self.directory = "file_split" self.split = int(split_number) if os.path.exists(self.directory): shutil.rmtree(self.directory) os.mkdir(self.directory) if self.file_name.endswith('.txt'): self.file_extension = '.txt' else: self.file_extension = '.csv' self.file_number = 1
Getting the file name and the split index Initializing the output directory, if present then truncate it. Getting the file extension
__init__
python
Python-World/python-mini-projects
projects/Split_File/split_files.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/Split_File/split_files.py
MIT
def split_data(self): ''' spliting the input csv/txt file according to the index provided ''' data = pd.read_csv(self.file_name, header=None) data.index += 1 split_frame = pd.DataFrame() output_file = f"{self.directory}/split_file{self.file_number}{self.file_extension}" for i in range(1, len(data)+1): split_frame = split_frame.append(data.iloc[i-1]) if i % self.split == 0: output_file = f"{self.directory}/split_file{self.file_number}{self.file_extension}" if self.file_extension == '.txt': split_frame.to_csv(output_file, header=False, index=False, sep=' ') else: split_frame.to_csv(output_file, header=False, index=False) split_frame.drop(split_frame.index, inplace=True) self.file_number += 1 if not split_frame.empty: output_file = f"{self.directory}/split_file{self.file_number}{self.file_extension}" split_frame.to_csv(output_file, header=False, index=False)
spliting the input csv/txt file according to the index provided
split_data
python
Python-World/python-mini-projects
projects/Split_File/split_files.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/Split_File/split_files.py
MIT
def start(update, context): ''' Start ''' context.bot.send_message(update.message.chat_id, "Welcome! to simple telegram bot", parse_mode=ParseMode.HTML) ''' We can call other commands, without it being activated in the chat (/ help). ''' coin(update, context)
Start
start
python
Python-World/python-mini-projects
projects/telegram_bot/main.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/telegram_bot/main.py
MIT
def coin(update, context): ''' ⚪️ / ⚫️ Currency Generate an elatory number between 1 and 2. ''' cid = update.message.chat_id msg = "⚫️ face " if random.randint(1, 2) == 1 else "⚪️ cross" ''' He responds directly on the channel where he has been spoken to. ''' update.message.reply_text(msg)
⚪️ / ⚫️ Currency Generate an elatory number between 1 and 2.
coin
python
Python-World/python-mini-projects
projects/telegram_bot/main.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/telegram_bot/main.py
MIT
def main(): TOKEN = "1914536904:AAF4ZnqNvyg1pk-1pCPzTqhDYggAyf-1CF8" updater = Updater(TOKEN, use_context=True) dp = updater.dispatcher ''' Events that will activate our bot. ''' dp.add_handler(CommandHandler('start', start)) dp.add_handler(CommandHandler('coin', coin)) dp.add_error_handler(error_callback) ''' The bot starts ''' updater.start_polling() ''' or leave listening. Keep it from stopping. ''' updater.idle()
Events that will activate our bot.
main
python
Python-World/python-mini-projects
projects/telegram_bot/main.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/telegram_bot/main.py
MIT
def readFile(src: Path): ''' if the given path is a directory ERROR the path is a directory ''' if src.is_dir(): logger.error(f'The path {src}: is a directory') else: with open(src, 'r') as f: for lines in f: print(lines, end='')
if the given path is a directory ERROR the path is a directory
readFile
python
Python-World/python-mini-projects
projects/cat_command/cat.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/cat_command/cat.py
MIT
def __init__(self) -> None: """initialization function """ self.login_user: Optional[Contact] = None super().__init__()
initialization function
__init__
python
Python-World/python-mini-projects
projects/chatbot/bot.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/chatbot/bot.py
MIT
async def on_ready(self, payload: EventReadyPayload) -> None: """listen for on-ready event""" logger.info('ready event %s...', payload)
listen for on-ready event
on_ready
python
Python-World/python-mini-projects
projects/chatbot/bot.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/chatbot/bot.py
MIT
async def on_message(self, msg: Message) -> None: """ listen for message event """ from_contact: Contact = msg.talker() text: str = msg.text() room: Optional[Room] = msg.room() msg_type: MessageType = msg.type() file_box: Optional[FileBox] = None if text == 'ding': conversation: Union[ Room, Contact] = from_contact if room is None else room await conversation.ready() await conversation.say('dong') file_box = FileBox.from_url( 'https://ss3.bdstatic.com/70cFv8Sh_Q1YnxGkpoWK1HF6hhy/it/' 'u=1116676390,2305043183&fm=26&gp=0.jpg', name='ding-dong.jpg') await conversation.say(file_box) elif msg_type == MessageType.MESSAGE_TYPE_IMAGE: logger.info('receving image file') # file_box: FileBox = await msg.to_file_box() image: Image = msg.to_image() hd_file_box: FileBox = await image.hd() await hd_file_box.to_file('./hd-image.jpg', overwrite=True) thumbnail_file_box: FileBox = await image.thumbnail() await thumbnail_file_box.to_file('./thumbnail-image.jpg', overwrite=True) artwork_file_box: FileBox = await image.artwork() await artwork_file_box.to_file('./artwork-image.jpg', overwrite=True) # reply the image await msg.say(hd_file_box) # pylint: disable=C0301 elif msg_type in [MessageType.MESSAGE_TYPE_AUDIO, MessageType.MESSAGE_TYPE_ATTACHMENT, MessageType.MESSAGE_TYPE_VIDEO]: logger.info('receving file ...') file_box = await msg.to_file_box() if file_box: await file_box.to_file(file_box.name) elif msg_type == MessageType.MESSAGE_TYPE_MINI_PROGRAM: logger.info('receving mini-program ...') mini_program: Optional[MiniProgram] = await msg.to_mini_program() if mini_program: await msg.say(mini_program) elif text == 'get room members' and room: logger.info('get room members ...') room_members: List[Contact] = await room.member_list() names: List[str] = [ room_member.name for room_member in room_members] await msg.say('\n'.join(names)) elif text.startswith('remove room member:'): logger.info('remove room member:') if not room: await msg.say('this is not room zone') return room_member_name = text[len('remove room member:') + 1:] room_member: Optional[Contact] = await room.member( query=RoomMemberQueryFilter(name=room_member_name) ) if room_member: if self.login_user and self.login_user.contact_id in room.payload.admin_ids: await room.delete(room_member) else: await msg.say('登录用户不是该群管理员...') else: await msg.say(f'can not fine room member by name<{room_member_name}>') elif text.startswith('get room topic'): logger.info('get room topic') if room: topic: Optional[str] = await room.topic() if topic: await msg.say(topic) elif text.startswith('rename room topic:'): logger.info('rename room topic ...') if room: new_topic = text[len('rename room topic:') + 1:] await msg.say(new_topic) elif text.startswith('add new friend:'): logger.info('add new friendship ...') identity_info = text[len('add new friend:'):] weixin_contact: Optional[Contact] = await self.Friendship.search(weixin=identity_info) phone_contact: Optional[Contact] = await self.Friendship.search(phone=identity_info) contact: Optional[Contact] = weixin_contact or phone_contact if contact: await self.Friendship.add(contact, 'hello world ...') elif text.startswith('at me'): if room: talker = msg.talker() await room.say('hello', mention_ids=[talker.contact_id]) elif text.startswith('my alias'): talker = msg.talker() alias = await talker.alias() await msg.say('your alias is:' + (alias or '')) elif text.startswith('set alias:'): talker = msg.talker() new_alias = text[len('set alias:'):] # set your new alias alias = await talker.alias(new_alias) # get your new alias alias = await talker.alias() await msg.say('your new alias is:' + (alias or '')) elif text.startswith('find friends:'): friend_name: str = text[len('find friends:'):] friend = await self.Contact.find(friend_name) if friend: logger.info('find only one friend <%s>', friend) friends: List[Contact] = await self.Contact.find_all(friend_name) logger.info('find friend<%d>', len(friends)) logger.info(friends) else: pass if msg.type() == MessageType.MESSAGE_TYPE_UNSPECIFIED: talker = msg.talker() assert isinstance(talker, Contact)
listen for message event
on_message
python
Python-World/python-mini-projects
projects/chatbot/bot.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/chatbot/bot.py
MIT
async def on_login(self, contact: Contact) -> None: """login event Args: contact (Contact): the account logined """ logger.info('Contact<%s> has logined ...', contact) self.login_user = contact
login event Args: contact (Contact): the account logined
on_login
python
Python-World/python-mini-projects
projects/chatbot/bot.py
https://github.com/Python-World/python-mini-projects/blob/master/projects/chatbot/bot.py
MIT