text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Set the resource that will be used for displaying QuickCharts in dataset preview <END_TASK> <USER_TASK:> Description: def set_quickchart_resource(self, resource): # type: (Union[hdx.data.resource.Resource,Dict,str,int]) -> bool """Set the resource that will be used for displaying QuickCharts in dataset preview Args: resource (Union[hdx.data.resource.Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position Returns: bool: Returns True if resource for QuickCharts in dataset preview set or False if not """
if isinstance(resource, int) and not isinstance(resource, bool): resource = self.get_resources()[resource] if isinstance(resource, hdx.data.resource.Resource) or isinstance(resource, dict): res = resource.get('id') if res is None: resource = resource['name'] else: resource = res elif not isinstance(resource, str): raise hdx.data.hdxobject.HDXError('Resource id cannot be found in type %s!' % type(resource).__name__) if is_valid_uuid(resource) is True: search = 'id' else: search = 'name' changed = False for dataset_resource in self.resources: if dataset_resource[search] == resource: dataset_resource.enable_dataset_preview() self.preview_resource() changed = True else: dataset_resource.disable_dataset_preview() return changed
<SYSTEM_TASK:> Create default resource views for all resources in dataset <END_TASK> <USER_TASK:> Description: def create_default_views(self, create_datastore_views=False): # type: (bool) -> None """Create default resource views for all resources in dataset Args: create_datastore_views (bool): Whether to try to create resource views that point to the datastore Returns: None """
package = deepcopy(self.data) if self.resources: package['resources'] = self._convert_hdxobjects(self.resources) data = {'package': package, 'create_datastore_views': create_datastore_views} self._write_to_hdx('create_default_views', data, 'package')
<SYSTEM_TASK:> Return HDX site username and password <END_TASK> <USER_TASK:> Description: def _get_credentials(self): # type: () -> Optional[Tuple[str, str]] """ Return HDX site username and password Returns: Optional[Tuple[str, str]]: HDX site username and password or None """
site = self.data[self.hdx_site] username = site.get('username') if username: return b64decode(username).decode('utf-8'), b64decode(site['password']).decode('utf-8') else: return None
<SYSTEM_TASK:> Calls the remote CKAN <END_TASK> <USER_TASK:> Description: def call_remoteckan(self, *args, **kwargs): # type: (Any, Any) -> Dict """ Calls the remote CKAN Args: *args: Arguments to pass to remote CKAN call_action method **kwargs: Keyword arguments to pass to remote CKAN call_action method Returns: Dict: The response from the remote CKAN call_action method """
requests_kwargs = kwargs.get('requests_kwargs', dict()) credentials = self._get_credentials() if credentials: requests_kwargs['auth'] = credentials kwargs['requests_kwargs'] = requests_kwargs apikey = kwargs.get('apikey', self.get_api_key()) kwargs['apikey'] = apikey return self.remoteckan().call_action(*args, **kwargs)
<SYSTEM_TASK:> Create remote CKAN instance from configuration <END_TASK> <USER_TASK:> Description: def create_remoteckan(cls, site_url, user_agent=None, user_agent_config_yaml=None, user_agent_lookup=None, session=None, **kwargs): # type: (str, Optional[str], Optional[str], Optional[str], requests.Session, Any) -> ckanapi.RemoteCKAN """ Create remote CKAN instance from configuration Args: site_url (str): Site url. user_agent (Optional[str]): User agent string. HDXPythonLibrary/X.X.X- is prefixed. user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml. user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied. session (requests.Session): requests Session object to use. Defaults to calling hdx.utilities.session.get_session() Returns: ckanapi.RemoteCKAN: Remote CKAN instance """
if not session: session = get_session(user_agent, user_agent_config_yaml, user_agent_lookup, prefix=Configuration.prefix, method_whitelist=frozenset(['HEAD', 'TRACE', 'GET', 'POST', 'PUT', 'OPTIONS', 'DELETE']), **kwargs) ua = session.headers['User-Agent'] else: ua = kwargs.get('full_agent') if not ua: ua = UserAgent.get(user_agent, user_agent_config_yaml, user_agent_lookup, prefix=Configuration.prefix, **kwargs) return ckanapi.RemoteCKAN(site_url, user_agent=ua, session=session)
<SYSTEM_TASK:> Set up remote CKAN from provided CKAN or by creating from configuration <END_TASK> <USER_TASK:> Description: def setup_remoteckan(self, remoteckan=None, **kwargs): # type: (Optional[ckanapi.RemoteCKAN], Any) -> None """ Set up remote CKAN from provided CKAN or by creating from configuration Args: remoteckan (Optional[ckanapi.RemoteCKAN]): CKAN instance. Defaults to setting one up from configuration. Returns: None """
if remoteckan is None: self._remoteckan = self.create_remoteckan(self.get_hdx_site_url(), full_agent=self.get_user_agent(), **kwargs) else: self._remoteckan = remoteckan
<SYSTEM_TASK:> Set up the HDX configuration <END_TASK> <USER_TASK:> Description: def setup(cls, configuration=None, **kwargs): # type: (Optional['Configuration'], Any) -> None """ Set up the HDX configuration Args: configuration (Optional[Configuration]): Configuration instance. Defaults to setting one up from passed arguments. **kwargs: See below user_agent (str): User agent string. HDXPythonLibrary/X.X.X- is prefixed. Must be supplied if remoteckan is not. user_agent_config_yaml (str): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml. user_agent_lookup (str): Lookup key for YAML. Ignored if user_agent supplied. hdx_url (str): HDX url to use. Overrides hdx_site. hdx_site (str): HDX site to use eg. prod, test. hdx_read_only (bool): Whether to access HDX in read only mode. Defaults to False. hdx_key (str): Your HDX key. Ignored if hdx_read_only = True. hdx_config_dict (dict): HDX configuration dictionary to use instead of above 3 parameters OR hdx_config_json (str): Path to JSON HDX configuration OR hdx_config_yaml (str): Path to YAML HDX configuration project_config_dict (dict): Project configuration dictionary OR project_config_json (str): Path to JSON Project configuration OR project_config_yaml (str): Path to YAML Project configuration hdx_base_config_dict (dict): HDX base configuration dictionary OR hdx_base_config_json (str): Path to JSON HDX base configuration OR hdx_base_config_yaml (str): Path to YAML HDX base configuration. Defaults to library's internal hdx_base_configuration.yml. Returns: None """
if configuration is None: cls._configuration = Configuration(**kwargs) else: cls._configuration = configuration
<SYSTEM_TASK:> Create HDX configuration <END_TASK> <USER_TASK:> Description: def _create(cls, configuration=None, remoteckan=None, **kwargs): # type: (Optional['Configuration'], Optional[ckanapi.RemoteCKAN], Any) -> str """ Create HDX configuration Args: configuration (Optional[Configuration]): Configuration instance. Defaults to setting one up from passed arguments. remoteckan (Optional[ckanapi.RemoteCKAN]): CKAN instance. Defaults to setting one up from configuration. **kwargs: See below user_agent (str): User agent string. HDXPythonLibrary/X.X.X- is prefixed. Must be supplied if remoteckan is not. user_agent_config_yaml (str): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml. user_agent_lookup (str): Lookup key for YAML. Ignored if user_agent supplied. hdx_url (str): HDX url to use. Overrides hdx_site. hdx_site (str): HDX site to use eg. prod, test. hdx_read_only (bool): Whether to access HDX in read only mode. Defaults to False. hdx_key (str): Your HDX key. Ignored if hdx_read_only = True. hdx_config_dict (dict): HDX configuration dictionary to use instead of above 3 parameters OR hdx_config_json (str): Path to JSON HDX configuration OR hdx_config_yaml (str): Path to YAML HDX configuration project_config_dict (dict): Project configuration dictionary OR project_config_json (str): Path to JSON Project configuration OR project_config_yaml (str): Path to YAML Project configuration hdx_base_config_dict (dict): HDX base configuration dictionary OR hdx_base_config_json (str): Path to JSON HDX base configuration OR hdx_base_config_yaml (str): Path to YAML HDX base configuration. Defaults to library's internal hdx_base_configuration.yml. Returns: str: HDX site url """
kwargs = cls._environment_variables(**kwargs) cls.setup(configuration, **kwargs) cls._configuration.setup_remoteckan(remoteckan, **kwargs) return cls._configuration.get_hdx_site_url()
<SYSTEM_TASK:> Convert a dictionary into a string with assignments <END_TASK> <USER_TASK:> Description: def kwargs_to_variable_assignment(kwargs: dict, value_representation=repr, assignment_operator: str = ' = ', statement_separator: str = '\n', statement_per_line: bool = False) -> str: """ Convert a dictionary into a string with assignments Each assignment is constructed based on: key assignment_operator value_representation(value) statement_separator, where key and value are the key and value of the dictionary. Moreover one can seprate the assignment statements by new lines. Parameters ---------- kwargs : dict assignment_operator: str, optional: Assignment operator (" = " in python) value_representation: str, optinal How to represent the value in the assignments (repr function in python) statement_separator : str, optional: Statement separator (new line in python) statement_per_line: bool, optional Insert each statement on a different line Returns ------- str All the assignemnts. >>> kwargs_to_variable_assignment({'a': 2, 'b': "abc"}) "a = 2\\nb = 'abc'\\n" >>> kwargs_to_variable_assignment({'a':2 ,'b': "abc"}, statement_per_line=True) "a = 2\\n\\nb = 'abc'\\n" >>> kwargs_to_variable_assignment({'a': 2}) 'a = 2\\n' >>> kwargs_to_variable_assignment({'a': 2}, statement_per_line=True) 'a = 2\\n' """
code = [] join_str = '\n' if statement_per_line else '' for key, value in kwargs.items(): code.append(key + assignment_operator + value_representation(value)+statement_separator) return join_str.join(code)
<SYSTEM_TASK:> Simple wrapper of json.load and json.loads. <END_TASK> <USER_TASK:> Description: def decode_json(json_input: Union[str, None] = None): """ Simple wrapper of json.load and json.loads. If json_input is None the output is an empty dictionary. If the input is a string that ends in .json it is decoded using json.load. Otherwise it is decoded using json.loads. Parameters ---------- json_input : str, None, optional input json object Returns ------- Decoded json object >>> decode_json() {} >>> decode_json('{"flag":true}') {'flag': True} >>> decode_json('{"value":null}') {'value': None} """
if json_input is None: return {} else: if isinstance(json_input, str) is False: raise TypeError() elif json_input[-5:] == ".json": with open(json_input) as f: decoded_json = json.load(f) else: decoded_json = json.loads(json_input) return decoded_json
<SYSTEM_TASK:> Check if an object is jsonable. <END_TASK> <USER_TASK:> Description: def is_jsonable(obj) -> bool: """ Check if an object is jsonable. An object is jsonable if it is json serialisable and by loading its json representation the same object is recovered. Parameters ---------- obj : Python object Returns ------- bool >>> is_jsonable([1,2,3]) True >>> is_jsonable((1,2,3)) False >>> is_jsonable({'a':True,'b':1,'c':None}) True """
try: return obj==json.loads(json.dumps(obj)) except TypeError: return False except: raise
<SYSTEM_TASK:> Check if an expresion can be literal_eval. <END_TASK> <USER_TASK:> Description: def is_literal_eval(node_or_string) -> tuple: """ Check if an expresion can be literal_eval. ---------- node_or_string : Input Returns ------- tuple (bool,python object) If it can be literal_eval the python object is returned. Otherwise None it is returned. >>> is_literal_eval('[1,2,3]') (True, [1, 2, 3]) >>> is_literal_eval('a') (False, None) """
try: obj=ast.literal_eval(node_or_string) return (True, obj) except: return (False, None)
<SYSTEM_TASK:> Return the duplicates in a list. <END_TASK> <USER_TASK:> Description: def find_duplicates(l: list) -> set: """ Return the duplicates in a list. The function relies on https://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-a-list . Parameters ---------- l : list Name Returns ------- set Duplicated values >>> find_duplicates([1,2,3]) set() >>> find_duplicates([1,2,1]) {1} """
return set([x for x in l if l.count(x) > 1])
<SYSTEM_TASK:> Sort a dictionary by key or value. <END_TASK> <USER_TASK:> Description: def sort_dict(d: dict, by: str = 'key', allow_duplicates: bool = True) -> collections.OrderedDict: """ Sort a dictionary by key or value. The function relies on https://docs.python.org/3/library/collections.html#collections.OrderedDict . The dulicated are determined based on https://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-a-list . Parameters ---------- d : dict Input dictionary by : ['key','value'], optional By what to sort the input dictionary allow_duplicates : bool, optional Flag to indicate if the duplicates are allowed. Returns ------- collections.OrderedDict Sorted dictionary. >>> sort_dict({2: 3, 1: 2, 3: 1}) OrderedDict([(1, 2), (2, 3), (3, 1)]) >>> sort_dict({2: 3, 1: 2, 3: 1}, by='value') OrderedDict([(3, 1), (1, 2), (2, 3)]) >>> sort_dict({'2': 3, '1': 2}, by='value') OrderedDict([('1', 2), ('2', 3)]) >>> sort_dict({2: 1, 1: 2, 3: 1}, by='value', allow_duplicates=False) Traceback (most recent call last): ... ValueError: There are duplicates in the values: {1} >>> sort_dict({1:1,2:3},by=True) Traceback (most recent call last): ... ValueError: by can be 'key' or 'value'. """
if by == 'key': i = 0 elif by == 'value': values = list(d.values()) if len(values) != len(set(values)) and not allow_duplicates: duplicates = find_duplicates(values) raise ValueError("There are duplicates in the values: {}".format(duplicates)) i = 1 else: raise ValueError("by can be 'key' or 'value'.") return collections.OrderedDict(sorted(d.items(), key=lambda t: t[i]))
<SYSTEM_TASK:> Group a dictionary by values. <END_TASK> <USER_TASK:> Description: def group_dict_by_value(d: dict) -> dict: """ Group a dictionary by values. Parameters ---------- d : dict Input dictionary Returns ------- dict Output dictionary. The keys are the values of the initial dictionary and the values ae given by a list of keys corresponding to the value. >>> group_dict_by_value({2: 3, 1: 2, 3: 1}) {3: [2], 2: [1], 1: [3]} >>> group_dict_by_value({2: 3, 1: 2, 3: 1, 10:1, 12: 3}) {3: [2, 12], 2: [1], 1: [3, 10]} """
d_out = {} for k, v in d.items(): if v in d_out: d_out[v].append(k) else: d_out[v] = [k] return d_out
<SYSTEM_TASK:> Find the possible parameters and "global" variables from a python code. <END_TASK> <USER_TASK:> Description: def variable_status(code: str, exclude_variable: Union[set, None] = None, jsonable_parameter: bool = True) -> tuple: """ Find the possible parameters and "global" variables from a python code. This is achieved by parsing the abstract syntax tree. Parameters ---------- code : str Input code as string. exclude_variable : set, None, optional Variable to exclude. jsonable_parameter: bool, True, optional Consider only jsonable parameter Returns ------- tuple (a set of possible parameter, a set of parameter to exclude, a dictionary of possible parameter ) A variable is a possible parameter if 1) it is not in the input exclude_variable, 2) the code contains only assignments, and 3) it is used only to bound objects. The set of parameter to exclude is the union of the input exclude_variable and all names that looks like a global variable. The dictionary of possible parameter {parameter name, parameter value} is available only if jsonable_parameter is True. >>> variable_status("a=3") ({'a'}, {'a'}, {'a': 3}) >>> variable_status("a=3",jsonable_parameter=False) ({'a'}, {'a'}, {}) >>> variable_status("a += 1") (set(), {'a'}, {}) >>> variable_status("def f(x,y=3):\\n\\t pass") (set(), {'f'}, {}) >>> variable_status("class C(A):\\n\\t pass") (set(), {'C'}, {}) >>> variable_status("import f") (set(), {'f'}, {}) >>> variable_status("import f as g") (set(), {'g'}, {}) >>> variable_status("from X import f") (set(), {'f'}, {}) >>> variable_status("from X import f as g") (set(), {'g'}, {}) """
if exclude_variable is None: exclude_variable = set() else: exclude_variable = copy.deepcopy(exclude_variable) root = ast.parse(code) store_variable_name = set() assign_only = True dict_parameter={} for node in ast.iter_child_nodes(root): if isinstance(node, ast.Assign): for assign_node in ast.walk(node): if isinstance(assign_node, ast.Name): if isinstance(assign_node.ctx, ast.Store): if jsonable_parameter is False: store_variable_name |= {assign_node.id} else: exclude_variable |= {assign_node.id} _is_literal_eval,_value=is_literal_eval(node.value) if jsonable_parameter is True: for assign_node in ast.iter_child_nodes(node): if isinstance(assign_node, ast.Tuple): i=0 for assign_tuple_node in ast.iter_child_nodes(assign_node): if isinstance(assign_tuple_node, ast.Name): if isinstance(_value,(collections.Iterable)) and is_jsonable(_value[i]) and _is_literal_eval: dict_parameter[assign_tuple_node.id]=_value[i] store_variable_name |= {assign_tuple_node.id} else: exclude_variable |= {assign_tuple_node.id} i += 1 else: if isinstance(assign_node, ast.Name): if is_jsonable(_value) and _is_literal_eval: dict_parameter[assign_node.id]=_value store_variable_name |= {assign_node.id} else: exclude_variable |= {assign_node.id} elif isinstance(node, ast.AugAssign): for assign_node in ast.walk(node): if isinstance(assign_node, ast.Name): exclude_variable |= {assign_node.id} # class and function elif isinstance(node, (ast.FunctionDef, ast.ClassDef)): assign_only = False exclude_variable |= {node.name} # import elif isinstance(node, ast.Import): assign_only = False for node1 in ast.iter_child_nodes(node): if node1.asname is not None: exclude_variable |= {node1.asname} else: exclude_variable |= {node1.name} # import from elif isinstance(node, ast.ImportFrom): assign_only = False for node1 in ast.iter_child_nodes(node): if node1.asname is not None: exclude_variable |= {node1.asname} else: exclude_variable |= {node1.name} else: assign_only = False if assign_only is True: possible_parameter = store_variable_name-exclude_variable if jsonable_parameter is True: dict_parameter = {k:dict_parameter[k] for k in possible_parameter} return (possible_parameter, store_variable_name | exclude_variable, dict_parameter) return set(), store_variable_name | exclude_variable, {}
<SYSTEM_TASK:> Increment the name where the incremental part is given by parameters. <END_TASK> <USER_TASK:> Description: def increment_name(name: str, start_marker: str = " (", end_marker: str = ")") -> str: """ Increment the name where the incremental part is given by parameters. Parameters ---------- name : str, nbformat.notebooknode.NotebookNode Name start_marker : str The marker used before the incremental end_marker : str The marker after the incrementa Returns ------- str Incremented name. >>> increment_name('abc') 'abc (1)' >>> increment_name('abc(1)') 'abc(1) (1)' >>> increment_name('abc (123)') 'abc (124)' >>> increment_name('abc-1',start_marker='-',end_marker='') 'abc-2' >>> increment_name('abc[2]',start_marker='[',end_marker=']') 'abc[3]' >>> increment_name('abc1',start_marker='',end_marker='') Traceback (most recent call last): ... ValueError: start_marker can not be the empty string. """
if start_marker == '': raise ValueError("start_marker can not be the empty string.") a = name start = len(a)-a[::-1].find(start_marker[::-1]) if (a[len(a)-len(end_marker):len(a)] == end_marker and start < (len(a)-len(end_marker)) and a[start-len(start_marker):start] == start_marker and a[start:len(a)-len(end_marker)].isdigit()): old_int = int(a[start:len(a)-len(end_marker)]) new_int = old_int+1 new_name = a[:start]+str(new_int)+end_marker else: new_name = a+start_marker+'1'+end_marker return new_name
<SYSTEM_TASK:> Reads the resource view given by identifier from HDX and returns ResourceView object <END_TASK> <USER_TASK:> Description: def read_from_hdx(identifier, configuration=None): # type: (str, Optional[Configuration]) -> Optional['ResourceView'] """Reads the resource view given by identifier from HDX and returns ResourceView object Args: identifier (str): Identifier of resource view configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[ResourceView]: ResourceView object if successful read, None if not """
resourceview = ResourceView(configuration=configuration) result = resourceview._load_from_hdx('resource view', identifier) if result: return resourceview return None
<SYSTEM_TASK:> Read all resource views for a resource given by identifier from HDX and returns list of ResourceView objects <END_TASK> <USER_TASK:> Description: def get_all_for_resource(identifier, configuration=None): # type: (str, Optional[Configuration]) -> List['ResourceView'] """Read all resource views for a resource given by identifier from HDX and returns list of ResourceView objects Args: identifier (str): Identifier of resource configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: List[ResourceView]: List of ResourceView objects """
resourceview = ResourceView(configuration=configuration) success, result = resourceview._read_from_hdx('resource view', identifier, 'id', ResourceView.actions()['list']) resourceviews = list() if success: for resourceviewdict in result: resourceview = ResourceView(resourceviewdict, configuration=configuration) resourceviews.append(resourceview) return resourceviews
<SYSTEM_TASK:> Check if resource view exists in HDX and if so, update it, otherwise create resource view <END_TASK> <USER_TASK:> Description: def create_in_hdx(self): # type: () -> None """Check if resource view exists in HDX and if so, update it, otherwise create resource view Returns: None """
self.check_required_fields() if not self._update_resource_view(log=True): self._save_to_hdx('create', 'title')
<SYSTEM_TASK:> Get tags cleanup dictionaries <END_TASK> <USER_TASK:> Description: def tagscleanupdicts(configuration=None, url=None, keycolumn=5, failchained=True): # type: (Optional[Configuration], Optional[str], int, bool) -> Tuple[Dict,List] """ Get tags cleanup dictionaries Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. url (Optional[str]): Url of tags cleanup spreadsheet. Defaults to None (internal configuration parameter). keycolumn (int): Column number of tag column in spreadsheet. Defaults to 5. failchained (bool): Fail if chained rules found. Defaults to True. Returns: Tuple[Dict,List]: Returns (Tags dictionary, Wildcard tags list) """
if not Tags._tags_dict: if configuration is None: configuration = Configuration.read() with Download(full_agent=configuration.get_user_agent()) as downloader: if url is None: url = configuration['tags_cleanup_url'] Tags._tags_dict = downloader.download_tabular_rows_as_dicts(url, keycolumn=keycolumn) keys = Tags._tags_dict.keys() chainerror = False for i, tag in enumerate(keys): whattodo = Tags._tags_dict[tag] action = whattodo[u'action'] final_tags = whattodo[u'final tags (semicolon separated)'] for final_tag in final_tags.split(';'): if final_tag in keys: index = list(keys).index(final_tag) if index != i: whattodo2 = Tags._tags_dict[final_tag] action2 = whattodo2[u'action'] if action2 != 'OK' and action2 != 'Other': final_tags2 = whattodo2[u'final tags (semicolon separated)'] if final_tag not in final_tags2.split(';'): chainerror = True if failchained: logger.error('Chained rules: %s (%s -> %s) | %s (%s -> %s)' % (action, tag, final_tags, action2, final_tag, final_tags2)) if failchained and chainerror: raise ChainRuleError('Chained rules for tags detected!') Tags._wildcard_tags = list() for tag in Tags._tags_dict: if '*' in tag: Tags._wildcard_tags.append(tag) return Tags._tags_dict, Tags._wildcard_tags
<SYSTEM_TASK:> Reads the user given by identifier from HDX and returns User object <END_TASK> <USER_TASK:> Description: def read_from_hdx(identifier, configuration=None): # type: (str, Optional[Configuration]) -> Optional['User'] """Reads the user given by identifier from HDX and returns User object Args: identifier (str): Identifier of user configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[User]: User object if successful read, None if not """
user = User(configuration=configuration) result = user._load_from_hdx('user', identifier) if result: return user return None
<SYSTEM_TASK:> Check if user exists in HDX and if so, update it, otherwise create user <END_TASK> <USER_TASK:> Description: def create_in_hdx(self): # type: () -> None """Check if user exists in HDX and if so, update it, otherwise create user Returns: None """
capacity = self.data.get('capacity') if capacity is not None: del self.data['capacity'] self._create_in_hdx('user', 'id', 'name') if capacity is not None: self.data['capacity'] = capacity
<SYSTEM_TASK:> Get all users in HDX <END_TASK> <USER_TASK:> Description: def get_all_users(configuration=None, **kwargs): # type: (Optional[Configuration], Any) -> List['User'] """Get all users in HDX Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. **kwargs: See below q (str): Restrict to names containing a string. Defaults to all users. order_by (str): Field by which to sort - any user field or edits (number_of_edits). Defaults to 'name'. Returns: List[User]: List of all users in HDX """
user = User(configuration=configuration) user['id'] = 'all users' # only for error message if produced result = user._write_to_hdx('list', kwargs, 'id') users = list() if result: for userdict in result: user = User(userdict, configuration=configuration) users.append(user) else: logger.debug(result) return users
<SYSTEM_TASK:> Get organizations in HDX that this user is a member of. <END_TASK> <USER_TASK:> Description: def get_organizations(self, permission='read'): # type: (str) -> List['Organization'] """Get organizations in HDX that this user is a member of. Args: permission (str): Permission to check for. Defaults to 'read'. Returns: List[Organization]: List of organizations in HDX that this user is a member of """
success, result = self._read_from_hdx('user', self.data['name'], 'id', self.actions()['listorgs'], permission=permission) organizations = list() if success: for organizationdict in result: organization = hdx.data.organization.Organization.read_from_hdx(organizationdict['id']) organizations.append(organization) return organizations
<SYSTEM_TASK:> Facade to simplify project setup that calls project main function <END_TASK> <USER_TASK:> Description: def facade(projectmainfn, **kwargs): # (Callable[[None], None], Any) -> None """Facade to simplify project setup that calls project main function Args: projectmainfn ((None) -> None): main function of project **kwargs: configuration parameters to pass to HDX Configuration class Returns: None """
# # Setting up configuration # site_url = Configuration._create(**kwargs) logger.info('--------------------------------------------------') logger.info('> Using HDX Python API Library %s' % Configuration.apiversion) logger.info('> HDX Site: %s' % site_url) UserAgent.user_agent = Configuration.read().user_agent projectmainfn()
<SYSTEM_TASK:> Tries loading the config from the given path. If no path is specified, the default config path <END_TASK> <USER_TASK:> Description: def get_lint_config(config_path=None): """ Tries loading the config from the given path. If no path is specified, the default config path is tried, and if that is not specified, we the default config is returned. """
# config path specified if config_path: config = LintConfig.load_from_file(config_path) click.echo("Using config from {0}".format(config_path)) # default config path elif os.path.exists(DEFAULT_CONFIG_FILE): config = LintConfig.load_from_file(DEFAULT_CONFIG_FILE) click.echo("Using config from {0}".format(DEFAULT_CONFIG_FILE)) # no config file else: config = LintConfig() return config
<SYSTEM_TASK:> Markdown lint tool, checks your markdown for styling issues <END_TASK> <USER_TASK:> Description: def cli(list_files, config, ignore, path): """ Markdown lint tool, checks your markdown for styling issues """
files = MarkdownFileFinder.find_files(path) if list_files: echo_files(files) lint_config = get_lint_config(config) lint_config.apply_on_csv_string(ignore, lint_config.disable_rule) linter = MarkdownLinter(lint_config) error_count = linter.lint_files(files) exit(error_count)
<SYSTEM_TASK:> Run the daemon <END_TASK> <USER_TASK:> Description: def run(self, check_interval=300): """ Run the daemon :type check_interval: int :param check_interval: Delay in seconds between checks """
while True: # Read configuration from the config file if present, else fall # back to command line options if args.config: config = config_file_parser.get_configuration(args.config) access_key_id = config['access-key-id'] secret_access_key = config['secret-access-key'] region = config['region'] else: access_key_id = args.access_key_id secret_access_key = args.secret_access_key region = args.region # Connect to AWS connection = connection_manager.connect_to_ec2( region, access_key_id, secret_access_key) snapshot_manager.run(connection) logger.info('Sleeping {} seconds until next check'.format( check_interval)) time.sleep(check_interval)
<SYSTEM_TASK:> Iterates over the lines in a given markdown string and applies all the enabled line rules to each line <END_TASK> <USER_TASK:> Description: def _apply_line_rules(self, markdown_string): """ Iterates over the lines in a given markdown string and applies all the enabled line rules to each line """
all_violations = [] lines = markdown_string.split("\n") line_rules = self.line_rules line_nr = 1 ignoring = False for line in lines: if ignoring: if line.strip() == '<!-- markdownlint:enable -->': ignoring = False else: if line.strip() == '<!-- markdownlint:disable -->': ignoring = True continue for rule in line_rules: violation = rule.validate(line) if violation: violation.line_nr = line_nr all_violations.append(violation) line_nr += 1 return all_violations
<SYSTEM_TASK:> Read values from a byte stream. <END_TASK> <USER_TASK:> Description: def ReadFrom(self, byte_stream): """Read values from a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: values copies from the byte stream. Raises: IOError: if byte stream cannot be read. OSError: if byte stream cannot be read. """
try: return self._struct.unpack_from(byte_stream) except (TypeError, struct.error) as exception: raise IOError('Unable to read byte stream with error: {0!s}'.format( exception))
<SYSTEM_TASK:> Writes values to a byte stream. <END_TASK> <USER_TASK:> Description: def WriteTo(self, values): """Writes values to a byte stream. Args: values (tuple[object, ...]): values to copy to the byte stream. Returns: bytes: byte stream. Raises: IOError: if byte stream cannot be written. OSError: if byte stream cannot be read. """
try: return self._struct.pack(*values) except (TypeError, struct.error) as exception: raise IOError('Unable to write stream with error: {0!s}'.format( exception))
<SYSTEM_TASK:> Ensure that we have snapshots for a given volume <END_TASK> <USER_TASK:> Description: def run(connection): """ Ensure that we have snapshots for a given volume :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :returns: None """
volumes = volume_manager.get_watched_volumes(connection) for volume in volumes: _ensure_snapshot(connection, volume) _remove_old_snapshots(connection, volume)
<SYSTEM_TASK:> Ensure that a given volume has an appropriate snapshot <END_TASK> <USER_TASK:> Description: def _ensure_snapshot(connection, volume): """ Ensure that a given volume has an appropriate snapshot :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :type volume: boto.ec2.volume.Volume :param volume: Volume to check :returns: None """
if 'AutomatedEBSSnapshots' not in volume.tags: logger.warning( 'Missing tag AutomatedEBSSnapshots for volume {}'.format( volume.id)) return interval = volume.tags['AutomatedEBSSnapshots'] if volume.tags['AutomatedEBSSnapshots'] not in VALID_INTERVALS: logger.warning( '"{}" is not a valid snapshotting interval for volume {}'.format( interval, volume.id)) return snapshots = connection.get_all_snapshots(filters={'volume-id': volume.id}) # Create a snapshot if we don't have any if not snapshots: _create_snapshot(volume) return min_delta = 3600*24*365*10 # 10 years :) for snapshot in snapshots: timestamp = datetime.datetime.strptime( snapshot.start_time, '%Y-%m-%dT%H:%M:%S.000Z') delta_seconds = int( (datetime.datetime.utcnow() - timestamp).total_seconds()) if delta_seconds < min_delta: min_delta = delta_seconds logger.info('The newest snapshot for {} is {} seconds old'.format( volume.id, min_delta)) if interval == 'hourly' and min_delta > 3600: _create_snapshot(volume) elif interval == 'daily' and min_delta > 3600*24: _create_snapshot(volume) elif interval == 'weekly' and min_delta > 3600*24*7: _create_snapshot(volume) elif interval == 'monthly' and min_delta > 3600*24*30: _create_snapshot(volume) elif interval == 'yearly' and min_delta > 3600*24*365: _create_snapshot(volume) else: logger.info('No need for a new snapshot of {}'.format(volume.id))
<SYSTEM_TASK:> Remove old snapshots <END_TASK> <USER_TASK:> Description: def _remove_old_snapshots(connection, volume): """ Remove old snapshots :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :type volume: boto.ec2.volume.Volume :param volume: Volume to check :returns: None """
if 'AutomatedEBSSnapshotsRetention' not in volume.tags: logger.warning( 'Missing tag AutomatedEBSSnapshotsRetention for volume {}'.format( volume.id)) return retention = int(volume.tags['AutomatedEBSSnapshotsRetention']) snapshots = connection.get_all_snapshots(filters={'volume-id': volume.id}) # Sort the list based on the start time snapshots.sort(key=lambda x: x.start_time) # Remove snapshots we want to keep snapshots = snapshots[:-int(retention)] if not snapshots: logger.info('No old snapshots to remove') return for snapshot in snapshots: logger.info('Deleting snapshot {}'.format(snapshot.id)) try: snapshot.delete() except EC2ResponseError as error: logger.warning('Could not remove snapshot: {}'.format( error.message)) logger.info('Done deleting snapshots')
<SYSTEM_TASK:> List watched EBS volumes <END_TASK> <USER_TASK:> Description: def list(connection): """ List watched EBS volumes :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :returns: None """
volumes = get_watched_volumes(connection) if not volumes: logger.info('No watched volumes found') return logger.info( '+-----------------------' '+----------------------' '+--------------' '+------------+') logger.info( '| {volume:<21} ' '| {volume_name:<20.20} ' '| {interval:<12} ' '| {retention:<10} |'.format( volume='Volume ID', volume_name='Volume name', interval='Interval', retention='Retention')) logger.info( '+-----------------------' '+----------------------' '+--------------' '+------------+') for volume in volumes: if 'AutomatedEBSSnapshots' not in volume.tags: interval = 'Interval tag not found' elif volume.tags['AutomatedEBSSnapshots'] not in VALID_INTERVALS: interval = 'Invalid interval' else: interval = volume.tags['AutomatedEBSSnapshots'] if 'AutomatedEBSSnapshotsRetention' not in volume.tags: retention = 0 else: retention = volume.tags['AutomatedEBSSnapshotsRetention'] # Get the volume name try: volume_name = volume.tags['Name'] except KeyError: volume_name = '' logger.info( '| {volume_id:<14} ' '| {volume_name:<20.20} ' '| {interval:<12} ' '| {retention:<10} |'.format( volume_id=volume.id, volume_name=volume_name, interval=interval, retention=retention)) logger.info( '+-----------------------' '+----------------------' '+--------------' '+------------+')
<SYSTEM_TASK:> Remove watching of a volume <END_TASK> <USER_TASK:> Description: def unwatch(connection, volume_id): """ Remove watching of a volume :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :type volume_id: str :param volume_id: VolumeID to add to the watchlist :returns: bool - True if the watch was successful """
try: volume = connection.get_all_volumes(volume_ids=[volume_id])[0] volume.remove_tag('AutomatedEBSSnapshots') except EC2ResponseError: pass logger.info('Removed {} from the watchlist'.format(volume_id)) return True
<SYSTEM_TASK:> Get Volume ID from the given volume. Input can be volume id <END_TASK> <USER_TASK:> Description: def get_volume_id(connection, volume): """ Get Volume ID from the given volume. Input can be volume id or its Name tag. :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :type volume: str :param volume: Volume ID or Volume Name :returns: Volume ID or None if the given volume does not exist """
# Regular expression to check whether input is a volume id volume_id_pattern = re.compile('vol-\w{8}') if volume_id_pattern.match(volume): # input is volume id try: # Check whether it exists connection.get_all_volumes(volume_ids=[volume]) volume_id = volume except EC2ResponseError: logger.warning('Volume {} not found'.format(volume)) return None else: # input is volume name name_filter = {'tag-key': 'Name', 'tag-value': volume} volumes = connection.get_all_volumes(filters=name_filter) if not volumes: logger.warning('Volume {} not found'.format(volume)) return None if len(volumes) > 1: logger.warning('Volume {} not unique'.format(volume)) volume_id = volumes[0].id return volume_id
<SYSTEM_TASK:> List all snapshots for the volume <END_TASK> <USER_TASK:> Description: def list_snapshots(connection, volume): """ List all snapshots for the volume :type connection: boto.ec2.connection.EC2Connection :param connection: EC2 connection object :type volume: str :param volume: Volume ID or Volume Name :returns: None """
logger.info( '+----------------' '+----------------------' '+---------------------------+') logger.info( '| {snapshot:<14} ' '| {snapshot_name:<20.20} ' '| {created:<25} |'.format( snapshot='Snapshot ID', snapshot_name='Snapshot name', created='Created')) logger.info( '+----------------' '+----------------------' '+---------------------------+') vid = get_volume_id(connection, volume) if vid: vol = connection.get_all_volumes(volume_ids=[vid])[0] for snap in vol.snapshots(): logger.info( '| {snapshot:<14} ' '| {snapshot_name:<20.20} ' '| {created:<25} |'.format( snapshot=snap.id, snapshot_name=snap.tags.get('Name', ''), created=snap.start_time)) logger.info( '+----------------' '+----------------------' '+---------------------------+')
<SYSTEM_TASK:> Get stems for the words using a given parser <END_TASK> <USER_TASK:> Description: def stem(self, words, parser, **kwargs): """ Get stems for the words using a given parser Example: from .parsing import ListParser parser = ListParser() stemmer = Morfologik() stemmer.stem(['ja tańczę a ona śpi], parser) [ ('ja': ['ja']), ('tańczę': ['tańczyć']), ('a': ['a']), ('ona': ['on']), ('śpi': ['spać']) ] """
output = self._run_morfologik(words) return parser.parse(output, **kwargs)
<SYSTEM_TASK:> Runs morfologik java jar and assumes that input and output is <END_TASK> <USER_TASK:> Description: def _run_morfologik(self, words): """ Runs morfologik java jar and assumes that input and output is UTF-8 encoded. """
p = subprocess.Popen( ['java', '-jar', self.jar_path, 'plstem', '-ie', 'UTF-8', '-oe', 'UTF-8'], bufsize=-1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = p.communicate(input=bytes("\n".join(words), "utf-8")) return decode(out, 'utf-8')
<SYSTEM_TASK:> Reads the showcase given by identifier from HDX and returns Showcase object <END_TASK> <USER_TASK:> Description: def read_from_hdx(identifier, configuration=None): # type: (str, Optional[Configuration]) -> Optional['Showcase'] """Reads the showcase given by identifier from HDX and returns Showcase object Args: identifier (str): Identifier of showcase configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[Showcase]: Showcase object if successful read, None if not """
showcase = Showcase(configuration=configuration) result = showcase._load_from_hdx('showcase', identifier) if result: return showcase return None
<SYSTEM_TASK:> Add a dataset <END_TASK> <USER_TASK:> Description: def add_dataset(self, dataset, datasets_to_check=None): # type: (Union[hdx.data.dataset.Dataset,Dict,str], List[hdx.data.dataset.Dataset]) -> bool """Add a dataset Args: dataset (Union[Dataset,Dict,str]): Either a dataset id or dataset metadata either from a Dataset object or a dictionary datasets_to_check (List[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. Returns: bool: True if the dataset was added, False if already present """
showcase_dataset = self._get_showcase_dataset_dict(dataset) if datasets_to_check is None: datasets_to_check = self.get_datasets() for dataset in datasets_to_check: if showcase_dataset['package_id'] == dataset['id']: return False self._write_to_hdx('associate', showcase_dataset, 'package_id') return True
<SYSTEM_TASK:> Add multiple datasets <END_TASK> <USER_TASK:> Description: def add_datasets(self, datasets, datasets_to_check=None): # type: (List[Union[hdx.data.dataset.Dataset,Dict,str]], List[hdx.data.dataset.Dataset]) -> bool """Add multiple datasets Args: datasets (List[Union[Dataset,Dict,str]]): A list of either dataset ids or dataset metadata from Dataset objects or dictionaries datasets_to_check (List[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. Returns: bool: True if all datasets added or False if any already present """
if datasets_to_check is None: datasets_to_check = self.get_datasets() alldatasetsadded = True for dataset in datasets: if not self.add_dataset(dataset, datasets_to_check=datasets_to_check): alldatasetsadded = False return alldatasetsadded
<SYSTEM_TASK:> Join values and convert to string <END_TASK> <USER_TASK:> Description: def join(self, joiner, formatter=lambda s, t: t.format(s), template="{}"): """Join values and convert to string Example: >>> from ww import l >>> lst = l('012') >>> lst.join(',') u'0,1,2' >>> lst.join(',', template="{}#") u'0#,1#,2#' >>> string = lst.join(',',\ formatter = lambda x, y: str(int(x) ** 2)) >>> string u'0,1,4' """
return ww.s(joiner).join(self, formatter, template)
<SYSTEM_TASK:> Append values at the end of the list <END_TASK> <USER_TASK:> Description: def append(self, *values): """Append values at the end of the list Allow chaining. Args: values: values to be appened at the end. Example: >>> from ww import l >>> lst = l([]) >>> lst.append(1) [1] >>> lst [1] >>> lst.append(2, 3).append(4,5) [1, 2, 3, 4, 5] >>> lst [1, 2, 3, 4, 5] """
for value in values: list.append(self, value) return self
<SYSTEM_TASK:> Add all values of all iterables at the end of the list <END_TASK> <USER_TASK:> Description: def extend(self, *iterables): """Add all values of all iterables at the end of the list Args: iterables: iterable which content to add at the end Example: >>> from ww import l >>> lst = l([]) >>> lst.extend([1, 2]) [1, 2] >>> lst [1, 2] >>> lst.extend([3, 4]).extend([5, 6]) [1, 2, 3, 4, 5, 6] >>> lst [1, 2, 3, 4, 5, 6] """
for value in iterables: list.extend(self, value) return self
<SYSTEM_TASK:> Process value for writing into a cell. <END_TASK> <USER_TASK:> Description: def normalize_cell_value(value): """Process value for writing into a cell. Args: value: any type of variable Returns: json serialized value if value is list or dict, else value """
if isinstance(value, dict) or isinstance(value, list): return json.dumps(value) return value
<SYSTEM_TASK:> Read addresses from input file into list of tuples. <END_TASK> <USER_TASK:> Description: def get_addresses_from_input_file(input_file_name): """Read addresses from input file into list of tuples. This only supports address and zipcode headers """
mode = 'r' if sys.version_info[0] < 3: mode = 'rb' with io.open(input_file_name, mode) as input_file: reader = csv.reader(input_file, delimiter=',', quotechar='"') addresses = list(map(tuple, reader)) if len(addresses) == 0: raise Exception('No addresses found in input file') header_columns = list(column.lower() for column in addresses.pop(0)) try: address_index = header_columns.index('address') zipcode_index = header_columns.index('zipcode') except ValueError: raise Exception("""The first row of the input CSV must be a header that contains \ a column labeled 'address' and a column labeled 'zipcode'.""") return list((row[address_index], row[zipcode_index]) for row in addresses)
<SYSTEM_TASK:> Read identifiers from input file into list of dicts with the header row values <END_TASK> <USER_TASK:> Description: def get_identifiers_from_input_file(input_file_name): """Read identifiers from input file into list of dicts with the header row values as keys, and the rest of the rows as values. """
valid_identifiers = ['address', 'zipcode', 'unit', 'city', 'state', 'slug', 'block_id', 'msa', 'num_bins', 'property_type', 'client_value', 'client_value_sqft', 'meta'] mode = 'r' if sys.version_info[0] < 3: mode = 'rb' with io.open(input_file_name, mode) as input_file: result = [{identifier: val for identifier, val in list(row.items()) if identifier in valid_identifiers} for row in csv.DictReader(input_file, skipinitialspace=True)] return result
<SYSTEM_TASK:> For a json response, check if there was any error and throw exception. <END_TASK> <USER_TASK:> Description: def process_json_response(self, response): """For a json response, check if there was any error and throw exception. Otherwise, create a housecanary.response.Response."""
response_json = response.json() # handle errors code_key = "code" if code_key in response_json and response_json[code_key] != constants.HTTP_CODE_OK: code = response_json[code_key] message = response_json if "message" in response_json: message = response_json["message"] elif "code_description" in response_json: message = response_json["code_description"] if code == constants.HTTP_FORBIDDEN: raise housecanary.exceptions.UnauthorizedException(code, message) if code == constants.HTTP_TOO_MANY_REQUESTS: raise housecanary.exceptions.RateLimitException(code, message, response) else: raise housecanary.exceptions.RequestException(code, message) request_url = response.request.url endpoint_name = self._parse_endpoint_name_from_url(request_url) return Response.create(endpoint_name, response_json, response)
<SYSTEM_TASK:> Find short paths of given paths. <END_TASK> <USER_TASK:> Description: def _find_short_paths(self, paths): """ Find short paths of given paths. E.g. if both `/home` and `/home/aoik` exist, only keep `/home`. :param paths: Paths. :return: Set of short paths. """
# Split each path to parts. # E.g. '/home/aoik' to ['', 'home', 'aoik'] path_parts_s = [path.split(os.path.sep) for path in paths] # Root node root_node = {} # Sort these path parts by length, with the longest being the first. # # Longer paths appear first so that their extra parts are discarded # when a shorter path is found at 5TQ8L. # # Then for each path's parts. for parts in sorted(path_parts_s, key=len, reverse=True): # Start from the root node node = root_node # For each part of the path for part in parts: # Create node of the path node = node.setdefault(part, {}) # 5TQ8L # Clear the last path part's node's child nodes. # # This aims to keep only the shortest path that needs be watched. # node.clear() # Short paths short_path_s = set() # Collect leaf paths self._collect_leaf_paths( node=root_node, path_parts=(), leaf_paths=short_path_s, ) # Return short paths return short_path_s
<SYSTEM_TASK:> Collect paths of leaf nodes. <END_TASK> <USER_TASK:> Description: def _collect_leaf_paths(self, node, path_parts, leaf_paths): """ Collect paths of leaf nodes. :param node: Starting node. Type is dict. Key is child node's path part. Value is child node. :param path_parts: The starting node's path parts. Type is tuple. :param leaf_paths: Leaf path list. :return: None. """
# If the node is leaf node if not node: # Get node path node_path = '/'.join(path_parts) # Add to list leaf_paths.add(node_path) # If the node is not leaf node else: # For each child node for child_path_part, child_node in node.items(): # Get the child node's path parts child_path_part_s = path_parts + (child_path_part,) # Visit the child node self._collect_leaf_paths( node=child_node, path_parts=child_path_part_s, leaf_paths=leaf_paths, )
<SYSTEM_TASK:> Dispatch file system event. <END_TASK> <USER_TASK:> Description: def dispatch(self, event): """ Dispatch file system event. Callback called when there is a file system event. Hooked at 2KGRW. This function overrides `FileSystemEventHandler.dispatch`. :param event: File system event object. :return: None. """
# Get file path file_path = event.src_path # If the file path is in extra paths if file_path in self._extra_paths: # Call `reload` self.reload() # If the file path ends with `.pyc` or `.pyo` if file_path.endswith(('.pyc', '.pyo')): # Get `.py` file path file_path = file_path[:-1] # If the file path ends with `.py` if file_path.endswith('.py'): # Get the file's directory path file_dir = os.path.dirname(file_path) # If the file's directory path starts with any of the watch paths if file_dir.startswith(tuple(self._watch_paths)): # Call `reload` self.reload()
<SYSTEM_TASK:> Reload the program. <END_TASK> <USER_TASK:> Description: def reload(self): """ Reload the program. :return: None. """
# Get reload mode reload_mode = self._reload_mode # If reload mode is `exec` if self._reload_mode == self.RELOAD_MODE_V_EXEC: # Call `reload_using_exec` self.reload_using_exec() # If reload mode is `spawn_exit` elif self._reload_mode == self.RELOAD_MODE_V_SPAWN_EXIT: # Call `reload_using_spawn_exit` self.reload_using_spawn_exit() # If reload mode is `spawn_wait` elif self._reload_mode == self.RELOAD_MODE_V_SPAWN_WAIT: # Call `reload_using_spawn_wait` self.reload_using_spawn_wait() # If reload mode is none of above else: # Get error message error_msg = 'Invalid reload mode: {}.'.format(repr(reload_mode)) # Raise error raise ValueError(error_msg)
<SYSTEM_TASK:> Reload the program process. <END_TASK> <USER_TASK:> Description: def reload_using_exec(self): """ Reload the program process. :return: None. """
# Create command parts cmd_parts = [sys.executable] + sys.argv # Get env dict copy env_copy = os.environ.copy() # Reload the program process os.execvpe( # Program file path sys.executable, # Command parts cmd_parts, # Env dict env_copy, )
<SYSTEM_TASK:> Spawn a subprocess and exit the current process. <END_TASK> <USER_TASK:> Description: def reload_using_spawn_exit(self): """ Spawn a subprocess and exit the current process. :return: None. """
# Create command parts cmd_parts = [sys.executable] + sys.argv # Get env dict copy env_copy = os.environ.copy() # Spawn subprocess subprocess.Popen(cmd_parts, env=env_copy, close_fds=True) # If need force exit if self._force_exit: # Force exit os._exit(0) # pylint: disable=protected-access # If not need force exit else: # Send interrupt to main thread interrupt_main() # Set the flag self._watcher_to_stop = True # Exit the watcher thread sys.exit(0)
<SYSTEM_TASK:> Spawn a subprocess and wait until it finishes. <END_TASK> <USER_TASK:> Description: def reload_using_spawn_wait(self): """ Spawn a subprocess and wait until it finishes. :return: None. """
# Create command parts cmd_parts = [sys.executable] + sys.argv # Get env dict copy env_copy = os.environ.copy() # Send interrupt to main thread interrupt_main() # Spawn subprocess and wait until it finishes subprocess.call(cmd_parts, env=env_copy, close_fds=True) # Exit the watcher thread sys.exit(0)
<SYSTEM_TASK:> Show details about a keypair resource policy. When `name` option is omitted, the <END_TASK> <USER_TASK:> Description: def resource_policy(name): """ Show details about a keypair resource policy. When `name` option is omitted, the resource policy for the current access_key will be returned. """
fields = [ ('Name', 'name'), ('Created At', 'created_at'), ('Default for Unspecified', 'default_for_unspecified'), ('Total Resource Slot', 'total_resource_slots'), ('Max Concurrent Sessions', 'max_concurrent_sessions'), ('Max Containers per Session', 'max_containers_per_session'), ('Max vFolder Count', 'max_vfolder_count'), ('Max vFolder Size', 'max_vfolder_size'), ('Idle Timeeout', 'idle_timeout'), ('Allowed vFolder Hosts', 'allowed_vfolder_hosts'), ] with Session() as session: try: rp = session.ResourcePolicy(session.config.access_key) info = rp.info(name, fields=(item[1] for item in fields)) except Exception as e: print_error(e) sys.exit(1) rows = [] if info is None: print('No such resource policy.') sys.exit(1) for name, key in fields: rows.append((name, info[key])) print(tabulate(rows, headers=('Field', 'Value')))
<SYSTEM_TASK:> Run a local proxy to a service provided by Backend.AI compute sessions. <END_TASK> <USER_TASK:> Description: def app(session_id, app, bind, port): """ Run a local proxy to a service provided by Backend.AI compute sessions. The type of proxy depends on the app definition: plain TCP or HTTP. \b SESSID: The compute session ID. APP: The name of service provided by the given session. """
api_session = None runner = None async def app_setup(): nonlocal api_session, runner loop = current_loop() api_session = AsyncSession() # TODO: generalize protocol using service ports metadata protocol = 'http' runner = ProxyRunner(api_session, session_id, app, protocol, bind, port, loop=loop) await runner.ready() print_info( "A local proxy to the application \"{0}\" ".format(app) + "provided by the session \"{0}\" ".format(session_id) + "is available at: {0}://{1}:{2}" .format(protocol, bind, port) ) async def app_shutdown(): nonlocal api_session, runner print_info("Shutting down....") await runner.close() await api_session.close() print_info("The local proxy to \"{}\" has terminated." .format(app)) asyncio_run_forever(app_setup(), app_shutdown(), stop_signals={signal.SIGINT, signal.SIGTERM})
<SYSTEM_TASK:> Creates an Excel file containing data returned by the Analytics API <END_TASK> <USER_TASK:> Description: def export_analytics_data_to_excel(data, output_file_name, result_info_key, identifier_keys): """Creates an Excel file containing data returned by the Analytics API Args: data: Analytics API data as a list of dicts output_file_name: File name for output Excel file (use .xlsx extension). """
workbook = create_excel_workbook(data, result_info_key, identifier_keys) workbook.save(output_file_name) print('Saved Excel file to {}'.format(output_file_name))
<SYSTEM_TASK:> Creates CSV files containing data returned by the Analytics API. <END_TASK> <USER_TASK:> Description: def export_analytics_data_to_csv(data, output_folder, result_info_key, identifier_keys): """Creates CSV files containing data returned by the Analytics API. Creates one file per requested endpoint and saves it into the specified output_folder Args: data: Analytics API data as a list of dicts output_folder: Path to a folder to save the CSV files into """
workbook = create_excel_workbook(data, result_info_key, identifier_keys) suffix = '.csv' if not os.path.exists(output_folder): os.makedirs(output_folder) for worksheet in workbook.worksheets: file_name = utilities.convert_title_to_snake_case(worksheet.title) file_path = os.path.join(output_folder, file_name + suffix) mode = 'w' if sys.version_info[0] < 3: mode = 'wb' with io.open(file_path, mode) as output_file: csv_writer = csv.writer(output_file) for row in worksheet.rows: csv_writer.writerow([cell.value for cell in row]) print('Saved CSV files to {}'.format(output_folder))
<SYSTEM_TASK:> Creates an Excel file made up of combining the Value Report or Rental Report Excel <END_TASK> <USER_TASK:> Description: def concat_excel_reports(addresses, output_file_name, endpoint, report_type, retry, api_key, api_secret, files_path): """Creates an Excel file made up of combining the Value Report or Rental Report Excel output for the provided addresses. Args: addresses: A list of (address, zipcode) tuples output_file_name: A file name for the Excel output endpoint: One of 'value_report' or 'rental_report' report_type: One of 'full' or 'summary' retry: optional boolean to retry if rate limit is reached api_key: optional API Key api_secret: optional API Secret files_path: Path to save individual files. If None, don't save files """
# create the master workbook to output master_workbook = openpyxl.Workbook() if api_key is not None and api_secret is not None: client = ApiClient(api_key, api_secret) else: client = ApiClient() errors = [] # for each address, call the API and load the xlsx content in a workbook. for index, addr in enumerate(addresses): print('Processing {}'.format(addr[0])) result = _get_excel_report( client, endpoint, addr[0], addr[1], report_type, retry) if not result['success']: print('Error retrieving report for {}'.format(addr[0])) print(result['content']) errors.append({'address': addr[0], 'message': result['content']}) continue orig_wb = openpyxl.load_workbook(filename=io.BytesIO(result['content'])) _save_individual_file(orig_wb, files_path, addr[0]) # for each worksheet for this address for sheet_name in orig_wb.get_sheet_names(): # if worksheet doesn't exist in master workbook, create it if sheet_name in master_workbook.get_sheet_names(): master_ws = master_workbook.get_sheet_by_name(sheet_name) else: master_ws = master_workbook.create_sheet(sheet_name) # get all the rows in the address worksheet orig_rows = orig_wb.get_sheet_by_name(sheet_name).rows if sheet_name == 'Summary' or sheet_name == 'Chart Data': _process_non_standard_sheet(master_ws, orig_rows, addr, index) continue _process_standard_sheet(master_ws, orig_rows, addr, index) # remove the first sheet which will be empty master_workbook.remove(master_workbook.worksheets[0]) # if any errors occurred, write them to an "Errors" worksheet if len(errors) > 0: errors_sheet = master_workbook.create_sheet('Errors') for error_idx, error in enumerate(errors): errors_sheet.cell(row=error_idx+1, column=1, value=error['address']) errors_sheet.cell(row=error_idx+1, column=2, value=error['message']) # save the master workbook to output_file_name adjust_column_width_workbook(master_workbook) output_file_path = os.path.join(files_path, output_file_name) master_workbook.save(output_file_path) print('Saved output to {}'.format(output_file_path))
<SYSTEM_TASK:> Calls the analytics_data_excel module to create the Workbook <END_TASK> <USER_TASK:> Description: def create_excel_workbook(data, result_info_key, identifier_keys): """Calls the analytics_data_excel module to create the Workbook"""
workbook = analytics_data_excel.get_excel_workbook(data, result_info_key, identifier_keys) adjust_column_width_workbook(workbook) return workbook
<SYSTEM_TASK:> Adjust column width in worksheet. <END_TASK> <USER_TASK:> Description: def adjust_column_width(worksheet): """Adjust column width in worksheet. Args: worksheet: worksheet to be adjusted """
dims = {} padding = 1 for row in worksheet.rows: for cell in row: if not cell.value: continue dims[cell.column] = max( dims.get(cell.column, 0), len(str(cell.value)) ) for col, value in list(dims.items()): worksheet.column_dimensions[col].width = value + padding
<SYSTEM_TASK:> function takes input of ipaddress to RESTFUL call to HP IMC <END_TASK> <USER_TASK:> Description: def get_ap_info(ipaddress, auth, url): """ function takes input of ipaddress to RESTFUL call to HP IMC :param ipaddress: The current IP address of the Access Point at time of query. :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: Dictionary object with the details of the target access point :rtype: dict >>> from pyhpeimc.auth import * >>> from pyhpeimc.wsm.apinfo import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> ap_info = get_ap_info('10.101.0.170',auth.creds, auth.url) >>> assert type(ap_info) is dict >>> assert len(ap_info) == 20 >>> assert 'acDevId' in ap_info >>> assert 'acIpAddress' in ap_info >>> assert 'acLabel' in ap_info >>> assert 'apAlias' in ap_info >>> assert 'connectType' in ap_info >>> assert 'hardwareVersion' in ap_info >>> assert 'ipAddress' in ap_info >>> assert 'isFit' in ap_info >>> assert 'label' in ap_info >>> assert 'location' in ap_info >>> assert 'locationList' in ap_info >>> assert 'macAddress' in ap_info >>> assert 'onlineClientCount' in ap_info >>> assert 'serialId' in ap_info >>> assert 'softwareVersion' in ap_info >>> assert 'ssids' in ap_info >>> assert 'status' in ap_info >>> assert 'sysName' in ap_info >>> assert 'type' in ap_info """
get_ap_info_url = "/imcrs/wlan/apInfo/queryApBasicInfoByCondition?ipAddress=" + str(ipaddress) f_url = url + get_ap_info_url payload = None r = requests.get(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents # print(r.status_code) try: if r.status_code == 200: if len(r.text) > 0: return json.loads(r.text)['apBasicInfo'] except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_ap_info_all: An Error has occured"
<SYSTEM_TASK:> Gets a list of business error message strings <END_TASK> <USER_TASK:> Description: def get_object_errors(self): """Gets a list of business error message strings for each of the requested objects that had a business error. If there was no error, returns an empty list Returns: List of strings """
if self._object_errors is None: self._object_errors = [{str(o): o.get_errors()} for o in self.objects() if o.has_error()] return self._object_errors
<SYSTEM_TASK:> Returns true if any requested object had a business logic error, <END_TASK> <USER_TASK:> Description: def has_object_error(self): """Returns true if any requested object had a business logic error, otherwise returns false Returns: boolean """
if self._has_object_error is None: # scan the objects for any business error codes self._has_object_error = next( (True for o in self.objects() if o.has_error()), False) return self._has_object_error
<SYSTEM_TASK:> Function takes input of auth class object auth object and URL and returns a BOOL of TRUE <END_TASK> <USER_TASK:> Description: def check_imc_creds(auth, url): """Function takes input of auth class object auth object and URL and returns a BOOL of TRUE if the authentication was successful. >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> check_imc_creds(auth.creds, auth.url) True """
test_url = '/imcrs' f_url = url + test_url try: response = requests.get(f_url, auth=auth, headers=HEADERS, verify=False) return bool(response.status_code == 200) except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " test_imc_creds: An Error has occured"
<SYSTEM_TASK:> Get given virtual environment's `python` program path. <END_TASK> <USER_TASK:> Description: def get_python_path(venv_path): """ Get given virtual environment's `python` program path. :param venv_path: Virtual environment directory path. :return: `python` program path. """
# Get `bin` directory path bin_path = get_bin_path(venv_path) # Get `python` program path program_path = os.path.join(bin_path, 'python') # If the platform is Windows if sys.platform.startswith('win'): # Add `.exe` suffix to the `python` program path program_path = program_path + '.exe' # Return the `python` program path return program_path
<SYSTEM_TASK:> Prepend given path to environment variable PYTHONPATH. <END_TASK> <USER_TASK:> Description: def add_pythonpath(path): """ Prepend given path to environment variable PYTHONPATH. :param path: Path to add to PYTHONPATH. :return: New PYTHONPATH value. """
# Get PYTHONPATH value. Default is empty string. pythonpath = os.environ.setdefault('PYTHONPATH', '') # If given path is not in PYTHONPATH if path not in pythonpath.split(os.pathsep): # Prepend given path to PYTHONPATH pythonpath = os.environ['PYTHONPATH'] = \ (path + os.pathsep + pythonpath) if pythonpath else path # Return new PYTHONPATH value return pythonpath
<SYSTEM_TASK:> Wrap given path as relative path relative to top directory. <END_TASK> <USER_TASK:> Description: def mark_path(path): """ Wrap given path as relative path relative to top directory. Wrapper object will be handled specially in \ :paramref:`create_cmd_task.parts`. :param path: Relative path relative to top directory. :return: Wrapper object. """
# If given path is not string, # or given path is absolute path. if not isinstance(path, str) or os.path.isabs(path): # Get error message msg = 'Error (2D9ZA): Given path is not relative path: {0}.'.format( path ) # Raise error raise ValueError(msg) # If given path is string, # and given path is not absolute path. # Wrap given path return _ItemWrapper(type='path', item=path)
<SYSTEM_TASK:> Wrap given item as input or output target that should be added to task. <END_TASK> <USER_TASK:> Description: def _mark_target(type, item): """ Wrap given item as input or output target that should be added to task. Wrapper object will be handled specially in \ :paramref:`create_cmd_task.parts`. :param type: Target type. Allowed values: - 'input' - 'output' :param item: Item to mark as input or output target. Allowed values: - Relative path relative to top directory. - Node object. - List of these. :return: Wrapper object. """
# If given type is not valid if type not in ('input', 'output'): # Get error message msg = 'Error (7D74X): Type is not valid: {0}'.format(type) # Raise error raise ValueError(msg) # If given type is valid. # Store given item orig_item = item # If given path is list if isinstance(item, list): # Use it as items list item_s = item # If given path is not list else: # Create items list containing given path item_s = [item] # For the items list's each item for item in item_s: # If the item is string, # and the item is absolute path. if isinstance(item, str) and os.path.isabs(item): # Get error message msg = ( 'Error (5VWOZ): Given path is not relative path: {0}.' ).format(item) # Raise error raise ValueError(msg) # Wrap given item return _ItemWrapper(type=type, item=orig_item)
<SYSTEM_TASK:> Normalize given items. <END_TASK> <USER_TASK:> Description: def _normalize_items( ctx, items, str_to_node=False, node_to_str=False, allow_task=False, ): """ Normalize given items. Do several things: - Ignore None. - Flatten list. - Unwrap wrapped item in `_ItemWrapper`. :param ctx: BuildContext object. :param items: Items list to normalize. :param str_to_node: Convert string to node. :param node_to_str: Convert node to absolute path. :param allow_task: Whether allow task item. :return: Normalized tuples list. Tuple format is: :: ( normalized_item, # Normalized item. wrapper_type, # Original `_ItemWrapper` type. ) """
# Ensure given context object is BuildContext object _ensure_build_context(ctx) # Normalized tuples list norm_tuple_s = [] # If given items list is empty if not items: # Return empty list return norm_tuple_s # If given items list is not empty. # For given items list's each item for item in items: # If the item is item wrapper if isinstance(item, _ItemWrapper): # Get wrapper type wrapper_type = item.type() # Get real item item = item.item() # If the item is not item wrapper else: # Set wrapper type be None wrapper_type = None # Use the item as real item item = item # If the real item is list if isinstance(item, list): # Use the real item as real items list real_item_s = item # If the real item is not list else: # Create real items list containing the real item real_item_s = [item] # For each real item for real_item in real_item_s: # If the real item is None if real_item is None: # Ignore None continue # If the real item is not None. # If the real item is string elif isinstance(real_item, str): # If need convert string to node if (wrapper_type is not None) or str_to_node: # If the path string is absolute path if os.path.isabs(real_item): # Get error message msg = ( 'Error (7MWU9): Given path is not relative path:' ' {0}.' ).format(real_item) # Raise error raise ValueError(msg) # If the path string is not absolute path. # Create node as normalized item norm_item = create_node(ctx, real_item) # If need convert node to absolute path if node_to_str: # Convert the node to absolute path norm_item = norm_item.abspath() # If not need convert string to node else: # Use the string as normalized item norm_item = real_item # Create normalized tuple norm_tuple = (norm_item, wrapper_type) # If the real item is not string. # If the real item is node elif isinstance(real_item, Node): # If need convert node to absolute path if node_to_str: # Convert the node to absolute path real_item = real_item.abspath() # Create normalized tuple norm_tuple = (real_item, wrapper_type) # If the real item is not node. # If the real item is task elif isinstance(real_item, Task): # If allow task item if allow_task: # Create normalized tuple norm_tuple = (real_item, wrapper_type) # If not allow task item else: # Get error message msg = 'Error (6PVMG): Item type is not valid: {0}.'.format( real_item ) # Raise error raise ValueError(msg) # If the real item is not task. # If the real item is not None, string, node, or task else: # Get error message msg = 'Error (63KUG): Item type is not valid: {0}.'.format( real_item ) # Raise error raise ValueError(msg) # Add the normalized tuple to list norm_tuple_s.append(norm_tuple) # Return the normalized tuples list return norm_tuple_s
<SYSTEM_TASK:> Update touch file at given path. <END_TASK> <USER_TASK:> Description: def update_touch_file( ctx, path, check_import=False, check_import_module=None, check_import_python=None, always=False, ): """ Update touch file at given path. Do two things: - Create touch file if it not exists. - Update touch file if import checking fails. The returned touch file node is used as task's output target for dirty checking. Task will run if the touch file changes. :param ctx: BuildContext instance. :param path: Touch file relative path relative to top directory. :param check_import: Whether import module for dirty checking. :param check_import_module: Module name to import for dirty checking. :param check_import_python: Python program to use for dirty checking. :param always: Whether always run. :return: A two-item tuple. Tuple format is: :: ( touch_file_node, # Touch file node. task_needs_run, # Whether task needs run. ) """
# Ensure given context object is BuildContext object _ensure_build_context(ctx) # Print title print_title('Update touch file: {}'.format(path)) # Create touch node touch_node = create_node(ctx, path) # Whether task needs run need_run = False # If the touch file not exists, # or `always` flag is on. if not touch_node.exists() or always: # Set `need_run` flag on need_run = True # If the touch file exists, # and `always` flag is off. else: # If need import module for dirty checking, # and module name to import is given. if check_import and check_import_module: # Get import statement. # Notice `from` import ensures the imported module is not imported # as `__main__` module. And `__name__` exists in any module. import_stmt = 'from {} import __name__'.format(check_import_module) # Print info print_text('Check import: {}'.format(import_stmt)) # If Python program to check import is not given if check_import_python is None: # Get error message msg = ( 'Error (3BKFW): Python program to check import is not' ' given.' ) # Raise error raise ValueError(msg) # If Python program to check import is given. # Normalize given Python program path check_import_python, _ = _normalize_items( ctx=ctx, items=[check_import_python], # Convert node to absolute path node_to_str=True, )[0] # If the Python program path is not string if not isinstance(check_import_python, str): # Get error message msg = ( 'Error (39FQE): Given Python program to check import is' ' not string or node: {0}.' ).format(check_import_python) # Raise error raise ValueError(msg) # If the Python program path is string. # If the Python program path is not absolute path if not os.path.isabs(check_import_python): # Convert the Python program path to absolute path check_import_python = \ create_node(ctx, check_import_python).abspath() # The Python program path is absolute path now. # Get command parts cmd_part_s = [ # Python program absolute path check_import_python, # Run code '-c', # Code to run import_stmt ] # Print the command in multi-line format print_text(_format_multi_line_command(cmd_part_s)) # try: # Run the command subprocess.check_output(cmd_part_s) # If not have error, # it means the module can be imported. # Set `need_run` flag off. need_run = False # If have error, # it means the module can not be imported. # # Notice the program may not exist. So catch general exception. except Exception: # pylint: disable=W0703 # Set `need_run` flag on need_run = True # If task needs run if need_run: # If the touch file's parent directory not exists if not touch_node.parent.exists(): # Create the touch file's parent directory touch_node.parent.mkdir() # Write current time to the touch file to force content change. # This will fail dirty-checking and cause task to run. touch_node.write('{0}\n'.format(datetime.utcnow())) # Print info print_text('Updated.') # If task not needs run else: # Print info print_text('Skipped.') # Print end title print_title('Update touch file: {}'.format(path), is_end=True) # Return a two-item tuple return touch_node, need_run
<SYSTEM_TASK:> Chain given tasks. Set each task to run after its previous task. <END_TASK> <USER_TASK:> Description: def chain_tasks(tasks): """ Chain given tasks. Set each task to run after its previous task. :param tasks: Tasks list. :return: Given tasks list. """
# If given tasks list is not empty if tasks: # Previous task previous_task = None # For given tasks list's each task for task in tasks: # If the task is not None. # Task can be None to allow code like ``task if _PY2 else None``. if task is not None: # If previous task is not None if previous_task is not None: # Set the task to run after the previous task task.set_run_after(previous_task) # Set the task as previous task for the next task previous_task = task # Return given tasks list. return tasks
<SYSTEM_TASK:> Decorator that makes decorated function use BuildContext instead of \ <END_TASK> <USER_TASK:> Description: def build_ctx(pythonpath=None): """ Decorator that makes decorated function use BuildContext instead of \ Context instance. BuildContext instance has more methods. :param pythonpath: Path or list of paths to add to environment variable PYTHONPATH. Each path can be absolute path, or relative path relative to top directory. Notice if this decorator is used without arguments, argument `pythonpath` is the decorated function. :return: Two situations: - If decorator arguments are given, return no-argument decorator. - If decorator arguments are not given, return wrapper function. """
# If argument `pythonpath` is string if isinstance(pythonpath, str): # Create paths list containing the string path_s = [pythonpath] # If argument `pythonpath` is list elif isinstance(pythonpath, list): # Use the list as paths list path_s = pythonpath # If argument `pythonpath` is not string or list, # it means the decorator is used without arguments. else: # Set paths list be None path_s = None # Create no-argument decorator def _noarg_decorator(func): """ No-argument decorator. :param func: Decorated function. :return: Wrapper function. """ # Create BuildContext subclass class _BuildContext(BuildContext): # Set command name for the context class cmd = func.__name__ # Set function name for the context class fun = func.__name__ # Create wrapper function @wraps(func) def _new_func(ctx, *args, **kwargs): """ Wrapper function. :param ctx: BuildContext object. :param \\*args: Other arguments passed to decorated function. :param \\*\\*kwargs: Other keyword arguments passed to decorated function. :return: Decorated function's call result. """ # If paths list is not empty if path_s: # For each path for path in path_s: # If the path is absolute path if os.path.isabs(path): # Use the path as absolute path abs_path = path # If the path is not absolute path, # it means relative path relative to top directory. else: # Create path node path_node = create_node(ctx, path) # Get absolute path abs_path = path_node.abspath() # Add the absolute path to environment variable PYTHONPATH add_pythonpath(abs_path) # Call the decorated function result = func(ctx, *args, **kwargs) # Return the call result return result # Store the created context class with the wrapper function _new_func._context_class = _BuildContext # pylint: disable=W0212 # Return the wrapper function return _new_func # If decorator arguments are given if path_s is not None: # Return no-argument decorator return _noarg_decorator # If decorator arguments are not given else: # Argument `pythonpath` is the decorated function _func = pythonpath # Call the no-argument decorator to create wrapper function wrapper_func = _noarg_decorator(_func) # Return the wrapper function return wrapper_func
<SYSTEM_TASK:> Decorator that makes decorated function use ConfigurationContext instead \ <END_TASK> <USER_TASK:> Description: def config_ctx(func): """ Decorator that makes decorated function use ConfigurationContext instead \ of Context instance. :param func: Decorated function. :return: Decorated function. """
# Create ConfigurationContext subclass class _ConfigurationContext(ConfigurationContext): # Set command name for the context class cmd = func.__name__ # Set function name for the context class fun = func.__name__ # Store the created context class with the decorated function func._context_class = _ConfigurationContext # pylint: disable=W0212 # Return the decorated function return func
<SYSTEM_TASK:> Create task that sets up `virtualenv` package. <END_TASK> <USER_TASK:> Description: def virtualenv_setup( ctx, python, inputs=None, outputs=None, touch=None, check_import=False, pip_setup_file=None, pip_setup_touch=None, cache_key=None, always=False, ): """ Create task that sets up `virtualenv` package. :param ctx: BuildContext object. :param python: Python program path. :param inputs: Input items list to add to created task. See :paramref:`create_cmd_task.inputs` for allowed item types. :param outputs: Output items list to add to created task. See :paramref:`create_cmd_task.outputs` for allowed item types. :param touch: Touch file path for dirty checking. :param check_import: Whether import module for dirty checking. :param pip_setup_file: `get-pip.py` file path for `pip_setup` task. :param pip_setup_touch: Touch file path for `pip_setup` task. :param cache_key: Task cache key. :param always: Whether always run. :return: Created task. """
# Ensure given context object is BuildContext object _ensure_build_context(ctx) # If `get-pip.py` file path is not given if pip_setup_file is None: # Not create task that sets up `pip` pip_setup_task = None # If `get-pip.py` file path is given else: # Create task that sets up `pip` pip_setup_task = pip_setup( # Context ctx=ctx, # Python program path python=python, # `get-pip.py` file path setup_file=pip_setup_file, # Touch file path touch=pip_setup_touch, # Whether import module for dirty checking check_import=check_import, # Whether always run always=always, ) # If touch file path is not given if touch is None: # Not update touch file touch_node = None else: # Update touch file touch_node, always = update_touch_file( # Context ctx=ctx, # Touch file path path=touch, # Whether import module for dirty checking check_import=check_import, # Module name to import for dirty checking check_import_module='virtualenv', # Python program path for dirty checking check_import_python=python, # Whether always run always=always, ) # Create task that sets up `virtualenv` package task = create_cmd_task( # Context ctx=ctx, # Command parts parts=[ # Python program path python, # Run module '-m', # Module name 'pip', # Install package 'install', # Package name 'virtualenv', ], # Input items list inputs=[ # Run after the task that sets up `pip` pip_setup_task, # Given input items list inputs, ], # Output items list outputs=[ # Use the touch node as output target for dirty checking touch_node, # Given output items list outputs, ], # Whether always run always=always, # Task cache key cache_key=cache_key or (python, 'virtualenv'), ) # Return the created task return task
<SYSTEM_TASK:> Create task that sets up virtual environment. <END_TASK> <USER_TASK:> Description: def create_venv( ctx, python, venv_path, inputs=None, outputs=None, pip_setup_file=None, pip_setup_touch=None, virtualenv_setup_touch=None, task_name=None, cache_key=None, always=False, ): """ Create task that sets up virtual environment. :param ctx: BuildContext object. :param python: Python program path. :param venv_path: Virtual environment directory relative path relative to top directory. :param inputs: Input items list to add to created task. See :paramref:`create_cmd_task.inputs` for allowed item types. :param outputs: Output items list to add to created task. See :paramref:`create_cmd_task.outputs` for allowed item types. :param pip_setup_file: `get-pip.py` file path for `pip_setup` task. :param pip_setup_touch: Touch file path for `pip_setup` task. :param virtualenv_setup_touch: Touch file path for `virtualenv_setup` task. :param task_name: Task name for display purpose. :param cache_key: Task cache key. :param always: Whether always run. :return: Created task. """
# Ensure given context object is BuildContext object _ensure_build_context(ctx) # Create task that sets up `virtualenv` package virtualenv_setup_task = virtualenv_setup( # Context ctx=ctx, # Python program path python=python, # Touch file path touch=virtualenv_setup_touch, # `get-pip.py` file path for `pip_setup` task. pip_setup_file=pip_setup_file, # Touch file path for `pip_setup` task. pip_setup_touch=pip_setup_touch, ) # Get virtual environment directory path node venv_path_node, _ = _normalize_items( ctx=ctx, items=[venv_path], # Convert path string to node str_to_node=True )[0] # Create task that sets up virtual environment. task = create_cmd_task( # Context ctx=ctx, # Command parts parts=[ # Python program path python, # Run module '-m', # Module name 'virtualenv', # Virtual environment directory absolute path venv_path_node.abspath(), ], # Input items list inputs=[ # Run after the task that sets up `virtualenv` package virtualenv_setup_task, # Given input items list inputs, ], # Output items list outputs=[ # Add the virtual environment's `python` program path as output # target for dirty checking get_python_path(venv_path), # Add the virtual environment's `pip` program path as output target # for dirty checking get_pip_path(venv_path), # Given output items list outputs, ], # Whether always run always=always, # Task name task_name=task_name, # Task cache key cache_key=cache_key or (python, venv_path), ) # Return the created task return task
<SYSTEM_TASK:> Create task that uses given virtual environment's `pip` to sets up \ <END_TASK> <USER_TASK:> Description: def pip_ins_req( ctx, python, req_path, venv_path=None, inputs=None, outputs=None, touch=None, check_import=False, check_import_module=None, pip_setup_file=None, pip_setup_touch=None, virtualenv_setup_touch=None, always=False, ): """ Create task that uses given virtual environment's `pip` to sets up \ packages listed in given requirements file. :param ctx: BuildContext object. :param python: Python program path used to set up `pip` and `virtualenv`. :param req_path: Requirements file relative path relative to top directory. :param venv_path: Virtual environment directory relative path relative to top directory. If given, will create the virtual environment and set up packages listed in given requirements file in the virtual environment. If not given, will set up packages listed in given requirements file in given Python program's environment. :param inputs: Input items list to add to created task. See :paramref:`create_cmd_task.inputs` for allowed item types. :param outputs: Output items list to add to created task. See :paramref:`create_cmd_task.outputs` for allowed item types. :param touch: Touch file path for dirty checking. :param check_import: Whether import module for dirty checking. :param check_import_module: Module name to import for dirty checking. :param pip_setup_file: `get-pip.py` file path for `pip_setup` task. :param pip_setup_touch: Touch file path for `pip_setup` task. :param virtualenv_setup_touch: Touch file path for `virtualenv_setup` task. :param always: Whether always run. :return: Created task. """
# Ensure given context object is BuildContext object _ensure_build_context(ctx) # If virtual environment directory path is not given if venv_path is None: # Use given Python program path venv_python = python # If virtual environment directory path is given else: # Get Python program path in the virtual environment venv_python = get_python_path(venv_path) # Mark the path as input target venv_python = mark_input(venv_python) # If virtual environment directory path is not given, # it means not create virtual environment. if venv_path is None: # Create task that sets up `pip` pip_setup_task = pip_setup( # Context ctx=ctx, # Python program path python=python, # `get-pip.py` file path setup_file=pip_setup_file, # Touch file path touch=pip_setup_touch, # Whether import module for dirty checking always=always, ) # Not create virtual environment venv_task = None # If virtual environment directory path is given else: # Not create task that sets up `pip` here because `create_venv` # function below will do pip_setup_task = None # Create task that sets up virtual environment venv_task = create_venv( # Context ctx=ctx, # Python program path python=python, # Virtual environment directory path venv_path=venv_path, # Output items list outputs=[ # Add the virtual environment's `python` program path as output # target for dirty checking get_python_path(venv_path), # Add the virtual environment's `pip` program path as output # target for dirty checking get_pip_path(venv_path), ], # Whether always run always=always, # Task name task_name='Create venv `{}`'.format(venv_path), # `get-pip.py` file path for `pip_setup` task pip_setup_file=pip_setup_file, # Touch file path for `pip_setup` task pip_setup_touch=pip_setup_touch, # Touch file path for `virtualenv_setup` task virtualenv_setup_touch=virtualenv_setup_touch, ) # If touch file path is not given if not touch: # Not update touch file touch_node = None # If touch file path is given else: # Update touch file touch_node, always = update_touch_file( # Context ctx=ctx, # Touch file path path=touch, # Whether import module for dirty checking check_import=check_import, # Module name to import for dirty checking check_import_module=check_import_module, # Python program path for dirty checking check_import_python=venv_python, # Whether always run always=always, ) # Create task that sets up packages task = create_cmd_task( # Context ctx=ctx, # Command parts parts=[ # Python program path venv_python, # Run module '-m', # Module name 'pip', # Install package 'install', # Read package names from requirements file '-r', # Requirements file path. Mark as input target. mark_input(req_path), ], # Input items list inputs=inputs, # Output items list outputs=[ # Use the touch node as output target for dirty checking touch_node, # Given output items list outputs, ], # Whether always run always=always, ) # Chain these tasks to run one after another chain_tasks([ pip_setup_task, venv_task, task, ]) # Return the created task return task
<SYSTEM_TASK:> Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific <END_TASK> <USER_TASK:> Description: def delete_telnet_template(auth, url, template_name= None, template_id= None): """ Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific telnet template from the IMC system :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :param template_name: str value of template name :param template_id: str value template template_id value :return: int HTTP response code :rtype int """
try: if template_id is None: telnet_templates = get_telnet_template(auth, url) if template_name is None: template_name = telnet_template['name'] template_id = None for template in telnet_templates: if template['name'] == template_name: template_id = template['id'] f_url = url + "/imcrs/plat/res/telnet/%s/delete" % template_id response = requests.delete(f_url, auth=auth, headers=HEADERS) return response.status_code except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " delete_telnet_template: An Error has occured"
<SYSTEM_TASK:> Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific <END_TASK> <USER_TASK:> Description: def delete_ssh_template(auth, url, template_name= None, template_id= None): """ Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific ssh template from the IMC system :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :param template_name: str value of template name :param template_id: str value template template_id value :return: int HTTP response code :rtype int """
try: if template_id is None: ssh_templates = get_ssh_template(auth, url) if template_name is None: template_name = ssh_template['name'] template_id = None for template in ssh_templates: if template['name'] == template_name: template_id = template['id'] f_url = url + "/imcrs/plat/res/ssh/%s/delete" % template_id response = requests.delete(f_url, auth=auth, headers=HEADERS) return response.status_code except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " delete_ssh_template: An Error has occured"
<SYSTEM_TASK:> Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific <END_TASK> <USER_TASK:> Description: def delete_snmp_template(auth, url, template_name= None, template_id= None): """ Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific snmp template from the IMC system :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :param template_name: str value of template name :param template_id: str value template template_id value :return: int HTTP response code :rtype int """
try: if template_id is None: snmp_templates = get_snmp_templates(auth, url) if template_name is None: template_name = snmp_template['name'] template_id = None for template in snmp_templates: if template['name'] == template_name: template_id = template['id'] f_url = url + "/imcrs/plat/res/snmp/%s/delete" % template_id response = requests.delete(f_url, auth=auth, headers=HEADERS) return response.status_code except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " delete_snmp_template: An Error has occured"
<SYSTEM_TASK:> Run a non-encrypted non-authorized API proxy server. <END_TASK> <USER_TASK:> Description: def proxy(ctx, bind, port): """ Run a non-encrypted non-authorized API proxy server. Use this only for development and testing! """
app = web.Application() app.on_startup.append(startup_proxy) app.on_cleanup.append(cleanup_proxy) app.router.add_route("GET", r'/stream/{path:.*$}', websocket_handler) app.router.add_route("GET", r'/wsproxy/{path:.*$}', websocket_handler) app.router.add_route('*', r'/{path:.*$}', web_handler) if getattr(ctx.args, 'testing', False): return app web.run_app(app, host=bind, port=port)
<SYSTEM_TASK:> Takes string input of IP address to issue RESTUL call to HP IMC <END_TASK> <USER_TASK:> Description: def get_dev_details(ip_address, auth, url): """Takes string input of IP address to issue RESTUL call to HP IMC :param ip_address: string object of dotted decimal notation of IPv4 address :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: dictionary of device details :rtype: dict >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.device import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> dev_1 = get_dev_details('10.101.0.221', auth.creds, auth.url) >>> assert type(dev_1) is dict >>> assert 'sysName' in dev_1 >>> dev_2 = get_dev_details('8.8.8.8', auth.creds, auth.url) Device not found >>> assert type(dev_2) is str """
get_dev_details_url = "/imcrs/plat/res/device?resPrivilegeFilter=false&ip=" + \ str(ip_address) + "&start=0&size=1000&orderBy=id&desc=false&total=false" f_url = url + get_dev_details_url response = requests.get(f_url, auth=auth, headers=HEADERS) try: if response.status_code == 200: dev_details = (json.loads(response.text)) if len(dev_details) == 0: print("Device not found") return "Device not found" elif isinstance(dev_details['device'], list): for i in dev_details['device']: if i['ip'] == ip_address: dev_details = i return dev_details elif isinstance(dev_details['device'], dict): return dev_details['device'] except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " get_dev_details: An Error has occured"
<SYSTEM_TASK:> function takest devid and ifindex of specific device and interface and issues a RESTFUL call <END_TASK> <USER_TASK:> Description: def set_inteface_up(ifindex, auth, url, devid=None, devip=None): """ function takest devid and ifindex of specific device and interface and issues a RESTFUL call to "undo shut" the specified interface on the target device. :param devid: int or str value of the target device :param devip: ipv4 address of the target devices :param ifindex: int or str value of the target interface :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: HTTP status code 204 with no values. :rype: int >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.device import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> int_down_response = set_interface_down( '9', auth.creds, auth.url, devid = '10') 204 >>> int_up_response = set_inteface_up( '9', auth.creds, auth.url, devid = '10') >>> int_down_response = set_interface_down( '9', auth.creds, auth.url, devid = '10') 204 >>> int_up_response = set_inteface_up('9', auth.creds, auth.url, devip = '10.101.0.221') >>> assert type(int_up_response) is int >>> assert int_up_response is 204 """
if devip is not None: devid = get_dev_details(devip, auth, url)['id'] set_int_up_url = "/imcrs/plat/res/device/" + str(devid) + "/interface/" + str(ifindex) + "/up" f_url = url + set_int_up_url try: response = requests.put(f_url, auth=auth, headers=HEADERS) if response.status_code == 204: return response.status_code except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + " set_inteface_up: An Error has occured"
<SYSTEM_TASK:> Deletes an existing keypair with given ACCESSKEY. <END_TASK> <USER_TASK:> Description: async def delete(cls, access_key: str): """ Deletes an existing keypair with given ACCESSKEY. """
q = 'mutation($access_key: String!) {' \ ' delete_keypair(access_key: $access_key) {' \ ' ok msg' \ ' }' \ '}' variables = { 'access_key': access_key, } rqst = Request(cls.session, 'POST', '/admin/graphql') rqst.set_json({ 'query': q, 'variables': variables, }) async with rqst.fetch() as resp: data = await resp.json() return data['delete_keypair']
<SYSTEM_TASK:> This function prompts user for IMC server information and credentuials and stores <END_TASK> <USER_TASK:> Description: def set_imc_creds(h_url=None, imc_server=None, imc_port=None, imc_user=None,imc_pw=None): """ This function prompts user for IMC server information and credentuials and stores values in url and auth global variables"""
global auth, url if h_url is None: imc_protocol = input( "What protocol would you like to use to connect to the IMC server: \n Press 1 for HTTP: \n Press 2 for HTTPS:") if imc_protocol == "1": h_url = 'http://' else: h_url = 'https://' imc_server = input("What is the ip address of the IMC server?") imc_port = input("What is the port number of the IMC server?") imc_user = input("What is the username of the IMC eAPI user?") imc_pw = input('''What is the password of the IMC eAPI user?''') url = h_url + imc_server + ":" + imc_port auth = requests.auth.HTTPDigestAuth(imc_user, imc_pw) test_url = '/imcrs' f_url = url + test_url try: r = requests.get(f_url, auth=auth, headers=headers, verify=False) print (r.status_code) return auth # checks for reqeusts exceptions except requests.exceptions.RequestException as e: print("Error:\n" + str(e)) print("\n\nThe IMC server address is invalid. Please try again\n\n") set_imc_creds() if r.status_code != 200: # checks for valid IMC credentials print("Error: \n You're credentials are invalid. Please try again\n\n") set_imc_creds() else: print("You've successfully access the IMC eAPI")
<SYSTEM_TASK:> Load and return the contents of version.json. <END_TASK> <USER_TASK:> Description: def get_version(root): """ Load and return the contents of version.json. :param root: The root path that the ``version.json`` file will be opened :type root: str :returns: Content of ``version.json`` or None :rtype: dict or None """
version_json = os.path.join(root, 'version.json') if os.path.exists(version_json): with open(version_json, 'r') as version_json_file: return json.load(version_json_file) return None
<SYSTEM_TASK:> Calls this instance's request_client's post method with the <END_TASK> <USER_TASK:> Description: def fetch(self, endpoint_name, identifier_input, query_params=None): """Calls this instance's request_client's post method with the specified component endpoint Args: - endpoint_name (str) - The endpoint to call like "property/value". - identifier_input - One or more identifiers to request data for. An identifier can be in one of these forms: - A list of property identifier dicts: - A property identifier dict can contain the following keys: (address, zipcode, unit, city, state, slug, meta). One of 'address' or 'slug' is required. Ex: [{"address": "82 County Line Rd", "zipcode": "72173", "meta": "some ID"}] A slug is a URL-safe string that identifies a property. These are obtained from HouseCanary. Ex: [{"slug": "123-Example-St-San-Francisco-CA-94105"}] - A list of dicts representing a block: - A block identifier dict can contain the following keys: (block_id, num_bins, property_type, meta). 'block_id' is required. Ex: [{"block_id": "060750615003005", "meta": "some ID"}] - A list of dicts representing a zipcode: Ex: [{"zipcode": "90274", "meta": "some ID"}] - A list of dicts representing an MSA: Ex: [{"msa": "41860", "meta": "some ID"}] The "meta" field is always optional. Returns: A Response object, or the output of a custom OutputGenerator if one was specified in the constructor. """
endpoint_url = constants.URL_PREFIX + "/" + self._version + "/" + endpoint_name if query_params is None: query_params = {} if len(identifier_input) == 1: # If only one identifier specified, use a GET request query_params.update(identifier_input[0]) return self._request_client.get(endpoint_url, query_params) # when more than one address, use a POST request return self._request_client.post(endpoint_url, identifier_input, query_params)
<SYSTEM_TASK:> Calls this instance's request_client's get method with the <END_TASK> <USER_TASK:> Description: def fetch_synchronous(self, endpoint_name, query_params=None): """Calls this instance's request_client's get method with the specified component endpoint"""
endpoint_url = constants.URL_PREFIX + "/" + self._version + "/" + endpoint_name if query_params is None: query_params = {} return self._request_client.get(endpoint_url, query_params)
<SYSTEM_TASK:> Convert the various formats of input identifier_data into <END_TASK> <USER_TASK:> Description: def get_identifier_input(self, identifier_data): """Convert the various formats of input identifier_data into the proper json format expected by the ApiClient fetch method, which is a list of dicts."""
identifier_input = [] if isinstance(identifier_data, list) and len(identifier_data) > 0: # if list, convert each item in the list to json for address in identifier_data: identifier_input.append(self._convert_to_identifier_json(address)) else: identifier_input.append(self._convert_to_identifier_json(identifier_data)) return identifier_input
<SYSTEM_TASK:> Common method for handling parameters before passing to api_client <END_TASK> <USER_TASK:> Description: def fetch_identifier_component(self, endpoint_name, identifier_data, query_params=None): """Common method for handling parameters before passing to api_client"""
if query_params is None: query_params = {} identifier_input = self.get_identifier_input(identifier_data) return self._api_client.fetch(endpoint_name, identifier_input, query_params)
<SYSTEM_TASK:> Convert input address data into json format <END_TASK> <USER_TASK:> Description: def _convert_to_identifier_json(self, address_data): """Convert input address data into json format"""
if isinstance(address_data, str): # allow just passing a slug string. return {"slug": address_data} if isinstance(address_data, tuple) and len(address_data) > 0: address_json = {"address": address_data[0]} if len(address_data) > 1: address_json["zipcode"] = address_data[1] if len(address_data) > 2: address_json["meta"] = address_data[2] return address_json if isinstance(address_data, dict): allowed_keys = ["address", "zipcode", "unit", "city", "state", "slug", "meta", "client_value", "client_value_sqft"] # ensure the dict does not contain any unallowed keys for key in address_data: if key not in allowed_keys: msg = "Key in address input not allowed: " + key raise housecanary.exceptions.InvalidInputException(msg) # ensure it contains an "address" key if "address" in address_data or "slug" in address_data: return address_data # if we made it here, the input was not valid. msg = ("Input is invalid. Must be a list of (address, zipcode) tuples, or a dict or list" " of dicts with each item containing at least an 'address' or 'slug' key.") raise housecanary.exceptions.InvalidInputException((msg))
<SYSTEM_TASK:> Call the value_report component <END_TASK> <USER_TASK:> Description: def value_report(self, address, zipcode, report_type="full", format_type="json"): """Call the value_report component Value Report only supports a single address. Args: - address - zipcode Kwargs: - report_type - "full" or "summary". Default is "full". - format_type - "json", "pdf", "xlsx" or "all". Default is "json". """
query_params = { "report_type": report_type, "format": format_type, "address": address, "zipcode": zipcode } return self._api_client.fetch_synchronous("property/value_report", query_params)
<SYSTEM_TASK:> Call the rental_report component <END_TASK> <USER_TASK:> Description: def rental_report(self, address, zipcode, format_type="json"): """Call the rental_report component Rental Report only supports a single address. Args: - address - zipcode Kwargs: - format_type - "json", "xlsx" or "all". Default is "json". """
# only json is supported by rental report. query_params = { "format": format_type, "address": address, "zipcode": zipcode } return self._api_client.fetch_synchronous("property/rental_report", query_params)
<SYSTEM_TASK:> Call the zip component_mget endpoint <END_TASK> <USER_TASK:> Description: def component_mget(self, zip_data, components): """Call the zip component_mget endpoint Args: - zip_data - As described in the class docstring. - components - A list of strings for each component to include in the request. Example: ["zip/details", "zip/volatility"] """
if not isinstance(components, list): print("Components param must be a list") return query_params = {"components": ",".join(components)} return self.fetch_identifier_component( "zip/component_mget", zip_data, query_params)
<SYSTEM_TASK:> Returns the contents of version.json or a 404. <END_TASK> <USER_TASK:> Description: def version(request): """ Returns the contents of version.json or a 404. """
version_json = import_string(version_callback)(settings.BASE_DIR) if version_json is None: return HttpResponseNotFound('version.json not found') else: return JsonResponse(version_json)
<SYSTEM_TASK:> Runs all the Django checks and returns a JsonResponse with either <END_TASK> <USER_TASK:> Description: def heartbeat(request): """ Runs all the Django checks and returns a JsonResponse with either a status code of 200 or 500 depending on the results of the checks. Any check that returns a warning or worse (error, critical) will return a 500 response. """
all_checks = checks.registry.registry.get_checks( include_deployment_checks=not settings.DEBUG, ) details = {} statuses = {} level = 0 for check in all_checks: detail = heartbeat_check_detail(check) statuses[check.__name__] = detail['status'] level = max(level, detail['level']) if detail['level'] > 0: details[check.__name__] = detail if level < checks.messages.WARNING: status_code = 200 heartbeat_passed.send(sender=heartbeat, level=level) else: status_code = 500 heartbeat_failed.send(sender=heartbeat, level=level) payload = { 'status': level_to_text(level), 'checks': statuses, 'details': details, } return JsonResponse(payload, status=status_code)