source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(path): assert isinstance(path, string_types + (list,) ), '\n{} no es un `path` valido'.format(path) if isinstance(path, list): if helpers.is_list_of_matching_dicts(path): return path else: raise ValueError( """ La lista ingresada no esta formada por diccionarios con las mismas claves.""" ) suffix = path.split('.')[-1] if suffix == 'csv': return _read_csv_table(path) elif suffix == 'xlsx': return _read_xlsx_table(path) else: raise ValueError( '\n{} no es un sufijo reconocido. Pruebe con .csv o .xlsx'. format(suffix))
read_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(path):\n assert isinstance(path, string_types + (list,)\n ), '\\n{} no es un `path` valido'.format(path)\n if isinstance(path, list):\n if helpers.is_list_of_matching_dicts(path):\n return path\n else:\n raise ValueError(\n \"\"\"\nLa lista ingresada no esta formada por diccionarios con las mismas claves.\"\"\"\n )\n suffix = path.split('.')[-1]\n if suffix == 'csv':\n return _read_csv_table(path)\n elif suffix == 'xlsx':\n return _read_xlsx_table(path)\n else:\n raise ValueError(\n '\\n{} no es un sufijo reconocido. Pruebe con .csv o .xlsx'.\n format(suffix))\n", "role": "user" }, { "content": "read_table", "role": "assistant" } ]
def x(path): with open(path, 'rb') as csvfile: reader = csv.DictReader(csvfile) table = list(reader) return table
read_csv_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(path):\n with open(path, 'rb') as csvfile:\n reader = csv.DictReader(csvfile)\n table = list(reader)\n return table\n", "role": "user" }, { "content": "read_csv_table", "role": "assistant" } ]
def x(path): workbook = pyxl.load_workbook(path) worksheet = workbook.active table = helpers.sheet_to_table(worksheet) return table
read_xlsx_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(path):\n workbook = pyxl.load_workbook(path)\n worksheet = workbook.active\n table = helpers.sheet_to_table(worksheet)\n return table\n", "role": "user" }, { "content": "read_xlsx_table", "role": "assistant" } ]
def x(file_path: str) ->str: if not os.path.isfile(file_path): raise ConfigurationError( f'If you provide a model specification file, it must be a file. You provided {file_path}' ) extension = file_path.split('.')[-1] if extension not in ['yaml', 'yml']: raise ConfigurationError( f'Model specification files must be in a yaml format. You provided {extension}' ) yaml.full_load(file_path) return file_path
validate_model_specification_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(file_path: str) ->str:\n if not os.path.isfile(file_path):\n raise ConfigurationError(\n f'If you provide a model specification file, it must be a file. You provided {file_path}'\n )\n extension = file_path.split('.')[-1]\n if extension not in ['yaml', 'yml']:\n raise ConfigurationError(\n f'Model specification files must be in a yaml format. You provided {extension}'\n )\n yaml.full_load(file_path)\n return file_path\n", "role": "user" }, { "content": "validate_model_specification_file", "role": "assistant" } ]
def x(self, layer=None): if layer: return self._values[layer] for layer in reversed(self._layers): if layer in self._values: return self._values[layer] raise KeyError(layer)
get_value_with_source
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, layer=None):\n if layer:\n return self._values[layer]\n for layer in reversed(self._layers):\n if layer in self._values:\n return self._values[layer]\n raise KeyError(layer)\n", "role": "user" }, { "content": "get_value_with_source", "role": "assistant" } ]
def x(self): result = [] for layer in self._layers: if layer in self._values: result.append({'layer': layer, 'value': self._values[layer][1], 'source': self._values[layer][0], 'default': layer == self. _layers[-1]}) return result
metadata
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n result = []\n for layer in self._layers:\n if layer in self._values:\n result.append({'layer': layer, 'value': self._values[layer][1],\n 'source': self._values[layer][0], 'default': layer == self.\n _layers[-1]})\n return result\n", "role": "user" }, { "content": "metadata", "role": "assistant" } ]
def x(self, value, layer=None, source=None): if self._frozen: raise TypeError('Frozen ConfigNode does not support assignment') if not layer: layer = self._layers[-1] self._values[layer] = source, value
set_value
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value, layer=None, source=None):\n if self._frozen:\n raise TypeError('Frozen ConfigNode does not support assignment')\n if not layer:\n layer = self._layers[-1]\n self._values[layer] = source, value\n", "role": "user" }, { "content": "set_value", "role": "assistant" } ]
def x(self, layer): if self._frozen: raise TypeError('Frozen ConfigNode does not support modification') self.reset_layer(layer) self._layers.remove(layer)
drop_layer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, layer):\n if self._frozen:\n raise TypeError('Frozen ConfigNode does not support modification')\n self.reset_layer(layer)\n self._layers.remove(layer)\n", "role": "user" }, { "content": "drop_layer", "role": "assistant" } ]
def x(self, layer): if self._frozen: raise TypeError('Frozen ConfigNode does not support modification') if layer in self._values: del self._values[layer]
reset_layer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, layer):\n if self._frozen:\n raise TypeError('Frozen ConfigNode does not support modification')\n if layer in self._values:\n del self._values[layer]\n", "role": "user" }, { "content": "reset_layer", "role": "assistant" } ]
def x(self): self.__dict__['_frozen'] = True for child in self._children.values(): child.freeze()
freeze
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n self.__dict__['_frozen'] = True\n for child in self._children.values():\n child.freeze()\n", "role": "user" }, { "content": "freeze", "role": "assistant" } ]
def x(self, name, layer=None): if name not in self._children: if self._frozen: raise KeyError(name) self._children[name] = ConfigTree(layers=self._layers) child = self._children[name] if isinstance(child, ConfigNode): return child.get_value(layer) else: return child
get_from_layer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, layer=None):\n if name not in self._children:\n if self._frozen:\n raise KeyError(name)\n self._children[name] = ConfigTree(layers=self._layers)\n child = self._children[name]\n if isinstance(child, ConfigNode):\n return child.get_value(layer)\n else:\n return child\n", "role": "user" }, { "content": "get_from_layer", "role": "assistant" } ]
def x(self, name, value, layer=None, source=None): if self._frozen: raise TypeError('Frozen ConfigTree does not support assignment') if isinstance(value, dict): if name not in self._children or not isinstance(self._children[name ], ConfigTree): self._children[name] = ConfigTree(layers=list(self._layers)) self._children[name].update(value, layer, source) else: if name not in self._children or not isinstance(self._children[name ], ConfigNode): self._children[name] = ConfigNode(list(self._layers)) child = self._children[name] child.set_value(value, layer, source)
set_with_metadata
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, value, layer=None, source=None):\n if self._frozen:\n raise TypeError('Frozen ConfigTree does not support assignment')\n if isinstance(value, dict):\n if name not in self._children or not isinstance(self._children[name\n ], ConfigTree):\n self._children[name] = ConfigTree(layers=list(self._layers))\n self._children[name].update(value, layer, source)\n else:\n if name not in self._children or not isinstance(self._children[name\n ], ConfigNode):\n self._children[name] = ConfigNode(list(self._layers))\n child = self._children[name]\n child.set_value(value, layer, source)\n", "role": "user" }, { "content": "set_with_metadata", "role": "assistant" } ]
def x(self, data: Union[Mapping, str, bytes], layer: str=None, source: str=None ): if isinstance(data, dict): self._read_dict(data, layer, source) elif isinstance(data, ConfigTree): self._read_dict(data.to_dict(), layer, source) elif isinstance(data, str): if data.endswith(('.yaml', '.yml')): source = source if source else data self._load(data, layer, source) else: try: self._loads(data, layer, source) except AttributeError: raise ValueError( 'The string data should be yaml formated string or path to .yaml/.yml file' ) elif data is None: pass else: raise ValueError( f'Update must be called with dictionary, string, or ConfigTree. You passed in {type(data)}' )
update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data: Union[Mapping, str, bytes], layer: str=None, source: str=None\n ):\n if isinstance(data, dict):\n self._read_dict(data, layer, source)\n elif isinstance(data, ConfigTree):\n self._read_dict(data.to_dict(), layer, source)\n elif isinstance(data, str):\n if data.endswith(('.yaml', '.yml')):\n source = source if source else data\n self._load(data, layer, source)\n else:\n try:\n self._loads(data, layer, source)\n except AttributeError:\n raise ValueError(\n 'The string data should be yaml formated string or path to .yaml/.yml file'\n )\n elif data is None:\n pass\n else:\n raise ValueError(\n f'Update must be called with dictionary, string, or ConfigTree. You passed in {type(data)}'\n )\n", "role": "user" }, { "content": "update", "role": "assistant" } ]
def x(self, data_dict, layer=None, source=None): for k, v in data_dict.items(): self._set_with_metadata(k, v, layer, source)
read_dict
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data_dict, layer=None, source=None):\n for k, v in data_dict.items():\n self._set_with_metadata(k, v, layer, source)\n", "role": "user" }, { "content": "read_dict", "role": "assistant" } ]
def x(self, data_string, layer=None, source=None): data_dict = yaml.full_load(data_string) self._read_dict(data_dict, layer, source)
loads
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data_string, layer=None, source=None):\n data_dict = yaml.full_load(data_string)\n self._read_dict(data_dict, layer, source)\n", "role": "user" }, { "content": "loads", "role": "assistant" } ]
def x(self, f, layer=None, source=None): if hasattr(f, 'read'): self._loads(f.read(), layer=layer, source=source) else: with open(f) as f: self._loads(f.read(), layer=layer, source=source)
load
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, f, layer=None, source=None):\n if hasattr(f, 'read'):\n self._loads(f.read(), layer=layer, source=source)\n else:\n with open(f) as f:\n self._loads(f.read(), layer=layer, source=source)\n", "role": "user" }, { "content": "load", "role": "assistant" } ]
def x(self, name): if name in self._children: return self._children[name].metadata() else: head, _, tail = name.partition('.') if head in self._children: return self._children[head].metadata(key=tail) else: raise KeyError(name)
metadata
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name):\n if name in self._children:\n return self._children[name].metadata()\n else:\n head, _, tail = name.partition('.')\n if head in self._children:\n return self._children[head].metadata(key=tail)\n else:\n raise KeyError(name)\n", "role": "user" }, { "content": "metadata", "role": "assistant" } ]
def x(self, layer, preserve_keys=()): self._reset_layer(layer, [k.split('.') for k in preserve_keys], prefix=[])
reset_layer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, layer, preserve_keys=()):\n self._reset_layer(layer, [k.split('.') for k in preserve_keys], prefix=[])\n", "role": "user" }, { "content": "reset_layer", "role": "assistant" } ]
def x(self, layer): if self._frozen: raise TypeError('Frozen ConfigTree does not support modification') for child in self._children.values(): child.drop_layer(layer) self._layers.remove(layer)
drop_layer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, layer):\n if self._frozen:\n raise TypeError('Frozen ConfigTree does not support modification')\n for child in self._children.values():\n child.drop_layer(layer)\n self._layers.remove(layer)\n", "role": "user" }, { "content": "drop_layer", "role": "assistant" } ]
def x(self): unused = set() for k, c in self._children.items(): if isinstance(c, ConfigNode): if not c.has_been_accessed(): unused.add(k) else: for ck in c.unused_keys(): unused.add(k + '.' + ck) return unused
unused_keys
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n unused = set()\n for k, c in self._children.items():\n if isinstance(c, ConfigNode):\n if not c.has_been_accessed():\n unused.add(k)\n else:\n for ck in c.unused_keys():\n unused.add(k + '.' + ck)\n return unused\n", "role": "user" }, { "content": "unused_keys", "role": "assistant" } ]
def x(schema_filename=None, schema_dir=None): schema_filename = schema_filename or DEFAULT_CATALOG_SCHEMA_FILENAME schema_dir = schema_dir or ABSOLUTE_SCHEMA_DIR schema_path = os.path.join(schema_dir, schema_filename) schema = readers.read_json(schema_path) if platform.system() == 'Windows': base_uri = 'file:///' + schema_path.replace('\\', '/') else: base_uri = 'file://' + schema_path resolver = jsonschema.RefResolver(base_uri=base_uri, referrer=schema) format_checker = jsonschema.FormatChecker() validator = jsonschema.Draft4Validator(schema=schema, resolver=resolver, format_checker=format_checker) return validator
create_validator
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(schema_filename=None, schema_dir=None):\n schema_filename = schema_filename or DEFAULT_CATALOG_SCHEMA_FILENAME\n schema_dir = schema_dir or ABSOLUTE_SCHEMA_DIR\n schema_path = os.path.join(schema_dir, schema_filename)\n schema = readers.read_json(schema_path)\n if platform.system() == 'Windows':\n base_uri = 'file:///' + schema_path.replace('\\\\', '/')\n else:\n base_uri = 'file://' + schema_path\n resolver = jsonschema.RefResolver(base_uri=base_uri, referrer=schema)\n format_checker = jsonschema.FormatChecker()\n validator = jsonschema.Draft4Validator(schema=schema, resolver=resolver,\n format_checker=format_checker)\n return validator\n", "role": "user" }, { "content": "create_validator", "role": "assistant" } ]
def x(catalog, validator=None): catalog = readers.read_catalog(catalog) if not validator: if hasattr(catalog, 'validator'): validator = catalog.validator else: validator = create_validator() jsonschema_res = validator.is_valid(catalog) custom_errors = iter_custom_errors(catalog) return jsonschema_res and len(list(custom_errors)) == 0
is_valid_catalog
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, validator=None):\n catalog = readers.read_catalog(catalog)\n if not validator:\n if hasattr(catalog, 'validator'):\n validator = catalog.validator\n else:\n validator = create_validator()\n jsonschema_res = validator.is_valid(catalog)\n custom_errors = iter_custom_errors(catalog)\n return jsonschema_res and len(list(custom_errors)) == 0\n", "role": "user" }, { "content": "is_valid_catalog", "role": "assistant" } ]
def x(catalog, only_errors=False, fmt='dict', export_path=None, validator=None ): catalog = readers.read_catalog(catalog) default_response = {'status': 'OK', 'error': {'catalog': {'status': 'OK', 'title': catalog.get('title'), 'errors': []}, 'dataset': [{ 'status': 'OK', 'title': dataset.get('title'), 'identifier': dataset.get('identifier'), 'list_index': index, 'errors': []} for index, dataset in enumerate(catalog['dataset'])] if 'dataset' in catalog and isinstance(catalog['dataset'], list) else None}} if not validator: if hasattr(catalog, 'validator'): validator = catalog.validator else: validator = create_validator() jsonschema_errors = list(validator.iter_errors(catalog)) custom_errors = list(iter_custom_errors(catalog)) errors = jsonschema_errors + custom_errors response = default_response.copy() for error in errors: response = _update_validation_response(error, response) if only_errors: response['error']['dataset'] = [dataset for dataset in response[ 'error']['dataset'] if dataset['status'] == 'ERROR'] if export_path: validation_lists = _catalog_validation_to_list(response) alignment = Alignment(wrap_text=True, shrink_to_fit=True, vertical= 'center') column_styles = {'catalog': {'catalog_status': {'width': 20}, 'catalog_error_location': {'width': 40}, 'catalog_error_message': {'width': 40}, 'catalog_title': { 'width': 20}}, 'dataset': {'dataset_error_location': {'width': 20}, 'dataset_identifier': {'width': 40}, 'dataset_status': { 'width': 20}, 'dataset_title': {'width': 40}, 'dataset_list_index': {'width': 20}, 'dataset_error_message': { 'width': 40}}} cell_styles = {'catalog': [{'alignment': Alignment(vertical= 'center')}, {'row': 1, 'font': Font(bold=True)}], 'dataset': [{ 'alignment': Alignment(vertical='center')}, {'row': 1, 'font': Font(bold=True)}]} writers.write_tables(tables=validation_lists, path=export_path, column_styles=column_styles, cell_styles=cell_styles) elif fmt == 'dict': return response elif fmt == 'list': return _catalog_validation_to_list(response) else: raise Exception('No se reconoce el formato {}'.format(fmt))
validate_catalog
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, only_errors=False, fmt='dict', export_path=None, validator=None\n ):\n catalog = readers.read_catalog(catalog)\n default_response = {'status': 'OK', 'error': {'catalog': {'status':\n 'OK', 'title': catalog.get('title'), 'errors': []}, 'dataset': [{\n 'status': 'OK', 'title': dataset.get('title'), 'identifier':\n dataset.get('identifier'), 'list_index': index, 'errors': []} for \n index, dataset in enumerate(catalog['dataset'])] if 'dataset' in\n catalog and isinstance(catalog['dataset'], list) else None}}\n if not validator:\n if hasattr(catalog, 'validator'):\n validator = catalog.validator\n else:\n validator = create_validator()\n jsonschema_errors = list(validator.iter_errors(catalog))\n custom_errors = list(iter_custom_errors(catalog))\n errors = jsonschema_errors + custom_errors\n response = default_response.copy()\n for error in errors:\n response = _update_validation_response(error, response)\n if only_errors:\n response['error']['dataset'] = [dataset for dataset in response[\n 'error']['dataset'] if dataset['status'] == 'ERROR']\n if export_path:\n validation_lists = _catalog_validation_to_list(response)\n alignment = Alignment(wrap_text=True, shrink_to_fit=True, vertical=\n 'center')\n column_styles = {'catalog': {'catalog_status': {'width': 20},\n 'catalog_error_location': {'width': 40},\n 'catalog_error_message': {'width': 40}, 'catalog_title': {\n 'width': 20}}, 'dataset': {'dataset_error_location': {'width': \n 20}, 'dataset_identifier': {'width': 40}, 'dataset_status': {\n 'width': 20}, 'dataset_title': {'width': 40},\n 'dataset_list_index': {'width': 20}, 'dataset_error_message': {\n 'width': 40}}}\n cell_styles = {'catalog': [{'alignment': Alignment(vertical=\n 'center')}, {'row': 1, 'font': Font(bold=True)}], 'dataset': [{\n 'alignment': Alignment(vertical='center')}, {'row': 1, 'font':\n Font(bold=True)}]}\n writers.write_tables(tables=validation_lists, path=export_path,\n column_styles=column_styles, cell_styles=cell_styles)\n elif fmt == 'dict':\n return response\n elif fmt == 'list':\n return _catalog_validation_to_list(response)\n else:\n raise Exception('No se reconoce el formato {}'.format(fmt))\n", "role": "user" }, { "content": "validate_catalog", "role": "assistant" } ]
def x(catalog): try: if 'themeTaxonomy' in catalog: theme_ids = [theme['id'] for theme in catalog['themeTaxonomy']] dups = _find_dups(theme_ids) if len(dups) > 0: yield ce.ThemeIdRepeated(dups) for dataset_idx, dataset in enumerate(catalog['dataset']): for distribution_idx, distribution in enumerate(dataset[ 'distribution']): for attribute in ['downloadURL', 'fileName']: if not format_matches_extension(distribution, attribute): yield ce.ExtensionError(dataset_idx, distribution_idx, distribution, attribute) except Exception: logger.exception('Error de validación.')
iter_custom_errors
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog):\n try:\n if 'themeTaxonomy' in catalog:\n theme_ids = [theme['id'] for theme in catalog['themeTaxonomy']]\n dups = _find_dups(theme_ids)\n if len(dups) > 0:\n yield ce.ThemeIdRepeated(dups)\n for dataset_idx, dataset in enumerate(catalog['dataset']):\n for distribution_idx, distribution in enumerate(dataset[\n 'distribution']):\n for attribute in ['downloadURL', 'fileName']:\n if not format_matches_extension(distribution, attribute):\n yield ce.ExtensionError(dataset_idx,\n distribution_idx, distribution, attribute)\n except Exception:\n logger.exception('Error de validación.')\n", "role": "user" }, { "content": "iter_custom_errors", "role": "assistant" } ]
def x(response): rows_catalog = [] validation_result = {'catalog_title': response['error']['catalog'][ 'title'], 'catalog_status': response['error']['catalog']['status']} for error in response['error']['catalog']['errors']: catalog_result = dict(validation_result) catalog_result.update({'catalog_error_message': error['message'], 'catalog_error_location': ', '.join(error['path'])}) rows_catalog.append(catalog_result) if len(response['error']['catalog']['errors']) == 0: catalog_result = dict(validation_result) catalog_result.update({'catalog_error_message': None, 'catalog_error_location': None}) rows_catalog.append(catalog_result) rows_dataset = [] for dataset in response['error']['dataset']: validation_result = {'dataset_title': dataset['title'], 'dataset_identifier': dataset['identifier'], 'dataset_list_index': dataset['list_index'], 'dataset_status': dataset['status']} for error in dataset['errors']: dataset_result = dict(validation_result) dataset_result.update({'dataset_error_message': error['message' ], 'dataset_error_location': error['path'][-1]}) rows_dataset.append(dataset_result) if len(dataset['errors']) == 0: dataset_result = dict(validation_result) dataset_result.update({'dataset_error_message': None, 'dataset_error_location': None}) rows_dataset.append(dataset_result) return {'catalog': rows_catalog, 'dataset': rows_dataset}
catalog_validation_to_list
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(response):\n rows_catalog = []\n validation_result = {'catalog_title': response['error']['catalog'][\n 'title'], 'catalog_status': response['error']['catalog']['status']}\n for error in response['error']['catalog']['errors']:\n catalog_result = dict(validation_result)\n catalog_result.update({'catalog_error_message': error['message'],\n 'catalog_error_location': ', '.join(error['path'])})\n rows_catalog.append(catalog_result)\n if len(response['error']['catalog']['errors']) == 0:\n catalog_result = dict(validation_result)\n catalog_result.update({'catalog_error_message': None,\n 'catalog_error_location': None})\n rows_catalog.append(catalog_result)\n rows_dataset = []\n for dataset in response['error']['dataset']:\n validation_result = {'dataset_title': dataset['title'],\n 'dataset_identifier': dataset['identifier'],\n 'dataset_list_index': dataset['list_index'], 'dataset_status':\n dataset['status']}\n for error in dataset['errors']:\n dataset_result = dict(validation_result)\n dataset_result.update({'dataset_error_message': error['message'\n ], 'dataset_error_location': error['path'][-1]})\n rows_dataset.append(dataset_result)\n if len(dataset['errors']) == 0:\n dataset_result = dict(validation_result)\n dataset_result.update({'dataset_error_message': None,\n 'dataset_error_location': None})\n rows_dataset.append(dataset_result)\n return {'catalog': rows_catalog, 'dataset': rows_dataset}\n", "role": "user" }, { "content": "catalog_validation_to_list", "role": "assistant" } ]
def x(distribution, attribute): if attribute in distribution and 'format' in distribution: if '/' in distribution['format']: possible_format_extensions = mimetypes.guess_all_extensions( distribution['format']) else: possible_format_extensions = ['.' + distribution['format'].lower()] file_name = urlparse(distribution[attribute]).path extension = os.path.splitext(file_name)[-1].lower() if attribute == 'downloadURL' and not extension: return True if extension.lower().replace('.', '') in EXTENSIONS_EXCEPTIONS: return True if extension not in possible_format_extensions: return False return True
format_matches_extension
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(distribution, attribute):\n if attribute in distribution and 'format' in distribution:\n if '/' in distribution['format']:\n possible_format_extensions = mimetypes.guess_all_extensions(\n distribution['format'])\n else:\n possible_format_extensions = ['.' + distribution['format'].lower()]\n file_name = urlparse(distribution[attribute]).path\n extension = os.path.splitext(file_name)[-1].lower()\n if attribute == 'downloadURL' and not extension:\n return True\n if extension.lower().replace('.', '') in EXTENSIONS_EXCEPTIONS:\n return True\n if extension not in possible_format_extensions:\n return False\n return True\n", "role": "user" }, { "content": "format_matches_extension", "role": "assistant" } ]
def x(catalog): res = validate_catalog(catalog, only_errors=True, fmt='list', export_path=None, validator=None) print('') print('=== Errores a nivel de CATALOGO ===') pprint(res['catalog']) print('') print('=== Errores a nivel de DATASET ===') pprint(res['dataset'])
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog):\n res = validate_catalog(catalog, only_errors=True, fmt='list',\n export_path=None, validator=None)\n print('')\n print('=== Errores a nivel de CATALOGO ===')\n pprint(res['catalog'])\n print('')\n print('=== Errores a nivel de DATASET ===')\n pprint(res['dataset'])\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(catalog, owner_org, dataset_origin_identifier, portal_url, apikey, catalog_id=None, demote_superThemes=True, demote_themes=True, download_strategy=None, generate_new_access_url=None): dataset = catalog.get_dataset(dataset_origin_identifier) ckan_portal = RemoteCKAN(portal_url, apikey=apikey) package = map_dataset_to_package(catalog, dataset, owner_org, catalog_id, demote_superThemes, demote_themes) if dataset.get('license'): license_list = ckan_portal.call_action('license_list') try: ckan_license = next(license_item for license_item in license_list if license_item['title'] == dataset['license'] or license_item['url'] == dataset['license']) package['license_id'] = ckan_license['id'] except StopIteration: package['license_id'] = 'notspecified' else: package['license_id'] = 'notspecified' try: pushed_package = ckan_portal.call_action('package_update', data_dict=package) except NotFound: pushed_package = ckan_portal.call_action('package_create', data_dict=package) with resource_files_download(catalog, dataset.get('distribution', []), download_strategy) as resource_files: resources_update(portal_url, apikey, dataset.get('distribution', [] ), resource_files, generate_new_access_url, catalog_id) ckan_portal.close() return pushed_package['id']
push_dataset_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, owner_org, dataset_origin_identifier, portal_url, apikey,\n catalog_id=None, demote_superThemes=True, demote_themes=True,\n download_strategy=None, generate_new_access_url=None):\n dataset = catalog.get_dataset(dataset_origin_identifier)\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n package = map_dataset_to_package(catalog, dataset, owner_org,\n catalog_id, demote_superThemes, demote_themes)\n if dataset.get('license'):\n license_list = ckan_portal.call_action('license_list')\n try:\n ckan_license = next(license_item for license_item in\n license_list if license_item['title'] == dataset['license'] or\n license_item['url'] == dataset['license'])\n package['license_id'] = ckan_license['id']\n except StopIteration:\n package['license_id'] = 'notspecified'\n else:\n package['license_id'] = 'notspecified'\n try:\n pushed_package = ckan_portal.call_action('package_update',\n data_dict=package)\n except NotFound:\n pushed_package = ckan_portal.call_action('package_create',\n data_dict=package)\n with resource_files_download(catalog, dataset.get('distribution', []),\n download_strategy) as resource_files:\n resources_update(portal_url, apikey, dataset.get('distribution', []\n ), resource_files, generate_new_access_url, catalog_id)\n ckan_portal.close()\n return pushed_package['id']\n", "role": "user" }, { "content": "push_dataset_to_ckan", "role": "assistant" } ]
def x(portal_url, apikey, distributions, resource_files, generate_new_access_url=None, catalog_id=None): ckan_portal = RemoteCKAN(portal_url, apikey=apikey) result = [] generate_new_access_url = generate_new_access_url or [] for distribution in distributions: updated = False resource_id = catalog_id + '_' + distribution['identifier' ] if catalog_id else distribution['identifier'] fields = {'id': resource_id} if distribution['identifier'] in generate_new_access_url: fields.update({'accessURL': ''}) updated = True if distribution['identifier'] in resource_files: fields.update({'resource_type': 'file.upload', 'upload': open( resource_files[distribution['identifier']], 'rb')}) updated = True if updated: try: pushed = ckan_portal.action.resource_patch(**fields) result.append(pushed['id']) except CKANAPIError as e: logger.exception( 'Error subiendo recurso {} a la distribución {}: {}'. format(resource_files[distribution['identifier']], resource_files, str(e))) return result
resources_update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, apikey, distributions, resource_files,\n generate_new_access_url=None, catalog_id=None):\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n result = []\n generate_new_access_url = generate_new_access_url or []\n for distribution in distributions:\n updated = False\n resource_id = catalog_id + '_' + distribution['identifier'\n ] if catalog_id else distribution['identifier']\n fields = {'id': resource_id}\n if distribution['identifier'] in generate_new_access_url:\n fields.update({'accessURL': ''})\n updated = True\n if distribution['identifier'] in resource_files:\n fields.update({'resource_type': 'file.upload', 'upload': open(\n resource_files[distribution['identifier']], 'rb')})\n updated = True\n if updated:\n try:\n pushed = ckan_portal.action.resource_patch(**fields)\n result.append(pushed['id'])\n except CKANAPIError as e:\n logger.exception(\n 'Error subiendo recurso {} a la distribución {}: {}'.\n format(resource_files[distribution['identifier']],\n resource_files, str(e)))\n return result\n", "role": "user" }, { "content": "resources_update", "role": "assistant" } ]
def x(portal_url, apikey, filter_in=None, filter_out=None, only_time_series =False, organization=None): ckan_portal = RemoteCKAN(portal_url, apikey=apikey) identifiers = [] datajson_filters = filter_in or filter_out or only_time_series if datajson_filters: identifiers += get_datasets(portal_url + '/data.json', filter_in= filter_in, filter_out=filter_out, only_time_series= only_time_series, meta_field='identifier') if organization: query = 'organization:"' + organization + '"' search_result = ckan_portal.call_action('package_search', data_dict ={'q': query, 'rows': 500, 'start': 0}) org_identifiers = [dataset['id'] for dataset in search_result[ 'results']] start = 500 while search_result['count'] > start: search_result = ckan_portal.call_action('package_search', data_dict={'q': query, 'rows': 500, 'start': start}) org_identifiers += [dataset['id'] for dataset in search_result[ 'results']] start += 500 if datajson_filters: identifiers = set(identifiers).intersection(set(org_identifiers)) else: identifiers = org_identifiers for identifier in identifiers: ckan_portal.call_action('dataset_purge', data_dict={'id': identifier})
remove_datasets_from_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, apikey, filter_in=None, filter_out=None, only_time_series\n =False, organization=None):\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n identifiers = []\n datajson_filters = filter_in or filter_out or only_time_series\n if datajson_filters:\n identifiers += get_datasets(portal_url + '/data.json', filter_in=\n filter_in, filter_out=filter_out, only_time_series=\n only_time_series, meta_field='identifier')\n if organization:\n query = 'organization:\"' + organization + '\"'\n search_result = ckan_portal.call_action('package_search', data_dict\n ={'q': query, 'rows': 500, 'start': 0})\n org_identifiers = [dataset['id'] for dataset in search_result[\n 'results']]\n start = 500\n while search_result['count'] > start:\n search_result = ckan_portal.call_action('package_search',\n data_dict={'q': query, 'rows': 500, 'start': start})\n org_identifiers += [dataset['id'] for dataset in search_result[\n 'results']]\n start += 500\n if datajson_filters:\n identifiers = set(identifiers).intersection(set(org_identifiers))\n else:\n identifiers = org_identifiers\n for identifier in identifiers:\n ckan_portal.call_action('dataset_purge', data_dict={'id': identifier})\n", "role": "user" }, { "content": "remove_datasets_from_ckan", "role": "assistant" } ]
def x(catalog, portal_url, apikey, identifier=None, label=None): ckan_portal = RemoteCKAN(portal_url, apikey=apikey) theme = catalog.get_theme(identifier=identifier, label=label) group = map_theme_to_group(theme) pushed_group = ckan_portal.call_action('group_create', data_dict=group) return pushed_group['name']
push_theme_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, portal_url, apikey, identifier=None, label=None):\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n theme = catalog.get_theme(identifier=identifier, label=label)\n group = map_theme_to_group(theme)\n pushed_group = ckan_portal.call_action('group_create', data_dict=group)\n return pushed_group['name']\n", "role": "user" }, { "content": "push_theme_to_ckan", "role": "assistant" } ]
def x(catalog, owner_org, dataset_origin_identifier, portal_url, apikey, download_strategy=None, generate_new_access_url=None): return push_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier, portal_url, apikey, None, False, False, download_strategy, generate_new_access_url)
restore_dataset_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, owner_org, dataset_origin_identifier, portal_url, apikey,\n download_strategy=None, generate_new_access_url=None):\n return push_dataset_to_ckan(catalog, owner_org,\n dataset_origin_identifier, portal_url, apikey, None, False, False,\n download_strategy, generate_new_access_url)\n", "role": "user" }, { "content": "restore_dataset_to_ckan", "role": "assistant" } ]
def x(catalog, owner_org, dataset_origin_identifier, portal_url, apikey, catalog_id, download_strategy=None): return push_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier, portal_url, apikey, catalog_id= catalog_id, download_strategy=download_strategy)
harvest_dataset_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, owner_org, dataset_origin_identifier, portal_url, apikey,\n catalog_id, download_strategy=None):\n return push_dataset_to_ckan(catalog, owner_org,\n dataset_origin_identifier, portal_url, apikey, catalog_id=\n catalog_id, download_strategy=download_strategy)\n", "role": "user" }, { "content": "harvest_dataset_to_ckan", "role": "assistant" } ]
def x(catalog, portal_url, apikey, catalog_id, dataset_list=None, owner_org =None, download_strategy=None): harvested = [] if dataset_list is None: try: dataset_list = [ds['identifier'] for ds in catalog.datasets] except KeyError: logger.exception('Hay datasets sin identificadores') return harvested owner_org = owner_org or catalog_id errors = {} for dataset_id in dataset_list: try: harvested_id = harvest_dataset_to_ckan(catalog, owner_org, dataset_id, portal_url, apikey, catalog_id, download_strategy) harvested.append(harvested_id) except Exception as e: msg = ( 'Error federando catalogo: %s, dataset: %s al portal: %s\n' % (catalog_id, dataset_id, portal_url)) msg += str(e) logger.error(msg) errors[dataset_id] = str(e) return harvested, errors
harvest_catalog_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, portal_url, apikey, catalog_id, dataset_list=None, owner_org\n =None, download_strategy=None):\n harvested = []\n if dataset_list is None:\n try:\n dataset_list = [ds['identifier'] for ds in catalog.datasets]\n except KeyError:\n logger.exception('Hay datasets sin identificadores')\n return harvested\n owner_org = owner_org or catalog_id\n errors = {}\n for dataset_id in dataset_list:\n try:\n harvested_id = harvest_dataset_to_ckan(catalog, owner_org,\n dataset_id, portal_url, apikey, catalog_id, download_strategy)\n harvested.append(harvested_id)\n except Exception as e:\n msg = (\n 'Error federando catalogo: %s, dataset: %s al portal: %s\\n' %\n (catalog_id, dataset_id, portal_url))\n msg += str(e)\n logger.error(msg)\n errors[dataset_id] = str(e)\n return harvested, errors\n", "role": "user" }, { "content": "harvest_catalog_to_ckan", "role": "assistant" } ]
def x(catalog, portal_url, apikey): ckan_portal = RemoteCKAN(portal_url, apikey=apikey) existing_themes = ckan_portal.call_action('group_list') new_themes = [theme['id'] for theme in catalog['themeTaxonomy'] if theme['id'] not in existing_themes] pushed_names = [] for new_theme in new_themes: name = push_theme_to_ckan(catalog, portal_url, apikey, identifier= new_theme) pushed_names.append(name) return pushed_names
push_new_themes
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, portal_url, apikey):\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n existing_themes = ckan_portal.call_action('group_list')\n new_themes = [theme['id'] for theme in catalog['themeTaxonomy'] if \n theme['id'] not in existing_themes]\n pushed_names = []\n for new_theme in new_themes:\n name = push_theme_to_ckan(catalog, portal_url, apikey, identifier=\n new_theme)\n pushed_names.append(name)\n return pushed_names\n", "role": "user" }, { "content": "push_new_themes", "role": "assistant" } ]
def x(portal_url, org_id): ckan_portal = RemoteCKAN(portal_url) return ckan_portal.call_action('organization_show', data_dict={'id': org_id})
get_organization_from_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, org_id):\n ckan_portal = RemoteCKAN(portal_url)\n return ckan_portal.call_action('organization_show', data_dict={'id':\n org_id})\n", "role": "user" }, { "content": "get_organization_from_ckan", "role": "assistant" } ]
def x(portal_url, apikey, org_tree, parent=None): created = [] for node in org_tree: pushed_org = push_organization_to_ckan(portal_url, apikey, node, parent=parent) if pushed_org['success']: pushed_org['children'] = push_organization_tree_to_ckan(portal_url, apikey, node['children'], parent=node['name']) created.append(pushed_org) return created
push_organization_tree_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, apikey, org_tree, parent=None):\n created = []\n for node in org_tree:\n pushed_org = push_organization_to_ckan(portal_url, apikey, node,\n parent=parent)\n if pushed_org['success']:\n pushed_org['children'] = push_organization_tree_to_ckan(portal_url,\n apikey, node['children'], parent=node['name'])\n created.append(pushed_org)\n return created\n", "role": "user" }, { "content": "push_organization_tree_to_ckan", "role": "assistant" } ]
def x(portal_url, apikey, organization, parent=None): portal = RemoteCKAN(portal_url, apikey=apikey) if parent: organization['groups'] = [{'name': parent}] try: pushed_org = portal.call_action('organization_create', data_dict= organization) pushed_org['success'] = True except Exception as e: logger.exception('Ocurrió un error creando la organización {}: {}'. format(organization['title'], str(e))) pushed_org = {'name': organization, 'success': False} return pushed_org
push_organization_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, apikey, organization, parent=None):\n portal = RemoteCKAN(portal_url, apikey=apikey)\n if parent:\n organization['groups'] = [{'name': parent}]\n try:\n pushed_org = portal.call_action('organization_create', data_dict=\n organization)\n pushed_org['success'] = True\n except Exception as e:\n logger.exception('Ocurrió un error creando la organización {}: {}'.\n format(organization['title'], str(e)))\n pushed_org = {'name': organization, 'success': False}\n return pushed_org\n", "role": "user" }, { "content": "push_organization_to_ckan", "role": "assistant" } ]
def x(portal_url, apikey, organization_id): portal = RemoteCKAN(portal_url, apikey=apikey) try: portal.call_action('organization_purge', data_dict={'id': organization_id}) except Exception as e: logger.exception('Ocurrió un error borrando la organización {}: {}' .format(organization_id, str(e)))
remove_organization_from_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, apikey, organization_id):\n portal = RemoteCKAN(portal_url, apikey=apikey)\n try:\n portal.call_action('organization_purge', data_dict={'id':\n organization_id})\n except Exception as e:\n logger.exception('Ocurrió un error borrando la organización {}: {}'\n .format(organization_id, str(e)))\n", "role": "user" }, { "content": "remove_organization_from_ckan", "role": "assistant" } ]
def x(portal_url, apikey, organization_list): for org in organization_list: remove_organization_from_ckan(portal_url, apikey, org)
remove_organizations_from_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url, apikey, organization_list):\n for org in organization_list:\n remove_organization_from_ckan(portal_url, apikey, org)\n", "role": "user" }, { "content": "remove_organizations_from_ckan", "role": "assistant" } ]
def x(catalog, owner_org, portal_url, apikey, dataset_list=None, download_strategy=None, generate_new_access_url=None): push_new_themes(catalog, portal_url, apikey) restored = [] if dataset_list is None: try: dataset_list = [ds['identifier'] for ds in catalog.datasets] except KeyError: logger.exception('Hay datasets sin identificadores') return restored for dataset_id in dataset_list: try: restored_id = restore_dataset_to_ckan(catalog, owner_org, dataset_id, portal_url, apikey, download_strategy, generate_new_access_url) restored.append(restored_id) except (CKANAPIError, KeyError, AttributeError) as e: logger.exception('Ocurrió un error restaurando el dataset {}: {}' .format(dataset_id, str(e))) return restored
restore_organization_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, owner_org, portal_url, apikey, dataset_list=None,\n download_strategy=None, generate_new_access_url=None):\n push_new_themes(catalog, portal_url, apikey)\n restored = []\n if dataset_list is None:\n try:\n dataset_list = [ds['identifier'] for ds in catalog.datasets]\n except KeyError:\n logger.exception('Hay datasets sin identificadores')\n return restored\n for dataset_id in dataset_list:\n try:\n restored_id = restore_dataset_to_ckan(catalog, owner_org,\n dataset_id, portal_url, apikey, download_strategy,\n generate_new_access_url)\n restored.append(restored_id)\n except (CKANAPIError, KeyError, AttributeError) as e:\n logger.exception('Ocurrió un error restaurando el dataset {}: {}'\n .format(dataset_id, str(e)))\n return restored\n", "role": "user" }, { "content": "restore_organization_to_ckan", "role": "assistant" } ]
def x(catalog, origin_portal_url, destination_portal_url, apikey, download_strategy=None, generate_new_access_url=None): catalog['homepage'] = catalog.get('homepage') or origin_portal_url res = {} origin_portal = RemoteCKAN(origin_portal_url) try: org_list = origin_portal.action.organization_list() except CKANAPIError as e: logger.exception( 'Ocurrió un error buscando las organizaciones del portal {}: {}' .format(origin_portal_url, str(e))) print(e) return res for org in org_list: print('Restaurando organizacion {}'.format(org)) response = origin_portal.action.organization_show(id=org, include_datasets=True) datasets = [package['id'] for package in response['packages']] pushed_datasets = restore_organization_to_ckan(catalog, org, destination_portal_url, apikey, dataset_list=datasets, download_strategy=download_strategy, generate_new_access_url= generate_new_access_url) res[org] = pushed_datasets return res
restore_catalog_to_ckan
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, origin_portal_url, destination_portal_url, apikey,\n download_strategy=None, generate_new_access_url=None):\n catalog['homepage'] = catalog.get('homepage') or origin_portal_url\n res = {}\n origin_portal = RemoteCKAN(origin_portal_url)\n try:\n org_list = origin_portal.action.organization_list()\n except CKANAPIError as e:\n logger.exception(\n 'Ocurrió un error buscando las organizaciones del portal {}: {}'\n .format(origin_portal_url, str(e)))\n print(e)\n return res\n for org in org_list:\n print('Restaurando organizacion {}'.format(org))\n response = origin_portal.action.organization_show(id=org,\n include_datasets=True)\n datasets = [package['id'] for package in response['packages']]\n pushed_datasets = restore_organization_to_ckan(catalog, org,\n destination_portal_url, apikey, dataset_list=datasets,\n download_strategy=download_strategy, generate_new_access_url=\n generate_new_access_url)\n res[org] = pushed_datasets\n return res\n", "role": "user" }, { "content": "restore_catalog_to_ckan", "role": "assistant" } ]
def x(components: List, input_config: Mapping=None, plugin_config: Mapping=None ) ->InteractiveContext: config = build_simulation_configuration() config.update(input_config) plugin_manager = PluginManager(plugin_config) return InteractiveContext(config, components, plugin_manager)
initialize_simulation
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(components: List, input_config: Mapping=None, plugin_config: Mapping=None\n ) ->InteractiveContext:\n config = build_simulation_configuration()\n config.update(input_config)\n plugin_manager = PluginManager(plugin_config)\n return InteractiveContext(config, components, plugin_manager)\n", "role": "user" }, { "content": "initialize_simulation", "role": "assistant" } ]
def x(components: List, input_config: Mapping=None, plugin_config: Mapping=None ) ->InteractiveContext: simulation = initialize_simulation(components, input_config, plugin_config) simulation.setup() return simulation
setup_simulation
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(components: List, input_config: Mapping=None, plugin_config: Mapping=None\n ) ->InteractiveContext:\n simulation = initialize_simulation(components, input_config, plugin_config)\n simulation.setup()\n return simulation\n", "role": "user" }, { "content": "setup_simulation", "role": "assistant" } ]
def x(model_specification_file: str) ->InteractiveContext: model_specification = build_model_specification(model_specification_file) plugin_config = model_specification.plugins component_config = model_specification.components simulation_config = model_specification.configuration plugin_manager = PluginManager(plugin_config) component_config_parser = plugin_manager.get_plugin( 'component_configuration_parser') components = component_config_parser.get_components(component_config) return InteractiveContext(simulation_config, components, plugin_manager)
initialize_simulation_from_model_specification
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(model_specification_file: str) ->InteractiveContext:\n model_specification = build_model_specification(model_specification_file)\n plugin_config = model_specification.plugins\n component_config = model_specification.components\n simulation_config = model_specification.configuration\n plugin_manager = PluginManager(plugin_config)\n component_config_parser = plugin_manager.get_plugin(\n 'component_configuration_parser')\n components = component_config_parser.get_components(component_config)\n return InteractiveContext(simulation_config, components, plugin_manager)\n", "role": "user" }, { "content": "initialize_simulation_from_model_specification", "role": "assistant" } ]
def x(model_specification_file: str) ->InteractiveContext: simulation = initialize_simulation_from_model_specification( model_specification_file) simulation.setup() return simulation
setup_simulation_from_model_specification
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(model_specification_file: str) ->InteractiveContext:\n simulation = initialize_simulation_from_model_specification(\n model_specification_file)\n simulation.setup()\n return simulation\n", "role": "user" }, { "content": "setup_simulation_from_model_specification", "role": "assistant" } ]
def x(self): super().setup() self._start_time = self.clock.time self.initialize_simulants()
setup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n super().setup()\n self._start_time = self.clock.time\n self.initialize_simulants()\n", "role": "user" }, { "content": "setup", "role": "assistant" } ]
def x(self): super().initialize_simulants() self._initial_population = self.population.get_population(True)
initialize_simulants
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n super().initialize_simulants()\n self._initial_population = self.population.get_population(True)\n", "role": "user" }, { "content": "initialize_simulants", "role": "assistant" } ]
def x(self): warnings.warn( 'This reset method is very crude. It should work for many simple simulations, but we make no guarantees. In particular, if you have components that manage their own state in any way, this might not work.' ) self.population._population = self._initial_population self.clock._time = self._start_time
reset
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n warnings.warn(\n 'This reset method is very crude. It should work for many simple simulations, but we make no guarantees. In particular, if you have components that manage their own state in any way, this might not work.'\n )\n self.population._population = self._initial_population\n self.clock._time = self._start_time\n", "role": "user" }, { "content": "reset", "role": "assistant" } ]
def x(self, with_logging: bool=True) ->int: return self.run_until(self.clock.stop_time, with_logging=with_logging)
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, with_logging: bool=True) ->int:\n return self.run_until(self.clock.stop_time, with_logging=with_logging)\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(self, duration: Timedelta, with_logging: bool=True) ->int: return self.run_until(self.clock.time + duration, with_logging=with_logging )
run_for
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, duration: Timedelta, with_logging: bool=True) ->int:\n return self.run_until(self.clock.time + duration, with_logging=with_logging\n )\n", "role": "user" }, { "content": "run_for", "role": "assistant" } ]
def x(self, end_time: Time, with_logging=True) ->int: if not isinstance(end_time, type(self.clock.time)): raise ValueError( f'Provided time must be an instance of {type(self.clock.time)}') iterations = int(ceil((end_time - self.clock.time) / self.clock.step_size)) self.take_steps(number_of_steps=iterations, with_logging=with_logging) assert self.clock.time - self.clock.step_size < end_time <= self.clock.time return iterations
run_until
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, end_time: Time, with_logging=True) ->int:\n if not isinstance(end_time, type(self.clock.time)):\n raise ValueError(\n f'Provided time must be an instance of {type(self.clock.time)}')\n iterations = int(ceil((end_time - self.clock.time) / self.clock.step_size))\n self.take_steps(number_of_steps=iterations, with_logging=with_logging)\n assert self.clock.time - self.clock.step_size < end_time <= self.clock.time\n return iterations\n", "role": "user" }, { "content": "run_until", "role": "assistant" } ]
def x(self, step_size: Timedelta=None): old_step_size = self.clock.step_size if step_size is not None: if not isinstance(step_size, type(self.clock.step_size)): raise ValueError( f'Provided time must be an instance of {type(self.clock.step_size)}' ) self.clock._step_size = step_size super().step() self.clock._step_size = old_step_size
step
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, step_size: Timedelta=None):\n old_step_size = self.clock.step_size\n if step_size is not None:\n if not isinstance(step_size, type(self.clock.step_size)):\n raise ValueError(\n f'Provided time must be an instance of {type(self.clock.step_size)}'\n )\n self.clock._step_size = step_size\n super().step()\n self.clock._step_size = old_step_size\n", "role": "user" }, { "content": "step", "role": "assistant" } ]
def x(self, number_of_steps: int=1, step_size: Timedelta=None, with_logging: bool=True): if not isinstance(number_of_steps, int): raise ValueError('Number of steps must be an integer.') if run_from_ipython() and with_logging: for _ in log_progress(range(number_of_steps), name='Step'): self.step(step_size) else: for _ in range(number_of_steps): self.step(step_size)
take_steps
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, number_of_steps: int=1, step_size: Timedelta=None, with_logging:\n bool=True):\n if not isinstance(number_of_steps, int):\n raise ValueError('Number of steps must be an integer.')\n if run_from_ipython() and with_logging:\n for _ in log_progress(range(number_of_steps), name='Step'):\n self.step(step_size)\n else:\n for _ in range(number_of_steps):\n self.step(step_size)\n", "role": "user" }, { "content": "take_steps", "role": "assistant" } ]
def x(self, untracked: bool=False) ->pd.DataFrame: return self.population.get_population(untracked)
get_population
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, untracked: bool=False) ->pd.DataFrame:\n return self.population.get_population(untracked)\n", "role": "user" }, { "content": "get_population", "role": "assistant" } ]
def x(self, event_type: str) ->List[Callable]: if event_type not in self.events: raise ValueError(f'No event {event_type} in system.') return self.events.get_listeners(event_type)
get_listeners
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, event_type: str) ->List[Callable]:\n if event_type not in self.events:\n raise ValueError(f'No event {event_type} in system.')\n return self.events.get_listeners(event_type)\n", "role": "user" }, { "content": "get_listeners", "role": "assistant" } ]
def x(self, event_type: str) ->Callable: if event_type not in self.events: raise ValueError(f'No event {event_type} in system.') return self.events.get_emitter(event_type)
get_emitter
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, event_type: str) ->Callable:\n if event_type not in self.events:\n raise ValueError(f'No event {event_type} in system.')\n return self.events.get_emitter(event_type)\n", "role": "user" }, { "content": "get_emitter", "role": "assistant" } ]
def x(self) ->List: return [component for component in self.component_manager._components + self.component_manager._managers]
get_components
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->List:\n return [component for component in self.component_manager._components +\n self.component_manager._managers]\n", "role": "user" }, { "content": "get_components", "role": "assistant" } ]
def x(self, data, key_columns=('sex',), parameter_columns=(['age', 'age_group_start', 'age_group_end'], ['year', 'year_start', 'year_end'] ), value_columns=None) ->LookupTable: return self._lookup_table_manager.build_table(data, key_columns, parameter_columns, value_columns)
build_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, key_columns=('sex',), parameter_columns=(['age',\n 'age_group_start', 'age_group_end'], ['year', 'year_start', 'year_end']\n ), value_columns=None) ->LookupTable:\n return self._lookup_table_manager.build_table(data, key_columns,\n parameter_columns, value_columns)\n", "role": "user" }, { "content": "build_table", "role": "assistant" } ]
def x(data, parameter_columns): param_edges = [p[1:] for p in parameter_columns if isinstance(p, (Tuple, List))] for p in param_edges: other_params = [p_ed[0] for p_ed in param_edges if p_ed != p] if other_params: sub_tables = data.groupby(list(other_params)) else: sub_tables = {None: data}.items() n_p_total = len(set(data[p[0]])) for _, table in sub_tables: param_data = table[[p[0], p[1]]].copy().sort_values(by=p[0]) start, end = param_data[p[0]].reset_index(drop=True), param_data[p [1]].reset_index(drop=True) if len(set(start)) < n_p_total: raise ValueError( f'You must provide a value for every combination of {parameter_columns}.' ) if len(start) <= 1: continue for i in range(1, len(start)): e = end[i - 1] s = start[i] if e > s or s == start[i - 1]: raise ValueError( f'Parameter data must not contain overlaps. Parameter {p} contains overlapping data.' ) if e < s: raise NotImplementedError( f'Interpolation only supported for parameter columns with continuous bins. Parameter {p} contains non-continuous bins.' )
check_data_complete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(data, parameter_columns):\n param_edges = [p[1:] for p in parameter_columns if isinstance(p, (Tuple,\n List))]\n for p in param_edges:\n other_params = [p_ed[0] for p_ed in param_edges if p_ed != p]\n if other_params:\n sub_tables = data.groupby(list(other_params))\n else:\n sub_tables = {None: data}.items()\n n_p_total = len(set(data[p[0]]))\n for _, table in sub_tables:\n param_data = table[[p[0], p[1]]].copy().sort_values(by=p[0])\n start, end = param_data[p[0]].reset_index(drop=True), param_data[p\n [1]].reset_index(drop=True)\n if len(set(start)) < n_p_total:\n raise ValueError(\n f'You must provide a value for every combination of {parameter_columns}.'\n )\n if len(start) <= 1:\n continue\n for i in range(1, len(start)):\n e = end[i - 1]\n s = start[i]\n if e > s or s == start[i - 1]:\n raise ValueError(\n f'Parameter data must not contain overlaps. Parameter {p} contains overlapping data.'\n )\n if e < s:\n raise NotImplementedError(\n f'Interpolation only supported for parameter columns with continuous bins. Parameter {p} contains non-continuous bins.'\n )\n", "role": "user" }, { "content": "check_data_complete", "role": "assistant" } ]
def x(self, key, path): sub_dir_path = os.path.join(self.results_root, path) os.makedirs(sub_dir_path, exist_ok=True) self._directories[key] = sub_dir_path return sub_dir_path
add_sub_directory
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, key, path):\n sub_dir_path = os.path.join(self.results_root, path)\n os.makedirs(sub_dir_path, exist_ok=True)\n self._directories[key] = sub_dir_path\n return sub_dir_path\n", "role": "user" }, { "content": "add_sub_directory", "role": "assistant" } ]
def x(self, data, file_name, key=None): path = os.path.join(self._directories[key], file_name) extension = file_name.split('.')[-1] if extension == 'yaml': with open(path, 'w') as f: yaml.dump(data, f) elif extension == 'hdf': categorical_columns = data.dtypes[data.dtypes == 'category'].index data.loc[:, categorical_columns] = data.loc[:, categorical_columns ].astype('object') data.to_hdf(path + 'update', 'data') if os.path.exists(path): os.remove(path) os.rename(path + 'update', path) else: raise NotImplementedError( f"Only 'yaml' and 'hdf' file types are supported. You requested {extension}" )
write_output
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, data, file_name, key=None):\n path = os.path.join(self._directories[key], file_name)\n extension = file_name.split('.')[-1]\n if extension == 'yaml':\n with open(path, 'w') as f:\n yaml.dump(data, f)\n elif extension == 'hdf':\n categorical_columns = data.dtypes[data.dtypes == 'category'].index\n data.loc[:, categorical_columns] = data.loc[:, categorical_columns\n ].astype('object')\n data.to_hdf(path + 'update', 'data')\n if os.path.exists(path):\n os.remove(path)\n os.rename(path + 'update', path)\n else:\n raise NotImplementedError(\n f\"Only 'yaml' and 'hdf' file types are supported. You requested {extension}\"\n )\n", "role": "user" }, { "content": "write_output", "role": "assistant" } ]
def x(self, src_path, file_name, key=None): path = os.path.join(self._directories[key], file_name) shutil.copyfile(src_path, path)
copy_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, src_path, file_name, key=None):\n path = os.path.join(self._directories[key], file_name)\n shutil.copyfile(src_path, path)\n", "role": "user" }, { "content": "copy_file", "role": "assistant" } ]
def x(self): daychunks = self.__split_file() if daychunks: maxcount = len(self.__splitpointers) for i in range(maxcount): start = self.__splitpointers[i] end = None if i < maxcount - 1: end = self.__splitpointers[i + 1] chunk = self.__get_chunk(start, end) parser = sarparse.Parser() cpu_usage, mem_usage, swp_usage, io_usage = parser._parse_file( parser._split_file(chunk)) self.__sarinfos[self.__get_part_date(chunk)] = {'cpu': cpu_usage, 'mem': mem_usage, 'swap': swp_usage, 'io': io_usage} del cpu_usage del mem_usage del swp_usage del io_usage del parser return True
load_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n daychunks = self.__split_file()\n if daychunks:\n maxcount = len(self.__splitpointers)\n for i in range(maxcount):\n start = self.__splitpointers[i]\n end = None\n if i < maxcount - 1:\n end = self.__splitpointers[i + 1]\n chunk = self.__get_chunk(start, end)\n parser = sarparse.Parser()\n cpu_usage, mem_usage, swp_usage, io_usage = parser._parse_file(\n parser._split_file(chunk))\n self.__sarinfos[self.__get_part_date(chunk)] = {'cpu':\n cpu_usage, 'mem': mem_usage, 'swap': swp_usage, 'io': io_usage}\n del cpu_usage\n del mem_usage\n del swp_usage\n del io_usage\n del parser\n return True\n", "role": "user" }, { "content": "load_file", "role": "assistant" } ]
def x(self, start=0, end=None): piece = False if self.__filename and os.access(self.__filename, os.R_OK): fhandle = None try: fhandle = os.open(self.__filename, os.O_RDONLY) except OSError: print("Couldn't open file %s" % self.__filename) fhandle = None if fhandle: try: sarmap = mmap.mmap(fhandle, length=0, prot=mmap.PROT_READ) except (TypeError, IndexError): os.close(fhandle) traceback.print_exc() return False if not end: end = sarmap.size() try: sarmap.seek(start) piece = sarmap.read(end - start) except: traceback.print_exc() os.close(fhandle) return piece
get_chunk
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, start=0, end=None):\n piece = False\n if self.__filename and os.access(self.__filename, os.R_OK):\n fhandle = None\n try:\n fhandle = os.open(self.__filename, os.O_RDONLY)\n except OSError:\n print(\"Couldn't open file %s\" % self.__filename)\n fhandle = None\n if fhandle:\n try:\n sarmap = mmap.mmap(fhandle, length=0, prot=mmap.PROT_READ)\n except (TypeError, IndexError):\n os.close(fhandle)\n traceback.print_exc()\n return False\n if not end:\n end = sarmap.size()\n try:\n sarmap.seek(start)\n piece = sarmap.read(end - start)\n except:\n traceback.print_exc()\n os.close(fhandle)\n return piece\n", "role": "user" }, { "content": "get_chunk", "role": "assistant" } ]
def x(self): if self.__filename and os.access(self.__filename, os.R_OK): fhandle = None try: fhandle = os.open(self.__filename, os.O_RDONLY) except OSError: print("Couldn't open file %s" % self.__filename) fhandle = None if fhandle: try: sarmap = mmap.mmap(fhandle, length=0, prot=mmap.PROT_READ) except (TypeError, IndexError): os.close(fhandle) traceback.print_exc() return False sfpos = sarmap.find(PATTERN_MULTISPLIT, 0) while sfpos > -1: """Split by day found""" self.__splitpointers.append(sfpos) try: sfpos = sarmap.find(PATTERN_MULTISPLIT, sfpos + 1) except ValueError: print('ValueError on mmap.find()') return True if self.__splitpointers: return True return False
split_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n if self.__filename and os.access(self.__filename, os.R_OK):\n fhandle = None\n try:\n fhandle = os.open(self.__filename, os.O_RDONLY)\n except OSError:\n print(\"Couldn't open file %s\" % self.__filename)\n fhandle = None\n if fhandle:\n try:\n sarmap = mmap.mmap(fhandle, length=0, prot=mmap.PROT_READ)\n except (TypeError, IndexError):\n os.close(fhandle)\n traceback.print_exc()\n return False\n sfpos = sarmap.find(PATTERN_MULTISPLIT, 0)\n while sfpos > -1:\n \"\"\"Split by day found\"\"\"\n self.__splitpointers.append(sfpos)\n try:\n sfpos = sarmap.find(PATTERN_MULTISPLIT, sfpos + 1)\n except ValueError:\n print('ValueError on mmap.find()')\n return True\n if self.__splitpointers:\n return True\n return False\n", "role": "user" }, { "content": "split_file", "role": "assistant" } ]
def x(self, part=''): if type(part) is not StringType: return False firstline = part.split('\n')[0] info = firstline.split() datevalue = '' try: datevalue = info[3] except KeyError: datevalue = False except: traceback.print_exc() datevalue = False return datevalue
get_part_date
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, part=''):\n if type(part) is not StringType:\n return False\n firstline = part.split('\\n')[0]\n info = firstline.split()\n datevalue = ''\n try:\n datevalue = info[3]\n except KeyError:\n datevalue = False\n except:\n traceback.print_exc()\n datevalue = False\n return datevalue\n", "role": "user" }, { "content": "get_part_date", "role": "assistant" } ]
def x(portal_url): portal = RemoteCKAN(portal_url) try: status = portal.call_action('status_show', requests_kwargs={ 'verify': False}) packages_list = portal.call_action('package_list', requests_kwargs= {'verify': False}) groups_list = portal.call_action('group_list', requests_kwargs={ 'verify': False}) packages = [] num_packages = len(packages_list) for index, pkg in enumerate(packages_list): msg = 'Leyendo dataset {} de {}'.format(index + 1, num_packages) logger.info(msg) packages.append(portal.call_action('package_show', {'id': pkg}, requests_kwargs={'verify': False})) time.sleep(0.2) groups = [portal.call_action('group_show', {'id': grp}, requests_kwargs={'verify': False}) for grp in groups_list] catalog = map_status_to_catalog(status) catalog['dataset'] = map_packages_to_datasets(packages, portal_url) catalog['themeTaxonomy'] = map_groups_to_themes(groups) except (CKANAPIError, RequestException) as e: logger.exception('Error al procesar el portal %s', portal_url, exc_info=True) raise NonParseableCatalog(portal_url, e) return catalog
read_ckan_catalog
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(portal_url):\n portal = RemoteCKAN(portal_url)\n try:\n status = portal.call_action('status_show', requests_kwargs={\n 'verify': False})\n packages_list = portal.call_action('package_list', requests_kwargs=\n {'verify': False})\n groups_list = portal.call_action('group_list', requests_kwargs={\n 'verify': False})\n packages = []\n num_packages = len(packages_list)\n for index, pkg in enumerate(packages_list):\n msg = 'Leyendo dataset {} de {}'.format(index + 1, num_packages)\n logger.info(msg)\n packages.append(portal.call_action('package_show', {'id': pkg},\n requests_kwargs={'verify': False}))\n time.sleep(0.2)\n groups = [portal.call_action('group_show', {'id': grp},\n requests_kwargs={'verify': False}) for grp in groups_list]\n catalog = map_status_to_catalog(status)\n catalog['dataset'] = map_packages_to_datasets(packages, portal_url)\n catalog['themeTaxonomy'] = map_groups_to_themes(groups)\n except (CKANAPIError, RequestException) as e:\n logger.exception('Error al procesar el portal %s', portal_url,\n exc_info=True)\n raise NonParseableCatalog(portal_url, e)\n return catalog\n", "role": "user" }, { "content": "read_ckan_catalog", "role": "assistant" } ]
def x(status): catalog = dict() catalog_mapping = {'site_title': 'title', 'site_description': 'description' } for status_key, catalog_key in iteritems(catalog_mapping): try: catalog[catalog_key] = status[status_key] except BaseException: logger.exception( """ La clave '%s' no está en el endpoint de status. No se puede completar catalog['%s'].""" , status_key, catalog_key) publisher_mapping = {'site_title': 'name', 'error_emails_to': 'mbox'} if any([(k in status) for k in publisher_mapping.keys()]): catalog['publisher'] = dict() for status_key, publisher_key in iteritems(publisher_mapping): try: catalog['publisher'][publisher_key] = status[status_key] except BaseException: logger.exception( """ La clave '%s' no está en el endpoint de status. No se puede completar catalog['publisher'['%s'].""" , status_key, publisher_key) else: logger.info( """ No hay ninguna información sobre catalog['publisher'] en el endpoint de 'status'.""" ) catalog['superThemeTaxonomy' ] = 'http://datos.gob.ar/superThemeTaxonomy.json' return catalog
map_status_to_catalog
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(status):\n catalog = dict()\n catalog_mapping = {'site_title': 'title', 'site_description': 'description'\n }\n for status_key, catalog_key in iteritems(catalog_mapping):\n try:\n catalog[catalog_key] = status[status_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en el endpoint de status. No se puede\n completar catalog['%s'].\"\"\"\n , status_key, catalog_key)\n publisher_mapping = {'site_title': 'name', 'error_emails_to': 'mbox'}\n if any([(k in status) for k in publisher_mapping.keys()]):\n catalog['publisher'] = dict()\n for status_key, publisher_key in iteritems(publisher_mapping):\n try:\n catalog['publisher'][publisher_key] = status[status_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en el endpoint de status. No se puede\n completar catalog['publisher'['%s'].\"\"\"\n , status_key, publisher_key)\n else:\n logger.info(\n \"\"\"\n No hay ninguna información sobre catalog['publisher'] en el endpoint\n de 'status'.\"\"\"\n )\n catalog['superThemeTaxonomy'\n ] = 'http://datos.gob.ar/superThemeTaxonomy.json'\n return catalog\n", "role": "user" }, { "content": "map_status_to_catalog", "role": "assistant" } ]
def x(package, portal_url): dataset = dict() resources = package['resources'] groups = package['groups'] tags = package['tags'] dataset_mapping = {'title': 'title', 'notes': 'description', 'metadata_created': 'issued', 'metadata_modified': 'modified', 'license_title': 'license', 'id': 'identifier', 'url': 'landingPage'} for package_key, dataset_key in iteritems(dataset_mapping): try: dataset[dataset_key] = package[package_key] except BaseException: logger.exception( """ La clave '%s' no está en el endpoint 'package_show' para el package '%s'. No se puede completar dataset['%s'].""" , package_key, package['name'], dataset_key) publisher_mapping = {'author': 'name', 'author_email': 'mbox'} if any([(k in package) for k in publisher_mapping.keys()]): dataset['publisher'] = dict() for package_key, publisher_key in iteritems(publisher_mapping): try: dataset['publisher'][publisher_key] = package[package_key] except BaseException: logger.exception( """ La clave '%s' no está en el endpoint 'package_show' para el package '%s'. No se puede completar dataset['publisher']['%s'].""" , package_key, package['name'], publisher_key) contact_point_mapping = {'maintainer': 'fn', 'maintainer_email': 'hasEmail' } if any([(k in package) for k in contact_point_mapping.keys()]): dataset['contactPoint'] = dict() for package_key, contact_key in iteritems(contact_point_mapping): try: dataset['contactPoint'][contact_key] = package[package_key] except BaseException: logger.exception( """ La clave '%s' no está en el endpoint 'package_show' para el package '%s'. No se puede completar dataset['contactPoint']['%s'].""" , package_key, package['name'], contact_key) if 'extras' in package: add_accrualPeriodicity(dataset, package) add_superTheme(dataset, package) add_temporal(dataset, package) dataset['distribution'] = map_resources_to_distributions(resources, portal_url) dataset['theme'] = [grp['name'] for grp in groups] dataset['keyword'] = [tag['name'] for tag in tags] return dataset
map_package_to_dataset
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(package, portal_url):\n dataset = dict()\n resources = package['resources']\n groups = package['groups']\n tags = package['tags']\n dataset_mapping = {'title': 'title', 'notes': 'description',\n 'metadata_created': 'issued', 'metadata_modified': 'modified',\n 'license_title': 'license', 'id': 'identifier', 'url': 'landingPage'}\n for package_key, dataset_key in iteritems(dataset_mapping):\n try:\n dataset[dataset_key] = package[package_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en el endpoint 'package_show' para el\n package '%s'. No se puede completar dataset['%s'].\"\"\"\n , package_key, package['name'], dataset_key)\n publisher_mapping = {'author': 'name', 'author_email': 'mbox'}\n if any([(k in package) for k in publisher_mapping.keys()]):\n dataset['publisher'] = dict()\n for package_key, publisher_key in iteritems(publisher_mapping):\n try:\n dataset['publisher'][publisher_key] = package[package_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en el endpoint 'package_show' para el\n package '%s'. No se puede completar\n dataset['publisher']['%s'].\"\"\"\n , package_key, package['name'], publisher_key)\n contact_point_mapping = {'maintainer': 'fn', 'maintainer_email': 'hasEmail'\n }\n if any([(k in package) for k in contact_point_mapping.keys()]):\n dataset['contactPoint'] = dict()\n for package_key, contact_key in iteritems(contact_point_mapping):\n try:\n dataset['contactPoint'][contact_key] = package[package_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en el endpoint 'package_show' para el\n package '%s'. No se puede completar\n dataset['contactPoint']['%s'].\"\"\"\n , package_key, package['name'], contact_key)\n if 'extras' in package:\n add_accrualPeriodicity(dataset, package)\n add_superTheme(dataset, package)\n add_temporal(dataset, package)\n dataset['distribution'] = map_resources_to_distributions(resources,\n portal_url)\n dataset['theme'] = [grp['name'] for grp in groups]\n dataset['keyword'] = [tag['name'] for tag in tags]\n return dataset\n", "role": "user" }, { "content": "map_package_to_dataset", "role": "assistant" } ]
def x(resource, portal_url): distribution = dict() distribution_mapping = {'url': 'downloadURL', 'name': 'title', 'created': 'issued', 'description': 'description', 'format': 'format', 'last_modified': 'modified', 'mimetype': 'mediaType', 'size': 'byteSize', 'id': 'identifier'} for resource_key, distribution_key in iteritems(distribution_mapping): try: distribution[distribution_key] = resource[resource_key] except BaseException: logger.exception( """ La clave '%s' no está en la metadata del 'resource' '%s'. No se puede completar distribution['%s'].""" , resource_key, resource['name'], distribution_key) if 'attributesDescription' in resource: try: distribution['field'] = json.loads(resource[ 'attributesDescription']) except BaseException: logger.exception("Error parseando los fields del resource '%s'", resource['name']) url_path = ['dataset', resource['package_id'], 'resource', resource['id']] distribution['accessURL'] = urljoin(portal_url, '/'.join(url_path)) return distribution
map_resource_to_distribution
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(resource, portal_url):\n distribution = dict()\n distribution_mapping = {'url': 'downloadURL', 'name': 'title',\n 'created': 'issued', 'description': 'description', 'format':\n 'format', 'last_modified': 'modified', 'mimetype': 'mediaType',\n 'size': 'byteSize', 'id': 'identifier'}\n for resource_key, distribution_key in iteritems(distribution_mapping):\n try:\n distribution[distribution_key] = resource[resource_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en la metadata del 'resource' '%s'. No\n se puede completar distribution['%s'].\"\"\"\n , resource_key, resource['name'], distribution_key)\n if 'attributesDescription' in resource:\n try:\n distribution['field'] = json.loads(resource[\n 'attributesDescription'])\n except BaseException:\n logger.exception(\"Error parseando los fields del resource '%s'\",\n resource['name'])\n url_path = ['dataset', resource['package_id'], 'resource', resource['id']]\n distribution['accessURL'] = urljoin(portal_url, '/'.join(url_path))\n return distribution\n", "role": "user" }, { "content": "map_resource_to_distribution", "role": "assistant" } ]
def x(group): theme = dict() theme_mapping = {'name': 'id', 'title': 'label', 'description': 'description'} for group_key, theme_key in iteritems(theme_mapping): try: theme[theme_key] = group[group_key] except BaseException: logger.exception( """ La clave '%s' no está en la metadata del 'group' '%s'. No se puede completar theme['%s'].""" , group_key, theme['name'], theme_key) return theme
map_group_to_theme
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(group):\n theme = dict()\n theme_mapping = {'name': 'id', 'title': 'label', 'description':\n 'description'}\n for group_key, theme_key in iteritems(theme_mapping):\n try:\n theme[theme_key] = group[group_key]\n except BaseException:\n logger.exception(\n \"\"\"\n La clave '%s' no está en la metadata del 'group' '%s'. No\n se puede completar theme['%s'].\"\"\"\n , group_key, theme['name'], theme_key)\n return theme\n", "role": "user" }, { "content": "map_group_to_theme", "role": "assistant" } ]
def x(tables, path, column_styles=None, cell_styles=None, tables_fields= None, tables_names=None): assert isinstance(path, string_types), '`path` debe ser un string' assert isinstance(tables, dict), '`table` es dict de listas de dicts' suffix = path.split('.')[-1] if suffix == 'csv': for table_name, table in tables: root_path = ''.join(path.split('.')[:-1]) table_path = '{}_{}.csv'.format(root_path, table_name) _write_csv_table(table, table_path) elif suffix == 'xlsx': return _write_xlsx_table(tables, path, column_styles, cell_styles, tables_fields=tables_fields, tables_names=tables_names) else: raise ValueError( '\n{} no es un sufijo reconocido. Pruebe con .csv o.xlsx'. format(suffix))
write_tables
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(tables, path, column_styles=None, cell_styles=None, tables_fields=\n None, tables_names=None):\n assert isinstance(path, string_types), '`path` debe ser un string'\n assert isinstance(tables, dict), '`table` es dict de listas de dicts'\n suffix = path.split('.')[-1]\n if suffix == 'csv':\n for table_name, table in tables:\n root_path = ''.join(path.split('.')[:-1])\n table_path = '{}_{}.csv'.format(root_path, table_name)\n _write_csv_table(table, table_path)\n elif suffix == 'xlsx':\n return _write_xlsx_table(tables, path, column_styles, cell_styles,\n tables_fields=tables_fields, tables_names=tables_names)\n else:\n raise ValueError(\n '\\n{} no es un sufijo reconocido. Pruebe con .csv o.xlsx'.\n format(suffix))\n", "role": "user" }, { "content": "write_tables", "role": "assistant" } ]
def x(table, path, column_styles=None, cell_styles=None): assert isinstance(path, string_types), '`path` debe ser un string' assert isinstance(table, list), '`table` debe ser una lista de dicts' if len(table) == 0: logger.warning('Tabla vacia: no se genera ninguna archivo.') return if not helpers.is_list_of_matching_dicts(table): raise ValueError( """ La lista ingresada no esta formada por diccionarios con las mismas claves.""" ) suffix = path.split('.')[-1] if suffix == 'csv': return _write_csv_table(table, path) elif suffix == 'xlsx': return _write_xlsx_table(table, path, column_styles, cell_styles) else: raise ValueError( '\n{} no es un sufijo reconocido. Pruebe con .csv o.xlsx'. format(suffix))
write_table
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(table, path, column_styles=None, cell_styles=None):\n assert isinstance(path, string_types), '`path` debe ser un string'\n assert isinstance(table, list), '`table` debe ser una lista de dicts'\n if len(table) == 0:\n logger.warning('Tabla vacia: no se genera ninguna archivo.')\n return\n if not helpers.is_list_of_matching_dicts(table):\n raise ValueError(\n \"\"\"\nLa lista ingresada no esta formada por diccionarios con las mismas claves.\"\"\"\n )\n suffix = path.split('.')[-1]\n if suffix == 'csv':\n return _write_csv_table(table, path)\n elif suffix == 'xlsx':\n return _write_xlsx_table(table, path, column_styles, cell_styles)\n else:\n raise ValueError(\n '\\n{} no es un sufijo reconocido. Pruebe con .csv o.xlsx'.\n format(suffix))\n", "role": "user" }, { "content": "write_table", "role": "assistant" } ]
def x(obj, path): obj_str = text_type(json.dumps(obj, indent=4, separators=(',', ': '), ensure_ascii=False)) helpers.ensure_dir_exists(os.path.dirname(path)) with io.open(path, 'w', encoding='utf-8') as target: target.write(obj_str)
write_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(obj, path):\n obj_str = text_type(json.dumps(obj, indent=4, separators=(',', ': '),\n ensure_ascii=False))\n helpers.ensure_dir_exists(os.path.dirname(path))\n with io.open(path, 'w', encoding='utf-8') as target:\n target.write(obj_str)\n", "role": "user" }, { "content": "write_json", "role": "assistant" } ]
def x(catalog, path, xlsx_fields=None): xlsx_fields = xlsx_fields or XLSX_FIELDS catalog_dict = {} catalog_dict['catalog'] = [_tabulate_nested_dict(catalog. get_catalog_metadata(exclude_meta_fields=['themeTaxonomy']), 'catalog') ] catalog_dict['dataset'] = _generate_dataset_table(catalog) catalog_dict['distribution'] = _generate_distribution_table(catalog) catalog_dict['field'] = _generate_field_table(catalog) catalog_dict['theme'] = _generate_theme_table(catalog) write_tables(catalog_dict, path, tables_fields=xlsx_fields, tables_names=['catalog', 'dataset', 'distribution', 'field', 'theme'])
write_xlsx_catalog
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(catalog, path, xlsx_fields=None):\n xlsx_fields = xlsx_fields or XLSX_FIELDS\n catalog_dict = {}\n catalog_dict['catalog'] = [_tabulate_nested_dict(catalog.\n get_catalog_metadata(exclude_meta_fields=['themeTaxonomy']), 'catalog')\n ]\n catalog_dict['dataset'] = _generate_dataset_table(catalog)\n catalog_dict['distribution'] = _generate_distribution_table(catalog)\n catalog_dict['field'] = _generate_field_table(catalog)\n catalog_dict['theme'] = _generate_theme_table(catalog)\n write_tables(catalog_dict, path, tables_fields=xlsx_fields,\n tables_names=['catalog', 'dataset', 'distribution', 'field', 'theme'])\n", "role": "user" }, { "content": "write_xlsx_catalog", "role": "assistant" } ]
def x(url, tries=DEFAULT_TRIES, retry_delay=RETRY_DELAY, try_timeout=None, proxies=None, verify=True): for i in range(tries): try: return requests.get(url, timeout=try_timeout, proxies=proxies, verify=verify).content except Exception as e: download_exception = e if i < tries - 1: time.sleep(retry_delay) raise download_exception
download
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(url, tries=DEFAULT_TRIES, retry_delay=RETRY_DELAY, try_timeout=None,\n proxies=None, verify=True):\n for i in range(tries):\n try:\n return requests.get(url, timeout=try_timeout, proxies=proxies,\n verify=verify).content\n except Exception as e:\n download_exception = e\n if i < tries - 1:\n time.sleep(retry_delay)\n raise download_exception\n", "role": "user" }, { "content": "download", "role": "assistant" } ]
def x(url, file_path, **kwargs): content = download(url, **kwargs) with open(file_path, 'wb') as f: f.write(content)
download_to_file
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(url, file_path, **kwargs):\n content = download(url, **kwargs)\n with open(file_path, 'wb') as f:\n f.write(content)\n", "role": "user" }, { "content": "download_to_file", "role": "assistant" } ]
def x(value, mutator, *args, **kwargs): args = list(args) + [value] return mutator(*args, **kwargs)
replace_combiner
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value, mutator, *args, **kwargs):\n args = list(args) + [value]\n return mutator(*args, **kwargs)\n", "role": "user" }, { "content": "replace_combiner", "role": "assistant" } ]
def x(value, mutator, *args, **kwargs): value.add(mutator(*args, **kwargs)) return value
set_combiner
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value, mutator, *args, **kwargs):\n value.add(mutator(*args, **kwargs))\n return value\n", "role": "user" }, { "content": "set_combiner", "role": "assistant" } ]
def x(value, mutator, *args, **kwargs): value.append(mutator(*args, **kwargs)) return value
list_combiner
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value, mutator, *args, **kwargs):\n value.append(mutator(*args, **kwargs))\n return value\n", "role": "user" }, { "content": "list_combiner", "role": "assistant" } ]
def x(a, _): if len(a) == 1: return a[0] product = 1 for v in a: new_value = 1 - v product = product * new_value joint_value = 1 - product return joint_value
joint_value_post_processor
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(a, _):\n if len(a) == 1:\n return a[0]\n product = 1\n for v in a:\n new_value = 1 - v\n product = product * new_value\n joint_value = 1 - product\n return joint_value\n", "role": "user" }, { "content": "joint_value_post_processor", "role": "assistant" } ]
def x(self, value_name: str, source: Callable[..., pd.DataFrame]=None, preferred_combiner: Callable=replace_combiner, preferred_post_processor: Callable[..., pd.DataFrame]=None) ->Pipeline: return self._value_manager.register_value_producer(value_name, source, preferred_combiner, preferred_post_processor)
register_value_producer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value_name: str, source: Callable[..., pd.DataFrame]=None,\n preferred_combiner: Callable=replace_combiner, preferred_post_processor:\n Callable[..., pd.DataFrame]=None) ->Pipeline:\n return self._value_manager.register_value_producer(value_name, source,\n preferred_combiner, preferred_post_processor)\n", "role": "user" }, { "content": "register_value_producer", "role": "assistant" } ]
def x(self, rate_name: str, source: Callable[..., pd.DataFrame]=None ) ->Pipeline: return self._value_manager.register_rate_producer(rate_name, source)
register_rate_producer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, rate_name: str, source: Callable[..., pd.DataFrame]=None\n ) ->Pipeline:\n return self._value_manager.register_rate_producer(rate_name, source)\n", "role": "user" }, { "content": "register_rate_producer", "role": "assistant" } ]
def x(self, value_name: str, modifier: Callable, priority: int=5): self._value_manager.register_value_modifier(value_name, modifier, priority)
register_value_modifier
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value_name: str, modifier: Callable, priority: int=5):\n self._value_manager.register_value_modifier(value_name, modifier, priority)\n", "role": "user" }, { "content": "register_value_modifier", "role": "assistant" } ]
def x(model_specification, results_directory, verbose, log, with_debugger): log_level = logging.DEBUG if verbose else logging.ERROR logging.basicConfig(filename=log, level=log_level) try: run_simulation(model_specification, results_directory) except (BdbQuit, KeyboardInterrupt): raise except Exception as e: if with_debugger: import pdb import traceback traceback.print_exc() pdb.post_mortem() else: logging.exception('Uncaught exception {}'.format(e)) raise
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(model_specification, results_directory, verbose, log, with_debugger):\n log_level = logging.DEBUG if verbose else logging.ERROR\n logging.basicConfig(filename=log, level=log_level)\n try:\n run_simulation(model_specification, results_directory)\n except (BdbQuit, KeyboardInterrupt):\n raise\n except Exception as e:\n if with_debugger:\n import pdb\n import traceback\n traceback.print_exc()\n pdb.post_mortem()\n else:\n logging.exception('Uncaught exception {}'.format(e))\n raise\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(model_specification, results_directory, process): model_specification = Path(model_specification) results_directory = Path(results_directory) out_stats_file = results_directory / f'{model_specification.name}'.replace( 'yaml', 'stats') command = f'run_simulation("{model_specification}", "{results_directory}")' cProfile.runctx(command, globals=globals(), locals=locals(), filename= out_stats_file) if process: out_txt_file = results_directory / (out_stats_file.name + '.txt') with open(out_txt_file, 'w') as f: p = pstats.Stats(str(out_stats_file), stream=f) p.sort_stats('cumulative') p.print_stats()
profile
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(model_specification, results_directory, process):\n model_specification = Path(model_specification)\n results_directory = Path(results_directory)\n out_stats_file = results_directory / f'{model_specification.name}'.replace(\n 'yaml', 'stats')\n command = f'run_simulation(\"{model_specification}\", \"{results_directory}\")'\n cProfile.runctx(command, globals=globals(), locals=locals(), filename=\n out_stats_file)\n if process:\n out_txt_file = results_directory / (out_stats_file.name + '.txt')\n with open(out_txt_file, 'w') as f:\n p = pstats.Stats(str(out_stats_file), stream=f)\n p.sort_stats('cumulative')\n p.print_stats()\n", "role": "user" }, { "content": "profile", "role": "assistant" } ]
def x(self, new_index): new_event = Event(new_index, self.user_data) new_event.time = self.time new_event.step_size = self.step_size return new_event
split
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, new_index):\n new_event = Event(new_index, self.user_data)\n new_event.time = self.time\n new_event.step_size = self.step_size\n return new_event\n", "role": "user" }, { "content": "split", "role": "assistant" } ]
def x(self, event): if hasattr(event, 'time'): event.step_size = self.manager.step_size() event.time = self.manager.clock() + self.manager.step_size() for priority_bucket in self.listeners: for listener in sorted(priority_bucket, key=lambda x: x.__name__): listener(event) return event
emit
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, event):\n if hasattr(event, 'time'):\n event.step_size = self.manager.step_size()\n event.time = self.manager.clock() + self.manager.step_size()\n for priority_bucket in self.listeners:\n for listener in sorted(priority_bucket, key=lambda x: x.__name__):\n listener(event)\n return event\n", "role": "user" }, { "content": "emit", "role": "assistant" } ]
def x(self, builder): self.clock = builder.time.clock() self.step_size = builder.time.step_size()
setup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, builder):\n self.clock = builder.time.clock()\n self.step_size = builder.time.step_size()\n", "role": "user" }, { "content": "setup", "role": "assistant" } ]
def x(self, name, listener, priority=5): self._event_types[name].listeners[priority].append(listener)
register_listener
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name, listener, priority=5):\n self._event_types[name].listeners[priority].append(listener)\n", "role": "user" }, { "content": "register_listener", "role": "assistant" } ]
def x(self, name: str) ->Callable[[Event], Event]: return self._event_manager.get_emitter(name)
get_emitter
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name: str) ->Callable[[Event], Event]:\n return self._event_manager.get_emitter(name)\n", "role": "user" }, { "content": "get_emitter", "role": "assistant" } ]
def x(self, name: str, listener: Callable[[Event], None], priority: int=5 ) ->None: self._event_manager.register_listener(name, listener, priority)
register_listener
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, name: str, listener: Callable[[Event], None], priority: int=5\n ) ->None:\n self._event_manager.register_listener(name, listener, priority)\n", "role": "user" }, { "content": "register_listener", "role": "assistant" } ]
def x(self, builder): super().setup(builder) self.clock = builder.time.clock() self.excess_mortality_rate = builder.value.register_rate_producer( f'{self.state_id}.excess_mortality_rate', source=self. risk_deleted_excess_mortality_rate) self.excess_mortality_rate_paf = builder.value.register_value_producer( f'{self.state_id}.excess_mortality_rate.population_attributable_fraction' , source=lambda index: [pd.Series(0, index=index)], preferred_combiner=list_combiner, preferred_post_processor= joint_value_post_processor) builder.value.register_value_modifier('mortality_rate', self. add_in_excess_mortality) self.population_view = builder.population.get_view([self._model], query =f"alive == 'alive' and {self._model} == '{self.state_id}'")
setup
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, builder):\n super().setup(builder)\n self.clock = builder.time.clock()\n self.excess_mortality_rate = builder.value.register_rate_producer(\n f'{self.state_id}.excess_mortality_rate', source=self.\n risk_deleted_excess_mortality_rate)\n self.excess_mortality_rate_paf = builder.value.register_value_producer(\n f'{self.state_id}.excess_mortality_rate.population_attributable_fraction'\n , source=lambda index: [pd.Series(0, index=index)],\n preferred_combiner=list_combiner, preferred_post_processor=\n joint_value_post_processor)\n builder.value.register_value_modifier('mortality_rate', self.\n add_in_excess_mortality)\n self.population_view = builder.population.get_view([self._model], query\n =f\"alive == 'alive' and {self._model} == '{self.state_id}'\")\n", "role": "user" }, { "content": "setup", "role": "assistant" } ]
def x(self, index: pd.Index, query: str='', omit_missing_columns: bool=False ) ->pd.DataFrame: pop = self.manager.get_population(True).loc[index] if self._query: pop = pop.query(self._query) if query: pop = pop.query(query) if not self._columns: return pop else: if omit_missing_columns: columns = list(set(self._columns).intersection(pop.columns)) else: columns = self._columns try: return pop[columns].copy() except KeyError: non_existent_columns = set(columns) - set(pop.columns) raise PopulationError( f'Requested column(s) {non_existent_columns} not in population table.' )
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, index: pd.Index, query: str='', omit_missing_columns: bool=False\n ) ->pd.DataFrame:\n pop = self.manager.get_population(True).loc[index]\n if self._query:\n pop = pop.query(self._query)\n if query:\n pop = pop.query(query)\n if not self._columns:\n return pop\n else:\n if omit_missing_columns:\n columns = list(set(self._columns).intersection(pop.columns))\n else:\n columns = self._columns\n try:\n return pop[columns].copy()\n except KeyError:\n non_existent_columns = set(columns) - set(pop.columns)\n raise PopulationError(\n f'Requested column(s) {non_existent_columns} not in population table.'\n )\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, pop: Union[pd.DataFrame, pd.Series]): if not pop.empty: if isinstance(pop, pd.Series): if pop.name in self._columns: affected_columns = [pop.name] elif len(self._columns) == 1: affected_columns = self._columns else: raise PopulationError( 'Cannot update with a Series unless the series name equals a column name or there is only a single column in the view' ) else: affected_columns = set(pop.columns) affected_columns = set(affected_columns).intersection(self._columns) state_table = self.manager.get_population(True) if not self.manager.growing: affected_columns = set(affected_columns).intersection(state_table .columns) for c in affected_columns: if c in state_table: v = state_table[c].values if isinstance(pop, pd.Series): v2 = pop.values else: v2 = pop[c].values v[pop.index] = v2 if v.dtype != v2.dtype: if not self.manager.growing: raise PopulationError( 'Component corrupting population table. Old column type: {} New column type: {}' .format(v.dtype, v2.dtype)) v = v.astype(v2.dtype) elif isinstance(pop, pd.Series): v = pop.values else: v = pop[c].values self.manager._population[c] = v
update
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, pop: Union[pd.DataFrame, pd.Series]):\n if not pop.empty:\n if isinstance(pop, pd.Series):\n if pop.name in self._columns:\n affected_columns = [pop.name]\n elif len(self._columns) == 1:\n affected_columns = self._columns\n else:\n raise PopulationError(\n 'Cannot update with a Series unless the series name equals a column name or there is only a single column in the view'\n )\n else:\n affected_columns = set(pop.columns)\n affected_columns = set(affected_columns).intersection(self._columns)\n state_table = self.manager.get_population(True)\n if not self.manager.growing:\n affected_columns = set(affected_columns).intersection(state_table\n .columns)\n for c in affected_columns:\n if c in state_table:\n v = state_table[c].values\n if isinstance(pop, pd.Series):\n v2 = pop.values\n else:\n v2 = pop[c].values\n v[pop.index] = v2\n if v.dtype != v2.dtype:\n if not self.manager.growing:\n raise PopulationError(\n 'Component corrupting population table. Old column type: {} New column type: {}'\n .format(v.dtype, v2.dtype))\n v = v.astype(v2.dtype)\n elif isinstance(pop, pd.Series):\n v = pop.values\n else:\n v = pop[c].values\n self.manager._population[c] = v\n", "role": "user" }, { "content": "update", "role": "assistant" } ]
def x(self, columns: Sequence[str], query: str=None) ->PopulationView: if 'tracked' not in columns: query_with_track = (query + 'and tracked == True' if query else 'tracked == True') return PopulationView(self, columns, query_with_track) return PopulationView(self, columns, query)
get_view
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, columns: Sequence[str], query: str=None) ->PopulationView:\n if 'tracked' not in columns:\n query_with_track = (query + 'and tracked == True' if query else\n 'tracked == True')\n return PopulationView(self, columns, query_with_track)\n return PopulationView(self, columns, query)\n", "role": "user" }, { "content": "get_view", "role": "assistant" } ]
def x(self, columns: Sequence[str], query: str=None) ->PopulationView: return self._population_manager.get_view(columns, query)
get_view
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, columns: Sequence[str], query: str=None) ->PopulationView:\n return self._population_manager.get_view(columns, query)\n", "role": "user" }, { "content": "get_view", "role": "assistant" } ]
def x(self) ->Callable[[int, Union[Mapping[str, Any], None]], pd.Index]: return self._population_manager.get_simulant_creator()
get_simulant_creator
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->Callable[[int, Union[Mapping[str, Any], None]], pd.Index]:\n return self._population_manager.get_simulant_creator()\n", "role": "user" }, { "content": "get_simulant_creator", "role": "assistant" } ]
def x(self, initializer: Callable[[SimulantData], None], creates_columns: Sequence[str]=(), requires_columns: Sequence[str]=()): self._population_manager.register_simulant_initializer(initializer, creates_columns, requires_columns)
initializes_simulants
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, initializer: Callable[[SimulantData], None], creates_columns:\n Sequence[str]=(), requires_columns: Sequence[str]=()):\n self._population_manager.register_simulant_initializer(initializer,\n creates_columns, requires_columns)\n", "role": "user" }, { "content": "initializes_simulants", "role": "assistant" } ]