source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
---|---|---|
def x(title, decode=True, max_len=None, use_complete_words=True):
if decode:
title = unidecode(title)
title = title.lower()
filtered_title = re.sub('[^a-z0-9- ]+', '', title)
normalized_title = '-'.join([word for word in filtered_title.split() if
word not in STOP_WORDS])
if max_len and len(normalized_title) > max_len:
if use_complete_words:
last_word_index = normalized_title.rindex('-', 0, max_len)
normalized_title = normalized_title[:last_word_index]
else:
normalized_title = normalized_title[:max_len]
return normalized_title
|
title_to_name
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(title, decode=True, max_len=None, use_complete_words=True):\n if decode:\n title = unidecode(title)\n title = title.lower()\n filtered_title = re.sub('[^a-z0-9- ]+', '', title)\n normalized_title = '-'.join([word for word in filtered_title.split() if\n word not in STOP_WORDS])\n if max_len and len(normalized_title) > max_len:\n if use_complete_words:\n last_word_index = normalized_title.rindex('-', 0, max_len)\n normalized_title = normalized_title[:last_word_index]\n else:\n normalized_title = normalized_title[:max_len]\n return normalized_title\n",
"role": "user"
},
{
"content": "title_to_name",
"role": "assistant"
}
] |
def x(uri_string):
try:
result = urlparse(uri_string)
has_elements = all([result.scheme, result.netloc, result.path])
is_http = result.scheme == 'http' or result.scheme == 'https'
return True if has_elements and is_http else False
except Exception:
return False
|
validate_url
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(uri_string):\n try:\n result = urlparse(uri_string)\n has_elements = all([result.scheme, result.netloc, result.path])\n is_http = result.scheme == 'http' or result.scheme == 'https'\n return True if has_elements and is_http else False\n except Exception:\n return False\n",
"role": "user"
},
{
"content": "validate_url",
"role": "assistant"
}
] |
def x(directory):
if directory and not os.path.exists(directory):
os.makedirs(directory)
|
ensure_dir_exists
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(directory):\n if directory and not os.path.exists(directory):\n os.makedirs(directory)\n",
"role": "user"
},
{
"content": "ensure_dir_exists",
"role": "assistant"
}
] |
def x(dicc, keys, default_value=None):
for key in keys:
if isinstance(dicc, dict) and key in dicc:
dicc = dicc[key]
elif isinstance(dicc, list) and isinstance(key, int) and key < len(dicc
):
dicc = dicc[key]
else:
return default_value
return dicc
|
traverse_dict
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(dicc, keys, default_value=None):\n for key in keys:\n if isinstance(dicc, dict) and key in dicc:\n dicc = dicc[key]\n elif isinstance(dicc, list) and isinstance(key, int) and key < len(dicc\n ):\n dicc = dicc[key]\n else:\n return default_value\n return dicc\n",
"role": "user"
},
{
"content": "traverse_dict",
"role": "assistant"
}
] |
def x(list_of_dicts, expected_keys=None):
if isinstance(list_of_dicts, list) and len(list_of_dicts) == 0:
return False
is_not_list_msg = '\n{} no es una lista. Debe ingresar una lista'.format(
list_of_dicts)
assert isinstance(list_of_dicts, list), is_not_list_msg
not_all_dicts_msg = (
"""
No todos los elementos de {} son diccionarios. Ingrese una lista compuesta
solo por diccionarios."""
.format(list_of_dicts))
assert all([isinstance(d, dict) for d in list_of_dicts]), not_all_dicts_msg
expected_keys = expected_keys or set(list_of_dicts[0].keys())
elements = [(set(d.keys()) == expected_keys) for d in list_of_dicts]
return all(elements)
|
is_list_of_matching_dicts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(list_of_dicts, expected_keys=None):\n if isinstance(list_of_dicts, list) and len(list_of_dicts) == 0:\n return False\n is_not_list_msg = '\\n{} no es una lista. Debe ingresar una lista'.format(\n list_of_dicts)\n assert isinstance(list_of_dicts, list), is_not_list_msg\n not_all_dicts_msg = (\n \"\"\"\nNo todos los elementos de {} son diccionarios. Ingrese una lista compuesta\nsolo por diccionarios.\"\"\"\n .format(list_of_dicts))\n assert all([isinstance(d, dict) for d in list_of_dicts]), not_all_dicts_msg\n expected_keys = expected_keys or set(list_of_dicts[0].keys())\n elements = [(set(d.keys()) == expected_keys) for d in list_of_dicts]\n return all(elements)\n",
"role": "user"
},
{
"content": "is_list_of_matching_dicts",
"role": "assistant"
}
] |
def x(cell):
value = cell.value
if isinstance(value, string_types):
value = value.strip()
if isinstance(value, datetime):
value = value.isoformat()
return value
|
parse_value
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cell):\n value = cell.value\n if isinstance(value, string_types):\n value = value.strip()\n if isinstance(value, datetime):\n value = value.isoformat()\n return value\n",
"role": "user"
},
{
"content": "parse_value",
"role": "assistant"
}
] |
def x(worksheet):
headers = []
value_rows = []
for row_i, row in enumerate(worksheet.iter_rows()):
if row_i == 0:
for header_cell in row:
if header_cell.value:
headers.append(parse_value(header_cell))
else:
break
continue
row_cells = [parse_value(cell) for index, cell in enumerate(row) if
index < len(headers)]
if any(row_cells):
value_rows.append(row_cells)
else:
break
table = [{k: v for k, v in zip(headers, row) if v is not None} for row in
value_rows]
return table
|
sheet_to_table
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(worksheet):\n headers = []\n value_rows = []\n for row_i, row in enumerate(worksheet.iter_rows()):\n if row_i == 0:\n for header_cell in row:\n if header_cell.value:\n headers.append(parse_value(header_cell))\n else:\n break\n continue\n row_cells = [parse_value(cell) for index, cell in enumerate(row) if\n index < len(headers)]\n if any(row_cells):\n value_rows.append(row_cells)\n else:\n break\n table = [{k: v for k, v in zip(headers, row) if v is not None} for row in\n value_rows]\n return table\n",
"role": "user"
},
{
"content": "sheet_to_table",
"role": "assistant"
}
] |
def x(string, sep=',', filter_empty=False):
return [value.strip() for value in string.split(sep) if not
filter_empty or value]
|
string_to_list
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(string, sep=',', filter_empty=False):\n return [value.strip() for value in string.split(sep) if not\n filter_empty or value]\n",
"role": "user"
},
{
"content": "string_to_list",
"role": "assistant"
}
] |
def x(one_dict, other_dict):
result = other_dict.copy()
for k, v in one_dict.items():
if v is None:
v = 0
if isinstance(v, dict):
result[k] = add_dicts(v, other_dict.get(k, {}))
else:
other_value = result.get(k, 0)
if other_value is None:
other_value = 0
result[k] = other_value + v
return result
|
add_dicts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(one_dict, other_dict):\n result = other_dict.copy()\n for k, v in one_dict.items():\n if v is None:\n v = 0\n if isinstance(v, dict):\n result[k] = add_dicts(v, other_dict.get(k, {}))\n else:\n other_value = result.get(k, 0)\n if other_value is None:\n other_value = 0\n result[k] = other_value + v\n return result\n",
"role": "user"
},
{
"content": "add_dicts",
"role": "assistant"
}
] |
def x(date_str):
intervals = {'Y': 365, 'M': 30, 'W': 7, 'D': 1, 'H': 0, 'S': 0}
if date_str.find('R/P') != 0:
return 0
date_str = date_str.strip('R/P')
days = 0
index = 0
for interval in intervals:
value_end = date_str.find(interval)
if value_end < 0:
continue
try:
days += int(float(date_str[index:value_end]) * intervals[interval])
except ValueError:
continue
index = value_end
return max(days, 1)
|
parse_repeating_time_interval_to_days
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(date_str):\n intervals = {'Y': 365, 'M': 30, 'W': 7, 'D': 1, 'H': 0, 'S': 0}\n if date_str.find('R/P') != 0:\n return 0\n date_str = date_str.strip('R/P')\n days = 0\n index = 0\n for interval in intervals:\n value_end = date_str.find(interval)\n if value_end < 0:\n continue\n try:\n days += int(float(date_str[index:value_end]) * intervals[interval])\n except ValueError:\n continue\n index = value_end\n return max(days, 1)\n",
"role": "user"
},
{
"content": "parse_repeating_time_interval_to_days",
"role": "assistant"
}
] |
def x(date_str):
with open(os.path.join(ABSOLUTE_SCHEMA_DIR, 'accrualPeriodicity.json'), 'r'
) as f:
freqs_map = {freq['id']: freq['description'] for freq in json.load(f)}
return freqs_map[date_str]
|
parse_repeating_time_interval_to_str
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(date_str):\n with open(os.path.join(ABSOLUTE_SCHEMA_DIR, 'accrualPeriodicity.json'), 'r'\n ) as f:\n freqs_map = {freq['id']: freq['description'] for freq in json.load(f)}\n return freqs_map[date_str]\n",
"role": "user"
},
{
"content": "parse_repeating_time_interval_to_str",
"role": "assistant"
}
] |
def x(wb, name):
if isinstance(wb, string_types):
wb = load_workbook(wb, read_only=True, data_only=True)
for sheetname in wb.sheetnames:
if sheetname.lower() == name.lower():
return sheetname
raise Exception('No existe la hoja {}'.format(name))
|
find_ws_name
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(wb, name):\n if isinstance(wb, string_types):\n wb = load_workbook(wb, read_only=True, data_only=True)\n for sheetname in wb.sheetnames:\n if sheetname.lower() == name.lower():\n return sheetname\n raise Exception('No existe la hoja {}'.format(name))\n",
"role": "user"
},
{
"content": "find_ws_name",
"role": "assistant"
}
] |
def x(dataset, other, fields_dataset=None, fields_distribution=None,
return_diff=False):
dataset_is_equal = True
dataset_diff = []
if not fields_dataset:
fields_dataset = ['title', ['publisher', 'name']]
for field_dataset in fields_dataset:
if isinstance(field_dataset, list):
value = traverse_dict(dataset, field_dataset)
other_value = traverse_dict(other, field_dataset)
else:
value = dataset.get(field_dataset)
other_value = other.get(field_dataset)
if value != other_value:
dataset_diff.append({'error_location': field_dataset,
'dataset_value': value, 'other_value': other_value})
dataset_is_equal = False
if fields_distribution:
dataset_distributions = dataset.get('distribution')
other_distributions = other.get('distribution')
if len(dataset_distributions) != len(other_distributions):
logger.info('{} distribuciones en origen y {} en destino'.
format(len(dataset_distributions), len(other_distributions)))
dataset_is_equal = False
distributions_equal = True
for dataset_distribution, other_distribution in zip(
dataset_distributions, other_distributions):
for field_distribution in fields_distribution:
if isinstance(field_distribution, list):
value = traverse_dict(dataset_distribution,
field_distribution)
other_value = traverse_dict(other_distribution,
field_distribution)
else:
value = dataset_distribution.get(field_distribution)
other_value = other_distribution.get(field_distribution)
if value != other_value:
dataset_diff.append({'error_location': '{} ({})'.format
(field_distribution, dataset_distribution.get(
'title')), 'dataset_value': value, 'other_value':
other_value})
distributions_equal = False
if not distributions_equal:
dataset_is_equal = False
if return_diff:
return dataset_diff
else:
return dataset_is_equal
|
datasets_equal
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(dataset, other, fields_dataset=None, fields_distribution=None,\n return_diff=False):\n dataset_is_equal = True\n dataset_diff = []\n if not fields_dataset:\n fields_dataset = ['title', ['publisher', 'name']]\n for field_dataset in fields_dataset:\n if isinstance(field_dataset, list):\n value = traverse_dict(dataset, field_dataset)\n other_value = traverse_dict(other, field_dataset)\n else:\n value = dataset.get(field_dataset)\n other_value = other.get(field_dataset)\n if value != other_value:\n dataset_diff.append({'error_location': field_dataset,\n 'dataset_value': value, 'other_value': other_value})\n dataset_is_equal = False\n if fields_distribution:\n dataset_distributions = dataset.get('distribution')\n other_distributions = other.get('distribution')\n if len(dataset_distributions) != len(other_distributions):\n logger.info('{} distribuciones en origen y {} en destino'.\n format(len(dataset_distributions), len(other_distributions)))\n dataset_is_equal = False\n distributions_equal = True\n for dataset_distribution, other_distribution in zip(\n dataset_distributions, other_distributions):\n for field_distribution in fields_distribution:\n if isinstance(field_distribution, list):\n value = traverse_dict(dataset_distribution,\n field_distribution)\n other_value = traverse_dict(other_distribution,\n field_distribution)\n else:\n value = dataset_distribution.get(field_distribution)\n other_value = other_distribution.get(field_distribution)\n if value != other_value:\n dataset_diff.append({'error_location': '{} ({})'.format\n (field_distribution, dataset_distribution.get(\n 'title')), 'dataset_value': value, 'other_value':\n other_value})\n distributions_equal = False\n if not distributions_equal:\n dataset_is_equal = False\n if return_diff:\n return dataset_diff\n else:\n return dataset_is_equal\n",
"role": "user"
},
{
"content": "datasets_equal",
"role": "assistant"
}
] |
def x(catalog):
for dataset in catalog.get('dataset', []):
for distribution_index, distribution in enumerate(dataset.get(
'distribution', [])):
if 'identifier' not in distribution:
distribution['identifier'] = '{}_{}'.format(dataset[
'identifier'], distribution_index)
|
generate_distribution_ids
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog):\n for dataset in catalog.get('dataset', []):\n for distribution_index, distribution in enumerate(dataset.get(\n 'distribution', [])):\n if 'identifier' not in distribution:\n distribution['identifier'] = '{}_{}'.format(dataset[\n 'identifier'], distribution_index)\n",
"role": "user"
},
{
"content": "generate_distribution_ids",
"role": "assistant"
}
] |
def x(catalog, theme):
try:
label = catalog.get_theme(identifier=theme)['label']
except BaseException:
try:
label = catalog.get_theme(label=theme)['label']
except BaseException:
raise ce.ThemeNonExistentError(theme)
label = re.sub('[^\\wá-úÁ-ÚñÑ .-]+', '', label, flags=re.UNICODE)
return label
|
get_theme_label
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, theme):\n try:\n label = catalog.get_theme(identifier=theme)['label']\n except BaseException:\n try:\n label = catalog.get_theme(label=theme)['label']\n except BaseException:\n raise ce.ThemeNonExistentError(theme)\n label = re.sub('[^\\\\wá-úÁ-ÚñÑ .-]+', '', label, flags=re.UNICODE)\n return label\n",
"role": "user"
},
{
"content": "get_theme_label",
"role": "assistant"
}
] |
def x(catalogs, local_catalogs_dir='', include_metadata=True, include_data=
True, include_metadata_xlsx=False, use_short_path=False):
if isinstance(catalogs, list):
for catalog in catalogs:
try:
make_catalog_backup(catalog, local_catalogs_dir=
local_catalogs_dir, include_metadata=include_metadata,
include_metadata_xlsx=include_metadata_xlsx,
include_data=include_data, use_short_path=use_short_path)
except Exception:
logger.exception('ERROR en {}'.format(catalog))
elif isinstance(catalogs, dict):
for catalog_id, catalog in catalogs.iteritems():
try:
make_catalog_backup(catalog, catalog_id, local_catalogs_dir
=local_catalogs_dir, include_metadata=include_metadata,
include_metadata_xlsx=include_metadata_xlsx,
include_data=include_data, use_short_path=use_short_path)
except Exception:
logger.exception('ERROR en {} ({})'.format(catalog, catalog_id)
)
|
make_catalogs_backup
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalogs, local_catalogs_dir='', include_metadata=True, include_data=\n True, include_metadata_xlsx=False, use_short_path=False):\n if isinstance(catalogs, list):\n for catalog in catalogs:\n try:\n make_catalog_backup(catalog, local_catalogs_dir=\n local_catalogs_dir, include_metadata=include_metadata,\n include_metadata_xlsx=include_metadata_xlsx,\n include_data=include_data, use_short_path=use_short_path)\n except Exception:\n logger.exception('ERROR en {}'.format(catalog))\n elif isinstance(catalogs, dict):\n for catalog_id, catalog in catalogs.iteritems():\n try:\n make_catalog_backup(catalog, catalog_id, local_catalogs_dir\n =local_catalogs_dir, include_metadata=include_metadata,\n include_metadata_xlsx=include_metadata_xlsx,\n include_data=include_data, use_short_path=use_short_path)\n except Exception:\n logger.exception('ERROR en {} ({})'.format(catalog, catalog_id)\n )\n",
"role": "user"
},
{
"content": "make_catalogs_backup",
"role": "assistant"
}
] |
def x(catalog, catalog_id=None, local_catalogs_dir='', include_metadata=
True, include_data=True, include_datasets=None,
include_distribution_formats=['CSV', 'XLS'], include_metadata_xlsx=True,
use_short_path=False):
catalog = pydatajson.DataJson(catalog)
catalog_identifier = catalog_id if catalog_id else catalog['identifier']
if include_metadata:
logger.info('Descargando catálogo {}'.format(catalog_identifier.
ljust(30)))
catalog_path = get_catalog_path(catalog_identifier, local_catalogs_dir)
ensure_dir_exists(os.path.dirname(catalog_path))
catalog.to_json(catalog_path)
if include_metadata_xlsx:
catalog_path = get_catalog_path(catalog_identifier,
local_catalogs_dir, fmt='xlsx')
ensure_dir_exists(os.path.dirname(catalog_path))
catalog.to_xlsx(catalog_path)
if include_data:
distributions = catalog.distributions
distributions_num = len(distributions)
for index, distribution in enumerate(distributions):
logger.info('Descargando distribución {} de {} ({})'.format(
index + 1, distributions_num, catalog_identifier))
dataset_id = distribution['dataset_identifier']
if include_datasets and dataset_id not in include_datasets:
pass
else:
distribution_id = distribution['identifier']
distribution_download_url = distribution['downloadURL']
distribution_file_name = distribution.get('fileName',
distribution_download_url[distribution_download_url.
rfind('/') + 1:])
distribution_format = distribution.get('format',
distribution_file_name[distribution_file_name.rfind('.'
) + 1:])
if (include_distribution_formats and distribution_format not in
include_distribution_formats):
pass
else:
file_path = get_distribution_path(catalog_identifier,
dataset_id, distribution_id, distribution_file_name,
local_catalogs_dir, use_short_path=use_short_path)
ensure_dir_exists(os.path.dirname(file_path))
download_to_file(distribution_download_url, file_path)
|
make_catalog_backup
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, catalog_id=None, local_catalogs_dir='', include_metadata=\n True, include_data=True, include_datasets=None,\n include_distribution_formats=['CSV', 'XLS'], include_metadata_xlsx=True,\n use_short_path=False):\n catalog = pydatajson.DataJson(catalog)\n catalog_identifier = catalog_id if catalog_id else catalog['identifier']\n if include_metadata:\n logger.info('Descargando catálogo {}'.format(catalog_identifier.\n ljust(30)))\n catalog_path = get_catalog_path(catalog_identifier, local_catalogs_dir)\n ensure_dir_exists(os.path.dirname(catalog_path))\n catalog.to_json(catalog_path)\n if include_metadata_xlsx:\n catalog_path = get_catalog_path(catalog_identifier,\n local_catalogs_dir, fmt='xlsx')\n ensure_dir_exists(os.path.dirname(catalog_path))\n catalog.to_xlsx(catalog_path)\n if include_data:\n distributions = catalog.distributions\n distributions_num = len(distributions)\n for index, distribution in enumerate(distributions):\n logger.info('Descargando distribución {} de {} ({})'.format(\n index + 1, distributions_num, catalog_identifier))\n dataset_id = distribution['dataset_identifier']\n if include_datasets and dataset_id not in include_datasets:\n pass\n else:\n distribution_id = distribution['identifier']\n distribution_download_url = distribution['downloadURL']\n distribution_file_name = distribution.get('fileName',\n distribution_download_url[distribution_download_url.\n rfind('/') + 1:])\n distribution_format = distribution.get('format',\n distribution_file_name[distribution_file_name.rfind('.'\n ) + 1:])\n if (include_distribution_formats and distribution_format not in\n include_distribution_formats):\n pass\n else:\n file_path = get_distribution_path(catalog_identifier,\n dataset_id, distribution_id, distribution_file_name,\n local_catalogs_dir, use_short_path=use_short_path)\n ensure_dir_exists(os.path.dirname(file_path))\n download_to_file(distribution_download_url, file_path)\n",
"role": "user"
},
{
"content": "make_catalog_backup",
"role": "assistant"
}
] |
def x(catalog_id, dataset_id, distribution_id, catalogs_dir=CATALOGS_DIR,
use_short_path=False):
if use_short_path:
catalog_path = os.path.join(catalogs_dir, 'catalog', catalog_id)
distribution_dir = os.path.join(catalog_path, dataset_id)
else:
catalog_path = os.path.join(catalogs_dir, 'catalog', catalog_id)
dataset_path = os.path.join(catalog_path, 'dataset', dataset_id)
distribution_dir = os.path.join(dataset_path, 'distribution',
distribution_id)
return os.path.abspath(distribution_dir)
|
get_distribution_dir
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog_id, dataset_id, distribution_id, catalogs_dir=CATALOGS_DIR,\n use_short_path=False):\n if use_short_path:\n catalog_path = os.path.join(catalogs_dir, 'catalog', catalog_id)\n distribution_dir = os.path.join(catalog_path, dataset_id)\n else:\n catalog_path = os.path.join(catalogs_dir, 'catalog', catalog_id)\n dataset_path = os.path.join(catalog_path, 'dataset', dataset_id)\n distribution_dir = os.path.join(dataset_path, 'distribution',\n distribution_id)\n return os.path.abspath(distribution_dir)\n",
"role": "user"
},
{
"content": "get_distribution_dir",
"role": "assistant"
}
] |
def x(catalog_id, dataset_id, distribution_id, distribution_file_name,
catalogs_dir=CATALOGS_DIR, use_short_path=False):
if use_short_path:
distribution_dir = get_distribution_dir(catalog_id, dataset_id,
distribution_id, catalogs_dir, use_short_path=True)
distribution_file_path = os.path.join(distribution_dir,
distribution_file_name)
else:
distribution_dir = get_distribution_dir(catalog_id, dataset_id,
distribution_id, catalogs_dir, use_short_path=False)
distribution_file_path = os.path.join(distribution_dir, 'download',
distribution_file_name)
return os.path.abspath(distribution_file_path)
|
get_distribution_path
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog_id, dataset_id, distribution_id, distribution_file_name,\n catalogs_dir=CATALOGS_DIR, use_short_path=False):\n if use_short_path:\n distribution_dir = get_distribution_dir(catalog_id, dataset_id,\n distribution_id, catalogs_dir, use_short_path=True)\n distribution_file_path = os.path.join(distribution_dir,\n distribution_file_name)\n else:\n distribution_dir = get_distribution_dir(catalog_id, dataset_id,\n distribution_id, catalogs_dir, use_short_path=False)\n distribution_file_path = os.path.join(distribution_dir, 'download',\n distribution_file_name)\n return os.path.abspath(distribution_file_path)\n",
"role": "user"
},
{
"content": "get_distribution_path",
"role": "assistant"
}
] |
def x(catalog_id, catalogs_dir=CATALOGS_DIR, fmt='json'):
base_path = os.path.join(catalogs_dir, 'catalog', catalog_id)
if fmt == 'json':
return os.path.join(base_path, 'data.json')
elif fmt == 'xlsx':
return os.path.join(base_path, 'catalog.xlsx')
else:
raise NotImplementedError('El formato {} no está implementado.'.
format(fmt))
|
get_catalog_path
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog_id, catalogs_dir=CATALOGS_DIR, fmt='json'):\n base_path = os.path.join(catalogs_dir, 'catalog', catalog_id)\n if fmt == 'json':\n return os.path.join(base_path, 'data.json')\n elif fmt == 'xlsx':\n return os.path.join(base_path, 'catalog.xlsx')\n else:\n raise NotImplementedError('El formato {} no está implementado.'.\n format(fmt))\n",
"role": "user"
},
{
"content": "get_catalog_path",
"role": "assistant"
}
] |
def x(catalogs, include_data=True, use_short_path=True):
include_data = bool(int(include_data))
make_catalogs_backup(catalogs.split(','), include_data=include_data,
use_short_path=use_short_path)
|
main
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalogs, include_data=True, use_short_path=True):\n include_data = bool(int(include_data))\n make_catalogs_backup(catalogs.split(','), include_data=include_data,\n use_short_path=use_short_path)\n",
"role": "user"
},
{
"content": "main",
"role": "assistant"
}
] |
def x(catalog, export_path=None):
if isinstance(catalog, string_types):
catalog_path_or_url = catalog
else:
catalog_path_or_url = None
catalog = read_catalog(catalog)
validation = validate_catalog(catalog)
indicators = generate_catalogs_indicators(catalog, CENTRAL_CATALOG)[0][0]
with io.open(os.path.join(TEMPLATES_PATH, 'catalog_readme.txt'), 'r',
encoding='utf-8') as template_file:
readme_template = template_file.read()
not_federated_datasets_list = '\n'.join(['- [{}]({})'.format(
dataset[0], dataset[1]) for dataset in indicators[
'datasets_no_federados']])
federated_removed_datasets_list = '\n'.join(['- [{}]({})'.format(
dataset[0], dataset[1]) for dataset in indicators[
'datasets_federados_eliminados']])
federated_datasets_list = '\n'.join(['- [{}]({})'.format(dataset[0],
dataset[1]) for dataset in indicators['datasets_federados']])
non_federated_pct = 1.0 - indicators['datasets_federados_pct'
] if indicators['datasets_federados_pct'
] is not None else indicators['datasets_federados_pct']
content = {'title': catalog.get('title'), 'publisher_name':
traverse_dict(catalog, ['publisher', 'name']), 'publisher_mbox':
traverse_dict(catalog, ['publisher', 'mbox']),
'catalog_path_or_url': catalog_path_or_url, 'description':
catalog.get('description'), 'global_status': validation[
'status'], 'catalog_status': validation['error']['catalog'][
'status'], 'no_of_datasets': len(catalog['dataset']),
'no_of_distributions': sum([len(dataset['distribution']) for
dataset in catalog['dataset']]), 'federated_datasets':
indicators['datasets_federados_cant'], 'not_federated_datasets':
indicators['datasets_no_federados_cant'],
'not_federated_datasets_pct': non_federated_pct,
'not_federated_datasets_list': not_federated_datasets_list,
'federated_removed_datasets_list':
federated_removed_datasets_list, 'federated_datasets_list':
federated_datasets_list}
catalog_readme = readme_template.format(**content)
if export_path:
with io.open(export_path, 'w+', encoding='utf-8') as target:
target.write(catalog_readme)
else:
return catalog_readme
|
generate_readme
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, export_path=None):\n if isinstance(catalog, string_types):\n catalog_path_or_url = catalog\n else:\n catalog_path_or_url = None\n catalog = read_catalog(catalog)\n validation = validate_catalog(catalog)\n indicators = generate_catalogs_indicators(catalog, CENTRAL_CATALOG)[0][0]\n with io.open(os.path.join(TEMPLATES_PATH, 'catalog_readme.txt'), 'r',\n encoding='utf-8') as template_file:\n readme_template = template_file.read()\n not_federated_datasets_list = '\\n'.join(['- [{}]({})'.format(\n dataset[0], dataset[1]) for dataset in indicators[\n 'datasets_no_federados']])\n federated_removed_datasets_list = '\\n'.join(['- [{}]({})'.format(\n dataset[0], dataset[1]) for dataset in indicators[\n 'datasets_federados_eliminados']])\n federated_datasets_list = '\\n'.join(['- [{}]({})'.format(dataset[0],\n dataset[1]) for dataset in indicators['datasets_federados']])\n non_federated_pct = 1.0 - indicators['datasets_federados_pct'\n ] if indicators['datasets_federados_pct'\n ] is not None else indicators['datasets_federados_pct']\n content = {'title': catalog.get('title'), 'publisher_name':\n traverse_dict(catalog, ['publisher', 'name']), 'publisher_mbox':\n traverse_dict(catalog, ['publisher', 'mbox']),\n 'catalog_path_or_url': catalog_path_or_url, 'description':\n catalog.get('description'), 'global_status': validation[\n 'status'], 'catalog_status': validation['error']['catalog'][\n 'status'], 'no_of_datasets': len(catalog['dataset']),\n 'no_of_distributions': sum([len(dataset['distribution']) for\n dataset in catalog['dataset']]), 'federated_datasets':\n indicators['datasets_federados_cant'], 'not_federated_datasets':\n indicators['datasets_no_federados_cant'],\n 'not_federated_datasets_pct': non_federated_pct,\n 'not_federated_datasets_list': not_federated_datasets_list,\n 'federated_removed_datasets_list':\n federated_removed_datasets_list, 'federated_datasets_list':\n federated_datasets_list}\n catalog_readme = readme_template.format(**content)\n if export_path:\n with io.open(export_path, 'w+', encoding='utf-8') as target:\n target.write(catalog_readme)\n else:\n return catalog_readme\n",
"role": "user"
},
{
"content": "generate_readme",
"role": "assistant"
}
] |
def x(self, builder: Builder):
self.config = builder.configuration
self.with_common_random_numbers = bool(self.config.randomness.key_columns)
self.register = builder.randomness.register_simulants
if self.with_common_random_numbers and not ['entrance_time', 'age'
] == self.config.randomness.key_columns:
raise ValueError(
"If running with CRN, you must specify ['entrance_time', 'age'] asthe randomness key columns."
)
self.age_randomness = builder.randomness.get_stream('age_initialization',
for_initialization=self.with_common_random_numbers)
self.sex_randomness = builder.randomness.get_stream('sex_initialization')
columns_created = ['age', 'sex', 'alive', 'entrance_time']
builder.population.initializes_simulants(self.on_initialize_simulants,
creates_columns=columns_created)
self.population_view = builder.population.get_view(columns_created)
builder.event.register_listener('time_step', self.age_simulants)
|
setup
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, builder: Builder):\n self.config = builder.configuration\n self.with_common_random_numbers = bool(self.config.randomness.key_columns)\n self.register = builder.randomness.register_simulants\n if self.with_common_random_numbers and not ['entrance_time', 'age'\n ] == self.config.randomness.key_columns:\n raise ValueError(\n \"If running with CRN, you must specify ['entrance_time', 'age'] asthe randomness key columns.\"\n )\n self.age_randomness = builder.randomness.get_stream('age_initialization',\n for_initialization=self.with_common_random_numbers)\n self.sex_randomness = builder.randomness.get_stream('sex_initialization')\n columns_created = ['age', 'sex', 'alive', 'entrance_time']\n builder.population.initializes_simulants(self.on_initialize_simulants,\n creates_columns=columns_created)\n self.population_view = builder.population.get_view(columns_created)\n builder.event.register_listener('time_step', self.age_simulants)\n",
"role": "user"
},
{
"content": "setup",
"role": "assistant"
}
] |
def x(self, pop_data: SimulantData):
age_start = self.config.population.age_start
age_end = self.config.population.age_end
if age_start == age_end:
age_window = pop_data.creation_window / pd.Timedelta(days=365)
else:
age_window = age_end - age_start
age_draw = self.age_randomness.get_draw(pop_data.index)
age = age_start + age_draw * age_window
if self.with_common_random_numbers:
population = pd.DataFrame({'entrance_time': pop_data.creation_time,
'age': age.values}, index=pop_data.index)
self.register(population)
population['sex'] = self.sex_randomness.choice(pop_data.index, [
'Male', 'Female'])
population['alive'] = 'alive'
else:
population = pd.DataFrame({'age': age.values, 'sex': self.
sex_randomness.choice(pop_data.index, ['Male', 'Female']),
'alive': pd.Series('alive', index=pop_data.index),
'entrance_time': pop_data.creation_time}, index=pop_data.index)
self.population_view.update(population)
|
on_initialize_simulants
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, pop_data: SimulantData):\n age_start = self.config.population.age_start\n age_end = self.config.population.age_end\n if age_start == age_end:\n age_window = pop_data.creation_window / pd.Timedelta(days=365)\n else:\n age_window = age_end - age_start\n age_draw = self.age_randomness.get_draw(pop_data.index)\n age = age_start + age_draw * age_window\n if self.with_common_random_numbers:\n population = pd.DataFrame({'entrance_time': pop_data.creation_time,\n 'age': age.values}, index=pop_data.index)\n self.register(population)\n population['sex'] = self.sex_randomness.choice(pop_data.index, [\n 'Male', 'Female'])\n population['alive'] = 'alive'\n else:\n population = pd.DataFrame({'age': age.values, 'sex': self.\n sex_randomness.choice(pop_data.index, ['Male', 'Female']),\n 'alive': pd.Series('alive', index=pop_data.index),\n 'entrance_time': pop_data.creation_time}, index=pop_data.index)\n self.population_view.update(population)\n",
"role": "user"
},
{
"content": "on_initialize_simulants",
"role": "assistant"
}
] |
def x(self, event: Event):
population = self.population_view.get(event.index, query="alive == 'alive'"
)
population['age'] += event.step_size / pd.Timedelta(days=365)
self.population_view.update(population)
|
age_simulants
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, event: Event):\n population = self.population_view.get(event.index, query=\"alive == 'alive'\"\n )\n population['age'] += event.step_size / pd.Timedelta(days=365)\n self.population_view.update(population)\n",
"role": "user"
},
{
"content": "age_simulants",
"role": "assistant"
}
] |
def x(catalogs, central_catalog=None, identifier_search=False, validator=None):
assert isinstance(catalogs, string_types + (dict, list))
if isinstance(catalogs, string_types + (dict,)):
catalogs = [catalogs]
indicators_list = []
fields = {}
catalogs_cant = 0
for catalog in catalogs:
try:
catalog = readers.read_catalog(catalog)
catalogs_cant += 1
except Exception as e:
msg = u'Error leyendo catálogo de la lista: {}'.format(str(e))
logger.error(msg)
continue
fields_count, result = _generate_indicators(catalog, validator=
validator)
if central_catalog:
result.update(_federation_indicators(catalog, central_catalog,
identifier_search=identifier_search))
if not indicators_list:
network_indicators = result.copy()
else:
network_indicators = helpers.add_dicts(network_indicators, result)
fields = helpers.add_dicts(fields_count, fields)
result['title'] = catalog.get('title', 'no-title')
result['identifier'] = catalog.get('identifier', 'no-id')
indicators_list.append(result)
if not indicators_list:
return [], {}
network_indicators['catalogos_cant'] = catalogs_cant
_network_indicator_percentages(fields, network_indicators)
return indicators_list, network_indicators
|
generate_catalogs_indicators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalogs, central_catalog=None, identifier_search=False, validator=None):\n assert isinstance(catalogs, string_types + (dict, list))\n if isinstance(catalogs, string_types + (dict,)):\n catalogs = [catalogs]\n indicators_list = []\n fields = {}\n catalogs_cant = 0\n for catalog in catalogs:\n try:\n catalog = readers.read_catalog(catalog)\n catalogs_cant += 1\n except Exception as e:\n msg = u'Error leyendo catálogo de la lista: {}'.format(str(e))\n logger.error(msg)\n continue\n fields_count, result = _generate_indicators(catalog, validator=\n validator)\n if central_catalog:\n result.update(_federation_indicators(catalog, central_catalog,\n identifier_search=identifier_search))\n if not indicators_list:\n network_indicators = result.copy()\n else:\n network_indicators = helpers.add_dicts(network_indicators, result)\n fields = helpers.add_dicts(fields_count, fields)\n result['title'] = catalog.get('title', 'no-title')\n result['identifier'] = catalog.get('identifier', 'no-id')\n indicators_list.append(result)\n if not indicators_list:\n return [], {}\n network_indicators['catalogos_cant'] = catalogs_cant\n _network_indicator_percentages(fields, network_indicators)\n return indicators_list, network_indicators\n",
"role": "user"
},
{
"content": "generate_catalogs_indicators",
"role": "assistant"
}
] |
def x(catalog, validator=None, only_numeric=False):
result = {}
result.update(_generate_status_indicators(catalog, validator=validator))
result.update(_generate_date_indicators(catalog, only_numeric=only_numeric)
)
if not only_numeric:
if 'dataset' in catalog:
format_count = count_fields(get_distributions(catalog), 'format')
type_count = count_fields(get_distributions(catalog), 'type')
license_count = count_fields(get_datasets(catalog), 'license')
else:
format_count = type_count = license_count = {}
result.update({'distribuciones_formatos_cant': format_count,
'distribuciones_tipos_cant': type_count,
'datasets_licencias_cant': license_count})
fields_count = _count_required_and_optional_fields(catalog)
recomendados_pct = float(fields_count['recomendado']) / fields_count[
'total_recomendado']
optativos_pct = float(fields_count['optativo']) / fields_count[
'total_optativo']
result.update({'campos_recomendados_pct': round(recomendados_pct, 4),
'campos_optativos_pct': round(optativos_pct, 4)})
return fields_count, result
|
generate_indicators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, validator=None, only_numeric=False):\n result = {}\n result.update(_generate_status_indicators(catalog, validator=validator))\n result.update(_generate_date_indicators(catalog, only_numeric=only_numeric)\n )\n if not only_numeric:\n if 'dataset' in catalog:\n format_count = count_fields(get_distributions(catalog), 'format')\n type_count = count_fields(get_distributions(catalog), 'type')\n license_count = count_fields(get_datasets(catalog), 'license')\n else:\n format_count = type_count = license_count = {}\n result.update({'distribuciones_formatos_cant': format_count,\n 'distribuciones_tipos_cant': type_count,\n 'datasets_licencias_cant': license_count})\n fields_count = _count_required_and_optional_fields(catalog)\n recomendados_pct = float(fields_count['recomendado']) / fields_count[\n 'total_recomendado']\n optativos_pct = float(fields_count['optativo']) / fields_count[\n 'total_optativo']\n result.update({'campos_recomendados_pct': round(recomendados_pct, 4),\n 'campos_optativos_pct': round(optativos_pct, 4)})\n return fields_count, result\n",
"role": "user"
},
{
"content": "generate_indicators",
"role": "assistant"
}
] |
def x(catalog, central_catalog, identifier_search=False):
result = {'datasets_federados_cant': None, 'datasets_federados_pct':
None, 'datasets_no_federados_cant': None,
'datasets_federados_eliminados_cant': None,
'distribuciones_federadas_cant': None,
'datasets_federados_eliminados': [], 'datasets_no_federados': [],
'datasets_federados': []}
try:
central_catalog = readers.read_catalog(central_catalog)
except Exception as e:
msg = u'Error leyendo el catálogo central: {}'.format(str(e))
logger.error(msg)
return result
generator = FederationIndicatorsGenerator(central_catalog, catalog,
id_based=identifier_search)
result.update({'datasets_federados_cant': generator.
datasets_federados_cant(), 'datasets_no_federados_cant': generator.
datasets_no_federados_cant(), 'datasets_federados_eliminados_cant':
generator.datasets_federados_eliminados_cant(),
'datasets_federados_eliminados': generator.
datasets_federados_eliminados(), 'datasets_no_federados': generator
.datasets_no_federados(), 'datasets_federados': generator.
datasets_federados(), 'datasets_federados_pct': generator.
datasets_federados_pct(), 'distribuciones_federadas_cant':
generator.distribuciones_federadas_cant()})
return result
|
federation_indicators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, central_catalog, identifier_search=False):\n result = {'datasets_federados_cant': None, 'datasets_federados_pct':\n None, 'datasets_no_federados_cant': None,\n 'datasets_federados_eliminados_cant': None,\n 'distribuciones_federadas_cant': None,\n 'datasets_federados_eliminados': [], 'datasets_no_federados': [],\n 'datasets_federados': []}\n try:\n central_catalog = readers.read_catalog(central_catalog)\n except Exception as e:\n msg = u'Error leyendo el catálogo central: {}'.format(str(e))\n logger.error(msg)\n return result\n generator = FederationIndicatorsGenerator(central_catalog, catalog,\n id_based=identifier_search)\n result.update({'datasets_federados_cant': generator.\n datasets_federados_cant(), 'datasets_no_federados_cant': generator.\n datasets_no_federados_cant(), 'datasets_federados_eliminados_cant':\n generator.datasets_federados_eliminados_cant(),\n 'datasets_federados_eliminados': generator.\n datasets_federados_eliminados(), 'datasets_no_federados': generator\n .datasets_no_federados(), 'datasets_federados': generator.\n datasets_federados(), 'datasets_federados_pct': generator.\n datasets_federados_pct(), 'distribuciones_federadas_cant':\n generator.distribuciones_federadas_cant()})\n return result\n",
"role": "user"
},
{
"content": "federation_indicators",
"role": "assistant"
}
] |
def x(fields, network_indicators):
percentages = {'datasets_meta_ok_pct': (network_indicators.get(
'datasets_meta_ok_cant'), network_indicators.get(
'datasets_meta_error_cant')), 'datasets_actualizados_pct': (
network_indicators.get('datasets_actualizados_cant'),
network_indicators.get('datasets_desactualizados_cant')),
'datasets_federados_pct': (network_indicators.get(
'datasets_federados_cant'), network_indicators.get(
'datasets_no_federados_cant')), 'datasets_con_datos_pct': (
network_indicators.get('datasets_con_datos_cant'),
network_indicators.get('datasets_sin_datos_cant'))}
for indicator in percentages:
pct = 0.0
partial = percentages[indicator][0] or 0
total = partial + (percentages[indicator][1] or 0)
if total:
pct = float(partial) / total
network_indicators[indicator] = round(pct, 4)
if fields:
rec_pct = float(fields['recomendado']) / fields['total_recomendado']
opt_pct = float(fields['optativo']) / fields['total_optativo']
network_indicators.update({'campos_recomendados_pct': round(rec_pct,
4), 'campos_optativos_pct': round(opt_pct, 4)})
|
network_indicator_percentages
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fields, network_indicators):\n percentages = {'datasets_meta_ok_pct': (network_indicators.get(\n 'datasets_meta_ok_cant'), network_indicators.get(\n 'datasets_meta_error_cant')), 'datasets_actualizados_pct': (\n network_indicators.get('datasets_actualizados_cant'),\n network_indicators.get('datasets_desactualizados_cant')),\n 'datasets_federados_pct': (network_indicators.get(\n 'datasets_federados_cant'), network_indicators.get(\n 'datasets_no_federados_cant')), 'datasets_con_datos_pct': (\n network_indicators.get('datasets_con_datos_cant'),\n network_indicators.get('datasets_sin_datos_cant'))}\n for indicator in percentages:\n pct = 0.0\n partial = percentages[indicator][0] or 0\n total = partial + (percentages[indicator][1] or 0)\n if total:\n pct = float(partial) / total\n network_indicators[indicator] = round(pct, 4)\n if fields:\n rec_pct = float(fields['recomendado']) / fields['total_recomendado']\n opt_pct = float(fields['optativo']) / fields['total_optativo']\n network_indicators.update({'campos_recomendados_pct': round(rec_pct,\n 4), 'campos_optativos_pct': round(opt_pct, 4)})\n",
"role": "user"
},
{
"content": "network_indicator_percentages",
"role": "assistant"
}
] |
def x(catalog, validator=None):
result = {'datasets_cant': None, 'distribuciones_cant': None,
'datasets_meta_ok_cant': None, 'datasets_meta_error_cant': None,
'datasets_meta_ok_pct': None, 'datasets_con_datos_cant': None,
'datasets_sin_datos_cant': None, 'datasets_con_datos_pct': None}
try:
summary = generate_datasets_summary(catalog, validator=validator)
except Exception as e:
msg = u'Error generando resumen del catálogo {}: {}'.format(catalog
['title'], str(e))
logger.error(msg)
return result
cant_ok = 0
cant_error = 0
cant_data = 0
cant_without_data = 0
cant_distribuciones = 0
datasets_total = len(summary)
for dataset in summary:
cant_distribuciones += dataset['cant_distribuciones']
if dataset['tiene_datos'] == 'SI':
cant_data += 1
else:
cant_without_data += 1
if dataset['estado_metadatos'] == 'OK':
cant_ok += 1
else:
cant_error += 1
datasets_ok_pct = 0
datasets_with_data_pct = 0
if datasets_total:
datasets_ok_pct = round(float(cant_ok) / datasets_total, 4)
datasets_with_data_pct = round(float(cant_data) / datasets_total, 4)
result.update({'datasets_cant': datasets_total, 'distribuciones_cant':
cant_distribuciones, 'datasets_meta_ok_cant': cant_ok,
'datasets_meta_error_cant': cant_error, 'datasets_meta_ok_pct':
datasets_ok_pct, 'datasets_con_datos_cant': cant_data,
'datasets_sin_datos_cant': cant_without_data,
'datasets_con_datos_pct': datasets_with_data_pct})
return result
|
generate_status_indicators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, validator=None):\n result = {'datasets_cant': None, 'distribuciones_cant': None,\n 'datasets_meta_ok_cant': None, 'datasets_meta_error_cant': None,\n 'datasets_meta_ok_pct': None, 'datasets_con_datos_cant': None,\n 'datasets_sin_datos_cant': None, 'datasets_con_datos_pct': None}\n try:\n summary = generate_datasets_summary(catalog, validator=validator)\n except Exception as e:\n msg = u'Error generando resumen del catálogo {}: {}'.format(catalog\n ['title'], str(e))\n logger.error(msg)\n return result\n cant_ok = 0\n cant_error = 0\n cant_data = 0\n cant_without_data = 0\n cant_distribuciones = 0\n datasets_total = len(summary)\n for dataset in summary:\n cant_distribuciones += dataset['cant_distribuciones']\n if dataset['tiene_datos'] == 'SI':\n cant_data += 1\n else:\n cant_without_data += 1\n if dataset['estado_metadatos'] == 'OK':\n cant_ok += 1\n else:\n cant_error += 1\n datasets_ok_pct = 0\n datasets_with_data_pct = 0\n if datasets_total:\n datasets_ok_pct = round(float(cant_ok) / datasets_total, 4)\n datasets_with_data_pct = round(float(cant_data) / datasets_total, 4)\n result.update({'datasets_cant': datasets_total, 'distribuciones_cant':\n cant_distribuciones, 'datasets_meta_ok_cant': cant_ok,\n 'datasets_meta_error_cant': cant_error, 'datasets_meta_ok_pct':\n datasets_ok_pct, 'datasets_con_datos_cant': cant_data,\n 'datasets_sin_datos_cant': cant_without_data,\n 'datasets_con_datos_pct': datasets_with_data_pct})\n return result\n",
"role": "user"
},
{
"content": "generate_status_indicators",
"role": "assistant"
}
] |
def x(catalog, tolerance=0.2, only_numeric=False):
result = {'datasets_desactualizados_cant': None,
'datasets_actualizados_cant': None, 'datasets_actualizados_pct':
None, 'catalogo_ultima_actualizacion_dias': None}
if not only_numeric:
result.update({'datasets_frecuencia_cant': {}})
try:
dias_ultima_actualizacion = _days_from_last_update(catalog, 'modified')
if not dias_ultima_actualizacion:
dias_ultima_actualizacion = _days_from_last_update(catalog,
'issued')
result['catalogo_ultima_actualizacion_dias'
] = dias_ultima_actualizacion
except Exception as e:
msg = (u'Error generando indicadores de fecha del catálogo {}: {}'.
format(catalog['title'], str(e)))
logger.error(msg)
return result
actualizados = 0
desactualizados = 0
periodicity_amount = {}
for dataset in catalog.get('dataset', []):
periodicity = dataset.get('accrualPeriodicity')
if not periodicity:
continue
if periodicity == 'eventual':
actualizados += 1
prev_periodicity = periodicity_amount.get(periodicity, 0)
periodicity_amount[periodicity] = prev_periodicity + 1
continue
if 'modified' not in dataset:
desactualizados += 1
else:
try:
date = helpers.parse_date_string(dataset['modified'])
days_diff = float((datetime.now() - date).days)
interval = helpers.parse_repeating_time_interval(periodicity
) * (1 + tolerance)
except Exception as e:
msg = (
u'Error generando indicadoresde fecha del dataset {} en {}: {}'
)
msg.format(dataset['identifier'], catalog['title'], str(e))
logger.error(msg)
desactualizados += 1
continue
if days_diff < interval:
actualizados += 1
else:
desactualizados += 1
prev_periodicity = periodicity_amount.get(periodicity, 0)
periodicity_amount[periodicity] = prev_periodicity + 1
datasets_total = len(catalog.get('dataset', []))
actualizados_pct = 0
if datasets_total:
actualizados_pct = float(actualizados) / datasets_total
result.update({'datasets_desactualizados_cant': desactualizados,
'datasets_actualizados_cant': actualizados,
'datasets_actualizados_pct': round(actualizados_pct, 4)})
if not only_numeric:
result.update({'datasets_frecuencia_cant': periodicity_amount})
return result
|
generate_date_indicators
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, tolerance=0.2, only_numeric=False):\n result = {'datasets_desactualizados_cant': None,\n 'datasets_actualizados_cant': None, 'datasets_actualizados_pct':\n None, 'catalogo_ultima_actualizacion_dias': None}\n if not only_numeric:\n result.update({'datasets_frecuencia_cant': {}})\n try:\n dias_ultima_actualizacion = _days_from_last_update(catalog, 'modified')\n if not dias_ultima_actualizacion:\n dias_ultima_actualizacion = _days_from_last_update(catalog,\n 'issued')\n result['catalogo_ultima_actualizacion_dias'\n ] = dias_ultima_actualizacion\n except Exception as e:\n msg = (u'Error generando indicadores de fecha del catálogo {}: {}'.\n format(catalog['title'], str(e)))\n logger.error(msg)\n return result\n actualizados = 0\n desactualizados = 0\n periodicity_amount = {}\n for dataset in catalog.get('dataset', []):\n periodicity = dataset.get('accrualPeriodicity')\n if not periodicity:\n continue\n if periodicity == 'eventual':\n actualizados += 1\n prev_periodicity = periodicity_amount.get(periodicity, 0)\n periodicity_amount[periodicity] = prev_periodicity + 1\n continue\n if 'modified' not in dataset:\n desactualizados += 1\n else:\n try:\n date = helpers.parse_date_string(dataset['modified'])\n days_diff = float((datetime.now() - date).days)\n interval = helpers.parse_repeating_time_interval(periodicity\n ) * (1 + tolerance)\n except Exception as e:\n msg = (\n u'Error generando indicadoresde fecha del dataset {} en {}: {}'\n )\n msg.format(dataset['identifier'], catalog['title'], str(e))\n logger.error(msg)\n desactualizados += 1\n continue\n if days_diff < interval:\n actualizados += 1\n else:\n desactualizados += 1\n prev_periodicity = periodicity_amount.get(periodicity, 0)\n periodicity_amount[periodicity] = prev_periodicity + 1\n datasets_total = len(catalog.get('dataset', []))\n actualizados_pct = 0\n if datasets_total:\n actualizados_pct = float(actualizados) / datasets_total\n result.update({'datasets_desactualizados_cant': desactualizados,\n 'datasets_actualizados_cant': actualizados,\n 'datasets_actualizados_pct': round(actualizados_pct, 4)})\n if not only_numeric:\n result.update({'datasets_frecuencia_cant': periodicity_amount})\n return result\n",
"role": "user"
},
{
"content": "generate_date_indicators",
"role": "assistant"
}
] |
def x(catalog, date_field='modified'):
date_modified = catalog.get(date_field, None)
dias_ultima_actualizacion = None
if isinstance(date_modified, string_types):
date = helpers.parse_date_string(date_modified)
dias_ultima_actualizacion = (datetime.now() - date
).days if date else None
for dataset in catalog.get('dataset', []):
date = helpers.parse_date_string(dataset.get(date_field, ''))
days_diff = float((datetime.now() - date).days) if date else None
if (not dias_ultima_actualizacion or days_diff and days_diff <
dias_ultima_actualizacion):
dias_ultima_actualizacion = days_diff
if dias_ultima_actualizacion:
return int(dias_ultima_actualizacion)
else:
return None
|
days_from_last_update
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog, date_field='modified'):\n date_modified = catalog.get(date_field, None)\n dias_ultima_actualizacion = None\n if isinstance(date_modified, string_types):\n date = helpers.parse_date_string(date_modified)\n dias_ultima_actualizacion = (datetime.now() - date\n ).days if date else None\n for dataset in catalog.get('dataset', []):\n date = helpers.parse_date_string(dataset.get(date_field, ''))\n days_diff = float((datetime.now() - date).days) if date else None\n if (not dias_ultima_actualizacion or days_diff and days_diff <\n dias_ultima_actualizacion):\n dias_ultima_actualizacion = days_diff\n if dias_ultima_actualizacion:\n return int(dias_ultima_actualizacion)\n else:\n return None\n",
"role": "user"
},
{
"content": "days_from_last_update",
"role": "assistant"
}
] |
def x(catalog):
catalog = readers.read_catalog(catalog)
catalog_fields_path = os.path.join(CATALOG_FIELDS_PATH, 'fields.json')
with open(catalog_fields_path) as f:
catalog_fields = json.load(f)
return _count_fields_recursive(catalog, catalog_fields)
|
count_required_and_optional_fields
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(catalog):\n catalog = readers.read_catalog(catalog)\n catalog_fields_path = os.path.join(CATALOG_FIELDS_PATH, 'fields.json')\n with open(catalog_fields_path) as f:\n catalog_fields = json.load(f)\n return _count_fields_recursive(catalog, catalog_fields)\n",
"role": "user"
},
{
"content": "count_required_and_optional_fields",
"role": "assistant"
}
] |
def x(dataset, fields):
key_count = {'recomendado': 0, 'optativo': 0, 'requerido': 0,
'total_optativo': 0, 'total_recomendado': 0, 'total_requerido': 0}
for k, v in fields.items():
if isinstance(v, dict):
elements = dataset.get(k)
if not isinstance(elements, (list, dict)):
elements = [{}]
if isinstance(elements, dict):
elements = [dataset[k].copy()]
for element in elements:
result = _count_fields_recursive(element, v)
for key in result:
key_count[key] += result[key]
else:
key_count['total_' + v] += 1
if k in dataset:
key_count[v] += 1
return key_count
|
count_fields_recursive
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(dataset, fields):\n key_count = {'recomendado': 0, 'optativo': 0, 'requerido': 0,\n 'total_optativo': 0, 'total_recomendado': 0, 'total_requerido': 0}\n for k, v in fields.items():\n if isinstance(v, dict):\n elements = dataset.get(k)\n if not isinstance(elements, (list, dict)):\n elements = [{}]\n if isinstance(elements, dict):\n elements = [dataset[k].copy()]\n for element in elements:\n result = _count_fields_recursive(element, v)\n for key in result:\n key_count[key] += result[key]\n else:\n key_count['total_' + v] += 1\n if k in dataset:\n key_count[v] += 1\n return key_count\n",
"role": "user"
},
{
"content": "count_fields_recursive",
"role": "assistant"
}
] |
def x(key: str, index: Index, index_map: IndexMap=None) ->pd.Series:
if len(index) > 0:
random_state = np.random.RandomState(seed=get_hash(key))
sample_size = (index_map.map_size if index_map is not None else
index.max() + 1)
try:
draw_index = index_map[index]
except (IndexError, TypeError):
draw_index = index
raw_draws = random_state.random_sample(sample_size)
return pd.Series(raw_draws[draw_index], index=index)
return pd.Series(index=index)
|
random
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(key: str, index: Index, index_map: IndexMap=None) ->pd.Series:\n if len(index) > 0:\n random_state = np.random.RandomState(seed=get_hash(key))\n sample_size = (index_map.map_size if index_map is not None else \n index.max() + 1)\n try:\n draw_index = index_map[index]\n except (IndexError, TypeError):\n draw_index = index\n raw_draws = random_state.random_sample(sample_size)\n return pd.Series(raw_draws[draw_index], index=index)\n return pd.Series(index=index)\n",
"role": "user"
},
{
"content": "random",
"role": "assistant"
}
] |
def x(key: str) ->int:
return int(hashlib.sha1(key.encode('utf8')).hexdigest(), 16) % 4294967295
|
get_hash
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(key: str) ->int:\n return int(hashlib.sha1(key.encode('utf8')).hexdigest(), 16) % 4294967295\n",
"role": "user"
},
{
"content": "get_hash",
"role": "assistant"
}
] |
def x(key: str, index: Index, choices: Array, p: Array=None, index_map:
IndexMap=None) ->pd.Series:
p = _set_residual_probability(_normalize_shape(p, index)
) if p is not None else np.ones((len(index), len(choices)))
p = p / p.sum(axis=1, keepdims=True)
draw = random(key, index, index_map)
p_bins = np.cumsum(p, axis=1)
choice_index = (draw.values[np.newaxis].T > p_bins).sum(axis=1)
return pd.Series(np.array(choices)[choice_index], index=index)
|
choice
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(key: str, index: Index, choices: Array, p: Array=None, index_map:\n IndexMap=None) ->pd.Series:\n p = _set_residual_probability(_normalize_shape(p, index)\n ) if p is not None else np.ones((len(index), len(choices)))\n p = p / p.sum(axis=1, keepdims=True)\n draw = random(key, index, index_map)\n p_bins = np.cumsum(p, axis=1)\n choice_index = (draw.values[np.newaxis].T > p_bins).sum(axis=1)\n return pd.Series(np.array(choices)[choice_index], index=index)\n",
"role": "user"
},
{
"content": "choice",
"role": "assistant"
}
] |
def x(p: np.ndarray) ->np.ndarray:
residual_mask = p == RESIDUAL_CHOICE
if residual_mask.any():
if np.any(np.sum(residual_mask, axis=1) - 1):
raise RandomnessError(
'More than one residual choice supplied for a single set of weights. Weights: {}.'
.format(p))
p[residual_mask] = 0
residual_p = 1 - np.sum(p, axis=1)
if np.any(residual_p < 0):
raise RandomnessError(
'Residual choice supplied with weights that summed to more than 1. Weights: {}.'
.format(p))
p[residual_mask] = residual_p
return p
|
set_residual_probability
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(p: np.ndarray) ->np.ndarray:\n residual_mask = p == RESIDUAL_CHOICE\n if residual_mask.any():\n if np.any(np.sum(residual_mask, axis=1) - 1):\n raise RandomnessError(\n 'More than one residual choice supplied for a single set of weights. Weights: {}.'\n .format(p))\n p[residual_mask] = 0\n residual_p = 1 - np.sum(p, axis=1)\n if np.any(residual_p < 0):\n raise RandomnessError(\n 'Residual choice supplied with weights that summed to more than 1. Weights: {}.'\n .format(p))\n p[residual_mask] = residual_p\n return p\n",
"role": "user"
},
{
"content": "set_residual_probability",
"role": "assistant"
}
] |
def x(key: str, population: Union[pd.DataFrame, pd.Series, Index],
probability: Array, index_map: IndexMap=None) ->Union[pd.DataFrame, pd.
Series, Index]:
if population.empty:
return population
index = population if isinstance(population, pd.Index
) else population.index
draw = random(key, index, index_map)
mask = np.array(draw < probability)
return population[mask]
|
filter_for_probability
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(key: str, population: Union[pd.DataFrame, pd.Series, Index],\n probability: Array, index_map: IndexMap=None) ->Union[pd.DataFrame, pd.\n Series, Index]:\n if population.empty:\n return population\n index = population if isinstance(population, pd.Index\n ) else population.index\n draw = random(key, index, index_map)\n mask = np.array(draw < probability)\n return population[mask]\n",
"role": "user"
},
{
"content": "filter_for_probability",
"role": "assistant"
}
] |
def x(self, new_keys: Index):
if not self._map.index.intersection(new_keys).empty:
raise KeyError('Non-unique keys in index.')
mapping_update = self.hash_(new_keys)
if self._map.empty:
self._map = mapping_update.drop_duplicates()
else:
self._map = self._map.append(mapping_update).drop_duplicates()
collisions = mapping_update.index.difference(self._map.index)
salt = 1
while not collisions.empty:
mapping_update = self.hash_(collisions, salt)
self._map = self._map.append(mapping_update).drop_duplicates()
collisions = mapping_update.index.difference(self._map.index)
salt += 1
|
update
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, new_keys: Index):\n if not self._map.index.intersection(new_keys).empty:\n raise KeyError('Non-unique keys in index.')\n mapping_update = self.hash_(new_keys)\n if self._map.empty:\n self._map = mapping_update.drop_duplicates()\n else:\n self._map = self._map.append(mapping_update).drop_duplicates()\n collisions = mapping_update.index.difference(self._map.index)\n salt = 1\n while not collisions.empty:\n mapping_update = self.hash_(collisions, salt)\n self._map = self._map.append(mapping_update).drop_duplicates()\n collisions = mapping_update.index.difference(self._map.index)\n salt += 1\n",
"role": "user"
},
{
"content": "update",
"role": "assistant"
}
] |
def x(self, keys: Index, salt: int=0) ->pd.Series:
key_frame = keys.to_frame()
new_map = pd.Series(0, index=keys)
salt = self.convert_to_ten_digit_int(pd.Series(salt, index=keys))
for i, column_name in enumerate(key_frame.columns):
column = self.convert_to_ten_digit_int(key_frame[column_name])
primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 27]
out = pd.Series(1, index=column.index)
for idx, p in enumerate(primes):
out *= np.power(p, self.digit(column, idx))
new_map += out + salt
return new_map % self.map_size
|
hash_
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, keys: Index, salt: int=0) ->pd.Series:\n key_frame = keys.to_frame()\n new_map = pd.Series(0, index=keys)\n salt = self.convert_to_ten_digit_int(pd.Series(salt, index=keys))\n for i, column_name in enumerate(key_frame.columns):\n column = self.convert_to_ten_digit_int(key_frame[column_name])\n primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 27]\n out = pd.Series(1, index=column.index)\n for idx, p in enumerate(primes):\n out *= np.power(p, self.digit(column, idx))\n new_map += out + salt\n return new_map % self.map_size\n",
"role": "user"
},
{
"content": "hash_",
"role": "assistant"
}
] |
def x(self, column: pd.Series) ->pd.Series:
if isinstance(column.iloc[0], datetime.datetime):
column = self.clip_to_seconds(column.astype(int))
elif np.issubdtype(column.iloc[0], np.integer):
if not len(column >= 0) == len(column):
raise RandomnessError(
'Values in integer columns must be greater than or equal to zero.'
)
column = self.spread(column)
elif np.issubdtype(column.iloc[0], np.floating):
column = self.shift(column)
else:
raise RandomnessError(
f'Unhashable column type {type(column.iloc[0])}. IndexMap accepts datetime like columns and numeric columns.'
)
return column
|
convert_to_ten_digit_int
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, column: pd.Series) ->pd.Series:\n if isinstance(column.iloc[0], datetime.datetime):\n column = self.clip_to_seconds(column.astype(int))\n elif np.issubdtype(column.iloc[0], np.integer):\n if not len(column >= 0) == len(column):\n raise RandomnessError(\n 'Values in integer columns must be greater than or equal to zero.'\n )\n column = self.spread(column)\n elif np.issubdtype(column.iloc[0], np.floating):\n column = self.shift(column)\n else:\n raise RandomnessError(\n f'Unhashable column type {type(column.iloc[0])}. IndexMap accepts datetime like columns and numeric columns.'\n )\n return column\n",
"role": "user"
},
{
"content": "convert_to_ten_digit_int",
"role": "assistant"
}
] |
def x(m: Union[int, pd.Series], n: int) ->Union[int, pd.Series]:
return m // 10 ** n % 10
|
digit
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(m: Union[int, pd.Series], n: int) ->Union[int, pd.Series]:\n return m // 10 ** n % 10\n",
"role": "user"
},
{
"content": "digit",
"role": "assistant"
}
] |
def x(m: Union[int, pd.Series]) ->Union[int, pd.Series]:
return m // pd.Timedelta(1, unit='s').value
|
clip_to_seconds
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(m: Union[int, pd.Series]) ->Union[int, pd.Series]:\n return m // pd.Timedelta(1, unit='s').value\n",
"role": "user"
},
{
"content": "clip_to_seconds",
"role": "assistant"
}
] |
def x(self, m: Union[int, pd.Series]) ->Union[int, pd.Series]:
return m * 111111 % self.TEN_DIGIT_MODULUS
|
spread
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, m: Union[int, pd.Series]) ->Union[int, pd.Series]:\n return m * 111111 % self.TEN_DIGIT_MODULUS\n",
"role": "user"
},
{
"content": "spread",
"role": "assistant"
}
] |
def x(self, m: Union[float, pd.Series]) ->Union[int, pd.Series]:
out = m % 1 * self.TEN_DIGIT_MODULUS // 1
if isinstance(out, pd.Series):
return out.astype(int)
return int(out)
|
shift
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, m: Union[float, pd.Series]) ->Union[int, pd.Series]:\n out = m % 1 * self.TEN_DIGIT_MODULUS // 1\n if isinstance(out, pd.Series):\n return out.astype(int)\n return int(out)\n",
"role": "user"
},
{
"content": "shift",
"role": "assistant"
}
] |
def x(self, key: Any) ->'RandomnessStream':
if self._for_initialization:
raise RandomnessError('Initialization streams cannot be copied.')
elif self._manager:
return self._manager.get_randomness_stream('_'.join([self.key, key]))
else:
return RandomnessStream(self.key, self.clock, self.seed, self.index_map
)
|
copy_with_additional_key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, key: Any) ->'RandomnessStream':\n if self._for_initialization:\n raise RandomnessError('Initialization streams cannot be copied.')\n elif self._manager:\n return self._manager.get_randomness_stream('_'.join([self.key, key]))\n else:\n return RandomnessStream(self.key, self.clock, self.seed, self.index_map\n )\n",
"role": "user"
},
{
"content": "copy_with_additional_key",
"role": "assistant"
}
] |
def x(self, additional_key: Any=None) ->str:
return '_'.join([self.key, str(self.clock()), str(additional_key), str(
self.seed)])
|
key
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, additional_key: Any=None) ->str:\n return '_'.join([self.key, str(self.clock()), str(additional_key), str(\n self.seed)])\n",
"role": "user"
},
{
"content": "key",
"role": "assistant"
}
] |
def x(self, index: Index, additional_key: Any=None) ->pd.Series:
if self._for_initialization:
draw = random(self._key(additional_key), pd.Index(range(len(index))
), self.index_map)
draw.index = index
else:
draw = random(self._key(additional_key), index, self.index_map)
return draw
|
get_draw
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, index: Index, additional_key: Any=None) ->pd.Series:\n if self._for_initialization:\n draw = random(self._key(additional_key), pd.Index(range(len(index))\n ), self.index_map)\n draw.index = index\n else:\n draw = random(self._key(additional_key), index, self.index_map)\n return draw\n",
"role": "user"
},
{
"content": "get_draw",
"role": "assistant"
}
] |
def x(self, population: Union[pd.DataFrame, pd.Series, Index], rate: Array,
additional_key: Any=None) ->Index:
return self.filter_for_probability(population, rate_to_probability(rate
), additional_key)
|
filter_for_rate
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, population: Union[pd.DataFrame, pd.Series, Index], rate: Array,\n additional_key: Any=None) ->Index:\n return self.filter_for_probability(population, rate_to_probability(rate\n ), additional_key)\n",
"role": "user"
},
{
"content": "filter_for_rate",
"role": "assistant"
}
] |
def x(self, population: Union[pd.DataFrame, pd.Series, Index], probability:
Array, additional_key: Any=None) ->Index:
return filter_for_probability(self._key(additional_key), population,
probability, self.index_map)
|
filter_for_probability
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, population: Union[pd.DataFrame, pd.Series, Index], probability:\n Array, additional_key: Any=None) ->Index:\n return filter_for_probability(self._key(additional_key), population,\n probability, self.index_map)\n",
"role": "user"
},
{
"content": "filter_for_probability",
"role": "assistant"
}
] |
def x(self, index: Index, choices: Array, p: Array=None, additional_key:
Any=None) ->pd.Series:
return choice(self._key(additional_key), index, choices, p, self.index_map)
|
choice
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, index: Index, choices: Array, p: Array=None, additional_key:\n Any=None) ->pd.Series:\n return choice(self._key(additional_key), index, choices, p, self.index_map)\n",
"role": "user"
},
{
"content": "choice",
"role": "assistant"
}
] |
def x(self, decision_point: str, for_initialization: bool=False
) ->RandomnessStream:
if decision_point in self._decision_points:
raise RandomnessError(
f'Two separate places are attempting to create the same randomness stream for {decision_point}'
)
stream = RandomnessStream(key=decision_point, clock=self._clock, seed=
self._seed, index_map=self._key_mapping, manager=self,
for_initialization=for_initialization)
self._decision_points[decision_point] = stream
return stream
|
get_randomness_stream
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, decision_point: str, for_initialization: bool=False\n ) ->RandomnessStream:\n if decision_point in self._decision_points:\n raise RandomnessError(\n f'Two separate places are attempting to create the same randomness stream for {decision_point}'\n )\n stream = RandomnessStream(key=decision_point, clock=self._clock, seed=\n self._seed, index_map=self._key_mapping, manager=self,\n for_initialization=for_initialization)\n self._decision_points[decision_point] = stream\n return stream\n",
"role": "user"
},
{
"content": "get_randomness_stream",
"role": "assistant"
}
] |
def x(self, simulants: pd.DataFrame):
if not all(k in simulants.columns for k in self._key_columns):
raise RandomnessError(
'The simulants dataframe does not have all specified key_columns.')
self._key_mapping.update(simulants.set_index(self._key_columns).index)
|
register_simulants
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, simulants: pd.DataFrame):\n if not all(k in simulants.columns for k in self._key_columns):\n raise RandomnessError(\n 'The simulants dataframe does not have all specified key_columns.')\n self._key_mapping.update(simulants.set_index(self._key_columns).index)\n",
"role": "user"
},
{
"content": "register_simulants",
"role": "assistant"
}
] |
def x(self, decision_point: str, for_initialization: bool=False
) ->RandomnessStream:
return self._randomness_manager.get_randomness_stream(decision_point,
for_initialization)
|
get_stream
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, decision_point: str, for_initialization: bool=False\n ) ->RandomnessStream:\n return self._randomness_manager.get_randomness_stream(decision_point,\n for_initialization)\n",
"role": "user"
},
{
"content": "get_stream",
"role": "assistant"
}
] |
def x(self):
group_ids = set()
for group in Group.objects.all():
if group.properties.student_visible:
group_ids.add(group.id)
return Group.objects.filter(id__in=group_ids)
|
student_visible
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n group_ids = set()\n for group in Group.objects.all():\n if group.properties.student_visible:\n group_ids.add(group.id)\n return Group.objects.filter(id__in=group_ids)\n",
"role": "user"
},
{
"content": "student_visible",
"role": "assistant"
}
] |
def x(self, value):
if value in self.empty_values:
return ''
value = force_text(value).strip()
return value
|
to_python
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, value):\n if value in self.empty_values:\n return ''\n value = force_text(value).strip()\n return value\n",
"role": "user"
},
{
"content": "to_python",
"role": "assistant"
}
] |
def x(value):
if value is None or isinstance(value, (int, long)):
return value
if NUMBER_TIME.match(value):
return long(value)
simple = SIMPLE_TIME.match(value)
if SIMPLE_TIME.match(value):
multiplier = long(simple.groups()[0])
constant = SIMPLE_TIMES[simple.groups()[1]]
return multiplier * constant
if value in GREGORIAN_TIMES:
return value
raise ValueError('Unsupported time format %s' % value)
|
resolve_time
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(value):\n if value is None or isinstance(value, (int, long)):\n return value\n if NUMBER_TIME.match(value):\n return long(value)\n simple = SIMPLE_TIME.match(value)\n if SIMPLE_TIME.match(value):\n multiplier = long(simple.groups()[0])\n constant = SIMPLE_TIMES[simple.groups()[1]]\n return multiplier * constant\n if value in GREGORIAN_TIMES:\n return value\n raise ValueError('Unsupported time format %s' % value)\n",
"role": "user"
},
{
"content": "resolve_time",
"role": "assistant"
}
] |
def x(self, t0=None, t1=None):
if t0 != None and t1 != None:
tb0 = self.to_bucket(t0)
tb1 = self.to_bucket(t1, steps=1)
if tb0 == tb1:
return self._step
return self.from_bucket(tb1) - self.from_bucket(tb0)
return self._step
|
step_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, t0=None, t1=None):\n if t0 != None and t1 != None:\n tb0 = self.to_bucket(t0)\n tb1 = self.to_bucket(t1, steps=1)\n if tb0 == tb1:\n return self._step\n return self.from_bucket(tb1) - self.from_bucket(tb0)\n return self._step\n",
"role": "user"
},
{
"content": "step_size",
"role": "assistant"
}
] |
def x(self, start, end):
start_bucket = self.to_bucket(start)
end_bucket = self.to_bucket(end)
return range(start_bucket, end_bucket + 1)
|
buckets
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, start, end):\n start_bucket = self.to_bucket(start)\n end_bucket = self.to_bucket(end)\n return range(start_bucket, end_bucket + 1)\n",
"role": "user"
},
{
"content": "buckets",
"role": "assistant"
}
] |
def x(self, steps, relative_time=None):
if steps:
if relative_time:
rtime = self.to_bucket(relative_time)
ntime = self.to_bucket(time.time())
if ntime - rtime > steps:
return 0
else:
return (steps + rtime - ntime) * self._step
return steps * self._step
return None
|
ttl
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, steps, relative_time=None):\n if steps:\n if relative_time:\n rtime = self.to_bucket(relative_time)\n ntime = self.to_bucket(time.time())\n if ntime - rtime > steps:\n return 0\n else:\n return (steps + rtime - ntime) * self._step\n return steps * self._step\n return None\n",
"role": "user"
},
{
"content": "ttl",
"role": "assistant"
}
] |
def x(self, t0, t1=None):
tb0 = self.to_bucket(t0)
if t1:
tb1 = self.to_bucket(t1, steps=1)
else:
tb1 = self.to_bucket(t0, steps=1)
days = (self.from_bucket(tb1, native=True) - self.from_bucket(tb0,
native=True)).days
return days * SIMPLE_TIMES['d']
|
step_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, t0, t1=None):\n tb0 = self.to_bucket(t0)\n if t1:\n tb1 = self.to_bucket(t1, steps=1)\n else:\n tb1 = self.to_bucket(t0, steps=1)\n days = (self.from_bucket(tb1, native=True) - self.from_bucket(tb0,\n native=True)).days\n return days * SIMPLE_TIMES['d']\n",
"role": "user"
},
{
"content": "step_size",
"role": "assistant"
}
] |
def x(self, timestamp, steps=0):
dt = datetime.utcfromtimestamp(timestamp)
if steps != 0:
if self._step == 'daily':
dt = dt + timedelta(days=steps)
elif self._step == 'weekly':
dt = dt + timedelta(weeks=steps)
elif self._step == 'monthly':
dt = dt + MonthDelta(steps)
elif self._step == 'yearly':
year = int(dt.strftime(self.FORMATS[self._step]))
year += steps
dt = datetime(year=year, month=1, day=1)
return int(dt.strftime(self.FORMATS[self._step]))
|
to_bucket
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, timestamp, steps=0):\n dt = datetime.utcfromtimestamp(timestamp)\n if steps != 0:\n if self._step == 'daily':\n dt = dt + timedelta(days=steps)\n elif self._step == 'weekly':\n dt = dt + timedelta(weeks=steps)\n elif self._step == 'monthly':\n dt = dt + MonthDelta(steps)\n elif self._step == 'yearly':\n year = int(dt.strftime(self.FORMATS[self._step]))\n year += steps\n dt = datetime(year=year, month=1, day=1)\n return int(dt.strftime(self.FORMATS[self._step]))\n",
"role": "user"
},
{
"content": "to_bucket",
"role": "assistant"
}
] |
def x(self, bucket, native=False):
bucket = str(bucket)
if self._step == 'weekly':
year, week = bucket[:4], bucket[4:]
normal = datetime(year=int(year), month=1, day=1) + timedelta(weeks
=int(week))
else:
normal = datetime.strptime(bucket, self.FORMATS[self._step])
if native:
return normal
return long(time.mktime(normal.timetuple()))
|
from_bucket
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, bucket, native=False):\n bucket = str(bucket)\n if self._step == 'weekly':\n year, week = bucket[:4], bucket[4:]\n normal = datetime(year=int(year), month=1, day=1) + timedelta(weeks\n =int(week))\n else:\n normal = datetime.strptime(bucket, self.FORMATS[self._step])\n if native:\n return normal\n return long(time.mktime(normal.timetuple()))\n",
"role": "user"
},
{
"content": "from_bucket",
"role": "assistant"
}
] |
def x(self, start, end):
rval = [self.to_bucket(start)]
step = 1
while True:
bucket = self.to_bucket(start, step)
bucket_time = self.from_bucket(bucket)
if bucket_time >= end:
if bucket_time == end:
rval.append(bucket)
break
rval.append(bucket)
step += 1
return rval
|
buckets
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, start, end):\n rval = [self.to_bucket(start)]\n step = 1\n while True:\n bucket = self.to_bucket(start, step)\n bucket_time = self.from_bucket(bucket)\n if bucket_time >= end:\n if bucket_time == end:\n rval.append(bucket)\n break\n rval.append(bucket)\n step += 1\n return rval\n",
"role": "user"
},
{
"content": "buckets",
"role": "assistant"
}
] |
def x(self, timestamp, steps=0):
return self.from_bucket(self.to_bucket(timestamp, steps))
|
normalize
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, timestamp, steps=0):\n return self.from_bucket(self.to_bucket(timestamp, steps))\n",
"role": "user"
},
{
"content": "normalize",
"role": "assistant"
}
] |
def x(self, steps, relative_time=None):
if steps:
if relative_time:
rtime = self.to_bucket(relative_time)
ntime = self.to_bucket(time.time())
day_diff = (self.from_bucket(ntime, native=True) - self.
from_bucket(rtime, native=True)).days
step_diff = steps * SIMPLE_TIMES[self._step[0]] / SIMPLE_TIMES['d']
if day_diff > step_diff:
return 0
else:
return (step_diff - day_diff) * SIMPLE_TIMES['d']
return steps * SIMPLE_TIMES[self._step[0]]
return None
|
ttl
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, steps, relative_time=None):\n if steps:\n if relative_time:\n rtime = self.to_bucket(relative_time)\n ntime = self.to_bucket(time.time())\n day_diff = (self.from_bucket(ntime, native=True) - self.\n from_bucket(rtime, native=True)).days\n step_diff = steps * SIMPLE_TIMES[self._step[0]] / SIMPLE_TIMES['d']\n if day_diff > step_diff:\n return 0\n else:\n return (step_diff - day_diff) * SIMPLE_TIMES['d']\n return steps * SIMPLE_TIMES[self._step[0]]\n return None\n",
"role": "user"
},
{
"content": "ttl",
"role": "assistant"
}
] |
def x(self, inserts, intervals=0, **kwargs):
if None in inserts:
inserts[time.time()] = inserts.pop(None)
if self._write_func:
for timestamp, names in inserts.iteritems():
for name, values in names.iteritems():
names[name] = [self._write_func(v) for v in values]
self._batch_insert(inserts, intervals, **kwargs)
|
bulk_insert
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, inserts, intervals=0, **kwargs):\n if None in inserts:\n inserts[time.time()] = inserts.pop(None)\n if self._write_func:\n for timestamp, names in inserts.iteritems():\n for name, values in names.iteritems():\n names[name] = [self._write_func(v) for v in values]\n self._batch_insert(inserts, intervals, **kwargs)\n",
"role": "user"
},
{
"content": "bulk_insert",
"role": "assistant"
}
] |
def x(self, name, value, timestamp=None, intervals=0, **kwargs):
if not timestamp:
timestamp = time.time()
if isinstance(value, (list, tuple, set)):
if self._write_func:
value = [self._write_func(v) for v in value]
return self._batch_insert({timestamp: {name: value}}, intervals, **
kwargs)
if self._write_func:
value = self._write_func(value)
self._insert(name, value, timestamp, intervals, **kwargs)
|
insert
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, value, timestamp=None, intervals=0, **kwargs):\n if not timestamp:\n timestamp = time.time()\n if isinstance(value, (list, tuple, set)):\n if self._write_func:\n value = [self._write_func(v) for v in value]\n return self._batch_insert({timestamp: {name: value}}, intervals, **\n kwargs)\n if self._write_func:\n value = self._write_func(value)\n self._insert(name, value, timestamp, intervals, **kwargs)\n",
"role": "user"
},
{
"content": "insert",
"role": "assistant"
}
] |
def x(self, inserts, intervals, **kwargs):
for timestamp, names in inserts.iteritems():
for name, values in names.iteritems():
for value in values:
self._insert(name, value, timestamp, intervals, **kwargs)
|
batch_insert
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, inserts, intervals, **kwargs):\n for timestamp, names in inserts.iteritems():\n for name, values in names.iteritems():\n for value in values:\n self._insert(name, value, timestamp, intervals, **kwargs)\n",
"role": "user"
},
{
"content": "batch_insert",
"role": "assistant"
}
] |
def x(self, timestamp, intervals, config):
rval = [timestamp]
if intervals < 0:
while intervals < 0:
rval.append(config['i_calc'].normalize(timestamp, intervals))
intervals += 1
elif intervals > 0:
while intervals > 0:
rval.append(config['i_calc'].normalize(timestamp, intervals))
intervals -= 1
return rval
|
normalize_timestamps
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, timestamp, intervals, config):\n rval = [timestamp]\n if intervals < 0:\n while intervals < 0:\n rval.append(config['i_calc'].normalize(timestamp, intervals))\n intervals += 1\n elif intervals > 0:\n while intervals > 0:\n rval.append(config['i_calc'].normalize(timestamp, intervals))\n intervals -= 1\n return rval\n",
"role": "user"
},
{
"content": "normalize_timestamps",
"role": "assistant"
}
] |
def x(self, name, interval, **kwargs):
config = self._intervals.get(interval)
if not config:
raise UnknownInterval(interval)
properties = self.properties(name)[interval]
i_buckets = config['i_calc'].buckets(properties['first'], properties[
'last'])
for i_bucket in i_buckets:
data = self.get(name, interval, timestamp=config['i_calc'].
from_bucket(i_bucket), **kwargs)
for timestamp, row in data.items():
yield timestamp, row
|
iterate
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, interval, **kwargs):\n config = self._intervals.get(interval)\n if not config:\n raise UnknownInterval(interval)\n properties = self.properties(name)[interval]\n i_buckets = config['i_calc'].buckets(properties['first'], properties[\n 'last'])\n for i_bucket in i_buckets:\n data = self.get(name, interval, timestamp=config['i_calc'].\n from_bucket(i_bucket), **kwargs)\n for timestamp, row in data.items():\n yield timestamp, row\n",
"role": "user"
},
{
"content": "iterate",
"role": "assistant"
}
] |
def x(self, name, interval, **kwargs):
config = self._intervals.get(interval)
if not config:
raise UnknownInterval(interval)
timestamp = kwargs.get('timestamp', time.time())
fetch = kwargs.get('fetch')
process_row = kwargs.get('process_row') or self._process_row
condense = kwargs.get('condense', False)
join_rows = kwargs.get('join_rows') or self._join
transform = kwargs.get('transform')
condense = kwargs.get('condensed', condense)
if isinstance(name, (list, tuple, set)):
results = [self._get(x, interval, config, timestamp, fetch=fetch,
process_row=process_row) for x in name]
rval = self._join_results(results, True, join_rows)
else:
rval = self._get(name, interval, config, timestamp, fetch=fetch,
process_row=process_row)
if config['coarse']:
step_size = config['i_calc'].step_size(timestamp)
else:
step_size = config['r_calc'].step_size(timestamp)
if condense and not config['coarse']:
condense = condense if callable(condense) else self._condense
rval = {config['i_calc'].normalize(timestamp): condense(rval)}
step_size = config['i_calc'].step_size(timestamp)
if transform:
for k, v in rval.items():
rval[k] = self._process_transform(v, transform, step_size)
return rval
|
get
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, interval, **kwargs):\n config = self._intervals.get(interval)\n if not config:\n raise UnknownInterval(interval)\n timestamp = kwargs.get('timestamp', time.time())\n fetch = kwargs.get('fetch')\n process_row = kwargs.get('process_row') or self._process_row\n condense = kwargs.get('condense', False)\n join_rows = kwargs.get('join_rows') or self._join\n transform = kwargs.get('transform')\n condense = kwargs.get('condensed', condense)\n if isinstance(name, (list, tuple, set)):\n results = [self._get(x, interval, config, timestamp, fetch=fetch,\n process_row=process_row) for x in name]\n rval = self._join_results(results, True, join_rows)\n else:\n rval = self._get(name, interval, config, timestamp, fetch=fetch,\n process_row=process_row)\n if config['coarse']:\n step_size = config['i_calc'].step_size(timestamp)\n else:\n step_size = config['r_calc'].step_size(timestamp)\n if condense and not config['coarse']:\n condense = condense if callable(condense) else self._condense\n rval = {config['i_calc'].normalize(timestamp): condense(rval)}\n step_size = config['i_calc'].step_size(timestamp)\n if transform:\n for k, v in rval.items():\n rval[k] = self._process_transform(v, transform, step_size)\n return rval\n",
"role": "user"
},
{
"content": "get",
"role": "assistant"
}
] |
def x(self, name, interval, **kwargs):
config = self._intervals.get(interval)
if not config:
raise UnknownInterval(interval)
start = kwargs.get('start')
end = kwargs.get('end')
steps = kwargs.get('steps') or config.get('steps', 1)
fetch = kwargs.get('fetch')
process_row = kwargs.get('process_row') or self._process_row
condense = kwargs.get('condense', False)
join_rows = kwargs.get('join_rows') or self._join
collapse = kwargs.get('collapse', False)
transform = kwargs.get('transform')
condense = kwargs.get('condensed', condense)
if collapse:
condense = condense or True
if end is None:
if start is None:
end = time.time()
end_bucket = config['i_calc'].to_bucket(end)
start_bucket = config['i_calc'].to_bucket(end, -steps + 1)
else:
start_bucket = config['i_calc'].to_bucket(start)
end_bucket = config['i_calc'].to_bucket(start, steps - 1)
else:
end_bucket = config['i_calc'].to_bucket(end)
if start is None:
start_bucket = config['i_calc'].to_bucket(end, -steps + 1)
else:
start_bucket = config['i_calc'].to_bucket(start)
start = config['i_calc'].from_bucket(start_bucket)
end = config['i_calc'].from_bucket(end_bucket)
if start > end:
end = start
interval_buckets = config['i_calc'].buckets(start, end)
if isinstance(name, (list, tuple, set)):
results = [self._series(x, interval, config, interval_buckets,
fetch=fetch, process_row=process_row) for x in name]
rval = self._join_results(results, config['coarse'], join_rows)
else:
rval = self._series(name, interval, config, interval_buckets, fetch
=fetch, process_row=process_row)
if not config['coarse']:
if condense:
condense = condense if callable(condense) else self._condense
for key in rval.iterkeys():
data = condense(rval[key])
if transform and not collapse:
data = self._process_transform(data, transform, config[
'i_calc'].step_size(key))
rval[key] = data
elif transform:
for interval, resolutions in rval.items():
for key in resolutions.iterkeys():
resolutions[key] = self._process_transform(resolutions[
key], transform, config['r_calc'].step_size(key))
if config['coarse'] or collapse:
if collapse:
collapse = collapse if callable(collapse
) else condense if callable(condense) else self._condense
data = collapse(rval)
if transform:
rval = {rval.keys()[0]: self._process_transform(data,
transform, config['i_calc'].step_size(rval.keys()[0],
rval.keys()[-1]))}
else:
rval = {rval.keys()[0]: data}
elif transform:
for key, data in rval.items():
rval[key] = self._process_transform(data, transform, config
['i_calc'].step_size(key))
return rval
|
series
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, interval, **kwargs):\n config = self._intervals.get(interval)\n if not config:\n raise UnknownInterval(interval)\n start = kwargs.get('start')\n end = kwargs.get('end')\n steps = kwargs.get('steps') or config.get('steps', 1)\n fetch = kwargs.get('fetch')\n process_row = kwargs.get('process_row') or self._process_row\n condense = kwargs.get('condense', False)\n join_rows = kwargs.get('join_rows') or self._join\n collapse = kwargs.get('collapse', False)\n transform = kwargs.get('transform')\n condense = kwargs.get('condensed', condense)\n if collapse:\n condense = condense or True\n if end is None:\n if start is None:\n end = time.time()\n end_bucket = config['i_calc'].to_bucket(end)\n start_bucket = config['i_calc'].to_bucket(end, -steps + 1)\n else:\n start_bucket = config['i_calc'].to_bucket(start)\n end_bucket = config['i_calc'].to_bucket(start, steps - 1)\n else:\n end_bucket = config['i_calc'].to_bucket(end)\n if start is None:\n start_bucket = config['i_calc'].to_bucket(end, -steps + 1)\n else:\n start_bucket = config['i_calc'].to_bucket(start)\n start = config['i_calc'].from_bucket(start_bucket)\n end = config['i_calc'].from_bucket(end_bucket)\n if start > end:\n end = start\n interval_buckets = config['i_calc'].buckets(start, end)\n if isinstance(name, (list, tuple, set)):\n results = [self._series(x, interval, config, interval_buckets,\n fetch=fetch, process_row=process_row) for x in name]\n rval = self._join_results(results, config['coarse'], join_rows)\n else:\n rval = self._series(name, interval, config, interval_buckets, fetch\n =fetch, process_row=process_row)\n if not config['coarse']:\n if condense:\n condense = condense if callable(condense) else self._condense\n for key in rval.iterkeys():\n data = condense(rval[key])\n if transform and not collapse:\n data = self._process_transform(data, transform, config[\n 'i_calc'].step_size(key))\n rval[key] = data\n elif transform:\n for interval, resolutions in rval.items():\n for key in resolutions.iterkeys():\n resolutions[key] = self._process_transform(resolutions[\n key], transform, config['r_calc'].step_size(key))\n if config['coarse'] or collapse:\n if collapse:\n collapse = collapse if callable(collapse\n ) else condense if callable(condense) else self._condense\n data = collapse(rval)\n if transform:\n rval = {rval.keys()[0]: self._process_transform(data,\n transform, config['i_calc'].step_size(rval.keys()[0],\n rval.keys()[-1]))}\n else:\n rval = {rval.keys()[0]: data}\n elif transform:\n for key, data in rval.items():\n rval[key] = self._process_transform(data, transform, config\n ['i_calc'].step_size(key))\n return rval\n",
"role": "user"
},
{
"content": "series",
"role": "assistant"
}
] |
def x(self, results, coarse, join):
rval = OrderedDict()
i_keys = set()
for res in results:
i_keys.update(res.keys())
for i_key in sorted(i_keys):
if coarse:
rval[i_key] = join([res.get(i_key) for res in results])
else:
rval[i_key] = OrderedDict()
r_keys = set()
for res in results:
r_keys.update(res.get(i_key, {}).keys())
for r_key in sorted(r_keys):
rval[i_key][r_key] = join([res.get(i_key, {}).get(r_key) for
res in results])
return rval
|
join_results
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, results, coarse, join):\n rval = OrderedDict()\n i_keys = set()\n for res in results:\n i_keys.update(res.keys())\n for i_key in sorted(i_keys):\n if coarse:\n rval[i_key] = join([res.get(i_key) for res in results])\n else:\n rval[i_key] = OrderedDict()\n r_keys = set()\n for res in results:\n r_keys.update(res.get(i_key, {}).keys())\n for r_key in sorted(r_keys):\n rval[i_key][r_key] = join([res.get(i_key, {}).get(r_key) for\n res in results])\n return rval\n",
"role": "user"
},
{
"content": "join_results",
"role": "assistant"
}
] |
def x(self, data, transform, step_size):
if isinstance(transform, (list, tuple, set)):
return {t: self._transform(data, t, step_size) for t in transform}
elif isinstance(transform, dict):
return {tn: self._transform(data, tf, step_size) for tn, tf in
transform.items()}
return self._transform(data, transform, step_size)
|
process_transform
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data, transform, step_size):\n if isinstance(transform, (list, tuple, set)):\n return {t: self._transform(data, t, step_size) for t in transform}\n elif isinstance(transform, dict):\n return {tn: self._transform(data, tf, step_size) for tn, tf in\n transform.items()}\n return self._transform(data, transform, step_size)\n",
"role": "user"
},
{
"content": "process_transform",
"role": "assistant"
}
] |
def x(self, data, transform, step_size):
if transform == 'mean':
total = sum(k * v for k, v in data.items())
count = sum(data.values())
data = float(total) / float(count) if count > 0 else 0
elif transform == 'count':
data = sum(data.values())
elif transform == 'min':
data = min(data.keys() or [0])
elif transform == 'max':
data = max(data.keys() or [0])
elif transform == 'sum':
data = sum(k * v for k, v in data.items())
elif transform == 'rate':
data = {k: (v / float(step_size)) for k, v in data.items()}
elif callable(transform):
data = transform(data, step_size)
return data
|
transform
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data, transform, step_size):\n if transform == 'mean':\n total = sum(k * v for k, v in data.items())\n count = sum(data.values())\n data = float(total) / float(count) if count > 0 else 0\n elif transform == 'count':\n data = sum(data.values())\n elif transform == 'min':\n data = min(data.keys() or [0])\n elif transform == 'max':\n data = max(data.keys() or [0])\n elif transform == 'sum':\n data = sum(k * v for k, v in data.items())\n elif transform == 'rate':\n data = {k: (v / float(step_size)) for k, v in data.items()}\n elif callable(transform):\n data = transform(data, step_size)\n return data\n",
"role": "user"
},
{
"content": "transform",
"role": "assistant"
}
] |
def x(self, data):
rval = {}
for resolution, histogram in data.items():
for value, count in histogram.items():
rval[value] = count + rval.get(value, 0)
return rval
|
condense
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data):\n rval = {}\n for resolution, histogram in data.items():\n for value, count in histogram.items():\n rval[value] = count + rval.get(value, 0)\n return rval\n",
"role": "user"
},
{
"content": "condense",
"role": "assistant"
}
] |
def x(self, rows):
rval = {}
for row in rows:
if row:
for value, count in row.items():
rval[value] = count + rval.get(value, 0)
return rval
|
join
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rows):\n rval = {}\n for row in rows:\n if row:\n for value, count in row.items():\n rval[value] = count + rval.get(value, 0)\n return rval\n",
"role": "user"
},
{
"content": "join",
"role": "assistant"
}
] |
def x(self, rows):
rval = 0
for row in rows:
if row:
rval += row
return rval
|
join
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, rows):\n rval = 0\n for row in rows:\n if row:\n rval += row\n return rval\n",
"role": "user"
},
{
"content": "join",
"role": "assistant"
}
] |
def x(self, data, transform, step_size):
if callable(transform):
data = transform(data, step_size)
return data
|
transform
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data, transform, step_size):\n if callable(transform):\n data = transform(data, step_size)\n return data\n",
"role": "user"
},
{
"content": "transform",
"role": "assistant"
}
] |
def x(self, data):
if data:
data = filter(None, data.values())
if data:
return data[-1]
return None
|
condense
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data):\n if data:\n data = filter(None, data.values())\n if data:\n return data[-1]\n return None\n",
"role": "user"
},
{
"content": "condense",
"role": "assistant"
}
] |
def x(self, data, transform, step_size):
if transform == 'mean':
total = sum(data)
count = len(data)
data = float(total) / float(count) if count > 0 else 0
elif transform == 'count':
data = len(data)
elif transform == 'min':
data = min(data or [0])
elif transform == 'max':
data = max(data or [0])
elif transform == 'sum':
data = sum(data)
elif transform == 'rate':
data = len(data) / float(step_size)
elif callable(transform):
data = transform(data)
return data
|
transform
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data, transform, step_size):\n if transform == 'mean':\n total = sum(data)\n count = len(data)\n data = float(total) / float(count) if count > 0 else 0\n elif transform == 'count':\n data = len(data)\n elif transform == 'min':\n data = min(data or [0])\n elif transform == 'max':\n data = max(data or [0])\n elif transform == 'sum':\n data = sum(data)\n elif transform == 'rate':\n data = len(data) / float(step_size)\n elif callable(transform):\n data = transform(data)\n return data\n",
"role": "user"
},
{
"content": "transform",
"role": "assistant"
}
] |
def x(self, data):
if data:
return reduce(operator.ior, data.values())
return set()
|
condense
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data):\n if data:\n return reduce(operator.ior, data.values())\n return set()\n",
"role": "user"
},
{
"content": "condense",
"role": "assistant"
}
] |
def x(x):
from math import sqrt
n = len(x)
mean = sum(x) / float(n)
std = sqrt(sum((a - mean) ** 2 for a in x) / float(n - 1))
return std
|
stdev
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x):\n from math import sqrt\n n = len(x)\n mean = sum(x) / float(n)\n std = sqrt(sum((a - mean) ** 2 for a in x) / float(n - 1))\n return std\n",
"role": "user"
},
{
"content": "stdev",
"role": "assistant"
}
] |
def x(self, **options):
students_grp, _ = Group.objects.get_or_create(name='Students')
sg_prop = students_grp.properties
sg_prop.student_visible = True
sg_prop.save()
students_grp.save()
for gr in [(settings.SENIOR_GRADUATION_YEAR + y) for y in range(0, 4)]:
users = User.objects.users_in_year(gr)
grp, _ = Group.objects.get_or_create(name='Class of {}'.format(gr))
grp_prop = grp.properties
grp_prop.student_visible = True
grp_prop.save()
grp.save()
self.stdout.write('{}: {} users'.format(gr, len(users)))
for u in users:
u.groups.add(grp)
u.groups.add(students_grp)
u.save()
self.stdout.write('{}: Processed'.format(gr))
self.stdout.write('Done.')
|
handle
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, **options):\n students_grp, _ = Group.objects.get_or_create(name='Students')\n sg_prop = students_grp.properties\n sg_prop.student_visible = True\n sg_prop.save()\n students_grp.save()\n for gr in [(settings.SENIOR_GRADUATION_YEAR + y) for y in range(0, 4)]:\n users = User.objects.users_in_year(gr)\n grp, _ = Group.objects.get_or_create(name='Class of {}'.format(gr))\n grp_prop = grp.properties\n grp_prop.student_visible = True\n grp_prop.save()\n grp.save()\n self.stdout.write('{}: {} users'.format(gr, len(users)))\n for u in users:\n u.groups.add(grp)\n u.groups.add(students_grp)\n u.save()\n self.stdout.write('{}: Processed'.format(gr))\n self.stdout.write('Done.')\n",
"role": "user"
},
{
"content": "handle",
"role": "assistant"
}
] |
def x(self, *args, **options):
do_run = options['run']
if do_run:
if not options['confirm']:
self.ask(
"""===== WARNING! =====
This script will DESTROY data! Ensure that you have a properly backed-up copy of your database before proceeding.
===== WARNING! =====
Continue?"""
)
else:
self.stdout.write('In pretend mode.')
current_year = timezone.now().year
new_senior_year = current_year + 1
turnover_date = datetime.datetime(current_year, 7, 1)
self.stdout.write('Turnover date set to: {}'.format(turnover_date.
strftime('%c')))
if not self.chk('SENIOR_GRADUATION_YEAR = {} in settings/__init__.py'.
format(new_senior_year), settings.SENIOR_GRADUATION_YEAR ==
new_senior_year):
return
"""
EIGHTH:
EighthBlock: filtered
EighthSignup: absences removed
EighthActivity: keep
ANNOUNCEMENTS:
AnnouncementRequest: filtered
USERS:
User: graduated students deleted
"""
self.stdout.write('Resolving absences')
if do_run:
self.clear_absences()
self.stdout.write('Updating welcome state')
if do_run:
self.update_welcome()
self.stdout.write('Deleting graduated users')
if do_run:
self.handle_delete()
self.stdout.write('Archiving admin comments')
if do_run:
self.archive_admin_comments()
|
handle
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **options):\n do_run = options['run']\n if do_run:\n if not options['confirm']:\n self.ask(\n \"\"\"===== WARNING! =====\n\nThis script will DESTROY data! Ensure that you have a properly backed-up copy of your database before proceeding.\n\n===== WARNING! =====\n\nContinue?\"\"\"\n )\n else:\n self.stdout.write('In pretend mode.')\n current_year = timezone.now().year\n new_senior_year = current_year + 1\n turnover_date = datetime.datetime(current_year, 7, 1)\n self.stdout.write('Turnover date set to: {}'.format(turnover_date.\n strftime('%c')))\n if not self.chk('SENIOR_GRADUATION_YEAR = {} in settings/__init__.py'.\n format(new_senior_year), settings.SENIOR_GRADUATION_YEAR ==\n new_senior_year):\n return\n \"\"\"\n EIGHTH:\n EighthBlock: filtered\n EighthSignup: absences removed\n EighthActivity: keep\n\n ANNOUNCEMENTS:\n AnnouncementRequest: filtered\n\n USERS:\n User: graduated students deleted\n \"\"\"\n self.stdout.write('Resolving absences')\n if do_run:\n self.clear_absences()\n self.stdout.write('Updating welcome state')\n if do_run:\n self.update_welcome()\n self.stdout.write('Deleting graduated users')\n if do_run:\n self.handle_delete()\n self.stdout.write('Archiving admin comments')\n if do_run:\n self.archive_admin_comments()\n",
"role": "user"
},
{
"content": "handle",
"role": "assistant"
}
] |
def x(self, userid, password, request=None):
user = auth.authenticate(username=userid, password=password)
if user is None or user and not user.is_active:
raise exceptions.AuthenticationFailed('Invalid username/password.')
return user, None
|
authenticate_credentials
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, userid, password, request=None):\n user = auth.authenticate(username=userid, password=password)\n if user is None or user and not user.is_active:\n raise exceptions.AuthenticationFailed('Invalid username/password.')\n return user, None\n",
"role": "user"
},
{
"content": "authenticate_credentials",
"role": "assistant"
}
] |
def x(request):
remote_addr = request.META['HTTP_X_FORWARDED_FOR'
] if 'HTTP_X_FORWARDED_FOR' in request.META else request.META.get(
'REMOTE_ADDR', '')
return remote_addr in settings.INTERNAL_IPS
|
check_internal_ip
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n remote_addr = request.META['HTTP_X_FORWARDED_FOR'\n ] if 'HTTP_X_FORWARDED_FOR' in request.META else request.META.get(\n 'REMOTE_ADDR', '')\n return remote_addr in settings.INTERNAL_IPS\n",
"role": "user"
},
{
"content": "check_internal_ip",
"role": "assistant"
}
] |
def x(q):
question_votes = votes = Answer.objects.filter(question=q)
users = q.get_users_voted()
num_users_votes = {u.id: votes.filter(user=u).count() for u in users}
user_scale = {u.id: (1 / num_users_votes[u.id]) for u in users}
choices = []
for c in q.choice_set.all().order_by('num'):
votes = question_votes.filter(choice=c)
vote_users = set([v.user for v in votes])
choice = {'choice': c, 'votes': {'total': {'all': len(vote_users),
'all_percent': perc(len(vote_users), users.count()), 'male':
fmt(sum([(v.user.is_male * user_scale[v.user.id]) for v in
votes])), 'female': fmt(sum([(v.user.is_female * user_scale[v.
user.id]) for v in votes]))}}, 'users': [v.user for v in votes]}
for yr in range(9, 14):
yr_votes = [(v.user if v.user.grade and v.user.grade.number ==
yr else None) for v in votes]
yr_votes = list(filter(None, yr_votes))
choice['votes'][yr] = {'all': len(set(yr_votes)), 'male': fmt(
sum([(u.is_male * user_scale[u.id]) for u in yr_votes])),
'female': fmt(sum([(u.is_female * user_scale[u.id]) for u in
yr_votes]))}
choices.append(choice)
""" Clear vote """
votes = question_votes.filter(clear_vote=True)
clr_users = set([v.user for v in votes])
choice = {'choice': 'Clear vote', 'votes': {'total': {'all': len(
clr_users), 'all_percent': perc(len(clr_users), users.count()),
'male': fmt(sum([(v.user.is_male * user_scale[v.user.id]) for v in
votes])), 'female': fmt(sum([(v.user.is_female * user_scale[v.user.
id]) for v in votes]))}}, 'users': clr_users}
for yr in range(9, 14):
yr_votes = [(v.user if v.user.grade and v.user.grade.number == yr else
None) for v in votes]
yr_votes = list(filter(None, yr_votes))
choice['votes'][yr] = {'all': len(yr_votes), 'male': fmt(sum([(u.
is_male * user_scale[u.id]) for u in yr_votes])), 'female': fmt
(sum([(u.is_female * user_scale[u.id]) for u in yr_votes]))}
choices.append(choice)
choice = {'choice': 'Total', 'votes': {'total': {'all': users.count(),
'votes_all': question_votes.count(), 'all_percent': perc(users.
count(), users.count()), 'male': users.filter(gender=True).count(),
'female': users.filter(gender__isnull=False, gender=False).count()}}}
for yr in range(9, 14):
yr_votes = [(u if u.grade and u.grade.number == yr else None) for u in
users]
yr_votes = list(filter(None, yr_votes))
choice['votes'][yr] = {'all': len(set(yr_votes)), 'male': fmt(sum([
(u.is_male * user_scale[u.id]) for u in yr_votes])), 'female':
fmt(sum([(u.is_female * user_scale[u.id]) for u in yr_votes]))}
choices.append(choice)
return {'question': q, 'choices': choices, 'user_scale': user_scale}
|
handle_sap
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(q):\n question_votes = votes = Answer.objects.filter(question=q)\n users = q.get_users_voted()\n num_users_votes = {u.id: votes.filter(user=u).count() for u in users}\n user_scale = {u.id: (1 / num_users_votes[u.id]) for u in users}\n choices = []\n for c in q.choice_set.all().order_by('num'):\n votes = question_votes.filter(choice=c)\n vote_users = set([v.user for v in votes])\n choice = {'choice': c, 'votes': {'total': {'all': len(vote_users),\n 'all_percent': perc(len(vote_users), users.count()), 'male':\n fmt(sum([(v.user.is_male * user_scale[v.user.id]) for v in\n votes])), 'female': fmt(sum([(v.user.is_female * user_scale[v.\n user.id]) for v in votes]))}}, 'users': [v.user for v in votes]}\n for yr in range(9, 14):\n yr_votes = [(v.user if v.user.grade and v.user.grade.number ==\n yr else None) for v in votes]\n yr_votes = list(filter(None, yr_votes))\n choice['votes'][yr] = {'all': len(set(yr_votes)), 'male': fmt(\n sum([(u.is_male * user_scale[u.id]) for u in yr_votes])),\n 'female': fmt(sum([(u.is_female * user_scale[u.id]) for u in\n yr_votes]))}\n choices.append(choice)\n \"\"\" Clear vote \"\"\"\n votes = question_votes.filter(clear_vote=True)\n clr_users = set([v.user for v in votes])\n choice = {'choice': 'Clear vote', 'votes': {'total': {'all': len(\n clr_users), 'all_percent': perc(len(clr_users), users.count()),\n 'male': fmt(sum([(v.user.is_male * user_scale[v.user.id]) for v in\n votes])), 'female': fmt(sum([(v.user.is_female * user_scale[v.user.\n id]) for v in votes]))}}, 'users': clr_users}\n for yr in range(9, 14):\n yr_votes = [(v.user if v.user.grade and v.user.grade.number == yr else\n None) for v in votes]\n yr_votes = list(filter(None, yr_votes))\n choice['votes'][yr] = {'all': len(yr_votes), 'male': fmt(sum([(u.\n is_male * user_scale[u.id]) for u in yr_votes])), 'female': fmt\n (sum([(u.is_female * user_scale[u.id]) for u in yr_votes]))}\n choices.append(choice)\n choice = {'choice': 'Total', 'votes': {'total': {'all': users.count(),\n 'votes_all': question_votes.count(), 'all_percent': perc(users.\n count(), users.count()), 'male': users.filter(gender=True).count(),\n 'female': users.filter(gender__isnull=False, gender=False).count()}}}\n for yr in range(9, 14):\n yr_votes = [(u if u.grade and u.grade.number == yr else None) for u in\n users]\n yr_votes = list(filter(None, yr_votes))\n choice['votes'][yr] = {'all': len(set(yr_votes)), 'male': fmt(sum([\n (u.is_male * user_scale[u.id]) for u in yr_votes])), 'female':\n fmt(sum([(u.is_female * user_scale[u.id]) for u in yr_votes]))}\n choices.append(choice)\n return {'question': q, 'choices': choices, 'user_scale': user_scale}\n",
"role": "user"
},
{
"content": "handle_sap",
"role": "assistant"
}
] |
def x(request):
is_events_admin = request.user.has_admin_permission('events')
if request.method == 'POST':
if 'approve' in request.POST and is_events_admin:
event_id = request.POST.get('approve')
event = get_object_or_404(Event, id=event_id)
event.rejected = False
event.approved = True
event.approved_by = request.user
event.save()
messages.success(request, 'Approved event {}'.format(event))
if 'reject' in request.POST and is_events_admin:
event_id = request.POST.get('reject')
event = get_object_or_404(Event, id=event_id)
event.approved = False
event.rejected = True
event.rejected_by = request.user
event.save()
messages.success(request, 'Rejected event {}'.format(event))
if is_events_admin and 'show_all' in request.GET:
viewable_events = Event.objects.all().this_year().prefetch_related(
'groups')
else:
viewable_events = Event.objects.visible_to_user(request.user
).this_year().prefetch_related('groups')
today = datetime.date.today()
delta = today - datetime.timedelta(days=today.weekday())
this_week = delta, delta + datetime.timedelta(days=7)
this_month = this_week[1], this_week[1] + datetime.timedelta(days=31)
events_categories = [{'title': 'This week', 'events': viewable_events.
filter(time__gte=this_week[0], time__lt=this_week[1])}, {'title':
'This month', 'events': viewable_events.filter(time__gte=this_month
[0], time__lt=this_month[1])}, {'title': 'Future', 'events':
viewable_events.filter(time__gte=this_month[1])}]
if is_events_admin:
unapproved_events = Event.objects.filter(approved=False, rejected=False
).prefetch_related('groups')
events_categories = [{'title': 'Awaiting Approval', 'events':
unapproved_events}] + events_categories
if is_events_admin and 'show_all' in request.GET:
events_categories.append({'title': 'Past', 'events':
viewable_events.filter(time__lt=this_week[0])})
context = {'events': events_categories, 'num_events': sum([x['events'].
count() for x in events_categories]), 'is_events_admin':
is_events_admin, 'events_admin': is_events_admin, 'show_attend':
True, 'show_icon': True}
return render(request, 'events/home.html', context)
|
events_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n is_events_admin = request.user.has_admin_permission('events')\n if request.method == 'POST':\n if 'approve' in request.POST and is_events_admin:\n event_id = request.POST.get('approve')\n event = get_object_or_404(Event, id=event_id)\n event.rejected = False\n event.approved = True\n event.approved_by = request.user\n event.save()\n messages.success(request, 'Approved event {}'.format(event))\n if 'reject' in request.POST and is_events_admin:\n event_id = request.POST.get('reject')\n event = get_object_or_404(Event, id=event_id)\n event.approved = False\n event.rejected = True\n event.rejected_by = request.user\n event.save()\n messages.success(request, 'Rejected event {}'.format(event))\n if is_events_admin and 'show_all' in request.GET:\n viewable_events = Event.objects.all().this_year().prefetch_related(\n 'groups')\n else:\n viewable_events = Event.objects.visible_to_user(request.user\n ).this_year().prefetch_related('groups')\n today = datetime.date.today()\n delta = today - datetime.timedelta(days=today.weekday())\n this_week = delta, delta + datetime.timedelta(days=7)\n this_month = this_week[1], this_week[1] + datetime.timedelta(days=31)\n events_categories = [{'title': 'This week', 'events': viewable_events.\n filter(time__gte=this_week[0], time__lt=this_week[1])}, {'title':\n 'This month', 'events': viewable_events.filter(time__gte=this_month\n [0], time__lt=this_month[1])}, {'title': 'Future', 'events':\n viewable_events.filter(time__gte=this_month[1])}]\n if is_events_admin:\n unapproved_events = Event.objects.filter(approved=False, rejected=False\n ).prefetch_related('groups')\n events_categories = [{'title': 'Awaiting Approval', 'events':\n unapproved_events}] + events_categories\n if is_events_admin and 'show_all' in request.GET:\n events_categories.append({'title': 'Past', 'events':\n viewable_events.filter(time__lt=this_week[0])})\n context = {'events': events_categories, 'num_events': sum([x['events'].\n count() for x in events_categories]), 'is_events_admin':\n is_events_admin, 'events_admin': is_events_admin, 'show_attend': \n True, 'show_icon': True}\n return render(request, 'events/home.html', context)\n",
"role": "user"
},
{
"content": "events_view",
"role": "assistant"
}
] |
def x(request, id):
event = get_object_or_404(Event, id=id)
if request.method == 'POST':
if not event.show_attending:
return redirect('events')
if 'attending' in request.POST:
attending = request.POST.get('attending')
attending = attending == 'true'
if attending:
event.attending.add(request.user)
else:
event.attending.remove(request.user)
return redirect('events')
context = {'event': event, 'is_events_admin': request.user.
has_admin_permission('events')}
return render(request, 'events/join_event.html', context)
|
join_event_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request, id):\n event = get_object_or_404(Event, id=id)\n if request.method == 'POST':\n if not event.show_attending:\n return redirect('events')\n if 'attending' in request.POST:\n attending = request.POST.get('attending')\n attending = attending == 'true'\n if attending:\n event.attending.add(request.user)\n else:\n event.attending.remove(request.user)\n return redirect('events')\n context = {'event': event, 'is_events_admin': request.user.\n has_admin_permission('events')}\n return render(request, 'events/join_event.html', context)\n",
"role": "user"
},
{
"content": "join_event_view",
"role": "assistant"
}
] |
def x(request, id):
event = get_object_or_404(Event, id=id)
full_roster = list(event.attending.all())
viewable_roster = []
num_hidden_members = 0
for p in full_roster:
if p.can_view_eighth:
viewable_roster.append(p)
else:
num_hidden_members += 1
context = {'event': event, 'viewable_roster': viewable_roster,
'full_roster': full_roster, 'num_hidden_members':
num_hidden_members, 'is_events_admin': request.user.
has_admin_permission('events')}
return render(request, 'events/roster.html', context)
|
event_roster_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request, id):\n event = get_object_or_404(Event, id=id)\n full_roster = list(event.attending.all())\n viewable_roster = []\n num_hidden_members = 0\n for p in full_roster:\n if p.can_view_eighth:\n viewable_roster.append(p)\n else:\n num_hidden_members += 1\n context = {'event': event, 'viewable_roster': viewable_roster,\n 'full_roster': full_roster, 'num_hidden_members':\n num_hidden_members, 'is_events_admin': request.user.\n has_admin_permission('events')}\n return render(request, 'events/roster.html', context)\n",
"role": "user"
},
{
"content": "event_roster_view",
"role": "assistant"
}
] |
def x(request):
is_events_admin = request.user.has_admin_permission('events')
if not is_events_admin:
return redirect('request_event')
if request.method == 'POST':
form = EventForm(data=request.POST, all_groups=request.user.
has_admin_permission('groups'))
logger.debug(form)
if form.is_valid():
obj = form.save()
obj.user = request.user
obj.description = safe_html(obj.description)
obj.approved = True
obj.approved_by = request.user
messages.success(request,
'Because you are an administrator, this event was auto-approved.'
)
obj.created_hook(request)
obj.save()
return redirect('events')
else:
messages.error(request, 'Error adding event.')
else:
form = EventForm(all_groups=request.user.has_admin_permission('groups')
)
context = {'form': form, 'action': 'add', 'action_title': 'Add' if
is_events_admin else 'Submit', 'is_events_admin': is_events_admin}
return render(request, 'events/add_modify.html', context)
|
add_event_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n is_events_admin = request.user.has_admin_permission('events')\n if not is_events_admin:\n return redirect('request_event')\n if request.method == 'POST':\n form = EventForm(data=request.POST, all_groups=request.user.\n has_admin_permission('groups'))\n logger.debug(form)\n if form.is_valid():\n obj = form.save()\n obj.user = request.user\n obj.description = safe_html(obj.description)\n obj.approved = True\n obj.approved_by = request.user\n messages.success(request,\n 'Because you are an administrator, this event was auto-approved.'\n )\n obj.created_hook(request)\n obj.save()\n return redirect('events')\n else:\n messages.error(request, 'Error adding event.')\n else:\n form = EventForm(all_groups=request.user.has_admin_permission('groups')\n )\n context = {'form': form, 'action': 'add', 'action_title': 'Add' if\n is_events_admin else 'Submit', 'is_events_admin': is_events_admin}\n return render(request, 'events/add_modify.html', context)\n",
"role": "user"
},
{
"content": "add_event_view",
"role": "assistant"
}
] |
def x(request, id=None):
event = get_object_or_404(Event, id=id)
is_events_admin = request.user.has_admin_permission('events')
if not is_events_admin:
raise exceptions.PermissionDenied
if request.method == 'POST':
if is_events_admin:
form = AdminEventForm(data=request.POST, instance=event,
all_groups=request.user.has_admin_permission('groups'))
else:
form = EventForm(data=request.POST, instance=event, all_groups=
request.user.has_admin_permission('groups'))
logger.debug(form)
if form.is_valid():
obj = form.save()
obj.description = safe_html(obj.description)
obj.save()
messages.success(request, 'Successfully modified event.')
else:
messages.error(request, 'Error modifying event.')
elif is_events_admin:
form = AdminEventForm(instance=event, all_groups=request.user.
has_admin_permission('groups'))
else:
form = EventForm(instance=event, all_groups=request.user.
has_admin_permission('groups'))
context = {'form': form, 'action': 'modify', 'action_title': 'Modify',
'id': id, 'is_events_admin': is_events_admin}
return render(request, 'events/add_modify.html', context)
|
modify_event_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request, id=None):\n event = get_object_or_404(Event, id=id)\n is_events_admin = request.user.has_admin_permission('events')\n if not is_events_admin:\n raise exceptions.PermissionDenied\n if request.method == 'POST':\n if is_events_admin:\n form = AdminEventForm(data=request.POST, instance=event,\n all_groups=request.user.has_admin_permission('groups'))\n else:\n form = EventForm(data=request.POST, instance=event, all_groups=\n request.user.has_admin_permission('groups'))\n logger.debug(form)\n if form.is_valid():\n obj = form.save()\n obj.description = safe_html(obj.description)\n obj.save()\n messages.success(request, 'Successfully modified event.')\n else:\n messages.error(request, 'Error modifying event.')\n elif is_events_admin:\n form = AdminEventForm(instance=event, all_groups=request.user.\n has_admin_permission('groups'))\n else:\n form = EventForm(instance=event, all_groups=request.user.\n has_admin_permission('groups'))\n context = {'form': form, 'action': 'modify', 'action_title': 'Modify',\n 'id': id, 'is_events_admin': is_events_admin}\n return render(request, 'events/add_modify.html', context)\n",
"role": "user"
},
{
"content": "modify_event_view",
"role": "assistant"
}
] |
def x(request, id):
event = get_object_or_404(Event, id=id)
if not request.user.has_admin_permission('events'):
raise exceptions.PermissionDenied
if request.method == 'POST':
try:
event.delete()
messages.success(request, 'Successfully deleted event.')
except Event.DoesNotExist:
pass
return redirect('events')
else:
return render(request, 'events/delete.html', {'event': event})
|
delete_event_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request, id):\n event = get_object_or_404(Event, id=id)\n if not request.user.has_admin_permission('events'):\n raise exceptions.PermissionDenied\n if request.method == 'POST':\n try:\n event.delete()\n messages.success(request, 'Successfully deleted event.')\n except Event.DoesNotExist:\n pass\n return redirect('events')\n else:\n return render(request, 'events/delete.html', {'event': event})\n",
"role": "user"
},
{
"content": "delete_event_view",
"role": "assistant"
}
] |
def x(request):
if request.method == 'POST':
event_id = request.POST.get('event_id')
if event_id:
event = Event.objects.get(id=event_id)
event.user_map.users_hidden.remove(request.user)
event.user_map.save()
return http.HttpResponse('Unhidden')
raise http.Http404
else:
return http.HttpResponseNotAllowed(['POST'],
'HTTP 405: METHOD NOT ALLOWED')
|
show_event_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n if request.method == 'POST':\n event_id = request.POST.get('event_id')\n if event_id:\n event = Event.objects.get(id=event_id)\n event.user_map.users_hidden.remove(request.user)\n event.user_map.save()\n return http.HttpResponse('Unhidden')\n raise http.Http404\n else:\n return http.HttpResponseNotAllowed(['POST'],\n 'HTTP 405: METHOD NOT ALLOWED')\n",
"role": "user"
},
{
"content": "show_event_view",
"role": "assistant"
}
] |
def x(request):
logger.debug(request.POST)
if request.method == 'POST':
if 'user_token' in request.POST and 'gcm_token' in request.POST:
user_token = request.POST.get('user_token')
gcm_token = request.POST.get('gcm_token')
logger.debug(user_token)
logger.debug(gcm_token)
try:
ncfg = NotificationConfig.objects.get(android_gcm_rand=
user_token)
except NotificationConfig.DoesNotExist:
logger.debug('No pair')
return HttpResponse('{"error":"Invalid data."}',
content_type='text/json')
ncfg.gcm_token = gcm_token
ncfg.android_gcm_rand = None
ncfg.android_gcm_date = None
ncfg.save()
return HttpResponse('{"success":"Now registered."}',
content_type='text/json')
return HttpResponse('{"error":"Invalid arguments."}', content_type=
'text/json')
|
android_setup_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n logger.debug(request.POST)\n if request.method == 'POST':\n if 'user_token' in request.POST and 'gcm_token' in request.POST:\n user_token = request.POST.get('user_token')\n gcm_token = request.POST.get('gcm_token')\n logger.debug(user_token)\n logger.debug(gcm_token)\n try:\n ncfg = NotificationConfig.objects.get(android_gcm_rand=\n user_token)\n except NotificationConfig.DoesNotExist:\n logger.debug('No pair')\n return HttpResponse('{\"error\":\"Invalid data.\"}',\n content_type='text/json')\n ncfg.gcm_token = gcm_token\n ncfg.android_gcm_rand = None\n ncfg.android_gcm_date = None\n ncfg.save()\n return HttpResponse('{\"success\":\"Now registered.\"}',\n content_type='text/json')\n return HttpResponse('{\"error\":\"Invalid arguments.\"}', content_type=\n 'text/json')\n",
"role": "user"
},
{
"content": "android_setup_view",
"role": "assistant"
}
] |
def x(request):
data = {}
if request.user.is_authenticated:
notifs = GCMNotification.objects.filter(sent_to__user=request.user
).order_by('-time')
if notifs.count() > 0:
notif = notifs.first()
ndata = notif.data
if 'title' in ndata and 'text' in ndata:
data = {'title': ndata['title'] if 'title' in ndata else '',
'text': ndata['text'] if 'text' in ndata else '', 'url':
ndata['url'] if 'url' in ndata else ''}
else:
schedule_chk = chrome_getdata_check(request)
if schedule_chk:
data = schedule_chk
else:
schedule_chk = chrome_getdata_check(request)
if schedule_chk:
data = schedule_chk
else:
return HttpResponse('null', content_type='text/json')
else:
schedule_chk = chrome_getdata_check(request)
if schedule_chk:
data = schedule_chk
else:
data = {'title': 'Check Intranet', 'text':
"You have a new notification that couldn't be loaded right now."
}
j = json.dumps(data)
return HttpResponse(j, content_type='text/json')
|
chrome_getdata_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n data = {}\n if request.user.is_authenticated:\n notifs = GCMNotification.objects.filter(sent_to__user=request.user\n ).order_by('-time')\n if notifs.count() > 0:\n notif = notifs.first()\n ndata = notif.data\n if 'title' in ndata and 'text' in ndata:\n data = {'title': ndata['title'] if 'title' in ndata else '',\n 'text': ndata['text'] if 'text' in ndata else '', 'url':\n ndata['url'] if 'url' in ndata else ''}\n else:\n schedule_chk = chrome_getdata_check(request)\n if schedule_chk:\n data = schedule_chk\n else:\n schedule_chk = chrome_getdata_check(request)\n if schedule_chk:\n data = schedule_chk\n else:\n return HttpResponse('null', content_type='text/json')\n else:\n schedule_chk = chrome_getdata_check(request)\n if schedule_chk:\n data = schedule_chk\n else:\n data = {'title': 'Check Intranet', 'text':\n \"You have a new notification that couldn't be loaded right now.\"\n }\n j = json.dumps(data)\n return HttpResponse(j, content_type='text/json')\n",
"role": "user"
},
{
"content": "chrome_getdata_view",
"role": "assistant"
}
] |
def x(request):
logger.debug(request.POST)
token = None
if request.method == 'POST':
if 'token' in request.POST:
token = request.POST.get('token')
if not token:
return HttpResponse('{"error":"Invalid data."}', content_type=
'text/json')
ncfg, _ = NotificationConfig.objects.get_or_create(user=request.user)
ncfg.gcm_token = token
ncfg.save()
return HttpResponse('{"success":"Now registered."}', content_type=
'text/json')
|
chrome_setup_view
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(request):\n logger.debug(request.POST)\n token = None\n if request.method == 'POST':\n if 'token' in request.POST:\n token = request.POST.get('token')\n if not token:\n return HttpResponse('{\"error\":\"Invalid data.\"}', content_type=\n 'text/json')\n ncfg, _ = NotificationConfig.objects.get_or_create(user=request.user)\n ncfg.gcm_token = token\n ncfg.save()\n return HttpResponse('{\"success\":\"Now registered.\"}', content_type=\n 'text/json')\n",
"role": "user"
},
{
"content": "chrome_setup_view",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.