source_code
stringlengths
45
7.24k
function_name
stringlengths
1
111
messages
list
def x(self, value: str, force: bool=False) ->datetime: value = value.lower() if not force: current_state = self.current_state if current_state == 'unknown': allowed_transitions = self._allowed_states else: allowed_transitions = self._allowed_transitions[current_state] allowed_transitions.append(current_state) LOG.debug('Updating current state of %s to %s', self._id, value) if value not in allowed_transitions: raise ValueError( "Invalid current state update: '{}'. '{}' can be transitioned to states: {}" .format(value, current_state, allowed_transitions)) return self._update_state('current', value)
update_current_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value: str, force: bool=False) ->datetime:\n value = value.lower()\n if not force:\n current_state = self.current_state\n if current_state == 'unknown':\n allowed_transitions = self._allowed_states\n else:\n allowed_transitions = self._allowed_transitions[current_state]\n allowed_transitions.append(current_state)\n LOG.debug('Updating current state of %s to %s', self._id, value)\n if value not in allowed_transitions:\n raise ValueError(\n \"Invalid current state update: '{}'. '{}' can be transitioned to states: {}\"\n .format(value, current_state, allowed_transitions))\n return self._update_state('current', value)\n", "role": "user" }, { "content": "update_current_state", "role": "assistant" } ]
def x(self, initial_state: str='unknown') ->dict: initial_state = initial_state.lower() if (initial_state != 'unknown' and initial_state not in self. _allowed_states): raise ValueError('Invalid initial state: {}'.format(initial_state)) _initial_state = dict(current_state=initial_state, target_state= initial_state, current_timestamp=datetime.utcnow().isoformat(), target_timestamp=datetime.utcnow().isoformat()) return _initial_state
initialise
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, initial_state: str='unknown') ->dict:\n initial_state = initial_state.lower()\n if (initial_state != 'unknown' and initial_state not in self.\n _allowed_states):\n raise ValueError('Invalid initial state: {}'.format(initial_state))\n _initial_state = dict(current_state=initial_state, target_state=\n initial_state, current_timestamp=datetime.utcnow().isoformat(),\n target_timestamp=datetime.utcnow().isoformat())\n return _initial_state\n", "role": "user" }, { "content": "initialise", "role": "assistant" } ]
def x(self, state_type: str, value: str) ->datetime: timestamp = datetime.utcnow() field = '{}_state'.format(state_type) old_state = DB.get_hash_value(self._key, field) DB.set_hash_value(self._key, field, value, pipeline=True) DB.set_hash_value(self._key, '{}_timestamp'.format(state_type), timestamp.isoformat(), pipeline=True) DB.execute() self.publish('{}_state_updated'.format(state_type), event_data=dict( state=value, old_state=old_state)) return timestamp
update_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, state_type: str, value: str) ->datetime:\n timestamp = datetime.utcnow()\n field = '{}_state'.format(state_type)\n old_state = DB.get_hash_value(self._key, field)\n DB.set_hash_value(self._key, field, value, pipeline=True)\n DB.set_hash_value(self._key, '{}_timestamp'.format(state_type),\n timestamp.isoformat(), pipeline=True)\n DB.execute()\n self.publish('{}_state_updated'.format(state_type), event_data=dict(\n state=value, old_state=old_state))\n return timestamp\n", "role": "user" }, { "content": "update_state", "role": "assistant" } ]
def x(dictionary: dict): return {key.lower(): [value.lower() for value in value] for key, value in dictionary.items()}
dict_lower
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(dictionary: dict):\n return {key.lower(): [value.lower() for value in value] for key, value in\n dictionary.items()}\n", "role": "user" }, { "content": "dict_lower", "role": "assistant" } ]
def x(self, config_dict): schema = self._get_schema() LOG.debug('Adding SBI with config: %s', config_dict) validate(config_dict, schema) updated_block = self._add_status(config_dict) scheduling_block_data, processing_block_data = (self. _split_sched_block_instance(updated_block)) name = 'scheduling_block:' + updated_block['id'] self._db.set_specified_values(name, scheduling_block_data) self._db.push_event(self.scheduling_event_name, updated_block['status'], updated_block['id']) for value in processing_block_data: name = 'scheduling_block:' + updated_block['id' ] + ':processing_block:' + value['id'] self._db.set_specified_values(name, value) self._db.push_event(self.processing_event_name, value['status'], value['id'])
add_sched_block_instance
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, config_dict):\n schema = self._get_schema()\n LOG.debug('Adding SBI with config: %s', config_dict)\n validate(config_dict, schema)\n updated_block = self._add_status(config_dict)\n scheduling_block_data, processing_block_data = (self.\n _split_sched_block_instance(updated_block))\n name = 'scheduling_block:' + updated_block['id']\n self._db.set_specified_values(name, scheduling_block_data)\n self._db.push_event(self.scheduling_event_name, updated_block['status'],\n updated_block['id'])\n for value in processing_block_data:\n name = 'scheduling_block:' + updated_block['id'\n ] + ':processing_block:' + value['id']\n self._db.set_specified_values(name, value)\n self._db.push_event(self.processing_event_name, value['status'],\n value['id'])\n", "role": "user" }, { "content": "add_sched_block_instance", "role": "assistant" } ]
def x(self): scheduling_block_ids = [] pattern = 'scheduling_block:*' block_ids = self._db.get_ids(pattern) for block_id in block_ids: if 'processing_block' not in block_id: id_split = block_id.split(':')[-1] scheduling_block_ids.append(id_split) return sorted(scheduling_block_ids)
get_sched_block_instance_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n scheduling_block_ids = []\n pattern = 'scheduling_block:*'\n block_ids = self._db.get_ids(pattern)\n for block_id in block_ids:\n if 'processing_block' not in block_id:\n id_split = block_id.split(':')[-1]\n scheduling_block_ids.append(id_split)\n return sorted(scheduling_block_ids)\n", "role": "user" }, { "content": "get_sched_block_instance_ids", "role": "assistant" } ]
def x(self): _processing_block_ids = [] pattern = '*:processing_block:*' block_ids = self._db.get_ids(pattern) for block_id in block_ids: id_split = block_id.split(':')[-1] _processing_block_ids.append(id_split) return sorted(_processing_block_ids)
get_processing_block_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n _processing_block_ids = []\n pattern = '*:processing_block:*'\n block_ids = self._db.get_ids(pattern)\n for block_id in block_ids:\n id_split = block_id.split(':')[-1]\n _processing_block_ids.append(id_split)\n return sorted(_processing_block_ids)\n", "role": "user" }, { "content": "get_processing_block_ids", "role": "assistant" } ]
def x(self): _scheduling_block_ids = [] _sub_array_ids = [] for blocks_id in self.get_sched_block_instance_ids(): _scheduling_block_ids.append(blocks_id) block_details = self.get_block_details(_scheduling_block_ids) for details in block_details: _sub_array_ids.append(details['sub_array_id']) _sub_array_ids = sorted(list(set(_sub_array_ids))) return _sub_array_ids
get_sub_array_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n _scheduling_block_ids = []\n _sub_array_ids = []\n for blocks_id in self.get_sched_block_instance_ids():\n _scheduling_block_ids.append(blocks_id)\n block_details = self.get_block_details(_scheduling_block_ids)\n for details in block_details:\n _sub_array_ids.append(details['sub_array_id'])\n _sub_array_ids = sorted(list(set(_sub_array_ids)))\n return _sub_array_ids\n", "role": "user" }, { "content": "get_sub_array_ids", "role": "assistant" } ]
def x(self, sub_array_id): _ids = [] sbi_ids = self.get_sched_block_instance_ids() for details in self.get_block_details(sbi_ids): if details['sub_array_id'] == sub_array_id: _ids.append(details['id']) return sorted(_ids)
get_sub_array_sbi_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, sub_array_id):\n _ids = []\n sbi_ids = self.get_sched_block_instance_ids()\n for details in self.get_block_details(sbi_ids):\n if details['sub_array_id'] == sub_array_id:\n _ids.append(details['id'])\n return sorted(_ids)\n", "role": "user" }, { "content": "get_sub_array_sbi_ids", "role": "assistant" } ]
def x(self, block_ids): if not hasattr(block_ids, '__iter__'): block_ids = [block_ids] for _id in block_ids: block_key = self._db.get_block(_id)[0] block_data = self._db.get_all_field_value(block_key) for key in block_data: for char in ['[', '{']: if char in block_data[key]: block_data[key] = ast.literal_eval(str(block_data[key])) yield block_data
get_block_details
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_ids):\n if not hasattr(block_ids, '__iter__'):\n block_ids = [block_ids]\n for _id in block_ids:\n block_key = self._db.get_block(_id)[0]\n block_data = self._db.get_all_field_value(block_key)\n for key in block_data:\n for char in ['[', '{']:\n if char in block_data[key]:\n block_data[key] = ast.literal_eval(str(block_data[key]))\n yield block_data\n", "role": "user" }, { "content": "get_block_details", "role": "assistant" } ]
def x(self, block_id, field, value): block_name = self._db.get_block(block_id) for name in block_name: self._db.set_value(name, field, value)
update_value
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_id, field, value):\n block_name = self._db.get_block(block_id)\n for name in block_name:\n self._db.set_value(name, field, value)\n", "role": "user" }, { "content": "update_value", "role": "assistant" } ]
def x(self, block_id): LOG.debug('Deleting SBI %s', block_id) scheduling_blocks = self._db.get_all_blocks(block_id) if not scheduling_blocks: raise RuntimeError('Scheduling block not found: {}'.format(block_id)) if scheduling_blocks: for blocks in scheduling_blocks: if 'processing_block' not in blocks: self._db.delete_block(blocks) else: split_key = blocks.split(':') self._db.delete_block(blocks) self._db.push_event(self.processing_event_name, 'deleted', split_key[3]) self._db.push_event(self.scheduling_event_name, 'deleted', block_id)
delete_sched_block_instance
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_id):\n LOG.debug('Deleting SBI %s', block_id)\n scheduling_blocks = self._db.get_all_blocks(block_id)\n if not scheduling_blocks:\n raise RuntimeError('Scheduling block not found: {}'.format(block_id))\n if scheduling_blocks:\n for blocks in scheduling_blocks:\n if 'processing_block' not in blocks:\n self._db.delete_block(blocks)\n else:\n split_key = blocks.split(':')\n self._db.delete_block(blocks)\n self._db.push_event(self.processing_event_name, 'deleted',\n split_key[3])\n self._db.push_event(self.scheduling_event_name, 'deleted', block_id)\n", "role": "user" }, { "content": "delete_sched_block_instance", "role": "assistant" } ]
def x(self, processing_block_id): LOG.debug('Deleting Processing Block %s ...', processing_block_id) processing_block = self._db.get_block(processing_block_id) if not processing_block: raise RuntimeError('Invalid Processing Block ID: {}'.format( processing_block_id)) for block in processing_block: if 'processing_block' in block: self._db.delete_block(block) scheduling_block_id = block.split(':') scheduling_block_details = self.get_block_details([ scheduling_block_id[1]]) for block_details in scheduling_block_details: block_list = block_details['processing_block_ids'] if processing_block_id in block_list: block_list.remove(processing_block_id) self.update_value(scheduling_block_id[1], 'processing_block_ids', block_list) self._db.push_event(self.processing_event_name, 'deleted', id)
delete_processing_block
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, processing_block_id):\n LOG.debug('Deleting Processing Block %s ...', processing_block_id)\n processing_block = self._db.get_block(processing_block_id)\n if not processing_block:\n raise RuntimeError('Invalid Processing Block ID: {}'.format(\n processing_block_id))\n for block in processing_block:\n if 'processing_block' in block:\n self._db.delete_block(block)\n scheduling_block_id = block.split(':')\n scheduling_block_details = self.get_block_details([\n scheduling_block_id[1]])\n for block_details in scheduling_block_details:\n block_list = block_details['processing_block_ids']\n if processing_block_id in block_list:\n block_list.remove(processing_block_id)\n self.update_value(scheduling_block_id[1],\n 'processing_block_ids', block_list)\n self._db.push_event(self.processing_event_name, 'deleted', id)\n", "role": "user" }, { "content": "delete_processing_block", "role": "assistant" } ]
def x(): schema_path = os.path.join(os.path.dirname(__file__), 'schema', 'scheduling_block_schema.json') with open(schema_path, 'r') as file: schema_data = file.read() schema = json.loads(schema_data) return schema
get_schema
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n schema_path = os.path.join(os.path.dirname(__file__), 'schema',\n 'scheduling_block_schema.json')\n with open(schema_path, 'r') as file:\n schema_data = file.read()\n schema = json.loads(schema_data)\n return schema\n", "role": "user" }, { "content": "get_schema", "role": "assistant" } ]
def x(scheduling_block): scheduling_block['status'] = 'created' for block in scheduling_block: if isinstance(scheduling_block[block], list): for field in scheduling_block[block]: field['status'] = 'created' return scheduling_block
add_status
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(scheduling_block):\n scheduling_block['status'] = 'created'\n for block in scheduling_block:\n if isinstance(scheduling_block[block], list):\n for field in scheduling_block[block]:\n field['status'] = 'created'\n return scheduling_block\n", "role": "user" }, { "content": "add_status", "role": "assistant" } ]
def x(self, scheduling_block): _scheduling_block_data = {} _processing_block_data = {} _processing_block_id = [] for block in scheduling_block: values = scheduling_block[block] if block != 'processing_blocks': _scheduling_block_data[block] = values else: processing_block_id = self.get_processing_block_ids() for value in values: if value['id'] not in processing_block_id: _processing_block_data = values else: raise Exception('Processing block already exits', value ['id']) for block_id in _processing_block_data: _processing_block_id.append(block_id['id']) _scheduling_block_data['processing_block_ids'] = _processing_block_id return _scheduling_block_data, _processing_block_data
split_sched_block_instance
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, scheduling_block):\n _scheduling_block_data = {}\n _processing_block_data = {}\n _processing_block_id = []\n for block in scheduling_block:\n values = scheduling_block[block]\n if block != 'processing_blocks':\n _scheduling_block_data[block] = values\n else:\n processing_block_id = self.get_processing_block_ids()\n for value in values:\n if value['id'] not in processing_block_id:\n _processing_block_data = values\n else:\n raise Exception('Processing block already exits', value\n ['id'])\n for block_id in _processing_block_data:\n _processing_block_id.append(block_id['id'])\n _scheduling_block_data['processing_block_ids'] = _processing_block_id\n return _scheduling_block_data, _processing_block_data\n", "role": "user" }, { "content": "split_sched_block_instance", "role": "assistant" } ]
def x(): init_logging() arlexecute.set_client(get_dask_Client()) arlexecute.run(init_logging) LOG.info('Results dir = %s', RESULTS_DIR) LOG.info('Starting imaging-modeling') PARFILE = 'parameters.json' if len(sys.argv) > 1: PARFILE = sys.argv[1] LOG.info('JSON parameter file = %s', PARFILE) try: with open(PARFILE, 'r') as par_file: jspar = json.load(par_file) except AssertionError as error: LOG.critical('ERROR %s', error) return configuration = jspar['modeling']['configuration']['name'] num_freq_win = jspar['modeling']['configuration']['num_freq_win'] num_times = jspar['modeling']['configuration']['num_times'] r_max = jspar['modeling']['configuration']['r_max'] fstart = jspar['modeling']['configuration']['fstart'] fend = jspar['modeling']['configuration']['fend'] timestart_pi = jspar['modeling']['configuration']['timestart_pi'] timeend_pi = jspar['modeling']['configuration']['timeend_pi'] polframe = jspar['modeling']['configuration']['PolarisationFrame'] frequency = numpy.linspace(fstart, fend, num_freq_win) channel_bw = numpy.array(num_freq_win * [frequency[1] - frequency[0]]) times = numpy.linspace(numpy.pi * timestart_pi, numpy.pi * timeend_pi, num_times) phase_centre = SkyCoord(ra=jspar['modeling']['phasecentre']['RA'] * u. deg, dec=jspar['modeling']['phasecentre']['Dec'] * u.deg, frame= jspar['modeling']['phasecentre']['frame'], equinox=jspar['modeling' ]['phasecentre']['equinox']) vis_list = simulate_arlexecute(configuration, frequency=frequency, channel_bandwidth=channel_bw, times=times, phasecentre=phase_centre, order=jspar['modeling']['simulate']['order'], rmax=r_max) LOG.info('%d elements in vis_list', len(vis_list)) LOG.info('About to make visibility') vis_list = arlexecute.compute(vis_list, sync=True) LOG.debug('vis_list type: %s', type(vis_list)) LOG.debug('vis_list element type: %s', type(vis_list[0])) try: export_blockvisibility_to_hdf5(vis_list, '%s/%s' % (RESULTS_DIR, jspar['files']['vis_list'])) except AssertionError as error: LOG.critical('ERROR %s', error) return wprojection_planes = jspar['advice']['wprojection_planes'] guard_band_image = jspar['advice']['guard_band_image'] delA = jspar['advice']['delA'] advice_low = advise_wide_field(vis_list[0], guard_band_image= guard_band_image, delA=delA, wprojection_planes=wprojection_planes) advice_high = advise_wide_field(vis_list[-1], guard_band_image= guard_band_image, delA=delA, wprojection_planes=wprojection_planes) vis_slices = advice_low['vis_slices'] num_pixels = advice_high['npixels2'] cellsize = min(advice_low['cellsize'], advice_high['cellsize']) gleam_model = [arlexecute.execute(create_low_test_image_from_gleam)( npixel=num_pixels, frequency=[frequency[f]], channel_bandwidth=[ channel_bw[f]], cellsize=cellsize, phasecentre=phase_centre, polarisation_frame=PolarisationFrame(polframe), flux_limit=jspar[ 'modeling']['gleam_model']['flux_limit'], applybeam=jspar[ 'modeling']['gleam_model']['applybeam']) for f, freq in enumerate( frequency)] LOG.info('About to make GLEAM model') gleam_model = arlexecute.compute(gleam_model, sync=True) LOG.info('About to run predict to get predicted visibility') future_vis_graph = arlexecute.scatter(vis_list) predicted_vis_list = predict_arlexecute(future_vis_graph, gleam_model, context=jspar['modeling']['predict']['context'], vis_slices=vis_slices) predicted_vis_list = arlexecute.compute(predicted_vis_list, sync=True) corrupted_vis_list = corrupt_arlexecute(predicted_vis_list, phase_error =jspar['modeling']['corrupt']['phase_error']) LOG.info('About to run corrupt to get corrupted visibility') corrupted_vis_list = arlexecute.compute(corrupted_vis_list, sync=True) LOG.info('About to output predicted_vislist.hdf') export_blockvisibility_to_hdf5(predicted_vis_list, '%s/%s' % ( RESULTS_DIR, jspar['files']['predicted_vis_list'])) LOG.info('About to output corrupted_vislist.hdf') export_blockvisibility_to_hdf5(corrupted_vis_list, '%s/%s' % ( RESULTS_DIR, jspar['files']['corrupted_vis_list'])) arlexecute.close()
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n init_logging()\n arlexecute.set_client(get_dask_Client())\n arlexecute.run(init_logging)\n LOG.info('Results dir = %s', RESULTS_DIR)\n LOG.info('Starting imaging-modeling')\n PARFILE = 'parameters.json'\n if len(sys.argv) > 1:\n PARFILE = sys.argv[1]\n LOG.info('JSON parameter file = %s', PARFILE)\n try:\n with open(PARFILE, 'r') as par_file:\n jspar = json.load(par_file)\n except AssertionError as error:\n LOG.critical('ERROR %s', error)\n return\n configuration = jspar['modeling']['configuration']['name']\n num_freq_win = jspar['modeling']['configuration']['num_freq_win']\n num_times = jspar['modeling']['configuration']['num_times']\n r_max = jspar['modeling']['configuration']['r_max']\n fstart = jspar['modeling']['configuration']['fstart']\n fend = jspar['modeling']['configuration']['fend']\n timestart_pi = jspar['modeling']['configuration']['timestart_pi']\n timeend_pi = jspar['modeling']['configuration']['timeend_pi']\n polframe = jspar['modeling']['configuration']['PolarisationFrame']\n frequency = numpy.linspace(fstart, fend, num_freq_win)\n channel_bw = numpy.array(num_freq_win * [frequency[1] - frequency[0]])\n times = numpy.linspace(numpy.pi * timestart_pi, numpy.pi * timeend_pi,\n num_times)\n phase_centre = SkyCoord(ra=jspar['modeling']['phasecentre']['RA'] * u.\n deg, dec=jspar['modeling']['phasecentre']['Dec'] * u.deg, frame=\n jspar['modeling']['phasecentre']['frame'], equinox=jspar['modeling'\n ]['phasecentre']['equinox'])\n vis_list = simulate_arlexecute(configuration, frequency=frequency,\n channel_bandwidth=channel_bw, times=times, phasecentre=phase_centre,\n order=jspar['modeling']['simulate']['order'], rmax=r_max)\n LOG.info('%d elements in vis_list', len(vis_list))\n LOG.info('About to make visibility')\n vis_list = arlexecute.compute(vis_list, sync=True)\n LOG.debug('vis_list type: %s', type(vis_list))\n LOG.debug('vis_list element type: %s', type(vis_list[0]))\n try:\n export_blockvisibility_to_hdf5(vis_list, '%s/%s' % (RESULTS_DIR,\n jspar['files']['vis_list']))\n except AssertionError as error:\n LOG.critical('ERROR %s', error)\n return\n wprojection_planes = jspar['advice']['wprojection_planes']\n guard_band_image = jspar['advice']['guard_band_image']\n delA = jspar['advice']['delA']\n advice_low = advise_wide_field(vis_list[0], guard_band_image=\n guard_band_image, delA=delA, wprojection_planes=wprojection_planes)\n advice_high = advise_wide_field(vis_list[-1], guard_band_image=\n guard_band_image, delA=delA, wprojection_planes=wprojection_planes)\n vis_slices = advice_low['vis_slices']\n num_pixels = advice_high['npixels2']\n cellsize = min(advice_low['cellsize'], advice_high['cellsize'])\n gleam_model = [arlexecute.execute(create_low_test_image_from_gleam)(\n npixel=num_pixels, frequency=[frequency[f]], channel_bandwidth=[\n channel_bw[f]], cellsize=cellsize, phasecentre=phase_centre,\n polarisation_frame=PolarisationFrame(polframe), flux_limit=jspar[\n 'modeling']['gleam_model']['flux_limit'], applybeam=jspar[\n 'modeling']['gleam_model']['applybeam']) for f, freq in enumerate(\n frequency)]\n LOG.info('About to make GLEAM model')\n gleam_model = arlexecute.compute(gleam_model, sync=True)\n LOG.info('About to run predict to get predicted visibility')\n future_vis_graph = arlexecute.scatter(vis_list)\n predicted_vis_list = predict_arlexecute(future_vis_graph, gleam_model,\n context=jspar['modeling']['predict']['context'], vis_slices=vis_slices)\n predicted_vis_list = arlexecute.compute(predicted_vis_list, sync=True)\n corrupted_vis_list = corrupt_arlexecute(predicted_vis_list, phase_error\n =jspar['modeling']['corrupt']['phase_error'])\n LOG.info('About to run corrupt to get corrupted visibility')\n corrupted_vis_list = arlexecute.compute(corrupted_vis_list, sync=True)\n LOG.info('About to output predicted_vislist.hdf')\n export_blockvisibility_to_hdf5(predicted_vis_list, '%s/%s' % (\n RESULTS_DIR, jspar['files']['predicted_vis_list']))\n LOG.info('About to output corrupted_vislist.hdf')\n export_blockvisibility_to_hdf5(corrupted_vis_list, '%s/%s' % (\n RESULTS_DIR, jspar['files']['corrupted_vis_list']))\n arlexecute.close()\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(logger_name='sip', log_level=None, p3_mode: bool=True, show_thread: bool=False, propagate: bool=False, show_log_origin=False): log = logging.getLogger(logger_name) log.propagate = propagate for handler in log.handlers: log.removeHandler(handler) _debug = '%(filename)s:%(lineno)d | ' if show_log_origin else '' if p3_mode: _prefix = '%(asctime)s - %(name)s - %(levelname)s' if show_thread: _format = '{} - %(threadName)s - {}%(message)s'.format(_prefix, _debug) else: _format = '{} - {}%(message)s'.format(_prefix, _debug) formatter = logging.Formatter(_format) formatter.converter = time.gmtime else: _prefix = '%(asctime)s | %(name)s | %(levelname)s' if show_thread: _format = '{} | %(threadName)s | {}%(message)s'.format(_prefix, _debug) else: _format = '{} | {}%(message)s'.format(_prefix, _debug) formatter = SIPFormatter(_format, datefmt='%Y-%m-%dT%H:%M:%S.%fZ') handler = logging.StreamHandler(stream=sys.stdout) handler.setFormatter(formatter) log.addHandler(handler) if log_level: log.setLevel(log_level) else: log.setLevel(os.getenv('SIP_LOG_LEVEL', 'DEBUG'))
init_logger
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger_name='sip', log_level=None, p3_mode: bool=True, show_thread:\n bool=False, propagate: bool=False, show_log_origin=False):\n log = logging.getLogger(logger_name)\n log.propagate = propagate\n for handler in log.handlers:\n log.removeHandler(handler)\n _debug = '%(filename)s:%(lineno)d | ' if show_log_origin else ''\n if p3_mode:\n _prefix = '%(asctime)s - %(name)s - %(levelname)s'\n if show_thread:\n _format = '{} - %(threadName)s - {}%(message)s'.format(_prefix,\n _debug)\n else:\n _format = '{} - {}%(message)s'.format(_prefix, _debug)\n formatter = logging.Formatter(_format)\n formatter.converter = time.gmtime\n else:\n _prefix = '%(asctime)s | %(name)s | %(levelname)s'\n if show_thread:\n _format = '{} | %(threadName)s | {}%(message)s'.format(_prefix,\n _debug)\n else:\n _format = '{} | {}%(message)s'.format(_prefix, _debug)\n formatter = SIPFormatter(_format, datefmt='%Y-%m-%dT%H:%M:%S.%fZ')\n handler = logging.StreamHandler(stream=sys.stdout)\n handler.setFormatter(formatter)\n log.addHandler(handler)\n if log_level:\n log.setLevel(log_level)\n else:\n log.setLevel(os.getenv('SIP_LOG_LEVEL', 'DEBUG'))\n", "role": "user" }, { "content": "init_logger", "role": "assistant" } ]
def x(logger_name: str, propagate: bool=False): log = logging.getLogger(logger_name) log.propagate = propagate for handler in log.handlers: log.removeHandler(handler)
disable_logger
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger_name: str, propagate: bool=False):\n log = logging.getLogger(logger_name)\n log.propagate = propagate\n for handler in log.handlers:\n log.removeHandler(handler)\n", "role": "user" }, { "content": "disable_logger", "role": "assistant" } ]
def x(logger_name: str, log_level: str, propagate: bool=False): log = logging.getLogger(logger_name) log.propagate = propagate log.setLevel(log_level)
set_log_level
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(logger_name: str, log_level: str, propagate: bool=False):\n log = logging.getLogger(logger_name)\n log.propagate = propagate\n log.setLevel(log_level)\n", "role": "user" }, { "content": "set_log_level", "role": "assistant" } ]
def x(self, record, datefmt=None): _seconds_fraction = record.created - int(record.created) _datetime_utc = time.mktime(time.gmtime(record.created)) _datetime_utc += _seconds_fraction _created = self.converter(_datetime_utc) if datefmt: time_string = _created.strftime(datefmt) else: time_string = _created.strftime('%Y-%m-%dT%H:%M:%S.%fZ') time_string = '%s,%03d' % (time_string, record.msecs) return time_string
formatTime
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, record, datefmt=None):\n _seconds_fraction = record.created - int(record.created)\n _datetime_utc = time.mktime(time.gmtime(record.created))\n _datetime_utc += _seconds_fraction\n _created = self.converter(_datetime_utc)\n if datefmt:\n time_string = _created.strftime(datefmt)\n else:\n time_string = _created.strftime('%Y-%m-%dT%H:%M:%S.%fZ')\n time_string = '%s,%03d' % (time_string, record.msecs)\n return time_string\n", "role": "user" }, { "content": "formatTime", "role": "assistant" } ]
def x(index: int=None): date = datetime.datetime.utcnow().strftime('%Y%m%d') if index is None: index = randint(0, 999) sbi_id = 'SBI-{}-sip-demo-{:03d}'.format(date, index) sb_id = 'SBI-{}-sip-demo-{:03d}'.format(date, index) pb_id = 'PB-{}-sip-demo-{:03d}'.format(date, index) print('* Generating SBI: %s, PB: %s' % (sb_id, pb_id)) sbi = dict(id=sbi_id, version='1.0.0', scheduling_block=dict(id=sb_id, project='sip', programme_block='sip_demos'), processing_blocks=[ dict(id=pb_id, version='1.0.0', type='offline', priority=1, dependencies=[], resources_required=[], workflow=dict(id= 'mock_workflow', version='1.0.0', parameters=dict(stage1=dict( duration=30), stage2=dict(duration=30), stage3=dict(duration=30))))]) return sbi
generate_sbi
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(index: int=None):\n date = datetime.datetime.utcnow().strftime('%Y%m%d')\n if index is None:\n index = randint(0, 999)\n sbi_id = 'SBI-{}-sip-demo-{:03d}'.format(date, index)\n sb_id = 'SBI-{}-sip-demo-{:03d}'.format(date, index)\n pb_id = 'PB-{}-sip-demo-{:03d}'.format(date, index)\n print('* Generating SBI: %s, PB: %s' % (sb_id, pb_id))\n sbi = dict(id=sbi_id, version='1.0.0', scheduling_block=dict(id=sb_id,\n project='sip', programme_block='sip_demos'), processing_blocks=[\n dict(id=pb_id, version='1.0.0', type='offline', priority=1,\n dependencies=[], resources_required=[], workflow=dict(id=\n 'mock_workflow', version='1.0.0', parameters=dict(stage1=dict(\n duration=30), stage2=dict(duration=30), stage3=dict(duration=30))))])\n return sbi\n", "role": "user" }, { "content": "generate_sbi", "role": "assistant" } ]
def x(self, config, log, obs_id, beam_id): log.info('Starting Pulsar Data Transfer...') socket = self._ftp.transfercmd('STOR {0}_{1}'.format(obs_id, beam_id)) socket.send(json.dumps(config).encode()) socket.send(bytearray(1000 * 1000)) config['metadata']['name'] = 'candidate_two' socket.send(json.dumps(config).encode()) socket.send(bytearray(1000 * 1000)) socket.close() log.info('Pulsar Data Transfer Completed...')
send
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, config, log, obs_id, beam_id):\n log.info('Starting Pulsar Data Transfer...')\n socket = self._ftp.transfercmd('STOR {0}_{1}'.format(obs_id, beam_id))\n socket.send(json.dumps(config).encode())\n socket.send(bytearray(1000 * 1000))\n config['metadata']['name'] = 'candidate_two'\n socket.send(json.dumps(config).encode())\n socket.send(bytearray(1000 * 1000))\n socket.close()\n log.info('Pulsar Data Transfer Completed...')\n", "role": "user" }, { "content": "send", "role": "assistant" } ]
def x(block_id): _url = get_root_url() try: block = DB.get_block_details([block_id]).__next__() response = block response['links'] = {'self': '{}'.format(request.url), 'list': '{}/processing-blocks'.format(_url), 'home': '{}'.format(_url)} return block except IndexError as error: response = dict(message='Unable to GET Processing Block', id='{}'. format(block_id), error=error.__str__()) response['links'] = {'list': '{}/processing-blocks'.format(_url), 'home': '{}'.format(_url)} return response, HTTPStatus.NOT_FOUND
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(block_id):\n _url = get_root_url()\n try:\n block = DB.get_block_details([block_id]).__next__()\n response = block\n response['links'] = {'self': '{}'.format(request.url), 'list':\n '{}/processing-blocks'.format(_url), 'home': '{}'.format(_url)}\n return block\n except IndexError as error:\n response = dict(message='Unable to GET Processing Block', id='{}'.\n format(block_id), error=error.__str__())\n response['links'] = {'list': '{}/processing-blocks'.format(_url),\n 'home': '{}'.format(_url)}\n return response, HTTPStatus.NOT_FOUND\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(block_id): _url = get_root_url() try: DB.delete_processing_block(block_id) response = dict(message='Deleted block', id='{}'.format(block_id), links=dict(list='{}/processing-blocks'.format(_url), home='{}'. format(_url))) return response, HTTPStatus.OK except RuntimeError as error: response = dict(error='Failed to delete Processing Block: {}'. format(block_id), reason=str(error), links=dict(list= '{}/processing-blocks'.format(_url), home='{}'.format(_url))) return response, HTTPStatus.OK
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(block_id):\n _url = get_root_url()\n try:\n DB.delete_processing_block(block_id)\n response = dict(message='Deleted block', id='{}'.format(block_id),\n links=dict(list='{}/processing-blocks'.format(_url), home='{}'.\n format(_url)))\n return response, HTTPStatus.OK\n except RuntimeError as error:\n response = dict(error='Failed to delete Processing Block: {}'.\n format(block_id), reason=str(error), links=dict(list=\n '{}/processing-blocks'.format(_url), home='{}'.format(_url)))\n return response, HTTPStatus.OK\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(self): Device.init_device(self) time.sleep(0.1) self.set_state(DevState.STANDBY)
init_device
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n Device.init_device(self)\n time.sleep(0.1)\n self.set_state(DevState.STANDBY)\n", "role": "user" }, { "content": "init_device", "role": "assistant" } ]
def x(self, sbi_config: str): config_dict = json.loads(sbi_config) self.debug_stream('SBI configuration:\n%s', json.dumps(config_dict, indent=2)) try: sbi = Subarray(self.get_name()).configure_sbi(config_dict) except jsonschema.exceptions.ValidationError as error: return json.dumps(dict(path=error.absolute_path.__str__(), schema_path=error.schema_path.__str__(), message=error.message), indent=2) except RuntimeError as error: return json.dumps(dict(error=str(error)), indent=2) return 'Accepted SBI: {}'.format(sbi.id)
configure
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, sbi_config: str):\n config_dict = json.loads(sbi_config)\n self.debug_stream('SBI configuration:\\n%s', json.dumps(config_dict,\n indent=2))\n try:\n sbi = Subarray(self.get_name()).configure_sbi(config_dict)\n except jsonschema.exceptions.ValidationError as error:\n return json.dumps(dict(path=error.absolute_path.__str__(),\n schema_path=error.schema_path.__str__(), message=error.message),\n indent=2)\n except RuntimeError as error:\n return json.dumps(dict(error=str(error)), indent=2)\n return 'Accepted SBI: {}'.format(sbi.id)\n", "role": "user" }, { "content": "configure", "role": "assistant" } ]
def x(self): sbi_ids = Subarray(self.get_name()).sbi_ids pbs = [] for sbi_id in sbi_ids: sbi = SchedulingBlockInstance(sbi_id) pbs.append(sbi.processing_block_ids) return 'PB', pbs
processing_blocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n sbi_ids = Subarray(self.get_name()).sbi_ids\n pbs = []\n for sbi_id in sbi_ids:\n sbi = SchedulingBlockInstance(sbi_id)\n pbs.append(sbi.processing_block_ids)\n return 'PB', pbs\n", "role": "user" }, { "content": "processing_blocks", "role": "assistant" } ]
def x(value: str): if sys.version_info >= (3, 7): return datetime.fromisoformat(value) return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f')
datetime_from_isoformat
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(value: str):\n if sys.version_info >= (3, 7):\n return datetime.fromisoformat(value)\n return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f')\n", "role": "user" }, { "content": "datetime_from_isoformat", "role": "assistant" } ]
def x(self): Device.init_device(self) self._set_master_state('on') self._devProxy = DeviceProxy(self.get_name())
init_device
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n Device.init_device(self)\n self._set_master_state('on')\n self._devProxy = DeviceProxy(self.get_name())\n", "role": "user" }, { "content": "init_device", "role": "assistant" } ]
def x(self): _logT = self._devProxy.get_logging_target() if 'device::sip_sdp_logger' not in _logT: try: self._devProxy.add_logging_target('device::sip_sdp/elt/logger') self.info_stream("Test of Tango logging from 'tc_tango_master'") except Exception as e: LOG.debug('Failed to setup Tango logging %s', e)
always_executed_hook
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n _logT = self._devProxy.get_logging_target()\n if 'device::sip_sdp_logger' not in _logT:\n try:\n self._devProxy.add_logging_target('device::sip_sdp/elt/logger')\n self.info_stream(\"Test of Tango logging from 'tc_tango_master'\")\n except Exception as e:\n LOG.debug('Failed to setup Tango logging %s', e)\n", "role": "user" }, { "content": "always_executed_hook", "role": "assistant" } ]
def x(self, value): if self._sdp_state.current_state != 'on': raise RuntimeWarning("Unable to configure SBIs unless SDP is 'on'.") sbi_config_dict = json.loads(value) sbi_list = SchedulingBlockInstanceList() LOG.info('SBIs before: %s', sbi_list.active) if sbi_config_dict.get('id') in sbi_list.active: raise RuntimeWarning( 'Unable to add SBI with ID {}, an SBI with this ID is already registered with SDP!' .format(sbi_config_dict.get('id'))) LOG.info('Scheduling offline SBI! Config:\n%s', value) sbi = SchedulingBlockInstance.from_config(sbi_config_dict) LOG.info('SBIs after: %s', sbi_list.active) sbi_pb_ids = sbi.processing_block_ids LOG.info('SBI "%s" contains PBs: %s', sbi.id, sbi_pb_ids) tango_db = Database() pb_device_class = 'ProcessingBlockDevice' pb_device_server_instance = 'processing_block_ds/1' pb_devices = tango_db.get_device_name(pb_device_server_instance, pb_device_class) LOG.info('Number of PB devices in the pool = %d', len(pb_devices)) for pb_id in sbi_pb_ids: for pb_device_name in pb_devices: device = DeviceProxy(pb_device_name) if not device.pb_id: LOG.info('Assigning PB device = %s to PB id = %s', pb_device_name, pb_id) device.pb_id = pb_id break
configure
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, value):\n if self._sdp_state.current_state != 'on':\n raise RuntimeWarning(\"Unable to configure SBIs unless SDP is 'on'.\")\n sbi_config_dict = json.loads(value)\n sbi_list = SchedulingBlockInstanceList()\n LOG.info('SBIs before: %s', sbi_list.active)\n if sbi_config_dict.get('id') in sbi_list.active:\n raise RuntimeWarning(\n 'Unable to add SBI with ID {}, an SBI with this ID is already registered with SDP!'\n .format(sbi_config_dict.get('id')))\n LOG.info('Scheduling offline SBI! Config:\\n%s', value)\n sbi = SchedulingBlockInstance.from_config(sbi_config_dict)\n LOG.info('SBIs after: %s', sbi_list.active)\n sbi_pb_ids = sbi.processing_block_ids\n LOG.info('SBI \"%s\" contains PBs: %s', sbi.id, sbi_pb_ids)\n tango_db = Database()\n pb_device_class = 'ProcessingBlockDevice'\n pb_device_server_instance = 'processing_block_ds/1'\n pb_devices = tango_db.get_device_name(pb_device_server_instance,\n pb_device_class)\n LOG.info('Number of PB devices in the pool = %d', len(pb_devices))\n for pb_id in sbi_pb_ids:\n for pb_device_name in pb_devices:\n device = DeviceProxy(pb_device_name)\n if not device.pb_id:\n LOG.info('Assigning PB device = %s to PB id = %s',\n pb_device_name, pb_id)\n device.pb_id = pb_id\n break\n", "role": "user" }, { "content": "configure", "role": "assistant" } ]
def x(service_id: str): LOG.debug('Getting state of service %s', service_id) services = get_service_id_list() service_ids = [s for s in services if service_id in s] if len(service_ids) != 1: return 'Service not found! services = {}'.format(str(services)) subsystem, name, version = service_ids[0].split(':') return ServiceState(subsystem, name, version)
get_service_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(service_id: str):\n LOG.debug('Getting state of service %s', service_id)\n services = get_service_id_list()\n service_ids = [s for s in services if service_id in s]\n if len(service_ids) != 1:\n return 'Service not found! services = {}'.format(str(services))\n subsystem, name, version = service_ids[0].split(':')\n return ServiceState(subsystem, name, version)\n", "role": "user" }, { "content": "get_service_state", "role": "assistant" } ]
def x(self, service_id: str): state = self._get_service_state(service_id) return state.current_state
get_current_service_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, service_id: str):\n state = self._get_service_state(service_id)\n return state.current_state\n", "role": "user" }, { "content": "get_current_service_state", "role": "assistant" } ]
def x(self, service_id: str): state = self._get_service_state(service_id) return state.target_state
get_target_service_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, service_id: str):\n state = self._get_service_state(service_id)\n return state.target_state\n", "role": "user" }, { "content": "get_target_service_state", "role": "assistant" } ]
def x(self): _current_state = self._sdp_state.current_state _allowed_target_states = self._sdp_state.allowed_target_states[ _current_state] return json.dumps(dict(allowed_target_sdp_states=_allowed_target_states))
allowed_target_sdp_states
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n _current_state = self._sdp_state.current_state\n _allowed_target_states = self._sdp_state.allowed_target_states[\n _current_state]\n return json.dumps(dict(allowed_target_sdp_states=_allowed_target_states))\n", "role": "user" }, { "content": "allowed_target_sdp_states", "role": "assistant" } ]
def x(self, state): LOG.info('Setting SDP target state to %s', state) if self._sdp_state.current_state == state: LOG.info('Target state ignored, SDP is already "%s"!', state) if state == 'on': self.set_state(DevState.ON) if state == 'off': self.set_state(DevState.OFF) if state == 'standby': self.set_state(DevState.STANDBY) if state == 'disable': self.set_state(DevState.DISABLE) self._sdp_state.update_target_state(state)
target_sdp_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, state):\n LOG.info('Setting SDP target state to %s', state)\n if self._sdp_state.current_state == state:\n LOG.info('Target state ignored, SDP is already \"%s\"!', state)\n if state == 'on':\n self.set_state(DevState.ON)\n if state == 'off':\n self.set_state(DevState.OFF)\n if state == 'standby':\n self.set_state(DevState.STANDBY)\n if state == 'disable':\n self.set_state(DevState.DISABLE)\n self._sdp_state.update_target_state(state)\n", "role": "user" }, { "content": "target_sdp_state", "role": "assistant" } ]
def x(self): return json.dumps(dict(uptime='{:.3f}s'.format(time.time() - self. _start_time)))
health
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n return json.dumps(dict(uptime='{:.3f}s'.format(time.time() - self.\n _start_time)))\n", "role": "user" }, { "content": "health", "role": "assistant" } ]
def x(self): sbi_list = SchedulingBlockInstanceList() return json.dumps(dict(active=sbi_list.active, completed=sbi_list. completed, aborted=sbi_list.aborted))
scheduling_block_instances
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n sbi_list = SchedulingBlockInstanceList()\n return json.dumps(dict(active=sbi_list.active, completed=sbi_list.\n completed, aborted=sbi_list.aborted))\n", "role": "user" }, { "content": "scheduling_block_instances", "role": "assistant" } ]
def x(self): pb_list = ProcessingBlockList() return json.dumps(dict(active=pb_list.active, completed=pb_list. completed, aborted=pb_list.aborted))
processing_blocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n pb_list = ProcessingBlockList()\n return json.dumps(dict(active=pb_list.active, completed=pb_list.\n completed, aborted=pb_list.aborted))\n", "role": "user" }, { "content": "processing_blocks", "role": "assistant" } ]
def x(self): tango_db = Database() pb_device_class = 'ProcessingBlockDevice' pb_device_server_instance = 'processing_block_ds/1' pb_devices = tango_db.get_device_name(pb_device_server_instance, pb_device_class) LOG.info('Number of PB devices in the pool = %d', len(pb_devices)) pb_device_map = [] for pb_device_name in pb_devices: device = DeviceProxy(pb_device_name) if device.pb_id: LOG.info('%s %s', pb_device_name, device.pb_id) pb_device_map.append((pb_device_name, device.pb_id)) return str(pb_device_map)
processing_block_devices
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n tango_db = Database()\n pb_device_class = 'ProcessingBlockDevice'\n pb_device_server_instance = 'processing_block_ds/1'\n pb_devices = tango_db.get_device_name(pb_device_server_instance,\n pb_device_class)\n LOG.info('Number of PB devices in the pool = %d', len(pb_devices))\n pb_device_map = []\n for pb_device_name in pb_devices:\n device = DeviceProxy(pb_device_name)\n if device.pb_id:\n LOG.info('%s %s', pb_device_name, device.pb_id)\n pb_device_map.append((pb_device_name, device.pb_id))\n return str(pb_device_map)\n", "role": "user" }, { "content": "processing_block_devices", "role": "assistant" } ]
def x(self, state): if state == 'init': self._service_state.update_current_state('init', force=True) self.set_state(DevState.INIT) elif state == 'on': self.set_state(DevState.ON) self._service_state.update_current_state('on')
set_master_state
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, state):\n if state == 'init':\n self._service_state.update_current_state('init', force=True)\n self.set_state(DevState.INIT)\n elif state == 'on':\n self.set_state(DevState.ON)\n self._service_state.update_current_state('on')\n", "role": "user" }, { "content": "set_master_state", "role": "assistant" } ]
def x(): tango_db = Database() LOG.info('Registering Subarray devices:') device_info = DbDevInfo() device_info._class = 'SubarrayDevice' device_info.server = 'subarray_ds/1' for index in range(16): device_info.name = 'sip_sdp/elt/subarray_{:02d}'.format(index) LOG.info('\t%s', device_info.name) tango_db.add_device(device_info) tango_db.put_class_property(device_info._class, dict(version='1.0.0'))
register_subarray_devices
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n tango_db = Database()\n LOG.info('Registering Subarray devices:')\n device_info = DbDevInfo()\n device_info._class = 'SubarrayDevice'\n device_info.server = 'subarray_ds/1'\n for index in range(16):\n device_info.name = 'sip_sdp/elt/subarray_{:02d}'.format(index)\n LOG.info('\\t%s', device_info.name)\n tango_db.add_device(device_info)\n tango_db.put_class_property(device_info._class, dict(version='1.0.0'))\n", "role": "user" }, { "content": "register_subarray_devices", "role": "assistant" } ]
def x(sbi_config: dict): registered_workflows = [] for i in range(len(sbi_config['processing_blocks'])): workflow_config = sbi_config['processing_blocks'][i]['workflow'] workflow_name = '{}:{}'.format(workflow_config['id'], workflow_config['version']) if workflow_name in registered_workflows: continue workflow_definition = dict(id=workflow_config['id'], version= workflow_config['version'], stages=[]) key = 'workflow_definitions:{}:{}'.format(workflow_config['id'], workflow_config['version']) DB.save_dict(key, workflow_definition, hierarchical=False) registered_workflows.append(workflow_name)
add_workflow_definitions
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(sbi_config: dict):\n registered_workflows = []\n for i in range(len(sbi_config['processing_blocks'])):\n workflow_config = sbi_config['processing_blocks'][i]['workflow']\n workflow_name = '{}:{}'.format(workflow_config['id'],\n workflow_config['version'])\n if workflow_name in registered_workflows:\n continue\n workflow_definition = dict(id=workflow_config['id'], version=\n workflow_config['version'], stages=[])\n key = 'workflow_definitions:{}:{}'.format(workflow_config['id'],\n workflow_config['version'])\n DB.save_dict(key, workflow_definition, hierarchical=False)\n registered_workflows.append(workflow_name)\n", "role": "user" }, { "content": "add_workflow_definitions", "role": "assistant" } ]
def x(max_major: int=1, max_minor: int=7, max_patch: int=15) ->str: major = randint(0, max_major) minor = randint(0, max_minor) patch = randint(0, max_patch) return '{:d}.{:d}.{:d}'.format(major, minor, patch)
generate_version
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(max_major: int=1, max_minor: int=7, max_patch: int=15) ->str:\n major = randint(0, max_major)\n minor = randint(0, max_minor)\n patch = randint(0, max_patch)\n return '{:d}.{:d}.{:d}'.format(major, minor, patch)\n", "role": "user" }, { "content": "generate_version", "role": "assistant" } ]
def x(date: datetime.datetime, project: str, programme_block: str) ->dict: date = date.strftime('%Y%m%d') instance_id = randint(0, 9999) sb_id = 'SB-{}-{}-{:04d}'.format(date, project, instance_id) return dict(id=sb_id, project=project, programme_block=programme_block)
generate_sb
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(date: datetime.datetime, project: str, programme_block: str) ->dict:\n date = date.strftime('%Y%m%d')\n instance_id = randint(0, 9999)\n sb_id = 'SB-{}-{}-{:04d}'.format(date, project, instance_id)\n return dict(id=sb_id, project=project, programme_block=programme_block)\n", "role": "user" }, { "content": "generate_sb", "role": "assistant" } ]
def x(pb_id: str, pb_config: dict=None, workflow_config: dict=None) ->dict: if workflow_config is None: workflow_config = dict() if pb_config is None: pb_config = dict() pb_type = pb_config.get('type', choice(PB_TYPES)) workflow_id = workflow_config.get('id') if workflow_id is None: if pb_type == 'offline': workflow_id = choice(OFFLINE_WORKFLOWS) else: workflow_id = choice(REALTIME_WORKFLOWS) workflow_version = workflow_config.get('version', generate_version()) workflow_parameters = workflow_config.get('parameters', dict()) pb_data = dict(id=pb_id, version=__pb_version__, type=pb_type, priority =pb_config.get('priority', randint(0, 10)), dependencies=pb_config. get('dependencies', []), resources_required=pb_config.get( 'resources_required', []), workflow=dict(id=workflow_id, version= workflow_version, parameters=workflow_parameters)) return pb_data
generate_pb_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(pb_id: str, pb_config: dict=None, workflow_config: dict=None) ->dict:\n if workflow_config is None:\n workflow_config = dict()\n if pb_config is None:\n pb_config = dict()\n pb_type = pb_config.get('type', choice(PB_TYPES))\n workflow_id = workflow_config.get('id')\n if workflow_id is None:\n if pb_type == 'offline':\n workflow_id = choice(OFFLINE_WORKFLOWS)\n else:\n workflow_id = choice(REALTIME_WORKFLOWS)\n workflow_version = workflow_config.get('version', generate_version())\n workflow_parameters = workflow_config.get('parameters', dict())\n pb_data = dict(id=pb_id, version=__pb_version__, type=pb_type, priority\n =pb_config.get('priority', randint(0, 10)), dependencies=pb_config.\n get('dependencies', []), resources_required=pb_config.get(\n 'resources_required', []), workflow=dict(id=workflow_id, version=\n workflow_version, parameters=workflow_parameters))\n return pb_data\n", "role": "user" }, { "content": "generate_pb_config", "role": "assistant" } ]
def x(num_pbs: int=3, project: str='sip', programme_block: str='sip_demos', pb_config: Union[dict, List[dict]]=None, workflow_config: Union[dict, List[dict]]=None, register_workflows=False) ->dict: if isinstance(workflow_config, dict): workflow_config = [workflow_config] if isinstance(pb_config, dict): pb_config = [pb_config] utc_now = datetime.datetime.utcnow() pb_list = [] for i in range(num_pbs): pb_id = ProcessingBlock.get_id(utc_now) if workflow_config is not None: _workflow_config = workflow_config[i] else: _workflow_config = None if pb_config is not None: _pb_config = pb_config[i] else: _pb_config = None pb_dict = generate_pb_config(pb_id, _pb_config, _workflow_config) pb_list.append(pb_dict) sbi_config = dict(id=SchedulingBlockInstance.get_id(utc_now, project), version=__sbi_version__, scheduling_block=generate_sb(utc_now, project, programme_block), processing_blocks=pb_list) if register_workflows: add_workflow_definitions(sbi_config) return sbi_config
generate_sbi_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(num_pbs: int=3, project: str='sip', programme_block: str='sip_demos',\n pb_config: Union[dict, List[dict]]=None, workflow_config: Union[dict,\n List[dict]]=None, register_workflows=False) ->dict:\n if isinstance(workflow_config, dict):\n workflow_config = [workflow_config]\n if isinstance(pb_config, dict):\n pb_config = [pb_config]\n utc_now = datetime.datetime.utcnow()\n pb_list = []\n for i in range(num_pbs):\n pb_id = ProcessingBlock.get_id(utc_now)\n if workflow_config is not None:\n _workflow_config = workflow_config[i]\n else:\n _workflow_config = None\n if pb_config is not None:\n _pb_config = pb_config[i]\n else:\n _pb_config = None\n pb_dict = generate_pb_config(pb_id, _pb_config, _workflow_config)\n pb_list.append(pb_dict)\n sbi_config = dict(id=SchedulingBlockInstance.get_id(utc_now, project),\n version=__sbi_version__, scheduling_block=generate_sb(utc_now,\n project, programme_block), processing_blocks=pb_list)\n if register_workflows:\n add_workflow_definitions(sbi_config)\n return sbi_config\n", "role": "user" }, { "content": "generate_sbi_config", "role": "assistant" } ]
def x(num_pbs: int=3, project: str='sip', programme_block: str='sip_demos', pb_config: Union[dict, List[dict]]=None, workflow_config: Union[dict, List[dict]]=None, register_workflows=True) ->str: return json.dumps(generate_sbi_config(num_pbs, project, programme_block, pb_config, workflow_config, register_workflows))
generate_sbi_json
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(num_pbs: int=3, project: str='sip', programme_block: str='sip_demos',\n pb_config: Union[dict, List[dict]]=None, workflow_config: Union[dict,\n List[dict]]=None, register_workflows=True) ->str:\n return json.dumps(generate_sbi_config(num_pbs, project, programme_block,\n pb_config, workflow_config, register_workflows))\n", "role": "user" }, { "content": "generate_sbi_json", "role": "assistant" } ]
def x(self, argin): tm = datetime.datetime.fromtimestamp(float(argin[0]) / 1000.0) fmt = '%Y-%m-%d %H:%M:%S' message = 'TANGO Log message - {} - {} {} {}'.format(tm.strftime(fmt), argin[1], argin[2], argin[3]) LOG.info(message)
log
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, argin):\n tm = datetime.datetime.fromtimestamp(float(argin[0]) / 1000.0)\n fmt = '%Y-%m-%d %H:%M:%S'\n message = 'TANGO Log message - {} - {} {} {}'.format(tm.strftime(fmt),\n argin[1], argin[2], argin[3])\n LOG.info(message)\n", "role": "user" }, { "content": "log", "role": "assistant" } ]
def x(block_id): try: sbi_details = DB.get_block_details([block_id]).__next__() sbi_details['processing_blocks'] = [] pb_ids = sbi_details['processing_block_ids'] for pb_details in DB.get_block_details(pb_ids): sbi_details['processing_blocks'].append(pb_details) del sbi_details['processing_block_ids'] response = sbi_details _url = get_root_url() response['links'] = {'scheduling-blocks': '{}/scheduling-blocks'. format(_url), 'home': '{}'.format(_url)} return sbi_details, HTTPStatus.OK except IndexError: return {'error': 'specified block id not found {}'.format(block_id) }, HTTPStatus.NOT_FOUND
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(block_id):\n try:\n sbi_details = DB.get_block_details([block_id]).__next__()\n sbi_details['processing_blocks'] = []\n pb_ids = sbi_details['processing_block_ids']\n for pb_details in DB.get_block_details(pb_ids):\n sbi_details['processing_blocks'].append(pb_details)\n del sbi_details['processing_block_ids']\n response = sbi_details\n _url = get_root_url()\n response['links'] = {'scheduling-blocks': '{}/scheduling-blocks'.\n format(_url), 'home': '{}'.format(_url)}\n return sbi_details, HTTPStatus.OK\n except IndexError:\n return {'error': 'specified block id not found {}'.format(block_id)\n }, HTTPStatus.NOT_FOUND\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(block_id): _url = get_root_url() LOG.debug('Requested delete of SBI %s', block_id) try: DB.delete_sched_block_instance(block_id) response = dict(message='Deleted block: _id = {}'.format(block_id)) response['_links'] = {'list': '{}/scheduling-blocks'.format(_url)} return response, HTTPStatus.OK except RuntimeError as error: return dict(error=str(error)), HTTPStatus.BAD_REQUEST
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(block_id):\n _url = get_root_url()\n LOG.debug('Requested delete of SBI %s', block_id)\n try:\n DB.delete_sched_block_instance(block_id)\n response = dict(message='Deleted block: _id = {}'.format(block_id))\n response['_links'] = {'list': '{}/scheduling-blocks'.format(_url)}\n return response, HTTPStatus.OK\n except RuntimeError as error:\n return dict(error=str(error)), HTTPStatus.BAD_REQUEST\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(num_blocks, start_id, project): for i in range(num_blocks): _root = '{}-{}'.format(strftime('%Y%m%d', gmtime()), project) yield '{}-sb{:03d}'.format(_root, i + start_id), '{}-sbi{:03d}'.format( _root, i + start_id)
scheduling_block_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(num_blocks, start_id, project):\n for i in range(num_blocks):\n _root = '{}-{}'.format(strftime('%Y%m%d', gmtime()), project)\n yield '{}-sb{:03d}'.format(_root, i + start_id), '{}-sbi{:03d}'.format(\n _root, i + start_id)\n", "role": "user" }, { "content": "scheduling_block_ids", "role": "assistant" } ]
def x(start_id, min_blocks=0, max_blocks=4): processing_blocks = [] num_blocks = random.randint(min_blocks, max_blocks) for i in range(start_id, start_id + num_blocks): _id = 'sip-pb{:03d}'.format(i) block = dict(id=_id, resources_requirement={}, workflow={}) processing_blocks.append(block) return processing_blocks
generate_processing_blocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(start_id, min_blocks=0, max_blocks=4):\n processing_blocks = []\n num_blocks = random.randint(min_blocks, max_blocks)\n for i in range(start_id, start_id + num_blocks):\n _id = 'sip-pb{:03d}'.format(i)\n block = dict(id=_id, resources_requirement={}, workflow={})\n processing_blocks.append(block)\n return processing_blocks\n", "role": "user" }, { "content": "generate_processing_blocks", "role": "assistant" } ]
def x(num_blocks=5, start_sbi_id=0, start_pb_id=0, project='sip'): pb_id = start_pb_id for sb_id, sbi_id in _scheduling_block_ids(num_blocks, start_sbi_id, project): sub_array_id = 'subarray-{:02d}'.format(random.choice(range(5))) config = dict(id=sbi_id, sched_block_id=sb_id, sub_array_id= sub_array_id, processing_blocks=_generate_processing_blocks(pb_id)) pb_id += len(config['processing_blocks']) yield config
scheduling_block_config
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(num_blocks=5, start_sbi_id=0, start_pb_id=0, project='sip'):\n pb_id = start_pb_id\n for sb_id, sbi_id in _scheduling_block_ids(num_blocks, start_sbi_id,\n project):\n sub_array_id = 'subarray-{:02d}'.format(random.choice(range(5)))\n config = dict(id=sbi_id, sched_block_id=sb_id, sub_array_id=\n sub_array_id, processing_blocks=_generate_processing_blocks(pb_id))\n pb_id += len(config['processing_blocks'])\n yield config\n", "role": "user" }, { "content": "scheduling_block_config", "role": "assistant" } ]
def x(num_blocks, clear=True): db_client = ConfigDb() if clear: LOG.info('Resetting database ...') db_client.clear() start_sbi_id = 0 start_pb_id = 0 else: start_sbi_id = len(db_client.get_sched_block_instance_ids()) start_pb_id = len(db_client.get_processing_block_ids()) LOG.info('Adding %i SBIs to the db', num_blocks) for config in _scheduling_block_config(num_blocks, start_sbi_id, start_pb_id): LOG.info('Creating SBI %s with %i PBs.', config['id'], len(config[ 'processing_blocks'])) db_client.add_sched_block_instance(config)
add_scheduling_blocks
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(num_blocks, clear=True):\n db_client = ConfigDb()\n if clear:\n LOG.info('Resetting database ...')\n db_client.clear()\n start_sbi_id = 0\n start_pb_id = 0\n else:\n start_sbi_id = len(db_client.get_sched_block_instance_ids())\n start_pb_id = len(db_client.get_processing_block_ids())\n LOG.info('Adding %i SBIs to the db', num_blocks)\n for config in _scheduling_block_config(num_blocks, start_sbi_id,\n start_pb_id):\n LOG.info('Creating SBI %s with %i PBs.', config['id'], len(config[\n 'processing_blocks']))\n db_client.add_sched_block_instance(config)\n", "role": "user" }, { "content": "add_scheduling_blocks", "role": "assistant" } ]
def x(): log = logging.getLogger('SIP.EC.PCI.DB') handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( '%(name)-20s | %(filename)-15s | %(levelname)-5s | %(message)s')) log.addHandler(handler) log.setLevel(os.getenv('SIP_PCI_LOG_LEVEL', 'INFO')) num_blocks = int(sys.argv[1]) if len(sys.argv) == 2 else 3 add_scheduling_blocks(num_blocks)
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n log = logging.getLogger('SIP.EC.PCI.DB')\n handler = logging.StreamHandler()\n handler.setFormatter(logging.Formatter(\n '%(name)-20s | %(filename)-15s | %(levelname)-5s | %(message)s'))\n log.addHandler(handler)\n log.setLevel(os.getenv('SIP_PCI_LOG_LEVEL', 'INFO'))\n num_blocks = int(sys.argv[1]) if len(sys.argv) == 2 else 3\n add_scheduling_blocks(num_blocks)\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(args=None, **kwargs): LOG.info('Starting SDP PB devices.') return run([ProcessingBlockDevice], verbose=True, msg_stream=sys.stdout, args=args, **kwargs)
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(args=None, **kwargs):\n LOG.info('Starting SDP PB devices.')\n return run([ProcessingBlockDevice], verbose=True, msg_stream=sys.stdout,\n args=args, **kwargs)\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(date: datetime.datetime) ->str: date = date.strftime('%Y%m%d') return 'PB-{}-{}-{:03d}'.format(date, 'sip', randint(0, 100))
get_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(date: datetime.datetime) ->str:\n date = date.strftime('%Y%m%d')\n return 'PB-{}-{}-{:03d}'.format(date, 'sip', randint(0, 100))\n", "role": "user" }, { "content": "get_id", "role": "assistant" } ]
def x(self) ->List[Dependency]: dependencies_str = DB.get_hash_value(self.key, 'dependencies') dependencies = [] for dependency in ast.literal_eval(dependencies_str): dependencies.append(Dependency(dependency)) return dependencies
dependencies
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->List[Dependency]:\n dependencies_str = DB.get_hash_value(self.key, 'dependencies')\n dependencies = []\n for dependency in ast.literal_eval(dependencies_str):\n dependencies.append(Dependency(dependency))\n return dependencies\n", "role": "user" }, { "content": "dependencies", "role": "assistant" } ]
def x(self) ->List[Resource]: resources_str = DB.get_hash_value(self.key, 'resources_assigned') resources_assigned = [] for resource in ast.literal_eval(resources_str): resources_assigned.append(Resource(resource)) return resources_assigned
resources_assigned
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->List[Resource]:\n resources_str = DB.get_hash_value(self.key, 'resources_assigned')\n resources_assigned = []\n for resource in ast.literal_eval(resources_str):\n resources_assigned.append(Resource(resource))\n return resources_assigned\n", "role": "user" }, { "content": "resources_assigned", "role": "assistant" } ]
def x(self) ->List[WorkflowStage]: workflow_stages = [] stages = DB.get_hash_value(self.key, 'workflow_stages') for index in range(len(ast.literal_eval(stages))): workflow_stages.append(WorkflowStage(self.id, index)) return workflow_stages
workflow_stages
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->List[WorkflowStage]:\n workflow_stages = []\n stages = DB.get_hash_value(self.key, 'workflow_stages')\n for index in range(len(ast.literal_eval(stages))):\n workflow_stages.append(WorkflowStage(self.id, index))\n return workflow_stages\n", "role": "user" }, { "content": "workflow_stages", "role": "assistant" } ]
def x(self, resource_type: str, value: Union[str, int, float, bool], parameters: dict=None): if parameters is None: parameters = dict() resources = DB.get_hash_value(self.key, 'resources_assigned') resources = ast.literal_eval(resources) resources.append(dict(type=resource_type, value=value, parameters= parameters)) DB.set_hash_value(self.key, 'resources_assigned', resources)
add_assigned_resource
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, resource_type: str, value: Union[str, int, float, bool],\n parameters: dict=None):\n if parameters is None:\n parameters = dict()\n resources = DB.get_hash_value(self.key, 'resources_assigned')\n resources = ast.literal_eval(resources)\n resources.append(dict(type=resource_type, value=value, parameters=\n parameters))\n DB.set_hash_value(self.key, 'resources_assigned', resources)\n", "role": "user" }, { "content": "add_assigned_resource", "role": "assistant" } ]
def x(self, resource_type: str, value: Union[str, int, float, bool]=None, parameters: dict=None): resources = DB.get_hash_value(self.key, 'resources_assigned') resources = ast.literal_eval(resources) new_resources = [] for resource in resources: if resource['type'] != resource_type: new_resources.append(resource) elif value is not None and resource['value'] != value: new_resources.append(resource) elif parameters is not None and resource['parameters'] != parameters: new_resources.append(resource) DB.set_hash_value(self.key, 'resources_assigned', new_resources)
remove_assigned_resource
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, resource_type: str, value: Union[str, int, float, bool]=None,\n parameters: dict=None):\n resources = DB.get_hash_value(self.key, 'resources_assigned')\n resources = ast.literal_eval(resources)\n new_resources = []\n for resource in resources:\n if resource['type'] != resource_type:\n new_resources.append(resource)\n elif value is not None and resource['value'] != value:\n new_resources.append(resource)\n elif parameters is not None and resource['parameters'] != parameters:\n new_resources.append(resource)\n DB.set_hash_value(self.key, 'resources_assigned', new_resources)\n", "role": "user" }, { "content": "remove_assigned_resource", "role": "assistant" } ]
def x(self): LOG.debug('Aborting PB %s', self._id) self.set_status('aborted') pb_type = DB.get_hash_value(self.key, 'type') key = '{}:active'.format(self._type) DB.remove_from_list(key, self._id) key = '{}:active:{}'.format(self._type, pb_type) DB.remove_from_list(key, self._id) key = '{}:aborted'.format(self._type) DB.append_to_list(key, self._id) key = '{}:aborted:{}'.format(self._type, pb_type) DB.append_to_list(key, self._id) self._mark_updated()
abort
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n LOG.debug('Aborting PB %s', self._id)\n self.set_status('aborted')\n pb_type = DB.get_hash_value(self.key, 'type')\n key = '{}:active'.format(self._type)\n DB.remove_from_list(key, self._id)\n key = '{}:active:{}'.format(self._type, pb_type)\n DB.remove_from_list(key, self._id)\n key = '{}:aborted'.format(self._type)\n DB.append_to_list(key, self._id)\n key = '{}:aborted:{}'.format(self._type, pb_type)\n DB.append_to_list(key, self._id)\n self._mark_updated()\n", "role": "user" }, { "content": "abort", "role": "assistant" } ]
def x(self): timestamp = datetime.datetime.utcnow().isoformat() DB.set_hash_value(self.key, 'updated', timestamp)
mark_updated
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n timestamp = datetime.datetime.utcnow().isoformat()\n DB.set_hash_value(self.key, 'updated', timestamp)\n", "role": "user" }, { "content": "mark_updated", "role": "assistant" } ]
def x(): _url = get_root_url() LOG.debug('GET Sub array list') sub_array_ids = sorted(DB.get_sub_array_ids()) response = dict(sub_arrays=[]) for array_id in sub_array_ids: array_summary = dict(sub_arrary_id=array_id) block_ids = DB.get_sub_array_sbi_ids(array_id) LOG.debug('Subarray IDs: %s', array_id) LOG.debug('SBI IDs: %s', block_ids) array_summary['num_scheduling_blocks'] = len(block_ids) array_summary['links'] = {'detail': '{}/sub-array/{}'.format(_url, array_id)} response['sub_arrays'].append(array_summary) response['links'] = dict(self=request.url, home=_url) return response, status.HTTP_200_OK
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n _url = get_root_url()\n LOG.debug('GET Sub array list')\n sub_array_ids = sorted(DB.get_sub_array_ids())\n response = dict(sub_arrays=[])\n for array_id in sub_array_ids:\n array_summary = dict(sub_arrary_id=array_id)\n block_ids = DB.get_sub_array_sbi_ids(array_id)\n LOG.debug('Subarray IDs: %s', array_id)\n LOG.debug('SBI IDs: %s', block_ids)\n array_summary['num_scheduling_blocks'] = len(block_ids)\n array_summary['links'] = {'detail': '{}/sub-array/{}'.format(_url,\n array_id)}\n response['sub_arrays'].append(array_summary)\n response['links'] = dict(self=request.url, home=_url)\n return response, status.HTTP_200_OK\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(): _url = get_root_url() LOG.debug('POST subarray SBI.') sbi_config = {} DB.add_sbi(sbi_config) response = dict() return response, status.HTTP_200_OK
post
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n _url = get_root_url()\n LOG.debug('POST subarray SBI.')\n sbi_config = {}\n DB.add_sbi(sbi_config)\n response = dict()\n return response, status.HTTP_200_OK\n", "role": "user" }, { "content": "post", "role": "assistant" } ]
def x(self, parameters_dict): DB.set_hash_value(self._key, 'parameters', parameters_dict) self.publish('parameters_updated')
set_parameters
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, parameters_dict):\n DB.set_hash_value(self._key, 'parameters', parameters_dict)\n self.publish('parameters_updated')\n", "role": "user" }, { "content": "set_parameters", "role": "assistant" } ]
def x(self) ->List[str]: return ast.literal_eval(DB.get_hash_value(self._key, 'sbi_ids'))
sbi_ids
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->List[str]:\n return ast.literal_eval(DB.get_hash_value(self._key, 'sbi_ids'))\n", "role": "user" }, { "content": "sbi_ids", "role": "assistant" } ]
def x(self, sbi_config: dict, schema_path: str=None): if not self.active: raise RuntimeError('Unable to add SBIs to inactive subarray!') sbi_config['subarray_id'] = self._id sbi = SchedulingBlockInstance.from_config(sbi_config, schema_path) self._add_sbi_id(sbi_config['id']) return sbi
configure_sbi
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, sbi_config: dict, schema_path: str=None):\n if not self.active:\n raise RuntimeError('Unable to add SBIs to inactive subarray!')\n sbi_config['subarray_id'] = self._id\n sbi = SchedulingBlockInstance.from_config(sbi_config, schema_path)\n self._add_sbi_id(sbi_config['id'])\n return sbi\n", "role": "user" }, { "content": "configure_sbi", "role": "assistant" } ]
def x(self): for sbi_id in self.sbi_ids: sbi = SchedulingBlockInstance(sbi_id) sbi.abort() self.set_state('ABORTED')
abort
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n for sbi_id in self.sbi_ids:\n sbi = SchedulingBlockInstance(sbi_id)\n sbi.abort()\n self.set_state('ABORTED')\n", "role": "user" }, { "content": "abort", "role": "assistant" } ]
def x(self): DB.set_hash_value(self._key, 'active', 'False') for sbi_id in self.sbi_ids: SchedulingBlockInstance(sbi_id).clear_subarray() DB.set_hash_value(self._key, 'sbi_ids', []) self.publish('subarray_deactivated')
deactivate
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n DB.set_hash_value(self._key, 'active', 'False')\n for sbi_id in self.sbi_ids:\n SchedulingBlockInstance(sbi_id).clear_subarray()\n DB.set_hash_value(self._key, 'sbi_ids', [])\n self.publish('subarray_deactivated')\n", "role": "user" }, { "content": "deactivate", "role": "assistant" } ]
def x(self, sbi_id): sbi_ids = self.sbi_ids sbi_ids.remove(sbi_id) DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)
remove_sbi_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, sbi_id):\n sbi_ids = self.sbi_ids\n sbi_ids.remove(sbi_id)\n DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)\n", "role": "user" }, { "content": "remove_sbi_id", "role": "assistant" } ]
def x(self, sbi_id): sbi_ids = self.sbi_ids sbi_ids.append(sbi_id) DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)
add_sbi_id
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, sbi_id):\n sbi_ids = self.sbi_ids\n sbi_ids.append(sbi_id)\n DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)\n", "role": "user" }, { "content": "add_sbi_id", "role": "assistant" } ]
def x(self) ->Union[Event, None]: message = self._queue.get_message() if message and message['type'] == 'message': event_id = DB.get_event(self._pub_key, self._processed_key) event_data_str = DB.get_hash_value(self._data_key, event_id) event_dict = ast.literal_eval(event_data_str) event_dict['id'] = event_id event_dict['subscriber'] = self._subscriber return Event.from_config(event_dict) return None
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->Union[Event, None]:\n message = self._queue.get_message()\n if message and message['type'] == 'message':\n event_id = DB.get_event(self._pub_key, self._processed_key)\n event_data_str = DB.get_hash_value(self._data_key, event_id)\n event_dict = ast.literal_eval(event_data_str)\n event_dict['id'] = event_id\n event_dict['subscriber'] = self._subscriber\n return Event.from_config(event_dict)\n return None\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, process=True) ->List[Event]: LOG.debug('Getting published events (%s)', self._pub_key) if process: LOG.debug('Marking returned published events as processed.') DB.watch(self._pub_key, pipeline=True) event_ids = DB.get_list(self._pub_key, pipeline=True) if event_ids: DB.delete(self._pub_key, pipeline=True) DB.append_to_list(self._processed_key, *event_ids, pipeline=True) DB.execute() else: event_ids = DB.get_list(self._pub_key) events = [] for event_id in event_ids[::-1]: event_str = DB.get_hash_value(self._data_key, event_id) event_dict = ast.literal_eval(event_str) event_dict['id'] = event_id event = Event.from_config(event_dict) LOG.debug('Loaded event: %s (%s)', event.id, event.type) events.append(event) return events
get_published_events
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, process=True) ->List[Event]:\n LOG.debug('Getting published events (%s)', self._pub_key)\n if process:\n LOG.debug('Marking returned published events as processed.')\n DB.watch(self._pub_key, pipeline=True)\n event_ids = DB.get_list(self._pub_key, pipeline=True)\n if event_ids:\n DB.delete(self._pub_key, pipeline=True)\n DB.append_to_list(self._processed_key, *event_ids, pipeline=True)\n DB.execute()\n else:\n event_ids = DB.get_list(self._pub_key)\n events = []\n for event_id in event_ids[::-1]:\n event_str = DB.get_hash_value(self._data_key, event_id)\n event_dict = ast.literal_eval(event_str)\n event_dict['id'] = event_id\n event = Event.from_config(event_dict)\n LOG.debug('Loaded event: %s (%s)', event.id, event.type)\n events.append(event)\n return events\n", "role": "user" }, { "content": "get_published_events", "role": "assistant" } ]
def x(self) ->List[Event]: event_ids = DB.get_list(self._processed_key) events = [] for event_id in event_ids: event_str = DB.get_hash_value(self._data_key, event_id) event_dict = ast.literal_eval(event_str) event_dict['id'] = event_id event_dict['subscriber'] = self._subscriber events.append(Event.from_config(event_dict)) return events
get_processed_events
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self) ->List[Event]:\n event_ids = DB.get_list(self._processed_key)\n events = []\n for event_id in event_ids:\n event_str = DB.get_hash_value(self._data_key, event_id)\n event_dict = ast.literal_eval(event_str)\n event_dict['id'] = event_id\n event_dict['subscriber'] = self._subscriber\n events.append(Event.from_config(event_dict))\n return events\n", "role": "user" }, { "content": "get_processed_events", "role": "assistant" } ]
def x(self, event_id: str): event_ids = DB.get_list(self._processed_key) if event_id not in event_ids: raise KeyError( 'Unable to complete event. Event {} has not been processed (ie. it is not in the processed list).' .format(event_id)) DB.remove_from_list(self._processed_key, event_id, pipeline=True) key = _keys.completed_events(self._object_type, self._subscriber) DB.append_to_list(key, event_id, pipeline=True) DB.execute()
complete_event
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, event_id: str):\n event_ids = DB.get_list(self._processed_key)\n if event_id not in event_ids:\n raise KeyError(\n 'Unable to complete event. Event {} has not been processed (ie. it is not in the processed list).'\n .format(event_id))\n DB.remove_from_list(self._processed_key, event_id, pipeline=True)\n key = _keys.completed_events(self._object_type, self._subscriber)\n DB.append_to_list(key, event_id, pipeline=True)\n DB.execute()\n", "role": "user" }, { "content": "complete_event", "role": "assistant" } ]
def x(workflow_definition: dict, templates_root: str): schema_path = join(dirname(__file__), 'schema', 'workflow_definition.json') with open(schema_path, 'r') as file: schema = json.loads(file.read()) jsonschema.validate(workflow_definition, schema) _id = workflow_definition['id'] _version = workflow_definition['version'] _load_templates(workflow_definition, templates_root) workflow_id = workflow_definition['id'] version = workflow_definition['version'] name = 'workflow_definitions:{}:{}'.format(workflow_id, version) if DB.get_keys(name): raise KeyError('Workflow definition already exists: {}'.format(name)) DB.save_dict(name, workflow_definition, hierarchical=False)
add
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow_definition: dict, templates_root: str):\n schema_path = join(dirname(__file__), 'schema', 'workflow_definition.json')\n with open(schema_path, 'r') as file:\n schema = json.loads(file.read())\n jsonschema.validate(workflow_definition, schema)\n _id = workflow_definition['id']\n _version = workflow_definition['version']\n _load_templates(workflow_definition, templates_root)\n workflow_id = workflow_definition['id']\n version = workflow_definition['version']\n name = 'workflow_definitions:{}:{}'.format(workflow_id, version)\n if DB.get_keys(name):\n raise KeyError('Workflow definition already exists: {}'.format(name))\n DB.save_dict(name, workflow_definition, hierarchical=False)\n", "role": "user" }, { "content": "add", "role": "assistant" } ]
def x(workflow_id, workflow_version): name = 'workflow_definitions:{}:{}'.format(workflow_id, workflow_version) workflow_definition = dict(id=workflow_id, version=workflow_version, stages=[]) DB.save_dict(name, workflow_definition, hierarchical=False)
register
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow_id, workflow_version):\n name = 'workflow_definitions:{}:{}'.format(workflow_id, workflow_version)\n workflow_definition = dict(id=workflow_id, version=workflow_version,\n stages=[])\n DB.save_dict(name, workflow_definition, hierarchical=False)\n", "role": "user" }, { "content": "register", "role": "assistant" } ]
def x(workflow_id: str=None, workflow_version: str=None): if workflow_id is None and workflow_version is None: keys = DB.get_keys('workflow_definitions:*') DB.delete(*keys) elif workflow_id is not None and workflow_version is None: keys = DB.get_keys('workflow_definitions:{}:*'.format(workflow_id)) DB.delete(*keys) elif workflow_id is None and workflow_version is not None: keys = DB.get_keys('workflow_definitions:*:{}'.format(workflow_version) ) DB.delete(*keys) else: name = 'workflow_definitions:{}:{}'.format(workflow_id, workflow_version) DB.delete(name)
delete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow_id: str=None, workflow_version: str=None):\n if workflow_id is None and workflow_version is None:\n keys = DB.get_keys('workflow_definitions:*')\n DB.delete(*keys)\n elif workflow_id is not None and workflow_version is None:\n keys = DB.get_keys('workflow_definitions:{}:*'.format(workflow_id))\n DB.delete(*keys)\n elif workflow_id is None and workflow_version is not None:\n keys = DB.get_keys('workflow_definitions:*:{}'.format(workflow_version)\n )\n DB.delete(*keys)\n else:\n name = 'workflow_definitions:{}:{}'.format(workflow_id,\n workflow_version)\n DB.delete(name)\n", "role": "user" }, { "content": "delete", "role": "assistant" } ]
def x(workflow_id: str, workflow_version: str) ->dict: name = 'workflow_definitions:{}:{}'.format(workflow_id, workflow_version) workflow = DB.get_hash_dict(name) workflow['stages'] = ast.literal_eval(workflow['stages']) return workflow
get_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow_id: str, workflow_version: str) ->dict:\n name = 'workflow_definitions:{}:{}'.format(workflow_id, workflow_version)\n workflow = DB.get_hash_dict(name)\n workflow['stages'] = ast.literal_eval(workflow['stages'])\n return workflow\n", "role": "user" }, { "content": "get_workflow", "role": "assistant" } ]
def x() ->dict: keys = DB.get_keys('workflow_definitions:*') known_workflows = dict() for key in keys: values = key.split(':') if values[1] not in known_workflows: known_workflows[values[1]] = list() known_workflows[values[1]].append(values[2]) return known_workflows
get_workflows
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x() ->dict:\n keys = DB.get_keys('workflow_definitions:*')\n known_workflows = dict()\n for key in keys:\n values = key.split(':')\n if values[1] not in known_workflows:\n known_workflows[values[1]] = list()\n known_workflows[values[1]].append(values[2])\n return known_workflows\n", "role": "user" }, { "content": "get_workflows", "role": "assistant" } ]
def x(workflow: dict, templates_root: str): workflow_template_path = join(templates_root, workflow['id'], workflow[ 'version']) for i, stage_config in enumerate(workflow['stages']): stage_template_path = join(workflow_template_path, stage_config[ 'id'], stage_config['version']) for config_type in ['ee_config', 'app_config']: for key, value in stage_config[config_type].items(): if 'template' in key: template_file = join(stage_template_path, value) with open(template_file, 'r') as file: template_str = file.read() workflow['stages'][i][config_type][key] = template_str
load_templates
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow: dict, templates_root: str):\n workflow_template_path = join(templates_root, workflow['id'], workflow[\n 'version'])\n for i, stage_config in enumerate(workflow['stages']):\n stage_template_path = join(workflow_template_path, stage_config[\n 'id'], stage_config['version'])\n for config_type in ['ee_config', 'app_config']:\n for key, value in stage_config[config_type].items():\n if 'template' in key:\n template_file = join(stage_template_path, value)\n with open(template_file, 'r') as file:\n template_str = file.read()\n workflow['stages'][i][config_type][key] = template_str\n", "role": "user" }, { "content": "load_templates", "role": "assistant" } ]
def x(): parser = argparse.ArgumentParser(description='Register PB devices.') parser.add_argument('num_pb', type=int, help= 'Number of PBs devices to register.') args = parser.parse_args() log = logging.getLogger('sip.tango_control.subarray') tango_db = Database() log.info('Deleting PB devices:') for index in range(args.num_pb): name = 'sip_sdp/pb/{:05d}'.format(index) log.info('\t%s', name) tango_db.delete_device(name)
delete_pb_devices
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n parser = argparse.ArgumentParser(description='Register PB devices.')\n parser.add_argument('num_pb', type=int, help=\n 'Number of PBs devices to register.')\n args = parser.parse_args()\n log = logging.getLogger('sip.tango_control.subarray')\n tango_db = Database()\n log.info('Deleting PB devices:')\n for index in range(args.num_pb):\n name = 'sip_sdp/pb/{:05d}'.format(index)\n log.info('\\t%s', name)\n tango_db.delete_device(name)\n", "role": "user" }, { "content": "delete_pb_devices", "role": "assistant" } ]
def x(): if len(sys.argv) != 2: raise RuntimeError('Usage: python3 async_send.py <json config>') sip_logging.init_logger(show_thread=False) spead_config = json.loads(sys.argv[1]) try: _path = os.path.dirname(os.path.abspath(__file__)) schema_path = os.path.join(_path, 'config_schema.json') with open(schema_path) as schema_file: schema = json.load(schema_file) validate(spead_config, schema) except ValidationError as error: print(error.cause) raise sender = SpeadSender(spead_config) sender.run()
main
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x():\n if len(sys.argv) != 2:\n raise RuntimeError('Usage: python3 async_send.py <json config>')\n sip_logging.init_logger(show_thread=False)\n spead_config = json.loads(sys.argv[1])\n try:\n _path = os.path.dirname(os.path.abspath(__file__))\n schema_path = os.path.join(_path, 'config_schema.json')\n with open(schema_path) as schema_file:\n schema = json.load(schema_file)\n validate(spead_config, schema)\n except ValidationError as error:\n print(error.cause)\n raise\n sender = SpeadSender(spead_config)\n sender.run()\n", "role": "user" }, { "content": "main", "role": "assistant" } ]
def x(heap_data, i_chan): now = datetime.datetime.utcnow() time_full = now.timestamp() time_count = int(time_full) time_fraction = int((time_full - time_count) * (2 ** 32 - 1)) diff = now - now.replace(hour=0, minute=0, second=0, microsecond=0) time_data = diff.seconds + 1e-06 * diff.microseconds heap_data['visibility_timestamp_count'] = time_count heap_data['visibility_timestamp_fraction'] = time_fraction heap_data['correlator_output_data']['VIS'][:][: ] = time_data + i_chan * 1.0j
fill_buffer
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(heap_data, i_chan):\n now = datetime.datetime.utcnow()\n time_full = now.timestamp()\n time_count = int(time_full)\n time_fraction = int((time_full - time_count) * (2 ** 32 - 1))\n diff = now - now.replace(hour=0, minute=0, second=0, microsecond=0)\n time_data = diff.seconds + 1e-06 * diff.microseconds\n heap_data['visibility_timestamp_count'] = time_count\n heap_data['visibility_timestamp_fraction'] = time_fraction\n heap_data['correlator_output_data']['VIS'][:][:\n ] = time_data + i_chan * 1.0j\n", "role": "user" }, { "content": "fill_buffer", "role": "assistant" } ]
async def _run_loop(self, executor): """Main loop.""" max_duration_sec = self._config['max_duration_sec'] num_stations = self._config['heap']['num_stations'] num_pols = self._config['heap']['num_pols'] num_baselines = num_stations * (num_stations + 1) // 2 reporting_interval_sec = self._config['reporting_interval_sec'] start_chan = self._config['start_channel'] dtype = [('TCI', 'i1'), ('FD', 'u1'), ('VIS', '<c8', num_pols)] descriptor = {'visibility_timestamp_count': {'id': 24576, 'dtype': '<u4'}, 'visibility_timestamp_fraction': {'id': 24577, 'dtype': '<u4'}, 'visibility_channel_id': {'id': 24578, 'dtype': '<u4'}, 'visibility_channel_count': {'id': 24579, 'dtype': '<u4'}, 'visibility_baseline_polarisation_id': {'id': 24580, 'dtype': '<u4' }, 'visibility_baseline_count': {'id': 24581, 'dtype': '<u4'}, 'phase_bin_id': {'id': 24582, 'dtype': '<u2'}, 'phase_bin_count': { 'id': 24583, 'dtype': '<u2'}, 'scan_id': {'id': 24584, 'dtype': '<u8'}, 'visibility_hardware_id': {'id': 24585, 'dtype': '<u4'}, 'correlator_output_data': {'id': 24586, 'dtype': dtype, 'shape': ( num_baselines,)}} for i_buffer in range(2): self._buffer.append([]) for _ in range(len(self._streams)): heap_data = {'visibility_timestamp_count': 0, 'visibility_timestamp_fraction': 0, 'correlator_output_data': numpy.zeros((num_baselines,), dtype=dtype)} self._buffer[i_buffer].append(heap_data) tasks = [] for i_stream, (stream, item_group) in enumerate(self._streams): for key, item in descriptor.items(): item_shape = item['shape'] if 'shape' in item else tuple() item_group.add_item(id=item['id'], name=key, description='', shape=item_shape, dtype=item['dtype']) item_group['visibility_channel_id'].value = i_stream + start_chan item_group['visibility_channel_count'].value = 1 item_group['visibility_baseline_count'].value = num_baselines tasks.append(stream.async_send_heap(item_group.get_start())) await asyncio.gather(*tasks) num_streams = self._config['num_streams'] data_size = num_baselines * (num_pols * 8 + 2) / 1000000.0 self._log.info('Heap size is %.3f MB', data_size) done = False loop = asyncio.get_event_loop() heaps_sent = 0 i_time = 0 start_time = time.time() timer1 = time.time() while not done: tasks = [] if i_time > 0: i_buffer = (i_time - 1) % 2 for i_stream, (stream, item_group) in enumerate(self._streams): for key, value in self._buffer[i_buffer][i_stream].items(): item_group[key].value = value tasks.append(stream.async_send_heap(item_group.get_heap( descriptors='all', data='all'))) heaps_sent += 1 i_buffer = i_time % 2 for i_stream in range(num_streams): tasks.append(loop.run_in_executor(executor, self.fill_buffer, self._buffer[i_buffer][i_stream], i_stream)) await asyncio.gather(*tasks) i_time += 1 now = time.time() if now - timer1 >= reporting_interval_sec: self._log.info('Sent %i heaps in %.3f sec (%.1f MB/s)', heaps_sent, now - timer1, data_size * heaps_sent / (now - timer1)) heaps_sent = 0 timer1 = now if now - start_time > max_duration_sec > 0: done = True tasks = [] for stream, item_group in self._streams: tasks.append(stream.async_send_heap(item_group.get_end())) await asyncio.gather(*tasks)
run_loop
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "async def _run_loop(self, executor):\n \"\"\"Main loop.\"\"\"\n max_duration_sec = self._config['max_duration_sec']\n num_stations = self._config['heap']['num_stations']\n num_pols = self._config['heap']['num_pols']\n num_baselines = num_stations * (num_stations + 1) // 2\n reporting_interval_sec = self._config['reporting_interval_sec']\n start_chan = self._config['start_channel']\n dtype = [('TCI', 'i1'), ('FD', 'u1'), ('VIS', '<c8', num_pols)]\n descriptor = {'visibility_timestamp_count': {'id': 24576, 'dtype':\n '<u4'}, 'visibility_timestamp_fraction': {'id': 24577, 'dtype':\n '<u4'}, 'visibility_channel_id': {'id': 24578, 'dtype': '<u4'},\n 'visibility_channel_count': {'id': 24579, 'dtype': '<u4'},\n 'visibility_baseline_polarisation_id': {'id': 24580, 'dtype': '<u4'\n }, 'visibility_baseline_count': {'id': 24581, 'dtype': '<u4'},\n 'phase_bin_id': {'id': 24582, 'dtype': '<u2'}, 'phase_bin_count': {\n 'id': 24583, 'dtype': '<u2'}, 'scan_id': {'id': 24584, 'dtype':\n '<u8'}, 'visibility_hardware_id': {'id': 24585, 'dtype': '<u4'},\n 'correlator_output_data': {'id': 24586, 'dtype': dtype, 'shape': (\n num_baselines,)}}\n for i_buffer in range(2):\n self._buffer.append([])\n for _ in range(len(self._streams)):\n heap_data = {'visibility_timestamp_count': 0,\n 'visibility_timestamp_fraction': 0,\n 'correlator_output_data': numpy.zeros((num_baselines,),\n dtype=dtype)}\n self._buffer[i_buffer].append(heap_data)\n tasks = []\n for i_stream, (stream, item_group) in enumerate(self._streams):\n for key, item in descriptor.items():\n item_shape = item['shape'] if 'shape' in item else tuple()\n item_group.add_item(id=item['id'], name=key, description='',\n shape=item_shape, dtype=item['dtype'])\n item_group['visibility_channel_id'].value = i_stream + start_chan\n item_group['visibility_channel_count'].value = 1\n item_group['visibility_baseline_count'].value = num_baselines\n tasks.append(stream.async_send_heap(item_group.get_start()))\n await asyncio.gather(*tasks)\n num_streams = self._config['num_streams']\n data_size = num_baselines * (num_pols * 8 + 2) / 1000000.0\n self._log.info('Heap size is %.3f MB', data_size)\n done = False\n loop = asyncio.get_event_loop()\n heaps_sent = 0\n i_time = 0\n start_time = time.time()\n timer1 = time.time()\n while not done:\n tasks = []\n if i_time > 0:\n i_buffer = (i_time - 1) % 2\n for i_stream, (stream, item_group) in enumerate(self._streams):\n for key, value in self._buffer[i_buffer][i_stream].items():\n item_group[key].value = value\n tasks.append(stream.async_send_heap(item_group.get_heap(\n descriptors='all', data='all')))\n heaps_sent += 1\n i_buffer = i_time % 2\n for i_stream in range(num_streams):\n tasks.append(loop.run_in_executor(executor, self.fill_buffer,\n self._buffer[i_buffer][i_stream], i_stream))\n await asyncio.gather(*tasks)\n i_time += 1\n now = time.time()\n if now - timer1 >= reporting_interval_sec:\n self._log.info('Sent %i heaps in %.3f sec (%.1f MB/s)',\n heaps_sent, now - timer1, data_size * heaps_sent / (now -\n timer1))\n heaps_sent = 0\n timer1 = now\n if now - start_time > max_duration_sec > 0:\n done = True\n tasks = []\n for stream, item_group in self._streams:\n tasks.append(stream.async_send_heap(item_group.get_end()))\n await asyncio.gather(*tasks)\n", "role": "user" }, { "content": "run_loop", "role": "assistant" } ]
def x(self): executor = concurrent.futures.ThreadPoolExecutor(max_workers=self. _config['num_workers']) now = int(datetime.datetime.utcnow().timestamp()) start_time = (now + 29) // 30 * 30 self._log.info('Waiting until {}'.format(datetime.datetime. fromtimestamp(start_time))) while int(datetime.datetime.utcnow().timestamp()) < start_time: time.sleep(0.1) loop = asyncio.get_event_loop() try: loop.run_until_complete(self._run_loop(executor)) except KeyboardInterrupt: pass finally: self._log.info('Shutting down, closing streams...') tasks = [] for stream, item_group in self._streams: tasks.append(stream.async_send_heap(item_group.get_end())) loop.run_until_complete(asyncio.gather(*tasks)) self._log.info('... finished.') executor.shutdown()
run
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.\n _config['num_workers'])\n now = int(datetime.datetime.utcnow().timestamp())\n start_time = (now + 29) // 30 * 30\n self._log.info('Waiting until {}'.format(datetime.datetime.\n fromtimestamp(start_time)))\n while int(datetime.datetime.utcnow().timestamp()) < start_time:\n time.sleep(0.1)\n loop = asyncio.get_event_loop()\n try:\n loop.run_until_complete(self._run_loop(executor))\n except KeyboardInterrupt:\n pass\n finally:\n self._log.info('Shutting down, closing streams...')\n tasks = []\n for stream, item_group in self._streams:\n tasks.append(stream.async_send_heap(item_group.get_end()))\n loop.run_until_complete(asyncio.gather(*tasks))\n self._log.info('... finished.')\n executor.shutdown()\n", "role": "user" }, { "content": "run", "role": "assistant" } ]
def x(self, object_id: str): if object_id in self.active: DB.remove_from_list('{}:active'.format(self.type), object_id) DB.append_to_list('{}:completed'.format(self.type), object_id)
set_complete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, object_id: str):\n if object_id in self.active:\n DB.remove_from_list('{}:active'.format(self.type), object_id)\n DB.append_to_list('{}:completed'.format(self.type), object_id)\n", "role": "user" }, { "content": "set_complete", "role": "assistant" } ]
def x(self, object_id: str, event_type: str, event_data: dict=None): object_key = SchedulingObject.get_key(self.type, object_id) publish(event_type=event_type, event_data=event_data, object_type=self. type, object_id=object_id, object_key=object_key, origin=None)
publish
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, object_id: str, event_type: str, event_data: dict=None):\n object_key = SchedulingObject.get_key(self.type, object_id)\n publish(event_type=event_type, event_data=event_data, object_type=self.\n type, object_id=object_id, object_key=object_key, origin=None)\n", "role": "user" }, { "content": "publish", "role": "assistant" } ]
def x(pb: ProcessingBlock, pb_id: str, workflow_stage_dict: dict, workflow_stage: WorkflowStage, docker: DockerSwarmClient): stage_data = workflow_stage_dict[workflow_stage.id] stage_data['start'] = False if stage_data['status'] == 'none': if not workflow_stage.dependencies: stage_data['start'] = True else: dependency_status = [] for dependency in workflow_stage.dependencies: dependency_status.append(workflow_stage_dict[dependency[ 'value']]['status'] == 'complete') stage_data['start'] = all(dependency_status) if stage_data['start']: LOG.info('-- Starting workflow stage: %s --', workflow_stage.id) LOG.info('Configuring EE templates.') args_template = jinja2.Template(workflow_stage.args_template) stage_params = pb.workflow_parameters[workflow_stage.id] template_params = {**workflow_stage.config, **stage_params} args = args_template.render(stage=template_params) LOG.info('Resolving workflow script arguments.') args = json.dumps(json.loads(args)) compose_template = jinja2.Template(workflow_stage.compose_template) compose_str = compose_template.render(stage=dict(args=args)) compose_dict = yaml.load(compose_str) service_names = compose_dict['services'].keys() new_service_names = ['{}_{}_{}'.format(pb_id, pb.workflow_id, name) for name in service_names] for new, old in zip(new_service_names, service_names): compose_dict['services'][new] = compose_dict['services'].pop(old) compose_str = yaml.dump(compose_dict) service_ids = docker.create_services(compose_str) LOG.info('Staring workflow containers:') for service_id in service_ids: service_name = docker.get_service_name(service_id) LOG.info(' %s, %s ', service_name, service_id) stage_data['services'][service_id] = {} LOG.info('Created Services: %s', service_ids) stage_data['services'][service_id] = dict(name=docker. get_service_name(service_id), status='running', complete=False) stage_data['status'] = 'running'
start_workflow_stages
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(pb: ProcessingBlock, pb_id: str, workflow_stage_dict: dict,\n workflow_stage: WorkflowStage, docker: DockerSwarmClient):\n stage_data = workflow_stage_dict[workflow_stage.id]\n stage_data['start'] = False\n if stage_data['status'] == 'none':\n if not workflow_stage.dependencies:\n stage_data['start'] = True\n else:\n dependency_status = []\n for dependency in workflow_stage.dependencies:\n dependency_status.append(workflow_stage_dict[dependency[\n 'value']]['status'] == 'complete')\n stage_data['start'] = all(dependency_status)\n if stage_data['start']:\n LOG.info('-- Starting workflow stage: %s --', workflow_stage.id)\n LOG.info('Configuring EE templates.')\n args_template = jinja2.Template(workflow_stage.args_template)\n stage_params = pb.workflow_parameters[workflow_stage.id]\n template_params = {**workflow_stage.config, **stage_params}\n args = args_template.render(stage=template_params)\n LOG.info('Resolving workflow script arguments.')\n args = json.dumps(json.loads(args))\n compose_template = jinja2.Template(workflow_stage.compose_template)\n compose_str = compose_template.render(stage=dict(args=args))\n compose_dict = yaml.load(compose_str)\n service_names = compose_dict['services'].keys()\n new_service_names = ['{}_{}_{}'.format(pb_id, pb.workflow_id, name) for\n name in service_names]\n for new, old in zip(new_service_names, service_names):\n compose_dict['services'][new] = compose_dict['services'].pop(old)\n compose_str = yaml.dump(compose_dict)\n service_ids = docker.create_services(compose_str)\n LOG.info('Staring workflow containers:')\n for service_id in service_ids:\n service_name = docker.get_service_name(service_id)\n LOG.info(' %s, %s ', service_name, service_id)\n stage_data['services'][service_id] = {}\n LOG.info('Created Services: %s', service_ids)\n stage_data['services'][service_id] = dict(name=docker.\n get_service_name(service_id), status='running', complete=False)\n stage_data['status'] = 'running'\n", "role": "user" }, { "content": "start_workflow_stages", "role": "assistant" } ]
def x(stage_data: dict, workflow_stage: WorkflowStage, docker: DockerSwarmClient): service_status_complete = [] if stage_data['status'] != 'complete': for service_id, service_dict in stage_data['services'].items(): service_state = docker.get_service_state(service_id) if service_state == 'shutdown': docker.delete_service(service_id) service_dict['status'] = service_state service_dict['complete'] = service_state == 'shutdown' service_status_complete.append(service_dict['complete']) if all(service_status_complete): LOG.info('Workflow stage service %s complete!', workflow_stage.id) stage_data['status'] = 'complete'
update_workflow_stages
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(stage_data: dict, workflow_stage: WorkflowStage, docker:\n DockerSwarmClient):\n service_status_complete = []\n if stage_data['status'] != 'complete':\n for service_id, service_dict in stage_data['services'].items():\n service_state = docker.get_service_state(service_id)\n if service_state == 'shutdown':\n docker.delete_service(service_id)\n service_dict['status'] = service_state\n service_dict['complete'] = service_state == 'shutdown'\n service_status_complete.append(service_dict['complete'])\n if all(service_status_complete):\n LOG.info('Workflow stage service %s complete!',\n workflow_stage.id)\n stage_data['status'] = 'complete'\n", "role": "user" }, { "content": "update_workflow_stages", "role": "assistant" } ]
def x(pb: ProcessingBlock, workflow_stage_dict: dict, docker: DockerSwarmClient ): _abort_flag = False if _abort_flag: for workflow_stage in pb.workflow_stages: for service_id, _ in workflow_stage_dict[workflow_stage.id][ 'services'].items(): docker.delete_service(service_id) LOG.info('Deleted Service Id %s', service_id) return True return False
abort_workflow
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(pb: ProcessingBlock, workflow_stage_dict: dict, docker: DockerSwarmClient\n ):\n _abort_flag = False\n if _abort_flag:\n for workflow_stage in pb.workflow_stages:\n for service_id, _ in workflow_stage_dict[workflow_stage.id][\n 'services'].items():\n docker.delete_service(service_id)\n LOG.info('Deleted Service Id %s', service_id)\n return True\n return False\n", "role": "user" }, { "content": "abort_workflow", "role": "assistant" } ]
def x(workflow_stage_dict: dict): complete_stages = [] for _, stage_config in workflow_stage_dict.items(): complete_stages.append(stage_config['status'] == 'complete') if all(complete_stages): LOG.info('PB workflow complete!') return True return False
workflow_complete
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(workflow_stage_dict: dict):\n complete_stages = []\n for _, stage_config in workflow_stage_dict.items():\n complete_stages.append(stage_config['status'] == 'complete')\n if all(complete_stages):\n LOG.info('PB workflow complete!')\n return True\n return False\n", "role": "user" }, { "content": "workflow_complete", "role": "assistant" } ]
def x(pb_id: str, log_level='DEBUG'): init_logger('sip', show_log_origin=True, propagate=False, log_level= log_level) LOG.info('+' * 40) LOG.info('+ Executing Processing block: %s!', pb_id) LOG.info('+' * 40) LOG.info('Processing Block Controller version: %s', __version__) LOG.info('Docker Swarm API version: %s', sip_swarm_api_version) LOG.info('Configuration database API version: %s', config_db_version) pb = ProcessingBlock(pb_id) LOG.info('Starting workflow %s %s', pb.workflow_id, pb.workflow_version) pb.set_status('running') docker = DockerSwarmClient() workflow_stage_dict = {} for stage in pb.workflow_stages: workflow_stage_dict[stage.id] = deepcopy(stage.config) workflow_stage_dict[stage.id]['services'] = dict() while True: time.sleep(0.1) for workflow_stage in pb.workflow_stages: _start_workflow_stages(pb, pb_id, workflow_stage_dict, workflow_stage, docker) _update_workflow_stages(workflow_stage_dict[workflow_stage.id], workflow_stage, docker) if _abort_workflow(pb, workflow_stage_dict, docker): break if _workflow_complete(workflow_stage_dict): break pb_list = ProcessingBlockList() pb_list.set_complete(pb_id) pb.set_status('completed') LOG.info('-' * 40) LOG.info('- Destroying PBC for %s', pb_id) LOG.info('-' * 40) return pb.status
execute_processing_block
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(pb_id: str, log_level='DEBUG'):\n init_logger('sip', show_log_origin=True, propagate=False, log_level=\n log_level)\n LOG.info('+' * 40)\n LOG.info('+ Executing Processing block: %s!', pb_id)\n LOG.info('+' * 40)\n LOG.info('Processing Block Controller version: %s', __version__)\n LOG.info('Docker Swarm API version: %s', sip_swarm_api_version)\n LOG.info('Configuration database API version: %s', config_db_version)\n pb = ProcessingBlock(pb_id)\n LOG.info('Starting workflow %s %s', pb.workflow_id, pb.workflow_version)\n pb.set_status('running')\n docker = DockerSwarmClient()\n workflow_stage_dict = {}\n for stage in pb.workflow_stages:\n workflow_stage_dict[stage.id] = deepcopy(stage.config)\n workflow_stage_dict[stage.id]['services'] = dict()\n while True:\n time.sleep(0.1)\n for workflow_stage in pb.workflow_stages:\n _start_workflow_stages(pb, pb_id, workflow_stage_dict,\n workflow_stage, docker)\n _update_workflow_stages(workflow_stage_dict[workflow_stage.id],\n workflow_stage, docker)\n if _abort_workflow(pb, workflow_stage_dict, docker):\n break\n if _workflow_complete(workflow_stage_dict):\n break\n pb_list = ProcessingBlockList()\n pb_list.set_complete(pb_id)\n pb.set_status('completed')\n LOG.info('-' * 40)\n LOG.info('- Destroying PBC for %s', pb_id)\n LOG.info('-' * 40)\n return pb.status\n", "role": "user" }, { "content": "execute_processing_block", "role": "assistant" } ]
def x(self, block_id, priority, pb_type='offline'): if pb_type not in ('offline', 'realtime'): raise ValueError('Invalid PB type.') with self._mutex: added_time = datetime.datetime.utcnow().isoformat() entry = (priority, sys.maxsize - self._index, block_id, pb_type, added_time) self._index += 1 if self._block_map.get(block_id) is not None: raise KeyError( 'ERROR: Block id "{}" already exists in PC PB queue!'. format(block_id)) self._block_map[block_id] = entry LOG.debug('Adding PB %s to queue', block_id) self._queue.append(entry) self._queue.sort() self._queue.reverse()
put
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_id, priority, pb_type='offline'):\n if pb_type not in ('offline', 'realtime'):\n raise ValueError('Invalid PB type.')\n with self._mutex:\n added_time = datetime.datetime.utcnow().isoformat()\n entry = (priority, sys.maxsize - self._index, block_id, pb_type,\n added_time)\n self._index += 1\n if self._block_map.get(block_id) is not None:\n raise KeyError(\n 'ERROR: Block id \"{}\" already exists in PC PB queue!'.\n format(block_id))\n self._block_map[block_id] = entry\n LOG.debug('Adding PB %s to queue', block_id)\n self._queue.append(entry)\n self._queue.sort()\n self._queue.reverse()\n", "role": "user" }, { "content": "put", "role": "assistant" } ]
def x(self): with self._mutex: entry = self._queue.pop() del self._block_map[entry[2]] return entry[2]
get
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self):\n with self._mutex:\n entry = self._queue.pop()\n del self._block_map[entry[2]]\n return entry[2]\n", "role": "user" }, { "content": "get", "role": "assistant" } ]
def x(self, block_id): with self._mutex: entry = self._block_map[block_id] self._queue.remove(entry)
remove
[ { "content": "Given the source code of a python function, suggest a fitting name for the function.", "role": "system" }, { "content": "def x(self, block_id):\n with self._mutex:\n entry = self._block_map[block_id]\n self._queue.remove(entry)\n", "role": "user" }, { "content": "remove", "role": "assistant" } ]