text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __audiorender_thread(self): """ Thread that takes care of the audio rendering. Do not call directly, but only as the target of a thread. """
new_audioframe = None logger.debug("Started audio rendering thread.") while self.status in [PLAYING,PAUSED]: # Retrieve audiochunk if self.status == PLAYING: if new_audioframe is None: # Get a new frame from the audiostream, skip to the next one # if the current one gives a problem try: start = self.audio_times.pop(0) stop = self.audio_times[0] except IndexError: logger.debug("Audio times could not be obtained") time.sleep(0.02) continue # Get the frame numbers to extract from the audio stream. chunk = (1.0/self.audioformat['fps'])*np.arange(start, stop) try: # Extract the frames from the audio stream. Does not always, # succeed (e.g. with bad streams missing frames), so make # sure this doesn't crash the whole program. new_audioframe = self.clip.audio.to_soundarray( tt = chunk, buffersize = self.frame_interval*self.clip.audio.fps, quantize=True ) except OSError as e: logger.warning("Sound decoding error: {}".format(e)) new_audioframe = None # Put audioframe in buffer/queue for soundrenderer to pick up. If # the queue is full, try again after a timeout (this allows to check # if the status is still PLAYING after a pause.) if not new_audioframe is None: try: self.audioqueue.put(new_audioframe, timeout=.05) new_audioframe = None except Full: pass time.sleep(0.005) logger.debug("Stopped audio rendering thread.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def search(self, keyword): '''Get search results for a specific keyword''' if type(keyword) != unicode: q = keyword.decode('utf-8') req = self.fetch(self.search_url, {'q':q.encode('gbk')}) if not req: return None html = req.content.decode('gbk').encode('utf-8') soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, markupMassage=hexentityMassage) cats = self.cats_parser(soup) keywords = self.keywords_parser(soup) mall_items = self.mall_items_parser(soup) total = int(soup.find('div', attrs={'class':'user-easy'}).find('a').string) lists = self.lists_parser(soup) ads = self.ads_parser(soup) return SearchResults(keyword, total, lists, mall_items, ads, cats, keywords)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_top_keywords(self, up=True): '''Get top keywords for all the categories''' engine = models.postgres_engine() session = models.create_session(engine) #threadPool = ThreadPool(5) for cat in session.query(models.Category): if not cat.level: continue self.cat_top_keywords(session, cat, up)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def cat_top_keywords(self, session, cat, up=True, offset=0, offsets=[]): '''Get top keywords in a specific category''' print 'CAT:%s, level:%s'%(str(cat), str(cat.level)) print 'OFFSET: %d'%offset response = [] if not offsets or offset==0: url = 'http://top.taobao.com/level3.php?cat=%s&level3=%s&show=focus&up=%s&offset=%d'%(cat.parent.cid, '' if cat.level==2 else str(cat.cid), 'true' if up else '', offset) print url rs = self.fetch(url) if not rs: return response soup = BeautifulSoup(rs.content, convertEntities=BeautifulSoup.HTML_ENTITIES, markupMassage=hexentityMassage) response = self.parse_cat_top_keywords(soup, offset) if offset==0: offsets = self.get_cat_top_keywords_pages(soup, offset) print 'OFFSETS: %s'%offsets if offsets: rs = [] threadPool = ThreadPool(len(offsets) if len(offsets)<=5 else 5) for idx, page_offset in enumerate(offsets): page_url = 'http://top.taobao.com/level3.php?cat=%s&level3=%s&show=focus&up=%s&offset=%d'%(cat.parent.cid, '' if cat.level==2 else str(cat.cid), 'true' if up else '', page_offset) next_page = 'True' if idx == (len(offsets)-1) else 'False' threadPool.run(self.fetch, callback=None, url=page_url, config=dict(get_next=next_page, offset=page_offset)) pages = threadPool.killAllWorkers(None) #print 'RESPONSES: %s'%pages for p in pages: if not p: continue soup2 = BeautifulSoup(p.content, convertEntities=BeautifulSoup.HTML_ENTITIES, markupMassage=hexentityMassage) offset2 = int(p.config['offset']) response += self.parse_cat_top_keywords(soup2, offset2) print 'GOT: %d'%offset2 if p.config['get_next'] != 'True': continue offsets = self.get_cat_top_keywords_pages(soup2, offset2) print offsets if not offsets: continue response += self.cat_top_keywords(session, cat, up, offset2, offsets) #return sorted(response, key=itemgetter('pos')) if response else [] #print "RETURN:%d"%offset for k in response: new_keyword = models.Keyword(k['name'].decode('utf-8')) new_keyword.categories.append(cat) session.add(new_keyword) try: session.commit() except IntegrityError: session.rollback() new_keyword = session.query(models.Keyword).filter(models.Keyword.name == k['name']).first() new_keyword.categories.append(cat) session.commit() print 'Duplicate %s'%new_keyword return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_cats(self): '''Get top keywords categories''' start_url = 'http://top.taobao.com/index.php?from=tbsy' rs = self.fetch(start_url) if not rs: return None soup = BeautifulSoup(rs.content, convertEntities=BeautifulSoup.HTML_ENTITIES, markupMassage=hexentityMassage) cats = [{'id':'TR_%s'%li['id'].encode('utf-8').upper(), 'title':li.a.text.encode('utf-8').strip()} for li in soup.find('div', id='nav').findAll('li') if li['id']!='index'] threadPool = ThreadPool(len(cats) if len(cats)<=5 else 5) for cat in cats: threadPool.run(self.get_cats_thread, callback=None, cat=cat) cats = threadPool.killAllWorkers(None) return cats
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_to_cairo_context(cairo_context, papersize_tuple, layout): """Renders the given layout manager on a page of the given context. Assumes the given context has not yet been reversed in the y-direction (i.e. it is still the default for Cairo, where y increases up from the bottom of the page). This method performs the reversal and resets it before it returns. """
try: cairo_context.save() cairo_context.translate(0, papersize_tuple[1]) cairo_context.scale(1, -1) layout.render( Rectangle(0, 0, *papersize_tuple), dict(output=CairoOutput(cairo_context)) ) finally: cairo_context.restore()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prompt(self): """Returns the UTF-8 encoded prompt string."""
if self.prompt_string is not None: return self.prompt_string if not self.color_enabled: return (' '.join(self.command) + '>> ').encode('utf8') color = '34' sub_color = '37' prompt_cmd = [ self.colorize('1;' + color, part) if part != self.cmd_sub_str else self.colorize('0;' + sub_color, u'\u2026') for part in self.command ] return ' '.join(prompt_cmd) + self.colorize('0;' + color, '>> ')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def title(self): """Returns the UTF-8 encoded title"""
return (u'[{}] {}>>'.format( os.path.split(os.path.abspath('.'))[-1], u' '.join(self.command))).encode('utf8')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def query_relative(self, query, event_time=None, relative_duration_before=None, relative_duration_after=None): """Perform the query and calculate the time range based on the relative values."""
assert event_time is None or isinstance(event_time, datetime.datetime) assert relative_duration_before is None or isinstance(relative_duration_before, str) assert relative_duration_after is None or isinstance(relative_duration_after, str) if event_time is None: # use now as the default event_time = datetime.datetime.now() # use preconfigured defaults if relative_duration_before is None: relative_duration_before = self.relative_duration_before if relative_duration_after is None: relative_duration_after = self.relative_duration_after time_start = event_time - create_timedelta(relative_duration_before) time_end = event_time + create_timedelta(relative_duration_after) return self.query_with_time(query, time_start, time_end)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def json(self): """Returns the search results as a list of JSON objects."""
if self.search_results is None: return None result = [] for row in self.search_results['rows']: obj = {} for index in range(0, len(self.search_results['fields'])): obj[self.search_results['fields'][index]] = row[index] result.append(obj) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_text(self, text:str, x:float, y:float, *, font_name:str, font_size:float, fill:Color) -> None: """Draws the given text at x,y."""
pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_line( self, x0:float, y0:float, x1:float, y1:float, *, stroke:Color, stroke_width:float=1, stroke_dash:typing.Sequence=None ) -> None: """Draws the given line."""
pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_rect( self, x:float, y:float, w:float, h:float, *, stroke:Color=None, stroke_width:float=1, stroke_dash:typing.Sequence=None, fill:Color=None ) -> None: """Draws the given rectangle."""
pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_image( self, img_filename:str, x:float, y:float, w:float, h:float ) -> None: """Draws the given image."""
pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_polygon( self, *pts, close_path:bool=True, stroke:Color=None, stroke_width:float=1, stroke_dash:typing.Sequence=None, fill:Color=None ) -> None: """Draws the given linear path."""
pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clip_rect(self, x:float, y:float, w:float, h:float) -> None: """Clip further output to this rect."""
pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_manifest(template_lines): """List of file names included by the MANIFEST.in template lines."""
manifest_files = distutils.filelist.FileList() for line in template_lines: if line.strip(): manifest_files.process_template_line(line) return manifest_files.files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _wx_two_step_creation_on_classic(cls): """ Patch the wxPython Classic class to behave like a wxPython Phoenix class on a 2-step creation process. On wxPython Phoenix, the first step is the parameterless ``__init__``, and the second step is the ``Create`` method with the construction parameters, e.g.:: class CustomFrame(wx.Frame): def __init__(self, parent): super(CustomFrame, self).__init__() # 1st step self.Create(parent) # 2nd step On wxPython Classic, the same would be written as:: class CustomFrame(wx.Frame): def __init__(self, parent): pre = wx.PreFrame() # 1st step pre.Create(parent) # 2nd step self.PostCreate(pre) # "3rd step" """
cls_init = cls.__init__ cls_create = cls.Create @functools.wraps(cls_init) def __init__(self, *args, **kwargs): if args or kwargs: cls_init(self, *args, **kwargs) else: # 2-step creation new_self = getattr(wx, "Pre" + cls.__name__)() for pair in vars(new_self).items(): setattr(self, *pair) if sys.platform == "win32": # On Windows, the wx.Pre*.Create constructors calls the # EVT_WINDOW_CREATE handler before returning (i.e, it processes # the event instead of just adding a message to the queue), and # that shouldn't happen before the PostCreate call in this thread @functools.wraps(cls_create) def create(self, *args, **kwargs): self.SetEvtHandlerEnabled(False) result = cls_create(self, *args, **kwargs) self.SetEvtHandlerEnabled(True) if result: self.PostCreate(self) wx.PostEvent(self, wx.WindowCreateEvent(self)) return result else: @functools.wraps(cls_create) def create(self, *args, **kwargs): result = cls_create(self, *args, **kwargs) if result: self.PostCreate(self) return result cls.__init__ = __init__ cls.Create = create
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start_pipeline(url, pipeline_id, auth, verify_ssl, runtime_parameters={}): """Start a running pipeline. The API waits for the pipeline to be fully started. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. runtime_parameters (dict): the desired runtime parameters for the pipeline. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
start_result = requests.post(url + '/' + pipeline_id + '/start', headers=X_REQ_BY, auth=auth, verify=verify_ssl, json=runtime_parameters) start_result.raise_for_status() logging.info('Pipeline start requested.') poll_pipeline_status(STATUS_RUNNING, url, pipeline_id, auth, verify_ssl) logging.info("Pipeline started.") return start_result.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def export_pipeline(url, pipeline_id, auth, verify_ssl): """Export the config and rules for a pipeline. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
export_result = requests.get(url + '/' + pipeline_id + '/export', headers=X_REQ_BY, auth=auth, verify=verify_ssl) if export_result.status_code == 404: logging.error('Pipeline not found: ' + pipeline_id) export_result.raise_for_status() return export_result.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pipeline_status(url, pipeline_id, auth, verify_ssl): """Retrieve the current status for a pipeline. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
status_result = requests.get(url + '/' + pipeline_id + '/status', headers=X_REQ_BY, auth=auth, verify=verify_ssl) status_result.raise_for_status() logging.debug('Status request: ' + url + '/status') logging.debug(status_result.json()) return status_result.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def preview_status(url, pipeline_id, previewer_id, auth, verify_ssl): """Retrieve the current status for a preview. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. previewer_id (str): the previewer id created by starting a preview or validation auth (tuple): a tuple of username, and password. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
preview_status = requests.get(url + '/' + pipeline_id + '/preview/' + previewer_id + "/status", headers=X_REQ_BY, auth=auth, verify=verify_ssl) preview_status.raise_for_status() logging.debug(preview_status.json()) return preview_status.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stop_pipeline(url, pipeline_id, auth, verify_ssl): """Stop a running pipeline. The API waits for the pipeline to be 'STOPPED' before returning. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
stop_result = requests.post(url + '/' + pipeline_id + '/stop', headers=X_REQ_BY, auth=auth, verify=verify_ssl) stop_result.raise_for_status() logging.info("Pipeline stop requested.") poll_pipeline_status(STATUS_STOPPED, url, pipeline_id, auth, verify_ssl) logging.info('Pipeline stopped.') return stop_result.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_pipeline(url, pipeline_id, auth, verify_ssl): """Validate a pipeline and show issues. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
validate_result = requests.get(url + '/' + pipeline_id + '/validate', headers=X_REQ_BY, auth=auth, verify=verify_ssl) validate_result.raise_for_status() previewer_id = validate_result.json()['previewerId'] poll_validation_status(url, pipeline_id, previewer_id, auth, verify_ssl) preview_result = requests.get(url + '/' + pipeline_id + '/preview/' + validate_result.json()['previewerId'], headers=X_REQ_BY, auth=auth, verify=verify_ssl) logging.debug('result content: {}'.format(preview_result.content)) return preview_result.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def import_pipeline(url, pipeline_id, auth, json_payload, verify_ssl, overwrite = False): """Import a pipeline. This will completely overwrite the existing pipeline. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. json_payload (dict): the exported json payload as a dictionary. overwrite (bool): overwrite existing pipeline verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
parameters = { 'overwrite' : overwrite } import_result = requests.post(url + '/' + pipeline_id + '/import', params=parameters, headers=X_REQ_BY, auth=auth, verify=verify_ssl, json=json_payload) if import_result.status_code != 200: logging.error('Import error response: ' + import_result.text) import_result.raise_for_status() logging.info('Pipeline import successful.') return import_result.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_pipeline(url, auth, json_payload, verify_ssl): """Create a new pipeline. Args: url (str): the host url in the form 'http://host:port/'. auth (tuple): a tuple of username, and password. json_payload (dict): the exported json paylod as a dictionary. verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json """
title = json_payload['pipelineConfig']['title'] description = json_payload['pipelineConfig']['description'] params = {'description':description, 'autoGeneratePipelineId':True} logging.info('No destination pipeline ID provided. Creating a new pipeline: ' + title) put_result = requests.put(url + '/' + title, params=params, headers=X_REQ_BY, auth=auth, verify=verify_ssl) put_result.raise_for_status() create_json = put_result.json() logging.debug(create_json) logging.info('Pipeline creation successful.') return create_json
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def system_info(url, auth, verify_ssl): """Retrieve SDC system information. Args: url (str): the host url. auth (tuple): a tuple of username, and password. """
sysinfo_response = requests.get(url + '/info', headers=X_REQ_BY, auth=auth, verify=verify_ssl) sysinfo_response.raise_for_status() return sysinfo_response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def export_pipeline(conf, args): """Export a pipeline to json."""
# Export the source pipeline and save it to file src = conf.config['instances'][args.src_instance] src_url = api.build_pipeline_url(build_instance_url(src)) src_auth = tuple([conf.creds['instances'][args.src_instance]['user'], conf.creds['instances'][args.src_instance]['pass']]) verify_ssl = src.get('verify_ssl', True) export_json = api.export_pipeline(src_url, args.src_pipeline_id, src_auth, verify_ssl) with open(args.out, 'w') as outFile: outFile.write(json.dumps(export_json, indent=4, sort_keys=False)) return (0, '')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def import_pipeline(conf, args): """Import a pipeline from json."""
with open(args.pipeline_json) as pipeline_json: dst = conf.config['instances'][args.dst_instance] dst_url = api.build_pipeline_url(build_instance_url(dst)) dst_auth = tuple([conf.creds['instances'][args.dst_instance]['user'], conf.creds['instances'][args.dst_instance]['pass']]) parsed_json = json.load(pipeline_json) verify_ssl = dst.get('verify_ssl', True) return api.import_pipeline(dst_url, args.pipeline_id, dst_auth, parsed_json, verify_ssl, overwrite=args.overwrite)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def promote_pipeline(conf, args): """Export a pipeline from a lower environment and import into higher environment."""
src = conf.config['instances'][args.src_instance] src_url = api.build_pipeline_url(build_instance_url(src)) src_auth = tuple([conf.creds['instances'][args.src_instance]['user'], conf.creds['instances'][args.src_instance]['pass']]) verify_ssl = src.get('verify_ssl', True) export_json = api.export_pipeline(src_url, args.src_pipeline_id, src_auth, verify_ssl) # Import the pipeline to the destination dest = conf.config['instances'][args.dest_instance] dest_url = api.build_pipeline_url(build_instance_url(dest)) dest_auth = tuple([conf.creds['instances'][args.dest_instance]['user'], conf.creds['instances'][args.dest_instance]['pass']]) dest_pipeline_id = args.dest_pipeline_id if dest_pipeline_id and api.pipeline_status(dest_url, dest_pipeline_id, dest_auth, verify_ssl)['status'] != api.STATUS_STOPPED: api.stop_pipeline(dest_url, dest_pipeline_id, dest_auth, verify_ssl) else: # No destination pipeline id was provided, must be a new pipeline. create_json = api.create_pipeline(dest_url, dest_auth, export_json, verify_ssl) dest_pipeline_id = create_json['info']['pipelineId'] result = api.import_pipeline(dest_url, dest_pipeline_id, dest_auth, export_json, verify_ssl, overwrite=True) # Start the imported pipeline if args.start_dest: api.start_pipeline(dest_url, dest_pipeline_id, dest_auth, verify_ssl) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start_pipeline(conf, args): """Start a pipeline"""
host = conf.config['instances'][args.host_instance] url = api.build_pipeline_url(build_instance_url(host)) auth = tuple([conf.creds['instances'][args.host_instance]['user'], conf.creds['instances'][args.host_instance]['pass']]) runtime_parameters = {} verify_ssl = host.get('verify_ssl', True) if args.runtime_parameters: runtime_parameters = json.loads(args.runtime_parameters) start_result = api.start_pipeline(url, args.pipeline_id, auth, verify_ssl, runtime_parameters) return start_result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def system_info(conf, args): """Retieve SDC system information."""
src = conf.config['instances'][args.src] src_url = api.build_system_url(build_instance_url(src)) src_auth = tuple([conf.creds['instances'][args.src]['user'], conf.creds['instances'][args.src]['pass']]) verify_ssl = src.get('verify_ssl', True) sysinfo_json = api.system_info(src_url, src_auth, verify_ssl) return sysinfo_json
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_pipeline(conf, args): """Validate a pipeline configuration."""
host = conf.config['instances'][args.host_instance] host_url = api.build_pipeline_url(build_instance_url(host)) host_auth = tuple([conf.creds['instances'][args.host_instance]['user'], conf.creds['instances'][args.host_instance]['pass']]) verify_ssl = host.get('verify_ssl', True) validate_result = api.validate_pipeline(host_url, args.pipeline_id, host_auth, verify_ssl) return validate_result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extractGlobalParameters(self, dna, bp, frames=None, paxis='Z', masked=False): """Extract the parameters for calculations .. currentmodule:: dnaMD Parameters dna : :class:`dnaMD.DNA` Input :class:`dnaMD.DNA` instance. bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. frames : list List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. Returns ------- time : numpy.ndarray 1D numpy array of shape (nframes) containing time array : numpy.ndarray 2D numpy array of shape (parameters count, nframes) containing extracted parameters. """
frames = self._validateFrames(frames) if frames[1] == -1: frames[1] = None if (len(bp) != 2): raise ValueError("bp should be a list containing first and last bp of a segment. See, documentation!!!") if bp[0] > bp[1]: raise ValueError("bp should be a list containing first and last bp of a segment. See, documentation!!!") time, clen = dna.time_vs_parameter('h-rise', bp=bp, merge=True, merge_method='sum', masked=masked) clen = np.asarray(clen) * 0.1 # conversion to nm time, htwist = dna.time_vs_parameter('h-twist', bp=bp, merge=True, merge_method='sum', masked=masked) htwist = np.deg2rad(htwist) # Conversion to radian angleOne, angleTwo = None, None if self.esType=='BST': angleOne, angleTwo = dna.calculate_2D_angles_bw_tangents(paxis, bp, masked=masked) # Rarely there are nan during angle calculation, remove those nan nanInOne = np.isnan(angleOne[frames[0]:frames[1]]) nanInTwo = np.isnan(angleTwo[frames[0]:frames[1]]) notNan = ~(nanInOne + nanInTwo) notNanIdx = np.nonzero(notNan) array = np.array([angleOne[frames[0]:frames[1]][notNanIdx], angleTwo[frames[0]:frames[1]][notNanIdx], clen[frames[0]:frames[1]][notNanIdx], htwist[frames[0]:frames[1]][notNanIdx]]) time = (time[frames[0]:frames[1]])[notNanIdx] else: array = np.array([clen[frames[0]:frames[1]], htwist[frames[0]:frames[1]]]) time = time[frames[0]:frames[1]] return time, array
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getStretchTwistBendModulus(self, bp, frames=None, paxis='Z', masked=True, matrix=False): r"""Calculate Bending-Stretching-Twisting matrix It calculate elastic matrix and modulus matrix. .. math:: \text{modulus matrix} = 4.1419464 \times \begin{bmatrix} K_{Bx} & K_{Bx,By} & K_{Bx,S} & K_{Bx,T} \\ K_{Bx,By} & K_{By} & K_{By,S} & K_{By,T} \\ K_{Bx,S} & K_{By,S} & K_{S} & K_{S,T} \\ K_{Bx,T} & K_{Bx,T} & K_{S,T} & K_{T} \end{bmatrix} \times L_0 .. currentmodule:: dnaMD Parameters bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. frames : list List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. matrix : bool If it is ``True``, elastic constant matrix will be returned. Otherwise, by default modulus matrix will be returned. Return ------ mean : numpy.ndarray Value of bending angles, contour length and twist angle (as 1D array) at which energy is zero. Minimum point on free energy landscape. .. math:: \begin{bmatrix} \theta^{x}_0 & \theta^{y}_0 & L_0 & \phi_0 \end{bmatrix} result : numpy.ndarray Either elastic matrix or modulus matrix depending on ``matrix`` value. """
if self.esType == 'ST': raise KeyError(' Use dnaEY.getStretchTwistModulus for Stretching-Twisting modulus.') frames = self._validateFrames(frames) name = '{0}-{1}-{2}-{3}'.format(bp[0], bp[1], frames[0], frames[1]) if name not in self.esMatrix: time, array = self.extractGlobalParameters(self.dna, bp, frames=frames, paxis=paxis, masked=masked) mean = np.mean(array, axis=1) esMatrix = np.asarray(self.getElasticMatrix(array)) self.esMatrix[name] = esMatrix self.minimumPoint[name] = mean else: esMatrix = self.esMatrix[name] mean = self.minimumPoint[name] if not matrix: result = 4.1419464 * np.array(esMatrix) * mean[2] # Calculate modulus else: result = esMatrix return mean, result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getModulusByTime(self, bp, frameGap, masked=False, paxis='Z', outFile=None): r"""Calculate moduli as a function of time for convergence check It can be used to obtained elastic moduli as a function of time to check their convergence. .. note:: Elastic properties cannot be calculated using a single frame because fluctuations are required. Therefore, here time means trajectory between zero time to given time. When ``esType='BST'``, following is obtained: 1) bend-1 2) bend-2 3) stretch 4) twist 5) bend-1-bend-2 6) bend-2-stretch 7) stretch-twist 8) bend-1-stretch 9) bend-2-twist 10) bend-1-twist When ``esType='ST'``, following is obtained: 1) stretch 2) twist 3) stretch-twist .. currentmodule:: dnaMD Parameters bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. frameGap : int How many frames to skip for next calculation. this option will determine the time-gap between each calculation. Lower the number, slower will be the calculation. masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values of shape (nframes). Elasticities : OrderedDict A ordered dictionary of 1D arrays of shape (nframes). The keys in dictionary are name of the elasticity in the same order as listed above. e.g. ``Elasticities['stretch']`` will give elasticity along stretching as a function of time. """
if self.esType == 'BST': props_name = [ 'bend-1', 'bend-2', 'stretch', 'twist', 'bend-1-bend-2', 'bend-2-stretch', 'stretch-twist', 'bend-1-stretch', 'bend-2-twist', 'bend-1-twist'] else: props_name = ['stretch', 'twist', 'stretch-twist'] time, elasticity = [], OrderedDict() for name in props_name: elasticity[name] = [] length = len(self.dna.time[:]) for i in range(frameGap, length, frameGap): props = None if self.esType == 'BST': mean, modulus_t = self.getStretchTwistBendModulus(bp, frames=[0, i], paxis=paxis, masked=True) else: mean, modulus_t = self.getStretchTwistModulus(bp, frames=[0, i], masked=masked) modulus_t = matrixToVector(modulus_t) for p in range(len(props_name)): elasticity[props_name[p]].append(modulus_t[p]) time.append(self.dna.time[i]) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#Time') for name in props_name: fout.write(', {0}'.format(name)) fout.write('\n') for t in range(len(time)): fout.write('{0:.3f}'.format(time[t])) for name in props_name: fout.write(', {0:.5f}'.format(elasticity[name][t])) fout.write('\n') return time, elasticity
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getGlobalDeformationEnergy(self, bp, complexDna, freeDnaFrames=None, boundDnaFrames=None, paxis='Z', which='all', masked=False, outFile=None): r"""Deformation energy of the input DNA using Global elastic properties It can be used to calculated deformation energy of a input DNA with reference to the DNA present in the current object. The deformation free energy is calculated using elastic matrix as follows .. math:: G = \frac{1}{2L_0}\mathbf{xKx^T} .. math:: \mathbf{x} = \begin{bmatrix} (\theta^{x} - \theta^{x}_0) & (\theta^{y} - \theta^{y}_0) & (L - L_0) & (\phi - \phi_0) \end{bmatrix} .. currentmodule:: dnaMD Parameters bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. complexDna : :class:`dnaMD.DNA` Input :class:`dnaMD.DNA` instance for which deformation energy will be calculated. freeDnaFrames : list To select a trajectory segment of current (free) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. boundDnaFrames : list To select a trajectory segment of input (bound) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. which : str or list For which motions, energy should be calculated. It should be either a list containing terms listed below or "all" for all energy terms. Following keywords are available: * ``'full'`` : Use entire elastic matrix -- all motions with their coupling * ``'diag'`` : Use diagonal of elastic matrix -- all motions but no coupling * ``'b1'`` : Only bending-1 motion * ``'b2'`` : Only bending-2 motion * ``'stretch'`` : Only stretching motion * ``'twist'`` : Only Twisting motions * ``'st_coupling'`` : Only stretch-twist coupling motion * ``'bs_coupling'`` : Only Bending and stretching coupling * ``'bt_coupling'`` : Only Bending and Twisting coupling * ``'bb_coupling'`` : Only bending-1 and bending-2 coupling * ``'bend'`` : Both bending motions with their coupling * ``'st'`` : Stretching and twisting motions with their coupling * ``'bs'`` : Bending (b1, b2) and stretching motions with their coupling * ``'bt'`` : Bending (b1, b2) and twisting motions with their coupling masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values. energy : OrderedDict of numpy.ndarray Dictionary of 1D array of shape (nframes) containing energy terms requested for DNA. """
if self.esType == 'BST': energyTerms = self.enGlobalTypes else: energyTerms = self.enGlobalTypes[:5] if isinstance(which, str): if which != 'all': raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format(energyTerms)) else: which = energyTerms elif isinstance(which, list): for key in which: if key not in energyTerms: raise ValueError('{0} is not a supported keyword.\n Use from the following list: \n{1}'.format( which, energyTerms)) else: raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format( energyTerms)) if self.esType == 'BST': means, esMatrix = self.getStretchTwistBendModulus(bp, frames=freeDnaFrames, masked=masked, matrix=True, paxis=paxis) else: means, esMatrix = self.getStretchTwistModulus(bp, frames=freeDnaFrames, masked=masked, matrix=True) esMatrix = 2.5 * esMatrix # Convert kT to kJ/mol time, array = self.extractGlobalParameters(complexDna, bp, frames=boundDnaFrames, paxis=paxis, masked=masked) # Initialize energy dictionary energyOut = OrderedDict() for key in which: energyOut[key] = [] for i in range(array[0].shape[0]): vec = array[:, i] diff = vec - means for key in which: if self.esType == 'BST': t_energy = self._calcEnergyBendStretchTwist(diff, esMatrix, key) else: t_energy = self._calcEnergyStretchTwist(diff, esMatrix, key) energyOut[key].append(t_energy) for key in which: energyOut[key] = np.asarray(energyOut[key]) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#Time') for name in which: fout.write(', {0}'.format(name)) fout.write('\n') for t in range(len(time)): fout.write('{0:.3f}'.format(time[t])) for name in which: fout.write(', {0:.5f}'.format(energyOut[name][t])) fout.write('\n') return time, energyOut
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _calcEnergyStretchTwist(self, diff, es, which): r"""Calculate energy for ``estype='ST'`` using a difference vector. It is called in :meth:`dnaEY.getGlobalDeformationEnergy` for energy calculation of each frame. Parameters diff : numpy.ndarray Array of difference between minimum and current parameter values. .. math:: \mathbf{x} = \begin{bmatrix} (L_i - L_0) & (\phi_i - \phi_0) \end{bmatrix} es : numpy.ndarray Elastic matrix. See in :meth:`dnaEY.getStretchTwistModulus` about elastic matrix. which : str For which type of motions, energy will be calculated. See ``which`` parameter in :meth:`dnaEY.getGlobalDeformationEnergy` for keywords. Return ------ energy : float Deformation free energy value """
if which not in self.enGlobalTypes[:5]: raise ValueError('{0} is not a supported energy keywords.\n Use any of the following: \n {1}'.format( which, self.enGlobalTypes[:5])) energy = None if which == 'full': temp = np.matrix(diff) energy = 0.5 * ((temp * es) * temp.T) energy = energy[0,0] if which == 'diag': energy = 0.5 * ((diff[0] ** 2 * es[0][0]) + (diff[1] ** 2 * es[1][1])) if which == 'stretch': energy = 0.5 * (diff[0] ** 2 * es[0][0]) if which == 'twist': energy = 0.5 * (diff[1] ** 2 * es[1][1]) if which == 'st_coupling': energy = 0.5 * (diff[0] * diff[1] * es[0][1]) return energy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def calculateLocalElasticity(self, bp, frames=None, helical=False, unit='kT'): r"""Calculate local elastic matrix or stiffness matrix for local DNA segment .. note:: Here local DNA segment referred to less than 5 base-pair long. In case of :ref:`base-step-image`: Shift (:math:`Dx`), Slide (:math:`Dy`), Rise (:math:`Dz`), Tilt (:math:`\tau`), Roll (:math:`\rho`) and Twist (:math:`\omega`), following elastic matrix is calculated. .. math:: \mathbf{K}_{base-step} = \begin{bmatrix} K_{Dx} & K_{Dx,Dy} & K_{Dx,Dz} & K_{Dx,\tau} & K_{Dx,\rho} & K_{Dx,\omega} \\ K_{Dx,Dy} & K_{Dy} & K_{Dy,Dz} & K_{Dy,\tau} & K_{Dy,\rho} & K_{Dy,\omega} \\ K_{Dx,Dz} & K_{Dy,Dz} & K_{Dz} & K_{Dz,\tau} & K_{Dz,\rho} & K_{Dz,\omega} \\ K_{Dx,\tau} & K_{Dy,\tau} & K_{Dz,\tau} & K_{\tau} & K_{\tau, \rho} & K_{\tau,\omega} \\ K_{Dx,\rho} & K_{Dy,\rho} & K_{Dz,\rho} & K_{\tau, \rho} & K_{\rho} & K_{\rho,\omega} \\ K_{Dx,\omega} & K_{Dy,\omega} & K_{Dz,\omega} & K_{\tau, \omega} & K_{\rho, \omega} & K_{\omega} \\ \end{bmatrix} In case of :ref:`helical-base-step-image`: x-displacement (:math:`dx`), y-displacement (:math:`dy`), h-rise (:math:`h`), inclination (:math:`\eta`), tip (:math:`\theta`) and twist (:math:`\Omega`), following elastic matrix is calculated. .. math:: \mathbf{K}_{helical-base-step} = \begin{bmatrix} K_{dx} & K_{dx,dy} & K_{dx,h} & K_{dx,\eta} & K_{dx,\theta} & K_{dx,\Omega} \\ K_{dx,dy} & K_{dy} & K_{dy,h} & K_{dy,\eta} & K_{dy,\theta} & K_{dy,\Omega} \\ K_{dx,h} & K_{dy,h} & K_{h} & K_{h,\eta} & K_{h,\theta} & K_{h,\Omega} \\ K_{dx,\eta} & K_{dy,\eta} & K_{h,\eta} & K_{\eta} & K_{\eta, \theta} & K_{\eta,\Omega} \\ K_{dx,\theta} & K_{dy,\theta} & K_{h,\theta} & K_{\eta, \theta} & K_{\theta} & K_{\theta,\Omega} \\ K_{dx,\Omega} & K_{dy,\Omega} & K_{h,\Omega} & K_{\eta, \Omega} & K_{\theta, \Omega} & K_{\Omega} \\ \end{bmatrix} Parameters bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. frames : list List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. helical : bool If ``helical=True``, elastic matrix for **helical base-step** parameters are calculated. Otherwise, by default, elastic matrix for **base-step** parameters are calculated. unit : str Unit of energy. Allowed units are: ``'kT', 'kJ/mol' and 'kcal/mol'``. Return ------ mean : numpy.ndarray Value of parameters at which energy is zero. Minimum point on energy landscape. if ``helical=False`` .. math:: \begin{bmatrix} Dx_0 & Dy_0 & Dz_0 & \tau_0 & \rho_0 & \omega_0 \end{bmatrix} if ``helical=True`` .. math:: \begin{bmatrix} dx_0 & dy_0 & h_0 & \eta_0 & \theta_0 & \Omega_0 \end{bmatrix} result : numpy.ndarray Elastic matrix. """
acceptedUnit = ['kT', 'kJ/mol', 'kcal/mol'] if unit not in acceptedUnit: raise ValueError(" {0} not accepted. Use any of the following: {1} ".format(unit, acceptedUnit)) frames = self._validateFrames(frames) name = '{0}-{1}-{2}-{3}-local-{4}'.format(bp[0], bp[1], frames[0], frames[1], int(helical)) if bp[1]-bp[0]+1 > 4: raise ValueError("Selected span {0} is larger than 4, and therefore, not recommended for local elasticity".format(bp[1]-bp[0]+1)) if name not in self.esMatrix: time, array = self.extractLocalParameters(self.dna, bp, helical=helical, frames=frames) mean = np.mean(array, axis = 1) esMatrix = self.getElasticMatrix(array) self.esMatrix[name] = esMatrix self.minimumPoint[name] = mean else: esMatrix = self.esMatrix[name] mean = self.minimumPoint[name] if unit == 'kJ/mol': result = 2.4946938107879997 * esMatrix # (1.38064852e-23 * 300 * 6.023e23 / 1000 ) kT.NA/1000 elif unit == 'kcal/mol': result = 0.5962461306854684 * esMatrix # (1.38064852e-23 * 300 * 6.023e23 / 1000 / 4.184) kT.NA/1000 else: result = esMatrix return mean, result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getLocalElasticityByTime(self, bp, frameGap, helical=False, unit='kT', outFile=None): r"""Calculate local elastic properties as a function of time for convergence check It can be used to obtained elastic properties as a function of time. .. note:: Elastic properties cannot be calculated using a single frame because fluctuations are required. Therefore, here time means trajectory between zero time to given time. When ``helical='False'``, following is obtained: 1) Shift (:math:`K_{Dx}`) 2) Slide (:math:`K_{Dy}`) 3) Rise (:math:`K_{Dz}`) 4) Tilt (:math:`K_{\tau}`) 5) Roll (:math:`K_{\rho}`) 6) Twist (:math:`K_{\omega}`) 7) :math:`K_{Dx,Dy}` 8) :math:`K_{Dy,Dz}` 9) :math:`K_{Dz,\tau}` 10) :math:`K_{\tau, \rho}` 11) :math:`K_{\rho,\omega}` 12) :math:`K_{Dx,Dz}` 13) :math:`K_{Dy,\tau}` 14) :math:`K_{Dz,\rho}` 15) :math:`K_{\tau,\omega}` 16) :math:`K_{Dx,\tau}` 17) :math:`K_{Dy,\rho}` 18) :math:`K_{Dz,\omega}` 19) :math:`K_{Dx,\rho}` 20) :math:`K_{Dy,\omega}` 21) :math:`K_{Dx,\omega}` When ``helical='True'``, following is obtained: 1) Shift (:math:`K_{Dx}`) 2) Slide (:math:`K_{Dy}`) 3) Rise (:math:`K_{h}`) 4) Tilt (:math:`K_{\eta}`) 5) Roll (:math:`K_{\theta}`) 6) Twist (:math:`K_{\Omega}`) 7) :math:`K_{dx,dy}` 8) :math:`K_{dy,h}` 9) :math:`K_{h,\eta}` 10) :math:`K_{\eta, \theta}` 11) :math:`K_{\theta,\Omega}` 12) :math:`K_{dx,h}` 13) :math:`K_{dy,\eta}` 14) :math:`K_{h,\theta}` 15) :math:`K_{\tau,\Omega}` 16) :math:`K_{dx,\eta}` 17) :math:`K_{dy,\theta}` 18) :math:`K_{h,\Omega}` 19) :math:`K_{dx,\theta}` 20) :math:`K_{dy,\Omega}` 21) :math:`K_{dx,\Omega}` .. currentmodule:: dnaMD Parameters bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. frameGap : int How many frames to skip for next time-frame. Lower the number, slower will be the calculation. helical : bool If ``helical=True``, elastic matrix for **helical base-step** parameters are calculated. Otherwise, by default, elastic matrix for **base-step** parameters are calculated. unit : str Unit of energy. Allowed units are: ``'kT', 'kJ/mol' and 'kcal/mol'``. outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values of shape (nframes). Elasticities : OrderedDict A ordered dictionary of 1D arrays of shape (nframes). The keys in dictionary are name of the elasticity in the same order as listed above. e.g. ``Elasticities['shift']`` will give elasticity along shift parameters as a function of time. """
if helical: props_name = helical_local_props_vector else: props_name = local_props_vector time, elasticity = [], OrderedDict() for name in props_name: elasticity[name] = [] length = len(self.dna.time[:]) for i in range(frameGap, length, frameGap): mean, esy_t = self.calculateLocalElasticity(bp, frames=[0, i], helical=helical, unit=unit) esy_t = matrixToVector(esy_t) for p in range(len(props_name)): elasticity[props_name[p]].append(esy_t[p]) time.append(self.dna.time[i]) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#Time') for name in props_name: fout.write(', {0}'.format(name)) fout.write('\n') for t in range(len(time)): fout.write('{0:.3f}'.format(time[t])) for name in props_name: fout.write(', {0:.5f}'.format(elasticity[name][t])) fout.write('\n') return time, elasticity
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def calculateLocalElasticitySegments(self, bp, span=2, frameGap=None, helical=False, unit='kT', err_type='block', tool='gmx analyze', outFile=None): """Calculate local elastic properties of consecutive overlapped DNA segments Calculate local elastic properties of consecutive overlapped DNA segments of length given by `span`. Parameters bp : list List of two base-steps forming the global DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. span : int Length of overlapping (local) DNA segments. It should be less than four. frameGap : int How many frames to skip for next time-frame. Lower the number, slower will be the calculation. helical : bool If ``helical=True``, elastic matrix for **helical base-step** parameters are calculated. Otherwise, by default, elastic matrix for **base-step** parameters are calculated. unit : str Unit of energy. Allowed units are: ``'kT', 'kJ/mol' and 'kcal/mol'``. err_type : str Error estimation by autocorrelation method ``err_type='acf'`` or block averaging method ``err_type='block'`` tool : str GROMACS tool to calculate error. In older versions it is `g_analyze` while in newer versions (above 2016) it is `gmx analyze`. outFile : str Output file in csv format. Returns ------- segments : list list of DNA segments for which local elastic properties was calculated. elasticities : OrderedDict A ordered dictionary of 1D arrays of shape (segments). The keys in dictionary are name of the elasticity in the same order as listed above. error : OrderedDict A ordered dictionary of 1D arrays of shape (segments). The keys in dictionary are name of the elasticity in the same order as listed above.. """
if helical: props_name = helical_local_props_vector else: props_name = local_props_vector segments, errors, elasticities = [], OrderedDict(), OrderedDict() for name in props_name: elasticities[name] = [] errors[name] = [] for s in range(bp[0], bp[1]): if s+span-1 > bp[1]: break time, elasticity_t = self.getLocalElasticityByTime([s, s+span-1], frameGap=frameGap, helical=helical, unit=unit) error_t = dnaMD.get_error(time, list(elasticity_t.values()), len(props_name), err_type=err_type, tool=tool) for i in range(len(props_name)): esy_t = elasticity_t[props_name[i]][-1] # only take last entry elasticities[props_name[i]].append(esy_t) errors[props_name[i]].append(error_t[i]) segments.append('{0}-{1}'.format(s, s+span-1)) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#bps') for name in props_name: fout.write(', {0}, {0}-error'.format(name)) fout.write('\n') for s in range(len(segments)): fout.write('{0}'.format(segments[s])) for name in props_name: fout.write(', {0:.5f}, {1:.5f}'.format(elasticities[name][s], errors[name][s])) fout.write('\n') return segments, elasticities, errors
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getLocalDeformationEnergy(self, bp, complexDna, freeDnaFrames=None, boundDnaFrames=None, helical=False, unit='kT', which='all', outFile=None): r"""Deformation energy of the input DNA using local elastic properties The deformation energy of a base-step/s for probe DNA object with reference to the same base-step/s DNA present in the current DNA object. The deformation free energy is calculated using elastic matrix as follows .. math:: G = \frac{1}{2}\mathbf{xKx^T} When ``helical='False'`` .. math:: \mathbf{K} = \mathbf{K}_{base-step} .. math:: \mathbf{x} = \begin{bmatrix} (Dx_{i}-Dx_0) & (Dy_i - Dy_0) & (Dz_i - Dz_0) & (\tau_i - \tau_0) & (\rho_i - \rho_0) & (\omega_i - \omega_0) \end{bmatrix} When ``helical='True'`` .. math:: \mathbf{K} = \mathbf{K}_{helical-base-step} .. math:: \mathbf{x} = \begin{bmatrix} (dx_{i}-dx_0) & (dy_i - dy_0) & (h_i - h_0) & (\eta_i - \eta_0) & (\theta_i - \theta_0) & (\Omega_i - \Omega_0) \end{bmatrix} .. currentmodule:: dnaMD Parameters bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. complexDna : :class:`dnaMD.DNA` Input :class:`dnaMD.DNA` instance for which deformation energy will be calculated. freeDnaFrames : list To select a trajectory segment of current (free) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. boundDnaFrames : list To select a trajectory segment of input (bound) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. helical : bool If ``helical=True``, elastic matrix for **helical base-step** parameters are calculated. Otherwise, by default, elastic matrix for **base-step** parameters are calculated. unit : str Unit of energy. Allowed units are: ``'kT', 'kJ/mol' and 'kcal/mol'``. which : str or list For which motions (degrees of freedom), energy should be calculated. It should be either a list containing terms listed below or"all" for all energy terms. Following keywords are available: * ``'full'`` : Use entire elastic matrix -- all parameters with their coupling * ``'diag'`` : Use diagonal of elastic matrix -- all motions but no coupling * ``'shift'`` or ``'x-disp'`` * ``'slide'`` or ``'y-idsp'`` * ``'rise'`` or ``'h-rise'`` * ``'tilt'`` or ``'inclination'`` * ``'roll'`` or ``'tip'`` * ``'twist'`` or ``'h-twist'`` outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values. energy : dict of numpy.ndarray Dictionary of 1D array of shape (nframes) containing energy terms requested for DNA. """
if helical: energyTerms = ['full', 'diag', 'x-disp', 'y-disp', 'h-rise', 'inclination', 'tip', 'h-twist'] else: energyTerms = ['full', 'diag', 'shift', 'slide', 'rise', 'tilt', 'roll', 'twist'] if isinstance(which, str): if which != 'all': raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format(energyTerms)) else: which = energyTerms elif isinstance(which, list): for key in which: if key not in energyTerms: raise ValueError('{0} is not a supported keyword.\n Use from the following list: \n{1}'.format( which, energyTerms)) else: raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format(energyTerms)) means, esMatrix = self.calculateLocalElasticity(bp, frames=freeDnaFrames, helical=helical, unit=unit) time, array = self.extractLocalParameters(complexDna, bp, frames=boundDnaFrames, helical=helical) # Initialize energy dictionary energyOut = OrderedDict() for key in which: energyOut[key] = [] for i in range(array[0].shape[0]): vec = array[:, i] diff = vec - means for key in which: t_energy = self._calcLocalEnergy(diff, esMatrix, key) energyOut[key].append(t_energy) for key in which: energyOut[key] = np.asarray(energyOut[key]) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#Time') for name in which: fout.write(', {0}'.format(name)) fout.write('\n') for t in range(len(time)): fout.write('{0:.3f}'.format(time[t])) for name in which: fout.write(', {0:.5f}'.format(energyOut[name][t])) fout.write('\n') return time, energyOut
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _calcLocalEnergy(self, diff, es, which): r"""Calculate local deformation energy using a difference vector. It is called in :meth:`dnaEY.getLocalDeformationEnergy` for energy calculation of each frame. Parameters diff : numpy.ndarray Array of difference between minimum and current parameter values. es : numpy.ndarray Elastic matrix. See in :meth:`dnaEY.calculateLocalElasticity` about elastic matrix. which : str For which type of motions, energy will be calculated. see ``which`` parameter in :meth:`dnaEY.getLocalDeformationEnergy` for keywords. Return ------ energy : float Deformation free energy value """
if which not in self.enLocalTypes: raise ValueError('{0} is not a supported energy keywords.\n Use any of the following: \n {1}'.format( which, self.enLocalTypes)) energy = None if which == 'full': temp = np.matrix(diff) energy = 0.5 * ((temp * es) * temp.T) energy = energy[0,0] if which == 'diag': energy = 0.5 * ((diff[0] ** 2 * es[0][0]) + (diff[1] ** 2 * es[1][1]) + (diff[2] ** 2 * es[2][2]) + (diff[3] ** 2 * es[3][3]) + (diff[4] ** 2 * es[4][4]) + (diff[5] ** 2 * es[5][5])) if which == 'shift' or which == 'x-disp': energy = 0.5 * (diff[0] ** 2 * es[0][0]) if which == 'slide' or which == 'y-disp': energy = 0.5 * (diff[1] ** 2 * es[1][1]) if which == 'rise' or which == 'h-rise': energy = 0.5 * (diff[2] ** 2 * es[2][2]) if which == 'tilt' or which == 'inclination': energy = 0.5 * (diff[3] ** 2 * es[3][3]) if which == 'roll' or which == 'tip': energy = 0.5 * (diff[4] ** 2 * es[4][4]) if which == 'twist' or which == 'h-twist': energy = 0.5 * (diff[5] ** 2 * es[5][5]) return energy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_variables(href): """Return a list of variable names used in a URI template."""
patterns = [re.sub(r'\*|:\d+', '', pattern) for pattern in re.findall(r'{[\+#\./;\?&]?([^}]+)*}', href)] variables = [] for pattern in patterns: for part in pattern.split(","): if not part in variables: variables.append(part) return variables
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def url(self, **kwargs): """Returns a URL for the link with optional template expansion. If the link is marked as templated, the href will be expanded according to RFC6570, using template variables provided in the keyword arguments. If the href is a valid URI Template, but the link is not marked as templated, the href will not be expanded even if template variables are provided. """
if self.is_templated: return uritemplate.expand(self.template, kwargs) else: return self.template
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_object(cls, o, base_uri): """Returns a new ``Link`` based on a JSON object or array. Arguments: - ``o``: a dictionary holding the deserializated JSON for the new ``Link``, or a ``list`` of such documents. - ``base_uri``: optional URL used as the basis when expanding relative URLs in the link. """
if isinstance(o, list): if len(o) == 1: return cls.from_object(o[0], base_uri) return [cls.from_object(x, base_uri) for x in o] return cls(o, base_uri)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def install_mediaroom_protocol(responses_callback, box_ip=None): """Install an asyncio protocol to process NOTIFY messages."""
from . import version _LOGGER.debug(version) loop = asyncio.get_event_loop() mediaroom_protocol = MediaroomProtocol(responses_callback, box_ip) sock = create_socket() await loop.create_datagram_endpoint(lambda: mediaroom_protocol, sock=sock) return mediaroom_protocol
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tune(self): """XML node representing tune."""
if self._node.get('activities'): tune = self._node['activities'].get('tune') if type(tune) is collections.OrderedDict: return tune elif type(tune) is list: return tune[0] return tune return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stopped(self): """Return if the stream is stopped."""
if self.tune and self.tune.get('@stopped'): return True if self.tune.get('@stopped') == 'true' else False else: raise PyMediaroomError("No information in <node> about @stopped")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def timeshift(self): """Return if the stream is a timeshift."""
if self.tune and self.tune.get('@src'): return True if self.tune.get('@src').startswith('timeshift') else False else: raise PyMediaroomError("No information in <node> about @src")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def recorded(self): """Return if the stream is a recording."""
if self.tune and self.tune.get('@src'): return True if self.tune.get('@src').startswith('mbr') else False else: raise PyMediaroomError("No information in <node> about @src")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def device_uuid(self): """Return device UUID."""
if self._device: return self._device return GEN_ID_FORMAT.format(self.src_ip)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def datagram_received(self, data, addr): """Datagram received callback."""
#_LOGGER.debug(data) if not self.box_ip or self.box_ip == addr[0]: self.responses(MediaroomNotify(addr, data))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def error_received(self, exception): """Datagram error callback."""
if exception is None: pass else: import pprint pprint.pprint(exception) _LOGGER.error('Error received: %s', exception)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def refresh_session(self, sessionkey, refresh_token=None): ''' Refresh Session Token ''' if not refresh_token: refresh_token = sessionkey params = { 'appkey' : self.API_KEY, 'sessionkey' : sessionkey, 'refresh_token': refresh_token } src = ''.join(["%s%s" % (k, v) for k, v in sorted(params.iteritems())]) + self.APP_SECRET params['sign'] = md5(src).hexdigest().upper() form_data = urllib.urlencode(params) rsp = requests.get('%s?%s'%(self.REFRESH_TOKEN_URL, form_data)) rsp = json.loads(rsp.content) if 'error' in rsp: raise TOPException(rsp['error'], rsp['error_description']) return None rsp['re_expires_in'] = int(rsp['re_expires_in']) rsp['expires_in'] = int(rsp['expires_in']) rsp['session'] = rsp['top_session'] del rsp['top_session'] return rsp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pause(self): """ Pauses the clock to continue running later. Saves the duration of the current interval in the previous_intervals list."""
if self.status == RUNNING: self.status = PAUSED self.previous_intervals.append(time.time() - self.interval_start) self.current_interval_duration = 0.0 elif self.status == PAUSED: self.interval_start = time.time() self.status = RUNNING
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start(self): """ Starts the clock from 0. Uses a separate thread to handle the timing functionalities. """
if not hasattr(self,"thread") or not self.thread.isAlive(): self.thread = threading.Thread(target=self.__run) self.status = RUNNING self.reset() self.thread.start() else: print("Clock already running!")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __run(self): """ Internal function that is run in a separate thread. Do not call directly. """
self.interval_start = time.time() while self.status != STOPPED: if self.status == RUNNING: self.current_interval_duration = time.time() - self.interval_start # If max_duration is set, stop the clock if it is reached if self.max_duration and self.time > self.max_duration: self.status == STOPPED # One refresh per millisecond seems enough time.sleep(0.001)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def current_frame(self): """ The current frame number that should be displayed."""
if not self.__fps: raise RuntimeError("fps not set so current frame number cannot be" " calculated") else: return int(self.__fps * self.time)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fps(self,value): """ Sets the frames per second of the current movie the clock is used for. Parameters value : float The fps value. """
if not value is None: if not type(value) == float: raise ValueError("fps needs to be specified as a float") if value<1.0: raise ValueError("fps needs to be greater than 1.0") self.__fps = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def max_duration(self,value): """ Sets the value of max duration Parameters value : float The value for max_duration Raises ------ TypeError If max_duration is not a number. ValueError If max_duration is smaller than 0. """
if not value is None: if not type(value) in [float, int]: raise TypeError("max_duration needs to be specified as a number") if value<1.0: raise ValueError("max_duration needs to be greater than 1.0") value = float(value) self.__max_duration = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_shared(fname, encoding="utf-8"): """ Loads the string data from a text file that was packaged as a data file in the distribution. Uses the setuptools ``pkg_resources.resource_string`` function as a fallback, as installing Dose with it directly instead of using wheel/pip would store the setup.py ``data_files`` otherwhere. For more information, see this: https://github.com/pypa/setuptools/issues/130 """
relative_path = "share/dose/v{0}/{1}".format(__version__, fname) prefixed_path = os.path.join(sys.prefix, *relative_path.split("/")) # Look for the file directly on sys.prefix try: return "\n".join(read_plain_text(prefixed_path, encoding=encoding)) except IOError: pass # Homebrew (Mac OS X) stores the data in Cellar, a directory in # the system prefix. Calling "brew --prefix" returns that prefix, # and pip installs the shared resources there cellar_index = sys.prefix.find("/Cellar/") if cellar_index != -1: # Found! outside_cellar_path = os.path.join(sys.prefix[:cellar_index], *relative_path.split("/")) try: return "\n".join(read_plain_text(outside_cellar_path, encoding=encoding)) except IOError: pass # Fallback: look for the file using setuptools (perhaps it's still # compressed inside an egg file or stored otherwhere) from pkg_resources import Requirement, resource_string return resource_string(Requirement.parse("dose"), relative_path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_agents(self, state_id=None, limit_neighbors=False): """Returns list of agents based on their state and connectedness Parameters state_id : int, str, or array-like, optional Used to select agents that have the same specified "state". If state = None, returns all agents regardless of its current state limit_neighbors : bool, optional Returns agents based on whether they are connected to this agent or not. If limit_neighbors = False, returns all agents whether or not it is directly connected to this agent """
if limit_neighbors: agents = self.global_topology.neighbors(self.id) else: agents = self.get_all_nodes() if state_id is None: return [self.global_topology.node[_]['agent'] for _ in agents] # return all regardless of state else: return [self.global_topology.node[_]['agent'] for _ in agents if self.global_topology.node[_]['agent'].state['id'] == state_id]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_node(self, agent_type=None, state=None, name='network_process', **state_params): """Add a new node to the current network Parameters agent_type : NetworkAgent subclass Agent in the new node will be instantiated using this agent class state : object State of the Agent, this may be an integer or string or any other name : str, optional Descriptive name of the agent state_params : keyword arguments, optional Key-value pairs of other state parameters for the agent Return ------ int Agent ID of the new node """
agent_id = int(len(self.global_topology.nodes())) agent = agent_type(self.env, agent_id=agent_id, state=state, name=name, **state_params) self.global_topology.add_node(agent_id, {'agent': agent}) return agent_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_edge(self, agent_id1, agent_id2, edge_attr_dict=None, *edge_attrs): """ Add an edge between agent_id1 and agent_id2. agent_id1 and agent_id2 correspond to Networkx node IDs. This is a wrapper for the Networkx.Graph method `.add_edge`. Agents agent_id1 and agent_id2 will be automatically added if they are not already present in the graph. Edge attributes can be specified using keywords or passing a dictionary with key-value pairs Parameters agent_id1, agent_id2 : nodes Nodes (as defined by Networkx) can be any hashable type except NoneType edge_attr_dict : dictionary, optional (default = no attributes) Dictionary of edge attributes. Assigns values to specified keyword attributes and overwrites them if already present. edge_attrs : keyword arguments, optional Edge attributes such as labels can be assigned directly using keyowrd arguments """
if agent_id1 in self.global_topology.nodes(data=False): if agent_id2 in self.global_topology.nodes(data=False): self.global_topology.add_edge(agent_id1, agent_id2, edge_attr_dict=edge_attr_dict, *edge_attrs) else: raise ValueError('\'agent_id2\'[{}] not in list of existing agents in the network'.format(agent_id2)) else: raise ValueError('\'agent_id1\'[{}] not in list of existing agents in the network'.format(agent_id1))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw): """ Perform a request on a given httplib connection object taken from our pool. """
self.num_requests += 1 if timeout is _Default: timeout = self.timeout conn.request(method, url, **httplib_request_kw) conn.sock.settimeout(timeout) httplib_response = conn.getresponse() log.debug("\"%s %s %s\" %s %s" % (method, url, conn._http_vsn_str, # pylint: disable-msg=W0212 httplib_response.status, httplib_response.length)) return httplib_response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_same_host(self, url): """ Check if the given ``url`` is a member of the same host as this conncetion pool. """
# TODO: Add optional support for socket.gethostbyname checking. return (url.startswith('/') or get_host(url) == (self.scheme, self.host, self.port))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def can_document_member(cls, member, membername, isattr, parent): """Called to see if a member can be documented by this documenter."""
if not super().can_document_member(member, membername, isattr, parent): return False return iscoroutinefunction(member)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(context, request, key=None): """Return all registry items if key is None, otherwise try to fetch the registry key """
registry_records = api.get_registry_records_by_keyword(key) # Prepare batch size = req.get_batch_size() start = req.get_batch_start() batch = api.make_batch(registry_records, size, start) return { "pagesize": batch.get_pagesize(), "next": batch.make_next_url(), "previous": batch.make_prev_url(), "page": batch.get_pagenumber(), "pages": batch.get_numpages(), "count": batch.get_sequence_length(), "items": [registry_records], "url": api.url_for("senaite.jsonapi.v1.registry", key=key), }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def merge_kwargs(local_kwarg, default_kwarg): """Merges kwarg dictionaries. If a local key in the dictionary is set to None, it will be removed. """
if default_kwarg is None: return local_kwarg if isinstance(local_kwarg, basestring): return local_kwarg if local_kwarg is None: return default_kwarg # Bypass if not a dictionary (e.g. timeout) if not hasattr(default_kwarg, 'items'): return local_kwarg # Update new values. kwargs = default_kwarg.copy() kwargs.update(local_kwarg) # Remove keys that are set to None. for (k,v) in local_kwarg.items(): if v is None: del kwargs[k] return kwargs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_simulation(self): """Runs the complete simulation"""
print('Starting simulations...') for i in range(self.num_trials): print('---Trial {}---'.format(i)) self.run_trial(i) print('Simulation completed.')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_network_agents(self): """Initializes agents on nodes of graph and registers them to the SimPy environment"""
for i in self.env.G.nodes(): self.env.G.node[i]['agent'] = self.agent_type(environment=self.env, agent_id=i, state=deepcopy(self.initial_states[i]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def maxchord(A, ve = None): """ Maximal chordal subgraph of sparsity graph. Returns a lower triangular sparse matrix which is the projection of :math:`A` on a maximal chordal subgraph and a perfect elimination order :math:`p`. Only the lower triangular part of :math:`A` is accessed. The optional argument `ve` is the index of the last vertex to be eliminated (the default value is `n-1`). If :math:`A` is chordal, then the matrix returned is equal to :math:`A`. :param A: :py:class:`spmatrix` :param ve: integer between 0 and `A.size[0]`-1 (optional) .. seealso:: P. M. Dearing, D. R. Shier, D. D. Warner, `Maximal chordal subgraphs <http://dx.doi.org/10.1016/0166-218X(88)90075-3>`_, Discrete Applied Mathematics, 20:3, 1988, pp. 181-190. """
n = A.size[0] assert A.size[1] == n, "A must be a square matrix" assert type(A) is spmatrix, "A must be a sparse matrix" if ve is None: ve = n-1 else: assert type(ve) is int and 0<=ve<n,\ "ve must be an integer between 0 and A.size[0]-1" As = symmetrize(A) cp,ri,val = As.CCS # permutation vector p = matrix(0,(n,1)) # weight array w = matrix(0,(n,1)) max_w = 0 S = [list(range(ve))+list(range(ve+1,n))+[ve]] + [[] for i in range(n-1)] C = [set() for i in range(n)] E = [[] for i in range(n)] # edge list V = [[] for i in range(n)] # num. values for i in range(n-1,-1,-1): # select next node to number while True: if len(S[max_w]) > 0: v = S[max_w].pop() if w[v] >= 0: break else: max_w -= 1 p[i] = v w[v] = -1 # set w[v] = -1 to mark that node v has been numbered # loop over unnumbered neighbors of node v for ii in range(cp[v],cp[v+1]): u = ri[ii] d = val[ii] if w[u] >= 0: if C[u].issubset(C[v]): C[u].update([v]) w[u] += 1 S[w[u]].append(u) # bump up u to S[w[u]] max_w = max(max_w,w[u]) # update max deg. E[min(u,v)].append(max(u,v)) V[min(u,v)].append(d) elif u == v: E[u].append(u) V[u].append(d) # build adjacency matrix of reordered max. chordal subgraph Am = spmatrix([d for d in chain.from_iterable(V)],[i for i in chain.from_iterable(E)],\ [i for i in chain.from_iterable([len(Ej)*[j] for j,Ej in enumerate(E)])],(n,n)) return Am,p
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cli(config, in_file, out_file, verbose): """Main Interface to generate xml documents from custom dictionaries using legal xsd files complying with legal documents in all countires around the world. """
config.out_file = out_file config.verbose = verbose config.in_file = in_file config.out_file = out_file
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cfdv32mx(config): """Format cfdi v3.2 for Mexico. \b File where the files will be written document.xml. cfdicli --in_file /path/to/yout/json/documnt.json cfdv32mx \b File where the files will be written from document.json. cfdicli --out_file ./document.xml cfdv32mx """
# TODO: look for a secure option for eval. # Or simply the CLI only should manage json? # TODO: Implement json option also. dict_input = eval(config.in_file.read()) invoice = cfdv32.get_invoice(dict_input) if invoice.valid: config.out_file.write(invoice.document) config.out_file.flush() click.echo('Document %s has been created.' % config.out_file.name) else: click.echo(invoice.ups.message)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_cache_key(key_prefix): """Make cache key from prefix Borrowed from Flask-Cache extension """
if callable(key_prefix): cache_key = key_prefix() elif '%s' in key_prefix: cache_key = key_prefix % request.path else: cache_key = key_prefix cache_key = cache_key.encode('utf-8') return cache_key
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_frame(self, in_data, frame_count, time_info, status): """ Callback function for the pyaudio stream. Don't use directly. """
while self.keep_listening: try: frame = self.queue.get(False, timeout=queue_timeout) return (frame, pyaudio.paContinue) except Empty: pass return (None, pyaudio.paComplete)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def sort_dict(self, data, key): '''Sort a list of dictionaries by dictionary key''' return sorted(data, key=itemgetter(key)) if data else []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _push_entry(self, key): "Push entry onto our access log, invalidate the old entry if exists." self._invalidate_entry(key) new_entry = AccessEntry(key) self.access_lookup[key] = new_entry self.access_log_lock.acquire() self.access_log.appendleft(new_entry) self.access_log_lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _prune_entries(self, num): "Pop entries from our access log until we popped ``num`` valid ones." while num > 0: self.access_log_lock.acquire() p = self.access_log.pop() self.access_log_lock.release() if not p.is_valid: continue # Invalidated entry, skip dict.pop(self, p.key, None) self.access_lookup.pop(p.key, None) num -= 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _prune_invalidated_entries(self): "Rebuild our access_log without the invalidated entries." self.access_log_lock.acquire() self.access_log = deque(e for e in self.access_log if e.is_valid) self.access_log_lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_ordered_access_keys(self): "Return ordered access keys for inspection. Used for testing." self.access_log_lock.acquire() r = [e.key for e in self.access_log if e.is_valid] self.access_log_lock.release() return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def calc_scaled_res(self, screen_res, image_res): """Calculate appropriate texture size. Calculate size or required texture so that it will fill the window, but retains the movies original aspect ratio. Parameters screen_res : tuple Display window size/Resolution image_res : tuple Image width and height Returns ------- tuple width and height of image scaled to window/screen """
rs = screen_res[0]/float(screen_res[1]) ri = image_res[0]/float(image_res[1]) if rs > ri: return (int(image_res[0] * screen_res[1]/image_res[1]), screen_res[1]) else: return (screen_res[0], int(image_res[1]*screen_res[0]/image_res[0]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_media(self, vidSource): """ Loads a video. Parameters vidSource : str The path to the video file """
if not os.path.exists(vidSource): print("File not found: " + vidSource) pygame.display.quit() pygame.quit() sys.exit(1) self.decoder.load_media(vidSource) self.decoder.loop = self.loop pygame.display.set_caption(os.path.split(vidSource)[1]) self.vidsize = self.decoder.clip.size self.destsize = self.calc_scaled_res(self.windowSize, self.vidsize) self.vidPos = ((self.windowSize[0] - self.destsize[0]) / 2, (self.windowSize[1] - self.destsize[1]) / 2) self.__textureSetup() if(self.decoder.audioformat): if self.soundrenderer == "pygame": from mediadecoder.soundrenderers import SoundrendererPygame self.audio = SoundrendererPygame(self.decoder.audioformat) elif self.soundrenderer == "pyaudio": from mediadecoder.soundrenderers.pyaudiorenderer import SoundrendererPyAudio self.audio = SoundrendererPyAudio(self.decoder.audioformat) elif self.soundrenderer == "sounddevice": from mediadecoder.soundrenderers.sounddevicerenderer import SoundrendererSounddevice self.audio = SoundrendererSounddevice(self.decoder.audioformat) self.decoder.set_audiorenderer(self.audio)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __texUpdate(self, frame): """ Update the texture with the newly supplied frame. """
# Retrieve buffer from videosink if self.texture_locked: return self.buffer = frame self.texUpdated = True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def play(self): """ Starts playback. """
# Signal player to start video playback self.paused = False # Start listening for incoming audio frames if self.decoder.audioformat: self.audio.start() self.decoder.play() # While video is playing, render frames while self.decoder.status in [mediadecoder.PLAYING, mediadecoder.PAUSED]: texture_update_time = 0 if self.texUpdated: t1 = time.time() # Update texture self.texture_locked = True glTexSubImage2D( GL_TEXTURE_2D, 0, 0, 0, self.vidsize[0], self.vidsize[1], GL_RGB, GL_UNSIGNED_BYTE, self.buffer) self.texture_locked = False texture_update_time = int((time.time()-t1)*1000) self.texUpdated = False # Draw the texture to the back buffer t1 = time.time() self.__drawFrame() draw_time = (time.time()-t1)*1000 # Flip the buffer to show frame to screen t1 = time.time() pygame.display.flip() flip_time = (time.time()-t1)*1000 logger.debug("================== Frame {} ========================" "".format(self.decoder.current_frame_no)) if texture_update_time: logger.debug("Texture updated in {0} ms".format(texture_update_time)) logger.debug("Texture drawn in {0} ms".format(draw_time)) logger.debug("Screen flipped in {0} ms".format(flip_time)) logger.debug("-----------------------------------------------------") logger.debug("Total: {} ms".format(texture_update_time+draw_time+flip_time)) for e in pygame.event.get(): if e.type == pygame.QUIT: self.stop() if e.type == pygame.KEYDOWN: # Quitting if e.key == pygame.K_ESCAPE: self.stop() # Pausing elif e.key == pygame.K_SPACE: self.pause() # Seeking elif e.key == pygame.K_RIGHT: new_time = min( self.decoder.current_playtime + 10, self.decoder.duration) self.decoder.seek(new_time) elif e.key == pygame.K_LEFT: new_time = max( self.decoder.current_playtime - 10, 0) self.decoder.seek(new_time) pygame.event.pump() # Prevent freezing of screen while dragging # Without this sleep, the video rendering threard goes haywire... time.sleep(0.005) if self.decoder.audioformat: self.audio.close_stream() pygame.quit()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pause(self): """ Pauses playback. """
if self.decoder.status == mediadecoder.PAUSED: self.decoder.pause() self.paused = False elif self.decoder.status == mediadecoder.PLAYING: self.decoder.pause() self.paused = True else: print("Player not in pausable state")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fg(color): """ Foreground color formatter function factory. Each function casts from a unicode string to a colored bytestring with the respective foreground color and foreground reset ANSI escape codes. You can also use the ``fg.color`` or ``fg[color]`` directly as attributes/items. The colors are the names of the ``colorama.Fore`` attributes (case insensitive). For more information, see: https://pypi.python.org/pypi/colorama https://en.wikipedia.org/wiki/ANSI_escape_code#Colors """
ansi_code = [getattr(colorama.Fore, color.upper()), colorama.Fore.RESET] return lambda msg: msg.join(ansi_code)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clog(color): """Same to ``log``, but this one centralizes the message first."""
logger = log(color) return lambda msg: logger(centralize(msg).rstrip())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def retrieve_width(self, signum=None, frame=None): """ Stores the terminal width into ``self.width``, if possible. This function is also the SIGWINCH event handler. """
for method_name, args in self.strategies: method = getattr(self, "from_" + method_name) width = method(*args) if width and width > 0: self.width = width break # Found! os.environ["COLUMNS"] = str(self.width)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dump_part(part, total_segments=None): """ 'part' may be the hash_key if we are dumping just a few hash_keys - else it will be the segment number """
try: connection = Connection(host=config['host'], region=config['region']) filename = ".".join([config['table_name'], str(part), "dump"]) if config['compress']: opener = gzip.GzipFile filename += ".gz" else: opener = open dumper = BatchDumper(connection, config['table_name'], config['capacity'], part, total_segments) with opener(filename, 'w') as output: while dumper.has_items: items = dumper.get_items() for item in items: output.write(json.dumps(item)) output.write("\n") output.flush() config['queue'].put(len(items)) config['queue'].put('complete') except Exception as e: print('Unhandled exception: {0}'.format(e))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def hessian(L, Y, U, adj = False, inv = False, factored_updates = False): """ Supernodal multifrontal Hessian mapping. The mapping .. math:: \mathcal H_X(U) = P(X^{-1}UX^{-1}) is the Hessian of the log-det barrier at a positive definite chordal matrix :math:`X`, applied to a symmetric chordal matrix :math:`U`. The Hessian operator can be factored as .. math:: \mathcal H_X(U) = \mathcal G_X^{\mathrm adj}( \mathcal G_X(U) ) where the mappings on the right-hand side are adjoint mappings that map chordal symmetric matrices to chordal symmetric matrices. This routine evaluates the mapping :math:`G_X` and its adjoint :math:`G_X^{\mathrm adj}` as well as the corresponding inverse mappings. The inputs `adj` and `inv` control the action as follows: | Action |`adj` | `inv` | +==================================================+========+=======+ | :math:`U = \mathcal G_X(U)` | False | False | | :math:`U = \mathcal G_X^{\mathrm adj}(U)` | True | False | | :math:`U = \mathcal G_X^{-1}(U)` | False | True | | :math:`U = \mathcal (G_X^{\mathrm adj})^{-1}(U)` | True | True | The input argument :math:`L` is the Cholesky factor of :math:`X`. The input argument :math:`Y` is the projected inverse of :math:`X`. The input argument :math:`U` is either a chordal matrix (a :py:class:`cspmatrix`) of a list of chordal matrices with the same sparsity pattern as :math:`L` and :math:`Y`. The optional argument `factored_updates` can be used to enable (if True) or disable (if False) updating of intermediate factorizations. :param L: :py:class:`cspmatrix` (factor) :param Y: :py:class:`cspmatrix` :param U: :py:class:`cspmatrix` or list of :py:class:`cspmatrix` objects :param adj: boolean :param inv: boolean :param factored_updates: boolean """
assert L.symb == Y.symb, "Symbolic factorization mismatch" assert isinstance(L, cspmatrix) and L.is_factor is True, "L must be a cspmatrix factor" assert isinstance(Y, cspmatrix) and Y.is_factor is False, "Y must be a cspmatrix" if isinstance(U, cspmatrix): assert U.is_factor is False,\ "U must be a cspmatrix or a list of cbsmatrices" U = [U] else: for Ut in U: assert Ut.symb == L.symb, "Symbolic factorization mismatch" assert isinstance(Ut, cspmatrix) and Ut.is_factor is False,\ "U must be a cspmatrix or a list of cbsmatrices" if adj is False and inv is False: __Y2K(L, U, inv = inv) __scale(L, Y, U, inv = inv, adj = adj, factored_updates = factored_updates) elif adj is True and inv is False: __scale(L, Y, U, inv = inv, adj = adj, factored_updates = factored_updates) __M2T(L, U, inv = inv) elif adj is True and inv is True: __M2T(L, U, inv = inv) __scale(L, Y, U, inv = inv, adj = adj, factored_updates = factored_updates) elif adj is False and inv is True: __scale(L, Y, U, inv = inv, adj = adj, factored_updates = factored_updates) __Y2K(L, U, inv = inv) elif adj is None and inv is False: __Y2K(L, U, inv = inv) __scale(L, Y, U, inv = inv, adj = False, factored_updates = factored_updates) __scale(L, Y, U, inv = inv, adj = True, factored_updates = factored_updates) __M2T(L, U, inv = inv) elif adj is None and inv is True: __M2T(L, U, inv = inv) __scale(L, Y, U, inv = inv, adj = True, factored_updates = factored_updates) __scale(L, Y, U, inv = inv, adj = False, factored_updates = factored_updates) __Y2K(L, U, inv = inv) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def frame_size(self): """The byte size of a single frame of this format."""
if self.sample_type == SampleType.S16NativeEndian: # Sample size is 2 bytes return self.sample_size * self.channels else: raise ValueError('Unknown sample type: %d', self.sample_type)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_fields(store_name, field_names): """ A class-decorator that creates layout managers with a set of named fields. """
def decorate(cls): def _add(index, name): def _set_dir(self, value): getattr(self, store_name)[index] = value def _get_dir(self): return getattr(self, store_name)[index] setattr(cls, name, property(_get_dir, _set_dir)) for index, field_name in enumerate(field_names): _add(index, field_name) return cls return decorate
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_smallest_dimensions(self, data): """A utility method to return the minimum size needed to fit all the elements in."""
min_width = 0 min_height = 0 for element in self.elements: if not element: continue size = element.get_minimum_size(data) min_width = max(min_width, size.x) min_height = max(min_height, size.y) return datatypes.Point(min_width, min_height)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set(self, instance, value, **kw): """Converts the value into a DateTime object before setting. """
if value: try: value = DateTime(value) except SyntaxError: logger.warn("Value '{}' is not a valid DateTime string" .format(value)) return False self._set(instance, value, **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_filename(self, instance): """Get the filename """
filename = self.field.getFilename(instance) if filename: return filename fieldname = self.get_field_name() content_type = self.get_content_type(instance) extension = mimetypes.guess_extension(content_type) return fieldname + extension
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set(self, instance, value, **kw): """Decodes base64 value and set the file object """
value = str(value).decode("base64") # handle the filename if "filename" not in kw: logger.debug("FielFieldManager::set: No Filename detected " "-> using title or id") kw["filename"] = kw.get("id") or kw.get("title") self._set(instance, value, **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set(self, instance, value, **kw): # noqa """Set the value of the refernce field """
ref = [] # The value is an UID if api.is_uid(value): ref.append(api.get_object_by_uid(value)) # The value is already an object if api.is_at_content(value): ref.append(value) # The value is a dictionary # -> handle it like a catalog query if u.is_dict(value): results = api.search(portal_type=self.allowed_types, **value) ref = map(api.get_object, results) # The value is a list if u.is_list(value): for item in value: # uid if api.is_uid(item): ref.append(api.get_object_by_uid(item)) continue # object if api.is_at_content(item): ref.append(api.get_object(item)) continue # path if api.is_path(item): ref.append(api.get_object_by_path(item)) continue # dict (catalog query) if u.is_dict(item): # If there is UID of objects, just use it. uid = item.get('uid', None) if uid: obj = api.get_object_by_uid(uid) ref.append(obj) else: results = api.search(portal_type=self.allowed_types, **item) objs = map(api.get_object, results) ref.extend(objs) continue # Plain string # -> do a catalog query for title if isinstance(item, basestring): results = api.search(portal_type=self.allowed_types, title=item) objs = map(api.get_object, results) ref.extend(objs) continue # The value is a physical path if api.is_path(value): ref.append(api.get_object_by_path(value)) # Handle non multi valued fields if not self.multi_valued: if len(ref) > 1: raise ValueError("Multiple values given for single valued " "field {}".format(repr(self.field))) else: ref = ref[0] return self._set(instance, ref, **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_proxy_field(self, instance): """Get the proxied field of this field """
proxy_object = self.get_proxy_object(instance) if not proxy_object: return None return proxy_object.getField(self.name)