text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Send pulse to kick the cash drawer <END_TASK> <USER_TASK:> Description: def cashdraw(self, pin): """ Send pulse to kick the cash drawer """
if pin == 2: self._raw(CD_KICK_2) elif pin == 5: self._raw(CD_KICK_5) else: raise CashDrawerError()
<SYSTEM_TASK:> Search device on USB tree and set is as escpos device <END_TASK> <USER_TASK:> Description: def open(self): """ Search device on USB tree and set is as escpos device """
self.device = usb.core.find(idVendor=self.idVendor, idProduct=self.idProduct) if self.device is None: raise NoDeviceError() try: if self.device.is_kernel_driver_active(self.interface): self.device.detach_kernel_driver(self.interface) self.device.set_configuration() usb.util.claim_interface(self.device, self.interface) except usb.core.USBError as e: raise HandleDeviceError(e)
<SYSTEM_TASK:> Open TCP socket and set it as escpos device <END_TASK> <USER_TASK:> Description: def open(self): """ Open TCP socket and set it as escpos device """
self.device = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.device.connect((self.host, self.port)) if self.device is None: print "Could not open socket for %s" % self.host
<SYSTEM_TASK:> Generate pinyin for chars, if char is not chinese character, <END_TASK> <USER_TASK:> Description: def _pinyin_generator(chars, format): """Generate pinyin for chars, if char is not chinese character, itself will be returned. Chars must be unicode list. """
for char in chars: key = "%X" % ord(char) pinyin = pinyin_dict.get(key, char) tone = pinyin_tone.get(key, 0) if tone == 0 or format == "strip": pass elif format == "numerical": pinyin += str(tone) elif format == "diacritical": # Find first vowel -- where we should put the diacritical mark vowels = itertools.chain((c for c in pinyin if c in "aeo"), (c for c in pinyin if c in "iuv")) vowel = pinyin.index(next(vowels)) + 1 pinyin = pinyin[:vowel] + tonemarks[tone] + pinyin[vowel:] else: error = "Format must be one of: numerical/diacritical/strip" raise ValueError(error) yield unicodedata.normalize('NFC', pinyin)
<SYSTEM_TASK:> Return pinyin of string, the string must be unicode <END_TASK> <USER_TASK:> Description: def get(s, delimiter='', format="diacritical"): """Return pinyin of string, the string must be unicode """
return delimiter.join(_pinyin_generator(u(s), format=format))
<SYSTEM_TASK:> Return the 1st char of pinyin of string, the string must be unicode <END_TASK> <USER_TASK:> Description: def get_initial(s, delimiter=' '): """Return the 1st char of pinyin of string, the string must be unicode """
initials = (p[0] for p in _pinyin_generator(u(s), format="strip")) return delimiter.join(initials)
<SYSTEM_TASK:> Generates a key used to set a status on a field <END_TASK> <USER_TASK:> Description: def get_status_key(self, instance): """Generates a key used to set a status on a field"""
key_id = "inst_%s" % id(instance) if instance.pk is None else instance.pk return "%s.%s-%s-%s" % (instance._meta.app_label, get_model_name(instance), key_id, self.field.name)
<SYSTEM_TASK:> Retrives a status of a field from cache. Fields in state 'error' and <END_TASK> <USER_TASK:> Description: def get_status(self, instance): """Retrives a status of a field from cache. Fields in state 'error' and 'complete' will not retain the status after the call. """
status_key, status = self._get_status(instance) if status['state'] in ['complete', 'error']: cache.delete(status_key) return status
<SYSTEM_TASK:> Sets the field status for up to 5 minutes. <END_TASK> <USER_TASK:> Description: def set_status(self, instance, status): """Sets the field status for up to 5 minutes."""
status_key = self.get_status_key(instance) cache.set(status_key, status, timeout=300)
<SYSTEM_TASK:> Returns output mode. If `mode` not set it will try to guess best <END_TASK> <USER_TASK:> Description: def get_mode(self, old_mode=None): """Returns output mode. If `mode` not set it will try to guess best mode, or next best mode comparing to old mode """
if self.mode is not None: return self.mode assert self.can_write, "This format does not have a supported output mode." if old_mode is None: return self.output_modes[0] if old_mode in self.output_modes: return old_mode # now let's get best mode available from supported try: idx = PILLOW_MODES.index(old_mode) except ValueError: # maybe some unknown or uncommon mode return self.output_modes[0] for mode in PILLOW_MODES[idx+1:]: if mode in self.output_modes: return mode # since there is no better one, lets' look for closest one in opposite direction opposite = PILLOW_MODES[:idx] opposite.reverse() for mode in opposite: if mode in self.output_modes: return mode
<SYSTEM_TASK:> Send a response to the frontend and return an execute message <END_TASK> <USER_TASK:> Description: def _send(self, data, msg_type='ok', silent=False): """ Send a response to the frontend and return an execute message @param data: response to send @param msg_type (str): message type: 'ok', 'raw', 'error', 'multi' @param silent (bool): suppress output @return (dict): the return value for the kernel """
# Data to send back if data is not None: # log the message try: self._klog.debug(u"msg to frontend (%d): %.160s...", silent, data) except Exception as e: self._klog.warn(u"can't log response: %s", e) # send it to the frontend if not silent: if msg_type != 'raw': data = data_msg(data, mtype=msg_type) self.send_response(self.iopub_socket, 'display_data', data) # Result message return {'status': 'error' if msg_type == 'error' else 'ok', # The base class will increment the execution count 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {} }
<SYSTEM_TASK:> Method called to execute a cell <END_TASK> <USER_TASK:> Description: def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): """ Method called to execute a cell """
self._klog.info("[%.30s] [%d] [%s]", code, silent, user_expressions) # Split lines and remove empty lines & comments code_noc = [line.strip() for line in code.split('\n') if line and line[0] != '#'] if not code_noc: return self._send(None) # Process try: # Detect if we've got magics magic_lines = [] for line in code_noc: if line[0] != '%': break magic_lines.append(line) # Process magics. Once done, remove them from the query buffer if magic_lines: out = [self._k.magic(line) for line in magic_lines] self._send(out, 'multi', silent=silent) code = '\n'.join(code_noc[len(magic_lines):]) # If we have a regular SPARQL query, process it now result = self._k.query(code, num=self.execution_count) if code else None # Return the result return self._send(result, 'raw', silent=silent) except Exception as e: return self._send(e, 'error', silent=silent)
<SYSTEM_TASK:> Method called on help requests <END_TASK> <USER_TASK:> Description: def do_inspect(self, code, cursor_pos, detail_level=0): """ Method called on help requests """
self._klog.info("{%s}", code[cursor_pos:cursor_pos+10]) # Find the token for which help is requested token, start = token_at_cursor(code, cursor_pos) self._klog.debug("token={%s} {%d}", token, detail_level) # Find the help for this token if not is_magic(token, start, code): info = sparql_help.get(token.upper(), None) elif token == '%': info = magic_help else: info = magics.get(token, None) if info: info = '{} {}\n\n{}'.format(token, *info) return {'status': 'ok', 'data': {'text/plain': info}, 'metadata': {}, 'found': info is not None }
<SYSTEM_TASK:> Method called on autocompletion requests <END_TASK> <USER_TASK:> Description: def do_complete(self, code, cursor_pos): """ Method called on autocompletion requests """
self._klog.info("{%s}", code[cursor_pos:cursor_pos+10]) token, start = token_at_cursor(code, cursor_pos) tkn_low = token.lower() if is_magic(token, start, code): matches = [k for k in magics.keys() if k.startswith(tkn_low)] else: matches = [sparql_names[k] for k in sparql_names if k.startswith(tkn_low)] self._klog.debug("token={%s} matches={%r}", token, matches) if matches: return {'status': 'ok', 'cursor_start': start, 'cursor_end': start+len(token), 'matches': matches}
<SYSTEM_TASK:> Return a Jupyter display_data message, in both HTML & text formats, by <END_TASK> <USER_TASK:> Description: def data_msglist( msglist ): """ Return a Jupyter display_data message, in both HTML & text formats, by joining together all passed messages. @param msglist (iterable): an iterable containing a list of tuples (message, css_style) Each message is either a text string, or a list. In the latter case it is assumed to be a format string + parameters. """
txt = html = u'' for msg, css in msglist: if is_collection(msg): msg = msg[0].format(*msg[1:]) html += div( escape(msg).replace('\n','<br/>'), css=css or 'msg' ) txt += msg + "\n" return { 'data': {'text/html' : div(html), 'text/plain' : txt }, 'metadata' : {} }
<SYSTEM_TASK:> Format a result element as an HTML table cell. <END_TASK> <USER_TASK:> Description: def html_elem(e, ct, withtype=False): """ Format a result element as an HTML table cell. @param e (list): a pair \c (value,type) @param ct (str): cell type (th or td) @param withtype (bool): add an additional cell with the element type """
# Header cell if ct == 'th': return '<th>{0}</th><th>{1}</th>'.format(*e) if withtype else '<th>{}</th>'.format(e) # Content cell if e[1] in ('uri', 'URIRef'): html = u'<{0} class=val><a href="{1}" target="_other">{2}</a></{0}>'.format(ct, e[0], escape(e[0])) else: html = u'<{0} class=val>{1}</{0}>'.format(ct, escape(e[0])) # Create the optional cell for the type if withtype: html += u'<{0} class=typ>{1}</{0}>'.format(ct, e[1]) return html
<SYSTEM_TASK:> Return a double iterable as an HTML table <END_TASK> <USER_TASK:> Description: def html_table(data, header=True, limit=None, withtype=False): """ Return a double iterable as an HTML table @param data (iterable): the data to format @param header (bool): if the first row is a header row @param limit (int): maximum number of rows to render (excluding header) @param withtype (bool): if columns are to have an alternating CSS class (even/odd) or not. @return (int,string): a pair <number-of-rendered-rows>, <html-table> """
if header and limit: limit += 1 ct = 'th' if header else 'td' rc = 'hdr' if header else 'odd' # import codecs # import datetime # with codecs.open( '/tmp/dump', 'w', encoding='utf-8') as f: # print( '************', datetime.datetime.now(), file=f ) # for n, row in enumerate(data): # print( '-------', n, file=f ) # for n, c in enumerate(row): # print( type(c), repr(c), file=f ) html = u'<table>' rn = -1 for rn, row in enumerate(data): html += u'<tr class={}>'.format(rc) html += '\n'.join((html_elem(c, ct, withtype) for c in row)) html += u'</tr>' rc = 'even' if rc == 'odd' else 'odd' ct = 'td' if limit: limit -= 1 if not limit: break return (0, '') if rn < 0 else (rn+1-header, html+u'</table>')
<SYSTEM_TASK:> Return the a string with the data type of a value, for JSON data <END_TASK> <USER_TASK:> Description: def jtype(c): """ Return the a string with the data type of a value, for JSON data """
ct = c['type'] return ct if ct != 'literal' else '{}, {}'.format(ct, c.get('xml:lang'))
<SYSTEM_TASK:> Return the a string with the data type of a value, for Graph data <END_TASK> <USER_TASK:> Description: def gtype(n): """ Return the a string with the data type of a value, for Graph data """
t = type(n).__name__ return str(t) if t != 'Literal' else 'Literal, {}'.format(n.language)
<SYSTEM_TASK:> Render to output a result in JSON format <END_TASK> <USER_TASK:> Description: def render_json(result, cfg, **kwargs): """ Render to output a result in JSON format """
result = json.loads(result.decode('utf-8')) head = result['head'] if 'results' not in result: if 'boolean' in result: r = u'Result: {}'.format(result['boolean']) else: r = u'Unsupported result: \n' + unicode(result) return {'data': {'text/plain': r}, 'metadata': {}} vars = head['vars'] nrow = len(result['results']['bindings']) if cfg.dis == 'table': j = json_iterator(vars, result['results']['bindings'], set(cfg.lan), add_vtype=cfg.typ) n, data = html_table(j, limit=cfg.lmt, withtype=cfg.typ) data += div('Total: {}, Shown: {}', nrow, n, css="tinfo") data = {'text/html': div(data)} else: result = json.dumps(result, ensure_ascii=False, indent=2, sort_keys=True) data = {'text/plain': unicode(result)} return {'data': data, 'metadata': {}}
<SYSTEM_TASK:> Render to output a result in XML format <END_TASK> <USER_TASK:> Description: def render_xml(result, cfg, **kwargs): """ Render to output a result in XML format """
# Raw mode if cfg.dis == 'raw': return {'data': {'text/plain': result.decode('utf-8')}, 'metadata': {}} # Table try: import xml.etree.cElementTree as ET except ImportError: import xml.etree.ElementTree as ET root = ET.fromstring(result) try: ns = {'ns': re.match(r'\{([^}]+)\}', root.tag).group(1)} except Exception: raise KrnlException('Invalid XML data: cannot get namespace') columns = [c.attrib['name'] for c in root.find('ns:head', ns)] results = root.find('ns:results', ns) nrow = len(results) j = xml_iterator(columns, results, set(cfg.lan), add_vtype=cfg.typ) n, data = html_table(j, limit=cfg.lmt, withtype=cfg.typ) data += div('Total: {}, Shown: {}', nrow, n, css="tinfo") return {'data': {'text/html': div(data)}, 'metadata': {}}
<SYSTEM_TASK:> Render to output a result that can be parsed as an RDF graph <END_TASK> <USER_TASK:> Description: def render_graph(result, cfg, **kwargs): """ Render to output a result that can be parsed as an RDF graph """
# Mapping from MIME types to formats accepted by RDFlib rdflib_formats = {'text/rdf+n3': 'n3', 'text/turtle': 'turtle', 'application/x-turtle': 'turtle', 'text/turtle': 'turtle', 'application/rdf+xml': 'xml', 'text/rdf': 'xml', 'application/rdf+xml': 'xml'} try: got = kwargs.get('format', 'text/rdf+n3') fmt = rdflib_formats[got] except KeyError: raise KrnlException('Unsupported format for graph processing: {!s}', got) g = ConjunctiveGraph() g.load(StringInputSource(result), format=fmt) display = cfg.dis[0] if is_collection(cfg.dis) else cfg.dis if display in ('png', 'svg'): try: literal = len(cfg.dis) > 1 and cfg.dis[1].startswith('withlit') opt = {'lang': cfg.lan, 'literal': literal, 'graphviz': []} data, metadata = draw_graph(g, fmt=display, options=opt) return {'data': data, 'metadata': metadata} except Exception as e: raise KrnlException('Exception while drawing graph: {!r}', e) elif display == 'table': it = rdf_iterator(g, set(cfg.lan), add_vtype=cfg.typ) n, data = html_table(it, limit=cfg.lmt, withtype=cfg.typ) data += div('Shown: {}, Total rows: {}', n if cfg.lmt else 'all', len(g), css="tinfo") data = {'text/html': div(data)} elif len(g) == 0: data = {'text/html': div(div('empty graph', css='krn-warn'))} else: data = {'text/plain': g.serialize(format='nt').decode('utf-8')} return {'data': data, 'metadata': {}}
<SYSTEM_TASK:> Set a logging configuration, with a rolling file appender. <END_TASK> <USER_TASK:> Description: def set_logging( logfilename=None, level=None ): """ Set a logging configuration, with a rolling file appender. If passed a filename, use it as the logfile, else use a default name. The default logfile is \c sparqlkernel.log, placed in the directory given by (in that order) the \c LOGDIR environment variable, the logdir specified upon kernel installation or the default temporal directory. """
if logfilename is None: # Find the logging diectory logdir = os.environ.get( 'LOGDIR' ) if logdir is None: logdir = os.environ.get( 'LOGDIR_DEFAULT', tempfile.gettempdir() ) # Define the log filename basename = __name__.split('.')[-2] logfilename = os.path.join( logdir, basename + '.log' ) LOGCONFIG['handlers']['default']['filename'] = logfilename if level is not None: LOGCONFIG['loggers']['sparqlkernel']['level'] = level dictConfig( LOGCONFIG )
<SYSTEM_TASK:> A way to find out a status of a filed. <END_TASK> <USER_TASK:> Description: def smartfields_get_field_status(self, field_name): """A way to find out a status of a filed."""
manager = self._smartfields_managers.get(field_name, None) if manager is not None: return manager.get_status(self) return {'state': 'ready'}
<SYSTEM_TASK:> Creates a temporary file. With regular `FileSystemStorage` it does not <END_TASK> <USER_TASK:> Description: def get_output_file(self, in_file, instance, field, **kwargs): """Creates a temporary file. With regular `FileSystemStorage` it does not need to be deleted, instaed file is safely moved over. With other cloud based storage it is a good idea to set `delete=True`."""
return NamedTemporaryFile(mode='rb', suffix='_%s_%s%s' % ( get_model_name(instance), field.name, self.get_ext()), delete=False)
<SYSTEM_TASK:> Compute the scattering matrices for the given PSD and geometries. <END_TASK> <USER_TASK:> Description: def get_SZ(self, psd, geometry): """ Compute the scattering matrices for the given PSD and geometries. Returns: The new amplitude (S) and phase (Z) matrices. """
if (self._S_table is None) or (self._Z_table is None): raise AttributeError( "Initialize or load the scattering table first.") if (not isinstance(psd, PSD)) or self._previous_psd != psd: self._S_dict = {} self._Z_dict = {} psd_w = psd(self._psd_D) for geom in self.geometries: self._S_dict[geom] = \ trapz(self._S_table[geom] * psd_w, self._psd_D) self._Z_dict[geom] = \ trapz(self._Z_table[geom] * psd_w, self._psd_D) self._previous_psd = psd return (self._S_dict[geometry], self._Z_dict[geometry])
<SYSTEM_TASK:> Save the scattering lookup tables. <END_TASK> <USER_TASK:> Description: def save_scatter_table(self, fn, description=""): """Save the scattering lookup tables. Save the state of the scattering lookup tables to a file. This can be loaded later with load_scatter_table. Other variables will not be saved, but this does not matter because the results of the computations are based only on the contents of the table. Args: fn: The name of the scattering table file. description (optional): A description of the table. """
data = { "description": description, "time": datetime.now(), "psd_scatter": (self.num_points, self.D_max, self._psd_D, self._S_table, self._Z_table, self._angular_table, self._m_table, self.geometries), "version": tmatrix_aux.VERSION } pickle.dump(data, file(fn, 'w'), pickle.HIGHEST_PROTOCOL)
<SYSTEM_TASK:> Load the scattering lookup tables. <END_TASK> <USER_TASK:> Description: def load_scatter_table(self, fn): """Load the scattering lookup tables. Load the scattering lookup tables saved with save_scatter_table. Args: fn: The name of the scattering table file. """
data = pickle.load(file(fn)) if ("version" not in data) or (data["version"]!=tmatrix_aux.VERSION): warnings.warn("Loading data saved with another version.", Warning) (self.num_points, self.D_max, self._psd_D, self._S_table, self._Z_table, self._angular_table, self._m_table, self.geometries) = data["psd_scatter"] return (data["time"], data["description"])
<SYSTEM_TASK:> Compute the T-matrix using variable orientation scatterers. <END_TASK> <USER_TASK:> Description: def orient_averaged_adaptive(tm): """Compute the T-matrix using variable orientation scatterers. This method uses a very slow adaptive routine and should mainly be used for reference purposes. Uses the set particle orientation PDF, ignoring the alpha and beta attributes. Args: tm: TMatrix (or descendant) instance Returns: The amplitude (S) and phase (Z) matrices. """
S = np.zeros((2,2), dtype=complex) Z = np.zeros((4,4)) def Sfunc(beta, alpha, i, j, real): (S_ang, Z_ang) = tm.get_SZ_single(alpha=alpha, beta=beta) s = S_ang[i,j].real if real else S_ang[i,j].imag return s * tm.or_pdf(beta) ind = range(2) for i in ind: for j in ind: S.real[i,j] = dblquad(Sfunc, 0.0, 360.0, lambda x: 0.0, lambda x: 180.0, (i,j,True))[0]/360.0 S.imag[i,j] = dblquad(Sfunc, 0.0, 360.0, lambda x: 0.0, lambda x: 180.0, (i,j,False))[0]/360.0 def Zfunc(beta, alpha, i, j): (S_and, Z_ang) = tm.get_SZ_single(alpha=alpha, beta=beta) return Z_ang[i,j] * tm.or_pdf(beta) ind = range(4) for i in ind: for j in ind: Z[i,j] = dblquad(Zfunc, 0.0, 360.0, lambda x: 0.0, lambda x: 180.0, (i,j))[0]/360.0 return (S, Z)
<SYSTEM_TASK:> Compute the T-matrix using variable orientation scatterers. <END_TASK> <USER_TASK:> Description: def orient_averaged_fixed(tm): """Compute the T-matrix using variable orientation scatterers. This method uses a fast Gaussian quadrature and is suitable for most use. Uses the set particle orientation PDF, ignoring the alpha and beta attributes. Args: tm: TMatrix (or descendant) instance. Returns: The amplitude (S) and phase (Z) matrices. """
S = np.zeros((2,2), dtype=complex) Z = np.zeros((4,4)) ap = np.linspace(0, 360, tm.n_alpha+1)[:-1] aw = 1.0/tm.n_alpha for alpha in ap: for (beta, w) in zip(tm.beta_p, tm.beta_w): (S_ang, Z_ang) = tm.get_SZ_single(alpha=alpha, beta=beta) S += w * S_ang Z += w * Z_ang sw = tm.beta_w.sum() #normalize to get a proper average S *= aw/sw Z *= aw/sw return (S, Z)
<SYSTEM_TASK:> A convenience function to set the geometry variables. <END_TASK> <USER_TASK:> Description: def set_geometry(self, geom): """A convenience function to set the geometry variables. Args: geom: A tuple containing (thet0, thet, phi0, phi, alpha, beta). See the Scatterer class documentation for a description of these angles. """
(self.thet0, self.thet, self.phi0, self.phi, self.alpha, self.beta) = geom
<SYSTEM_TASK:> A convenience function to get the geometry variables. <END_TASK> <USER_TASK:> Description: def get_geometry(self): """A convenience function to get the geometry variables. Returns: A tuple containing (thet0, thet, phi0, phi, alpha, beta). See the Scatterer class documentation for a description of these angles. """
return (self.thet0, self.thet, self.phi0, self.phi, self.alpha, self.beta)
<SYSTEM_TASK:> Initialize the T-matrix. <END_TASK> <USER_TASK:> Description: def _init_tmatrix(self): """Initialize the T-matrix. """
if self.radius_type == Scatterer.RADIUS_MAXIMUM: # Maximum radius is not directly supported in the original # so we convert it to equal volume radius radius_type = Scatterer.RADIUS_EQUAL_VOLUME radius = self.equal_volume_from_maximum() else: radius_type = self.radius_type radius = self.radius self.nmax = pytmatrix.calctmat(radius, radius_type, self.wavelength, self.m.real, self.m.imag, self.axis_ratio, self.shape, self.ddelt, self.ndgs) self._tm_signature = (self.radius, self.radius_type, self.wavelength, self.m, self.axis_ratio, self.shape, self.ddelt, self.ndgs)
<SYSTEM_TASK:> Retrieve the quadrature points and weights if needed. <END_TASK> <USER_TASK:> Description: def _init_orient(self): """Retrieve the quadrature points and weights if needed. """
if self.orient == orientation.orient_averaged_fixed: (self.beta_p, self.beta_w) = quadrature.get_points_and_weights( self.or_pdf, 0, 180, self.n_beta) self._set_orient_signature()
<SYSTEM_TASK:> Mark the amplitude and scattering matrices as up to date. <END_TASK> <USER_TASK:> Description: def _set_scatter_signature(self): """Mark the amplitude and scattering matrices as up to date. """
self._scatter_signature = (self.thet0, self.thet, self.phi0, self.phi, self.alpha, self.beta, self.orient)
<SYSTEM_TASK:> Get the S and Z matrices for a single orientation. <END_TASK> <USER_TASK:> Description: def get_SZ_single(self, alpha=None, beta=None): """Get the S and Z matrices for a single orientation. """
if alpha == None: alpha = self.alpha if beta == None: beta = self.beta tm_outdated = self._tm_signature != (self.radius, self.radius_type, self.wavelength, self.m, self.axis_ratio, self.shape, self.ddelt, self.ndgs) if tm_outdated: self._init_tmatrix() scatter_outdated = self._scatter_signature != (self.thet0, self.thet, self.phi0, self.phi, alpha, beta, self.orient) outdated = tm_outdated or scatter_outdated if outdated: (self._S_single, self._Z_single) = pytmatrix.calcampl(self.nmax, self.wavelength, self.thet0, self.thet, self.phi0, self.phi, alpha, beta) self._set_scatter_signature() return (self._S_single, self._Z_single)
<SYSTEM_TASK:> Get the S and Z matrices using the specified orientation averaging. <END_TASK> <USER_TASK:> Description: def get_SZ_orient(self): """Get the S and Z matrices using the specified orientation averaging. """
tm_outdated = self._tm_signature != (self.radius, self.radius_type, self.wavelength, self.m, self.axis_ratio, self.shape, self.ddelt, self.ndgs) scatter_outdated = self._scatter_signature != (self.thet0, self.thet, self.phi0, self.phi, self.alpha, self.beta, self.orient) orient_outdated = self._orient_signature != \ (self.orient, self.or_pdf, self.n_alpha, self.n_beta) if orient_outdated: self._init_orient() outdated = tm_outdated or scatter_outdated or orient_outdated if outdated: (self._S_orient, self._Z_orient) = self.orient(self) self._set_scatter_signature() return (self._S_orient, self._Z_orient)
<SYSTEM_TASK:> Get the S and Z matrices using the current parameters. <END_TASK> <USER_TASK:> Description: def get_SZ(self): """Get the S and Z matrices using the current parameters. """
if self.psd_integrator is None: (self._S, self._Z) = self.get_SZ_orient() else: scatter_outdated = self._scatter_signature != (self.thet0, self.thet, self.phi0, self.phi, self.alpha, self.beta, self.orient) psd_outdated = self._psd_signature != (self.psd,) outdated = scatter_outdated or psd_outdated if outdated: (self._S, self._Z) = self.psd_integrator(self.psd, self.get_geometry()) self._set_scatter_signature() self._set_psd_signature() return (self._S, self._Z)
<SYSTEM_TASK:> Quadratude points and weights for a weighting function. <END_TASK> <USER_TASK:> Description: def get_points_and_weights(w_func=lambda x : np.ones(x.shape), left=-1.0, right=1.0, num_points=5, n=4096): """Quadratude points and weights for a weighting function. Points and weights for approximating the integral I = \int_left^right f(x) w(x) dx given the weighting function w(x) using the approximation I ~ w_i f(x_i) Args: w_func: The weighting function w(x). Must be a function that takes one argument and is valid over the open interval (left, right). left: The left boundary of the interval right: The left boundary of the interval num_points: number of integration points to return n: the number of points to evaluate w_func at. Returns: A tuple (points, weights) where points is a sorted array of the points x_i and weights gives the corresponding weights w_i. """
dx = (float(right)-left)/n z = np.hstack(np.linspace(left+0.5*dx, right-0.5*dx, n)) w = dx*w_func(z) (a, b) = discrete_gautschi(z, w, num_points) alpha = a beta = np.sqrt(b) J = np.diag(alpha) J += np.diag(beta, k=-1) J += np.diag(beta, k=1) (points,v) = np.linalg.eigh(J) ind = points.argsort() points = points[ind] weights = v[0,:]**2 * w.sum() weights = weights[ind] return (points, weights)
<SYSTEM_TASK:> Scattering cross section for the current setup, with polarization. <END_TASK> <USER_TASK:> Description: def sca_xsect(scatterer, h_pol=True): """Scattering cross section for the current setup, with polarization. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The scattering cross section. """
if scatterer.psd_integrator is not None: return scatterer.psd_integrator.get_angular_integrated( scatterer.psd, scatterer.get_geometry(), "sca_xsect") old_geom = scatterer.get_geometry() def d_xsect(thet, phi): (scatterer.phi, scatterer.thet) = (phi*rad_to_deg, thet*rad_to_deg) Z = scatterer.get_Z() I = sca_intensity(scatterer, h_pol) return I * np.sin(thet) try: xsect = dblquad(d_xsect, 0.0, 2*np.pi, lambda x: 0.0, lambda x: np.pi)[0] finally: scatterer.set_geometry(old_geom) return xsect
<SYSTEM_TASK:> Extinction cross section for the current setup, with polarization. <END_TASK> <USER_TASK:> Description: def ext_xsect(scatterer, h_pol=True): """Extinction cross section for the current setup, with polarization. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The extinction cross section. """
if scatterer.psd_integrator is not None: try: return scatterer.psd_integrator.get_angular_integrated( scatterer.psd, scatterer.get_geometry(), "ext_xsect") except AttributeError: # Fall back to the usual method of computing this from S pass old_geom = scatterer.get_geometry() (thet0, thet, phi0, phi, alpha, beta) = old_geom try: scatterer.set_geometry((thet0, thet0, phi0, phi0, alpha, beta)) S = scatterer.get_S() finally: scatterer.set_geometry(old_geom) if h_pol: return 2 * scatterer.wavelength * S[1,1].imag else: return 2 * scatterer.wavelength * S[0,0].imag
<SYSTEM_TASK:> Single-scattering albedo for the current setup, with polarization. <END_TASK> <USER_TASK:> Description: def ssa(scatterer, h_pol=True): """Single-scattering albedo for the current setup, with polarization. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The single-scattering albedo. """
ext_xs = ext_xsect(scatterer, h_pol=h_pol) return sca_xsect(scatterer, h_pol=h_pol)/ext_xs if ext_xs > 0.0 else 0.0
<SYSTEM_TASK:> Asymmetry parameter for the current setup, with polarization. <END_TASK> <USER_TASK:> Description: def asym(scatterer, h_pol=True): """Asymmetry parameter for the current setup, with polarization. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The asymmetry parameter. """
if scatterer.psd_integrator is not None: return scatterer.psd_integrator.get_angular_integrated( scatterer.psd, scatterer.get_geometry(), "asym") old_geom = scatterer.get_geometry() cos_t0 = np.cos(scatterer.thet0 * deg_to_rad) sin_t0 = np.sin(scatterer.thet0 * deg_to_rad) p0 = scatterer.phi0 * deg_to_rad def integrand(thet, phi): (scatterer.phi, scatterer.thet) = (phi*rad_to_deg, thet*rad_to_deg) cos_T_sin_t = 0.5 * (np.sin(2*thet)*cos_t0 + \ (1-np.cos(2*thet))*sin_t0*np.cos(p0-phi)) I = sca_intensity(scatterer, h_pol) return I * cos_T_sin_t try: cos_int = dblquad(integrand, 0.0, 2*np.pi, lambda x: 0.0, lambda x: np.pi)[0] finally: scatterer.set_geometry(old_geom) return cos_int/sca_xsect(scatterer, h_pol)
<SYSTEM_TASK:> Radar cross section for the current setup. <END_TASK> <USER_TASK:> Description: def radar_xsect(scatterer, h_pol=True): """Radar cross section for the current setup. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The radar cross section. """
Z = scatterer.get_Z() if h_pol: return 2 * np.pi * \ (Z[0,0] - Z[0,1] - Z[1,0] + Z[1,1]) else: return 2 * np.pi * \ (Z[0,0] + Z[0,1] + Z[1,0] + Z[1,1])
<SYSTEM_TASK:> Delta_hv for the current setup. <END_TASK> <USER_TASK:> Description: def delta_hv(scatterer): """ Delta_hv for the current setup. Args: scatterer: a Scatterer instance. Returns: Delta_hv [rad]. """
Z = scatterer.get_Z() return np.arctan2(Z[2,3] - Z[3,2], -Z[2,2] - Z[3,3])
<SYSTEM_TASK:> Maxwell-Garnett EMA for the refractive index. <END_TASK> <USER_TASK:> Description: def mg_refractive(m, mix): """Maxwell-Garnett EMA for the refractive index. Args: m: Tuple of the complex refractive indices of the media. mix: Tuple of the volume fractions of the media, len(mix)==len(m) (if sum(mix)!=1, these are taken relative to sum(mix)) Returns: The Maxwell-Garnett approximation for the complex refractive index of the effective medium If len(m)==2, the first element is taken as the matrix and the second as the inclusion. If len(m)>2, the media are mixed recursively so that the last element is used as the inclusion and the second to last as the matrix, then this mixture is used as the last element on the next iteration, and so on. """
if len(m) == 2: cF = float(mix[1]) / (mix[0]+mix[1]) * \ (m[1]**2-m[0]**2) / (m[1]**2+2*m[0]**2) er = m[0]**2 * (1.0+2.0*cF) / (1.0-cF) m = np.sqrt(er) else: m_last = mg_refractive(m[-2:], mix[-2:]) mix_last = mix[-2] + mix[-1] m = mg_refractive(m[:-2] + (m_last,), mix[:-2] + (mix_last,)) return m
<SYSTEM_TASK:> Bruggeman EMA for the refractive index. <END_TASK> <USER_TASK:> Description: def bruggeman_refractive(m, mix): """Bruggeman EMA for the refractive index. For instructions, see mg_refractive in this module, except this routine only works for two components. """
f1 = mix[0]/sum(mix) f2 = mix[1]/sum(mix) e1 = m[0]**2 e2 = m[1]**2 a = -2*(f1+f2) b = (2*f1*e1 - f1*e2 + 2*f2*e2 - f2*e1) c = (f1+f2)*e1*e2 e_eff = (-b - np.sqrt(b**2-4*a*c))/(2*a) return np.sqrt(e_eff)
<SYSTEM_TASK:> Interpolator for the refractive indices of ice. <END_TASK> <USER_TASK:> Description: def ice_refractive(file): """ Interpolator for the refractive indices of ice. Inputs: File to read the refractive index lookup table from. This is supplied as "ice_refr.dat", retrieved from http://www.atmos.washington.edu/ice_optical_constants/ Returns: A callable object that takes as parameters the wavelength [mm] and the snow density [g/cm^3]. """
D = np.loadtxt(file) log_wl = np.log10(D[:,0]/1000) re = D[:,1] log_im = np.log10(D[:,2]) iobj_re = interpolate.interp1d(log_wl, re) iobj_log_im = interpolate.interp1d(log_wl, log_im) def ref(wl, snow_density): lwl = np.log10(wl) try: len(lwl) except TypeError: mi_sqr = complex(iobj_re(lwl), 10**iobj_log_im(lwl))**2 else: mi_sqr = np.array([complex(a,b) for (a,b) in zip(iobj_re(lwl), 10**iobj_log_im(lwl))])**2 c = (mi_sqr-1)/(mi_sqr+2) * snow_density/ice_density return np.sqrt( (1+2*c) / (1-c) ) return ref
<SYSTEM_TASK:> Group objects in data using a function or a key <END_TASK> <USER_TASK:> Description: def _group_by(data, criteria): """ Group objects in data using a function or a key """
if isinstance(criteria, str): criteria_str = criteria def criteria(x): return x[criteria_str] res = defaultdict(list) for element in data: key = criteria(element) res[key].append(element) return res
<SYSTEM_TASK:> Perform the product between two objects <END_TASK> <USER_TASK:> Description: def _product(k, v): """ Perform the product between two objects even if they don't support iteration """
if not _can_iterate(k): k = [k] if not _can_iterate(v): v = [v] return list(product(k, v))
<SYSTEM_TASK:> Plot a learning curve <END_TASK> <USER_TASK:> Description: def learning_curve(train_scores, test_scores, train_sizes, ax=None): """Plot a learning curve Plot a metric vs number of examples for the training and test set Parameters ---------- train_scores : array-like Scores for the training set test_scores : array-like Scores for the test set train_sizes : array-like Relative or absolute numbers of training examples used to generate the learning curve ax : matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Returns ------- ax: matplotlib Axes Axes containing the plot Examples -------- .. plot:: ../../examples/learning_curve.py """
if ax is None: ax = plt.gca() ax.grid() ax.set_title("Learning Curve") ax.set_xlabel("Training examples") ax.set_ylabel("Score mean") train_scores_mean = np.mean(train_scores, axis=1) train_scores_std = np.std(train_scores, axis=1) test_scores_mean = np.mean(test_scores, axis=1) test_scores_std = np.std(test_scores, axis=1) ax.fill_between(train_sizes, train_scores_mean - train_scores_std, train_scores_mean + train_scores_std, alpha=0.1, color="r") ax.fill_between(train_sizes, test_scores_mean - test_scores_std, test_scores_mean + test_scores_std, alpha=0.1, color="g") ax.plot(train_sizes, train_scores_mean, 'o-', color="r", label="Training score") ax.plot(train_sizes, test_scores_mean, 'o-', color="g", label="Cross-validation score") ax.legend(loc="best") ax.margins(0.05) return ax
<SYSTEM_TASK:> Plot a validation curve <END_TASK> <USER_TASK:> Description: def validation_curve(train_scores, test_scores, param_range, param_name=None, semilogx=False, ax=None): """Plot a validation curve Plot a metric vs hyperpameter values for the training and test set Parameters ---------- train_scores : array-like Scores for the training set test_scores : array-like Scores for the test set param_range : array-like Hyperparameter values used to generate the curve param_range : str Hyperparameter name semilgo : bool Sets a log scale on the x axis ax : matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Returns ------- ax: matplotlib Axes Axes containing the plot Examples -------- .. plot:: ../../examples/validation_curve.py """
if ax is None: ax = plt.gca() if semilogx: ax.set_xscale('log') train_scores_mean = np.mean(train_scores, axis=1) train_scores_std = np.std(train_scores, axis=1) test_scores_mean = np.mean(test_scores, axis=1) test_scores_std = np.std(test_scores, axis=1) ax.set_title("Validation Curve") ax.set_ylabel("Score mean") if param_name: ax.set_xlabel(param_name) ax.plot(param_range, train_scores_mean, label="Training score", color="r") ax.plot(param_range, test_scores_mean, label="Cross-validation score", color="g") ax.fill_between(param_range, train_scores_mean - train_scores_std, train_scores_mean + train_scores_std, alpha=0.2, color="r") ax.fill_between(param_range, test_scores_mean - test_scores_std, test_scores_mean + test_scores_std, alpha=0.2, color="g") ax.legend(loc="best") ax.margins(0.05) return ax
<SYSTEM_TASK:> Plot confustion matrix. <END_TASK> <USER_TASK:> Description: def confusion_matrix(y_true, y_pred, target_names=None, normalize=False, cmap=None, ax=None): """ Plot confustion matrix. Parameters ---------- y_true : array-like, shape = [n_samples] Correct target values (ground truth). y_pred : array-like, shape = [n_samples] Target predicted classes (estimator predictions). target_names : list List containing the names of the target classes. List must be in order e.g. ``['Label for class 0', 'Label for class 1']``. If ``None``, generic labels will be generated e.g. ``['Class 0', 'Class 1']`` ax: matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes normalize : bool Normalize the confusion matrix cmap : matplotlib Colormap If ``None`` uses a modified version of matplotlib's OrRd colormap. Returns ------- ax: matplotlib Axes Axes containing the plot Examples -------- .. plot:: ../../examples/confusion_matrix.py """
if any((val is None for val in (y_true, y_pred))): raise ValueError("y_true and y_pred are needed to plot confusion " "matrix") # calculate how many names you expect values = set(y_true).union(set(y_pred)) expected_len = len(values) if target_names and (expected_len != len(target_names)): raise ValueError(('Data cointains {} different values, but target' ' names contains {} values.'.format(expected_len, len(target_names) ))) # if the user didn't pass target_names, create generic ones if not target_names: values = list(values) values.sort() target_names = ['Class {}'.format(v) for v in values] cm = sk_confusion_matrix(y_true, y_pred) if normalize: cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] np.set_printoptions(precision=2) if ax is None: ax = plt.gca() # this (y, x) may sound counterintuitive. The reason is that # in a matrix cell (i, j) is in row=i and col=j, translating that # to an x, y plane (which matplotlib uses to plot), we need to use # i as the y coordinate (how many steps down) and j as the x coordinate # how many steps to the right. for (y, x), v in np.ndenumerate(cm): try: label = '{:.2}'.format(v) except: label = v ax.text(x, y, label, horizontalalignment='center', verticalalignment='center') if cmap is None: cmap = default_heatmap() im = ax.imshow(cm, interpolation='nearest', cmap=cmap) plt.colorbar(im, ax=ax) tick_marks = np.arange(len(target_names)) ax.set_xticks(tick_marks) ax.set_xticklabels(target_names) ax.set_yticks(tick_marks) ax.set_yticklabels(target_names) title = 'Confusion matrix' if normalize: title += ' (normalized)' ax.set_title(title) ax.set_ylabel('True label') ax.set_xlabel('Predicted label') return ax
<SYSTEM_TASK:> Plot precision values at different proportions. <END_TASK> <USER_TASK:> Description: def precision_at_proportions(y_true, y_score, ax=None): """ Plot precision values at different proportions. Parameters ---------- y_true : array-like Correct target values (ground truth). y_score : array-like Target scores (estimator predictions). ax : matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Returns ------- ax: matplotlib Axes Axes containing the plot """
if any((val is None for val in (y_true, y_score))): raise ValueError('y_true and y_score are needed to plot precision at ' 'proportions') if ax is None: ax = plt.gca() y_score_is_vector = is_column_vector(y_score) or is_row_vector(y_score) if not y_score_is_vector: y_score = y_score[:, 1] # Calculate points proportions = [0.01 * i for i in range(1, 101)] precs_and_cutoffs = [precision_at(y_true, y_score, p) for p in proportions] precs, cutoffs = zip(*precs_and_cutoffs) # Plot and set nice defaults for title and axis labels ax.plot(proportions, precs) ax.set_title('Precision at various proportions') ax.set_ylabel('Precision') ax.set_xlabel('Proportion') ticks = [0.1 * i for i in range(1, 11)] ax.set_xticks(ticks) ax.set_xticklabels(ticks) ax.set_yticks(ticks) ax.set_yticklabels(ticks) ax.set_ylim([0, 1.0]) ax.set_xlim([0, 1.0]) return ax
<SYSTEM_TASK:> Plot results from a sklearn grid search by changing two parameters at most. <END_TASK> <USER_TASK:> Description: def grid_search(grid_scores, change, subset=None, kind='line', cmap=None, ax=None): """ Plot results from a sklearn grid search by changing two parameters at most. Parameters ---------- grid_scores : list of named tuples Results from a sklearn grid search (get them using the `grid_scores_` parameter) change : str or iterable with len<=2 Parameter to change subset : dictionary-like parameter-value(s) pairs to subset from grid_scores. (e.g. ``{'n_estimartors': [1, 10]}``), if None all combinations will be used. kind : ['line', 'bar'] This only applies whe change is a single parameter. Changes the type of plot cmap : matplotlib Colormap This only applies when change are two parameters. Colormap used for the matrix. If None uses a modified version of matplotlib's OrRd colormap. ax: matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Returns ------- ax: matplotlib Axes Axes containing the plot Examples -------- .. plot:: ../../examples/grid_search.py """
if change is None: raise ValueError(('change can\'t be None, you need to select at least' ' one value to make the plot.')) if ax is None: ax = plt.gca() if cmap is None: cmap = default_heatmap() if isinstance(change, string_types) or len(change) == 1: return _grid_search_single(grid_scores, change, subset, kind, ax) elif len(change) == 2: return _grid_search_double(grid_scores, change, subset, cmap, ax) else: raise ValueError('change must have length 1 or 2 or be a string')
<SYSTEM_TASK:> Precision at proportions plot <END_TASK> <USER_TASK:> Description: def precision_at_proportions(self): """Precision at proportions plot """
return plot.precision_at_proportions(self.y_true, self.y_score, ax=_gen_ax())
<SYSTEM_TASK:> Generate HTML report <END_TASK> <USER_TASK:> Description: def generate_report(self, template, path=None, style=None): """ Generate HTML report Parameters ---------- template : markdown-formatted string or path to the template file used for rendering the report. Any attribute of this object can be included in the report using the {tag} format. e.g.'# Report{estimator_name}{roc}{precision_recall}'. Apart from every attribute, you can also use {date} and {date_utc} tags to include the date for the report generation using local and UTC timezones repectively. path : str Path to save the HTML report. If None, the function will return the HTML code. style: str Path to a css file to apply style to the report. If None, no style will be applied Returns ------- report: str Returns the contents of the report if path is None. """
from .report import generate return generate(self, template, path, style)
<SYSTEM_TASK:> Plot ROC curve. <END_TASK> <USER_TASK:> Description: def roc(y_true, y_score, ax=None): """ Plot ROC curve. Parameters ---------- y_true : array-like, shape = [n_samples] Correct target values (ground truth). y_score : array-like, shape = [n_samples] or [n_samples, 2] for binary classification or [n_samples, n_classes] for multiclass Target scores (estimator predictions). ax: matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Notes ----- It is assumed that the y_score parameter columns are in order. For example, if ``y_true = [2, 2, 1, 0, 0, 1, 2]``, then the first column in y_score must countain the scores for class 0, second column for class 1 and so on. Returns ------- ax: matplotlib Axes Axes containing the plot Examples -------- .. plot:: ../../examples/roc.py """
if any((val is None for val in (y_true, y_score))): raise ValueError("y_true and y_score are needed to plot ROC") if ax is None: ax = plt.gca() # get the number of classes based on the shape of y_score y_score_is_vector = is_column_vector(y_score) or is_row_vector(y_score) if y_score_is_vector: n_classes = 2 else: _, n_classes = y_score.shape # check data shape? if n_classes > 2: # convert y_true to binary format y_true_bin = label_binarize(y_true, classes=np.unique(y_true)) _roc_multi(y_true_bin, y_score, ax=ax) for i in range(n_classes): _roc(y_true_bin[:, i], y_score[:, i], ax=ax) else: if y_score_is_vector: _roc(y_true, y_score, ax) else: _roc(y_true, y_score[:, 1], ax) # raise error if n_classes = 1? return ax
<SYSTEM_TASK:> Plot ROC curve for binary classification. <END_TASK> <USER_TASK:> Description: def _roc(y_true, y_score, ax=None): """ Plot ROC curve for binary classification. Parameters ---------- y_true : array-like, shape = [n_samples] Correct target values (ground truth). y_score : array-like, shape = [n_samples] Target scores (estimator predictions). ax: matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Returns ------- ax: matplotlib Axes Axes containing the plot """
# check dimensions fpr, tpr, _ = roc_curve(y_true, y_score) roc_auc = auc(fpr, tpr) ax.plot(fpr, tpr, label=('ROC curve (area = {0:0.2f})'.format(roc_auc))) _set_ax_settings(ax) return ax
<SYSTEM_TASK:> Plot ROC curve for multi classification. <END_TASK> <USER_TASK:> Description: def _roc_multi(y_true, y_score, ax=None): """ Plot ROC curve for multi classification. Parameters ---------- y_true : array-like, shape = [n_samples, n_classes] Correct target values (ground truth). y_score : array-like, shape = [n_samples, n_classes] Target scores (estimator predictions). ax: matplotlib Axes Axes object to draw the plot onto, otherwise uses current Axes Returns ------- ax: matplotlib Axes Axes containing the plot """
# Compute micro-average ROC curve and ROC area fpr, tpr, _ = roc_curve(y_true.ravel(), y_score.ravel()) roc_auc = auc(fpr, tpr) if ax is None: ax = plt.gca() ax.plot(fpr, tpr, label=('micro-average ROC curve (area = {0:0.2f})' .format(roc_auc))) _set_ax_settings(ax) return ax
<SYSTEM_TASK:> Set DHCP option with human friendly value <END_TASK> <USER_TASK:> Description: def create_option_from_value(tag, value): """ Set DHCP option with human friendly value """
dhcp_option.parser() fake_opt = dhcp_option(tag = tag) for c in dhcp_option.subclasses: if c.criteria(fake_opt): if hasattr(c, '_parse_from_value'): return c(tag = tag, value = c._parse_from_value(value)) else: raise ValueError('Invalid DHCP option ' + str(tag) + ": " + repr(value)) else: fake_opt._setextra(_tobytes(value)) return fake_opt
<SYSTEM_TASK:> Try best to create dhcp_options from human friendly values, ignoring <END_TASK> <USER_TASK:> Description: def create_dhcp_options(input_dict, ignoreError = False, generateNone = False): """ Try best to create dhcp_options from human friendly values, ignoring invalid values """
retdict = {} for k,v in dict(input_dict).items(): try: if generateNone and v is None: retdict[k] = None else: try: retdict[k] = create_option_from_value(k, v) except _EmptyOptionException: if generateNone: retdict[k] = None except Exception: if ignoreError: continue else: raise return retdict
<SYSTEM_TASK:> Create the subqueue to change the default behavior of Lock to semaphore. <END_TASK> <USER_TASK:> Description: def create(self): """ Create the subqueue to change the default behavior of Lock to semaphore. """
self.queue = self.scheduler.queue.addSubQueue(self.priority, LockEvent.createMatcher(self.context, self.key), maxdefault = self.size, defaultQueueClass = CBQueue.AutoClassQueue.initHelper('locker', subqueuelimit = 1))
<SYSTEM_TASK:> Destroy the created subqueue to change the behavior back to Lock <END_TASK> <USER_TASK:> Description: async def destroy(self, container = None): """ Destroy the created subqueue to change the behavior back to Lock """
if container is None: container = RoutineContainer(self.scheduler) if self.queue is not None: await container.syscall_noreturn(syscall_removequeue(self.scheduler.queue, self.queue)) self.queue = None
<SYSTEM_TASK:> Use `sys.argv` for starting parameters. This is the entry-point of `vlcp-start` <END_TASK> <USER_TASK:> Description: def default_start(): """ Use `sys.argv` for starting parameters. This is the entry-point of `vlcp-start` """
(config, daemon, pidfile, startup, fork) = parsearg() if config is None: if os.path.isfile('/etc/vlcp.conf'): config = '/etc/vlcp.conf' else: print('/etc/vlcp.conf is not found; start without configurations.') elif not config: config = None main(config, startup, daemon, pidfile, fork)
<SYSTEM_TASK:> Force a update notification on specified objects, even if they are not actually updated <END_TASK> <USER_TASK:> Description: def updateobjects(self, updatedvalues): """ Force a update notification on specified objects, even if they are not actually updated in ObjectDB """
if not self._updatedset: self.scheduler.emergesend(FlowUpdaterNotification(self, FlowUpdaterNotification.DATAUPDATED)) self._updatedset.update(set(updatedvalues).intersection(self._savedresult))
<SYSTEM_TASK:> Called from main module to update port information <END_TASK> <USER_TASK:> Description: async def update_ports(self, ports, ovsdb_ports): """ Called from main module to update port information """
new_port_names = dict((p['name'], _to32bitport(p['ofport'])) for p in ovsdb_ports) new_port_ids = dict((p['id'], _to32bitport(p['ofport'])) for p in ovsdb_ports if p['id']) if new_port_names == self._portnames and new_port_ids == self._portids: return self._portnames.clear() self._portnames.update(new_port_names) self._portids.clear() self._portids.update(new_port_ids) logicalportkeys = [LogicalPort.default_key(id) for id in self._portids] self._original_initialkeys = logicalportkeys + [PhysicalPortSet.default_key()] self._initialkeys = tuple(itertools.chain(self._original_initialkeys, self._append_initialkeys)) phy_walker = partial(self._physicalport_walker, _portnames=new_port_names) log_walker = partial(self._logicalport_walker, _portids=new_port_ids) self._walkerdict = dict(itertools.chain( ((PhysicalPortSet.default_key(),phy_walker),), ((lgportkey,log_walker) for lgportkey in logicalportkeys) )) self._portnames = new_port_names self._portids = new_port_ids await self.restart_walk()
<SYSTEM_TASK:> Buffer the whole output until write EOF or flushed. <END_TASK> <USER_TASK:> Description: def bufferoutput(self): """ Buffer the whole output until write EOF or flushed. """
new_stream = Stream(writebufferlimit=None) if self._sendHeaders: # An extra copy self.container.subroutine(new_stream.copy_to(self.outputstream, self.container, buffering=False)) self.outputstream = Stream(writebufferlimit=None)
<SYSTEM_TASK:> Redirect this request with 3xx status <END_TASK> <USER_TASK:> Description: async def redirect(self, path, status = 302): """ Redirect this request with 3xx status """
location = urljoin(urlunsplit((b'https' if self.https else b'http', self.host, quote_from_bytes(self.path).encode('ascii'), '', '' )), path) self.start_response(status, [(b'Location', location)]) await self.write(b'<a href="' + self.escape(location, True) + b'">' + self.escape(location) + b'</a>') await self.flush(True)
<SYSTEM_TASK:> Escape special characters in HTML <END_TASK> <USER_TASK:> Description: def escape(self, text, quote = True): """ Escape special characters in HTML """
if isinstance(text, bytes): return escape_b(text, quote) else: return escape(text, quote)
<SYSTEM_TASK:> Show default error response <END_TASK> <USER_TASK:> Description: async def error(self, status=500, allowredirect = True, close = True, showerror = None, headers = []): """ Show default error response """
if showerror is None: showerror = self.showerrorinfo if self._sendHeaders: if showerror: typ, exc, tb = sys.exc_info() if exc: await self.write('<span style="white-space:pre-wrap">\n', buffering = False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering = False) await self.write('</span>\n', close, False) elif allowredirect and status in self.protocol.errorrewrite: await self.rewrite(self.protocol.errorrewrite[status], b'GET') elif allowredirect and status in self.protocol.errorredirect: await self.redirect(self.protocol.errorredirect[status]) else: self.start_response(status, headers) typ, exc, tb = sys.exc_info() if showerror and exc: await self.write('<span style="white-space:pre-wrap">\n', buffering = False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering = False) await self.write('</span>\n', close, False) else: await self.write(b'<h1>' + _createstatus(status) + b'</h1>', close, False)
<SYSTEM_TASK:> Write output to current output stream <END_TASK> <USER_TASK:> Description: async def write(self, data, eof = False, buffering = True): """ Write output to current output stream """
if not self.outputstream: self.outputstream = Stream() self._startResponse() elif (not buffering or eof) and not self._sendHeaders: self._startResponse() if not isinstance(data, bytes): data = data.encode(self.encoding) await self.outputstream.write(data, self.connection, eof, False, buffering)
<SYSTEM_TASK:> Write lines to current output stream <END_TASK> <USER_TASK:> Description: async def writelines(self, lines, eof = False, buffering = True): """ Write lines to current output stream """
for l in lines: await self.write(l, False, buffering) if eof: await self.write(b'', eof, buffering)
<SYSTEM_TASK:> Set output stream and send response immediately <END_TASK> <USER_TASK:> Description: def output(self, stream, disabletransferencoding = None): """ Set output stream and send response immediately """
if self._sendHeaders: raise HttpProtocolException('Cannot modify response, headers already sent') self.outputstream = stream try: content_length = len(stream) except Exception: pass else: self.header(b'Content-Length', str(content_length).encode('ascii')) if disabletransferencoding is not None: self.disabledeflate = disabletransferencoding self._startResponse()
<SYSTEM_TASK:> Send output with fixed length data <END_TASK> <USER_TASK:> Description: def outputdata(self, data): """ Send output with fixed length data """
if not isinstance(data, bytes): data = str(data).encode(self.encoding) self.output(MemoryStream(data))
<SYSTEM_TASK:> Close this request, send all data. You can still run other operations in the handler. <END_TASK> <USER_TASK:> Description: async def close(self): """ Close this request, send all data. You can still run other operations in the handler. """
if not self._sendHeaders: self._startResponse() if self.inputstream is not None: self.inputstream.close(self.connection.scheduler) if self.outputstream is not None: await self.flush(True) if hasattr(self, 'session') and self.session: self.session.unlock()
<SYSTEM_TASK:> Destroy current session. The session object is discarded and can no longer be used in other requests. <END_TASK> <USER_TASK:> Description: async def sessiondestroy(self): """ Destroy current session. The session object is discarded and can no longer be used in other requests. """
if hasattr(self, 'session') and self.session: setcookies = await call_api(self.container, 'session', 'destroy', {'sessionid':self.session.id}) self.session.unlock() del self.session for nc in setcookies: self.sent_cookies = [c for c in self.sent_cookies if c.key != nc.key] self.sent_cookies.append(nc)
<SYSTEM_TASK:> Return 401 for authentication failure. This will end the handler. <END_TASK> <USER_TASK:> Description: def basicauthfail(self, realm = b'all'): """ Return 401 for authentication failure. This will end the handler. """
if not isinstance(realm, bytes): realm = realm.encode('ascii') self.start_response(401, [(b'WWW-Authenticate', b'Basic realm="' + realm + b'"')]) self.exit(b'<h1>' + _createstatus(401) + b'</h1>')
<SYSTEM_TASK:> Create a anti-CSRF token in the session <END_TASK> <USER_TASK:> Description: async def createcsrf(self, csrfarg = '_csrf'): """ Create a anti-CSRF token in the session """
await self.sessionstart() if not csrfarg in self.session.vars: self.session.vars[csrfarg] = uuid.uuid4().hex
<SYSTEM_TASK:> Serialize `obj` with JSON and output to the client <END_TASK> <USER_TASK:> Description: def outputjson(self, obj): """ Serialize `obj` with JSON and output to the client """
self.header('Content-Type', 'application/json') self.outputdata(json.dumps(obj).encode('ascii'))
<SYSTEM_TASK:> If use expand directly, the url-decoded context will be decoded again, which create a security <END_TASK> <USER_TASK:> Description: def expand(cls, match, expand): """ If use expand directly, the url-decoded context will be decoded again, which create a security issue. Hack expand to quote the text before expanding """
return re._expand(match.re, cls._EncodedMatch(match), expand)
<SYSTEM_TASK:> PyPy 3 generator has a bug that calling `close` caused <END_TASK> <USER_TASK:> Description: def _close_generator(g): """ PyPy 3 generator has a bug that calling `close` caused memory leak. Before it is fixed, use `throw` instead """
if isinstance(g, generatorwrapper): g.close() elif _get_frame(g) is not None: try: g.throw(GeneratorExit_) except (StopIteration, GeneratorExit_): return else: raise RuntimeError("coroutine ignored GeneratorExit")
<SYSTEM_TASK:> This wraps a normal coroutine to become a VLCP routine. Usually you do not need to call this yourself; <END_TASK> <USER_TASK:> Description: def Routine(coroutine, scheduler, asyncStart = True, container = None, manualStart = False, daemon = False): """ This wraps a normal coroutine to become a VLCP routine. Usually you do not need to call this yourself; `container.start` and `container.subroutine` calls this automatically. """
def run(): iterator = _await(coroutine) iterself = yield if manualStart: yield try: if asyncStart: scheduler.yield_(iterself) yield if container is not None: container.currentroutine = iterself if daemon: scheduler.setDaemon(iterself, True) try: matchers = next(iterator) except StopIteration: return while matchers is None: scheduler.yield_(iterself) yield try: matchers = next(iterator) except StopIteration: return try: scheduler.register(matchers, iterself) except Exception: try: iterator.throw(IllegalMatchersException(matchers)) except StopIteration: pass raise while True: try: etup = yield except GeneratorExit_: raise except: #scheduler.unregister(matchers, iterself) lmatchers = matchers t,v,tr = sys.exc_info() # @UnusedVariable if container is not None: container.currentroutine = iterself try: matchers = iterator.throw(t,v) except StopIteration: return else: #scheduler.unregister(matchers, iterself) lmatchers = matchers if container is not None: container.currentroutine = iterself try: matchers = iterator.send(etup) except StopIteration: return while matchers is None: scheduler.yield_(iterself) yield try: matchers = next(iterator) except StopIteration: return try: if hasattr(matchers, 'two_way_difference'): reg, unreg = matchers.two_way_difference(lmatchers) else: reg = set(matchers).difference(lmatchers) unreg = set(lmatchers).difference(matchers) scheduler.register(reg, iterself) scheduler.unregister(unreg, iterself) except Exception: try: iterator.throw(IllegalMatchersException(matchers)) except StopIteration: pass raise finally: # iterator.close() can be called in other routines, we should restore the currentroutine variable if container is not None: lastcurrentroutine = getattr(container, 'currentroutine', None) container.currentroutine = iterself else: lastcurrentroutine = None _close_generator(coroutine) if container is not None: container.currentroutine = lastcurrentroutine scheduler.unregisterall(iterself) r = generatorwrapper(run()) next(r) r.send(r) return r
<SYSTEM_TASK:> Monitoring event matchers while executing a subprocess. If events are matched before the subprocess ends, <END_TASK> <USER_TASK:> Description: async def with_exception(self, subprocess, *matchers): """ Monitoring event matchers while executing a subprocess. If events are matched before the subprocess ends, the subprocess is terminated and a RoutineException is raised. """
def _callback(event, matcher): raise RoutineException(matcher, event) return await self.with_callback(subprocess, _callback, *matchers)
<SYSTEM_TASK:> Similar to `waitForAll`, but set `canignore=True` for these events. This ensures <END_TASK> <USER_TASK:> Description: async def wait_for_all_to_process(self, *matchers, eventlist = None, eventdict = None, callback = None): """ Similar to `waitForAll`, but set `canignore=True` for these events. This ensures blocking events are processed correctly. """
def _callback(event, matcher): event.canignore = True if callback: callback(event, matcher) return await self.wait_for_all(*matchers, eventlist=eventlist, eventdict=eventdict, callback=_callback)
<SYSTEM_TASK:> Wait for multiple queues to be empty at the same time. <END_TASK> <USER_TASK:> Description: async def wait_for_all_empty(self, *queues): """ Wait for multiple queues to be empty at the same time. Require delegate when calling from coroutines running in other containers """
matchers = [m for m in (q.waitForEmpty() for q in queues) if m is not None] while matchers: await self.wait_for_all(*matchers) matchers = [m for m in (q.waitForEmpty() for q in queues) if m is not None]
<SYSTEM_TASK:> Call a syscall method and retrieve its return value <END_TASK> <USER_TASK:> Description: async def syscall(self, func, ignoreException = False): """ Call a syscall method and retrieve its return value """
ev = await self.syscall_noreturn(func) if hasattr(ev, 'exception'): if ignoreException: return else: raise ev.exception[1] else: return ev.retvalue
<SYSTEM_TASK:> Retrieve a begin_delegate result. Must be called immediately after begin_delegate <END_TASK> <USER_TASK:> Description: async def end_delegate(self, delegate_matcher, routine = None, forceclose = False): """ Retrieve a begin_delegate result. Must be called immediately after begin_delegate before any other `await`, or the result might be lost. Do not use this method without thinking. Always use `RoutineFuture` when possible. """
try: ev = await delegate_matcher if hasattr(ev, 'exception'): raise ev.exception else: return ev.result finally: if forceclose and routine: routine.close()
<SYSTEM_TASK:> Create temporary instance for helper functions <END_TASK> <USER_TASK:> Description: def get_container(cls, scheduler): """ Create temporary instance for helper functions """
if scheduler in cls._container_cache: return cls._container_cache[scheduler] else: c = cls(scheduler) cls._container_cache[scheduler] = c return c
<SYSTEM_TASK:> Create a proxy module. A proxy module has a default implementation, but can be redirected to other <END_TASK> <USER_TASK:> Description: def proxy(name, default = None): """ Create a proxy module. A proxy module has a default implementation, but can be redirected to other implementations with configurations. Other modules can depend on proxy modules. """
proxymodule = _ProxyMetaClass(name, (_ProxyModule,), {'_default': default}) proxymodule.__module__ = sys._getframe(1).f_globals.get('__name__') return proxymodule
<SYSTEM_TASK:> Send API and discard the result <END_TASK> <USER_TASK:> Description: async def send_api(container, targetname, name, params = {}): """ Send API and discard the result """
handle = object() apiEvent = ModuleAPICall(handle, targetname, name, params = params) await container.wait_for_send(apiEvent)
<SYSTEM_TASK:> Remove an API from this handler <END_TASK> <USER_TASK:> Description: def unregisterAPI(self, name): """ Remove an API from this handler """
if name.startswith('public/'): target = 'public' name = name[len('public/'):] else: target = self.servicename name = name removes = [m for m in self.handler.handlers.keys() if m.target == target and m.name == name] for m in removes: self.handler.unregisterHandler(m)
<SYSTEM_TASK:> Load a module by full path. If there are dependencies, they are also loaded. <END_TASK> <USER_TASK:> Description: async def load_by_path(self, path): """ Load a module by full path. If there are dependencies, they are also loaded. """
try: p, module = findModule(path, True) except KeyError as exc: raise ModuleLoadException('Cannot load module ' + repr(path) + ': ' + str(exc) + 'is not defined in the package') except Exception as exc: raise ModuleLoadException('Cannot load module ' + repr(path) + ': ' + str(exc)) if module is None: raise ModuleLoadException('Cannot find module: ' + repr(path)) return await self.loadmodule(module)
<SYSTEM_TASK:> Unload a module by full path. Dependencies are automatically unloaded if they are marked to be <END_TASK> <USER_TASK:> Description: async def unload_by_path(self, path): """ Unload a module by full path. Dependencies are automatically unloaded if they are marked to be services. """
p, module = findModule(path, False) if module is None: raise ModuleLoadException('Cannot find module: ' + repr(path)) return await self.unloadmodule(module)
<SYSTEM_TASK:> Return the module instance for a target name. <END_TASK> <USER_TASK:> Description: def get_module_by_name(self, targetname): """ Return the module instance for a target name. """
if targetname == 'public': target = None elif not targetname not in self.activeModules: raise KeyError('Module %r not exists or is not loaded' % (targetname,)) else: target = self.activeModules[targetname] return target
<SYSTEM_TASK:> Return all configuration keys in this node, including configurations on children nodes. <END_TASK> <USER_TASK:> Description: def config_keys(self, sortkey = False): """ Return all configuration keys in this node, including configurations on children nodes. """
if sortkey: items = sorted(self.items()) else: items = self.items() for k,v in items: if isinstance(v, ConfigTree): for k2 in v.config_keys(sortkey): yield k + '.' + k2 else: yield k
<SYSTEM_TASK:> Return configuration keys directly stored in this node. Configurations in child nodes are not included. <END_TASK> <USER_TASK:> Description: def config_value_keys(self, sortkey = False): """ Return configuration keys directly stored in this node. Configurations in child nodes are not included. """
if sortkey: items = sorted(self.items()) else: items = self.items() return (k for k,v in items if not isinstance(v,ConfigTree))
<SYSTEM_TASK:> Copy all configurations from this node into obj <END_TASK> <USER_TASK:> Description: def loadconfig(self, keysuffix, obj): """ Copy all configurations from this node into obj """
subtree = self.get(keysuffix) if subtree is not None and isinstance(subtree, ConfigTree): for k,v in subtree.items(): if isinstance(v, ConfigTree): if hasattr(obj, k) and not isinstance(getattr(obj, k), ConfigTree): v.loadconfig(getattr(obj,k)) else: setattr(obj, k, v) elif not hasattr(obj, k): setattr(obj, k, v)
<SYSTEM_TASK:> Load configurations with this decorator <END_TASK> <USER_TASK:> Description: def withconfig(self, keysuffix): """ Load configurations with this decorator """
def decorator(cls): return self.loadconfig(keysuffix, cls) return decorator
<SYSTEM_TASK:> Convert this node to a dictionary tree. <END_TASK> <USER_TASK:> Description: def todict(self): """ Convert this node to a dictionary tree. """
dict_entry = [] for k,v in self.items(): if isinstance(v, ConfigTree): dict_entry.append((k, v.todict())) else: dict_entry.append((k, v)) return dict(dict_entry)