text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def dodirot_V(di_block, Dbar, Ibar): """ Rotate an array of dec/inc pairs to coordinate system with Dec,Inc as 0,90 Parameters ___________________ di_block : array of [[Dec1,Inc1],[Dec2,Inc2],....] Dbar : declination of desired center Ibar : inclination of desired center Returns __________ array of rotated decs and incs: [[rot_Dec1,rot_Inc1],[rot_Dec2,rot_Inc2],....] """ N = di_block.shape[0] DipDir, Dip = np.ones(N, dtype=np.float).transpose( )*(Dbar-180.), np.ones(N, dtype=np.float).transpose()*(90.-Ibar) di_block = di_block.transpose() data = np.array([di_block[0], di_block[1], DipDir, Dip]).transpose() drot, irot = dotilt_V(data) drot = (drot-180.) % 360. # return np.column_stack((drot, irot))
[ "def", "dodirot_V", "(", "di_block", ",", "Dbar", ",", "Ibar", ")", ":", "N", "=", "di_block", ".", "shape", "[", "0", "]", "DipDir", ",", "Dip", "=", "np", ".", "ones", "(", "N", ",", "dtype", "=", "np", ".", "float", ")", ".", "transpose", "(", ")", "*", "(", "Dbar", "-", "180.", ")", ",", "np", ".", "ones", "(", "N", ",", "dtype", "=", "np", ".", "float", ")", ".", "transpose", "(", ")", "*", "(", "90.", "-", "Ibar", ")", "di_block", "=", "di_block", ".", "transpose", "(", ")", "data", "=", "np", ".", "array", "(", "[", "di_block", "[", "0", "]", ",", "di_block", "[", "1", "]", ",", "DipDir", ",", "Dip", "]", ")", ".", "transpose", "(", ")", "drot", ",", "irot", "=", "dotilt_V", "(", "data", ")", "drot", "=", "(", "drot", "-", "180.", ")", "%", "360.", "#", "return", "np", ".", "column_stack", "(", "(", "drot", ",", "irot", ")", ")" ]
34.727273
17.636364
def report_target_info(self, scope, target, keys, val): """Add target information to run_info under target_data. Will Recursively construct a nested dict with the keys provided. Primitive values can be overwritten with other primitive values, but a primitive value cannot be overwritten with a dictionary. For example: Where the dictionary being updated is {'a': {'b': 16}}, reporting the value 15 with the key list ['a', 'b'] will result in {'a': {'b':15}}; but reporting the value 20 with the key list ['a', 'b', 'c'] will throw an error. :param string scope: The scope for which we are reporting the information. :param target: The target for which we want to store information. :type target: :class:`pants.build_graph.target.Target` :param list of string keys: The keys that will be recursively nested and pointing to the information being stored. :param primitive val: The value of the information being stored. :API: public """ new_key_list = [target.address.spec, scope] new_key_list += keys self._merge_list_of_keys_into_dict(self._target_to_data, new_key_list, val, 0)
[ "def", "report_target_info", "(", "self", ",", "scope", ",", "target", ",", "keys", ",", "val", ")", ":", "new_key_list", "=", "[", "target", ".", "address", ".", "spec", ",", "scope", "]", "new_key_list", "+=", "keys", "self", ".", "_merge_list_of_keys_into_dict", "(", "self", ".", "_target_to_data", ",", "new_key_list", ",", "val", ",", "0", ")" ]
44
26.884615
def eval_stdin(): 'evaluate expressions read from stdin' cmd = ['plash', 'eval'] p = subprocess.Popen(cmd, stdin=sys.stdin, stdout=sys.stdout) exit = p.wait() if exit: raise subprocess.CalledProcessError(exit, cmd)
[ "def", "eval_stdin", "(", ")", ":", "cmd", "=", "[", "'plash'", ",", "'eval'", "]", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdin", "=", "sys", ".", "stdin", ",", "stdout", "=", "sys", ".", "stdout", ")", "exit", "=", "p", ".", "wait", "(", ")", "if", "exit", ":", "raise", "subprocess", ".", "CalledProcessError", "(", "exit", ",", "cmd", ")" ]
33.714286
18
def get_attname_column(self): """ Get the database column name automatically in most cases. """ # See "A guide to Field parameters": django/db/models/fields/__init__.py # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. attname = self.get_attname() if self.db_column is not None: # explicit name column = self.db_column else: if not self.name.islower(): # a Salesforce style name e.g. 'LastName' or 'MyCustomField' column = self.name else: # a Django style name like 'last_name' or 'my_custom_field' column = self.name.title().replace('_', '') # Fix custom fields if self.sf_custom: column = self.sf_namespace + column + '__c' return attname, column
[ "def", "get_attname_column", "(", "self", ")", ":", "# See \"A guide to Field parameters\": django/db/models/fields/__init__.py", "# * attname: The attribute to use on the model object. This is the same as", "# \"name\", except in the case of ForeignKeys, where \"_id\" is", "# appended.", "# * column: The database column for this field. This is the same as", "# \"attname\", except if db_column is specified.", "attname", "=", "self", ".", "get_attname", "(", ")", "if", "self", ".", "db_column", "is", "not", "None", ":", "# explicit name", "column", "=", "self", ".", "db_column", "else", ":", "if", "not", "self", ".", "name", ".", "islower", "(", ")", ":", "# a Salesforce style name e.g. 'LastName' or 'MyCustomField'", "column", "=", "self", ".", "name", "else", ":", "# a Django style name like 'last_name' or 'my_custom_field'", "column", "=", "self", ".", "name", ".", "title", "(", ")", ".", "replace", "(", "'_'", ",", "''", ")", "# Fix custom fields", "if", "self", ".", "sf_custom", ":", "column", "=", "self", ".", "sf_namespace", "+", "column", "+", "'__c'", "return", "attname", ",", "column" ]
45.84
18.32
def get_current_price(crypto, fiat, services=None, convert_to=None, helper_prices=None, **modes): """ High level function for getting current exchange rate for a cryptocurrency. If the fiat value is not explicitly defined, it will try the wildcard service. if that does not work, it tries converting to an intermediate cryptocurrency if available. """ fiat = fiat.lower() args = {'crypto': crypto, 'fiat': fiat, 'convert_to': convert_to} if not services: services = get_optimal_services(crypto, 'current_price') if fiat in services: # first, try service with explicit fiat support try_services = services[fiat] result = _try_price_fetch(try_services, args, modes) if not isinstance(result, Exception): return result if '*' in services: # then try wildcard service try_services = services['*'] result = _try_price_fetch(try_services, args, modes) if not isinstance(result, Exception): return result def _do_composite_price_fetch(crypto, convert_crypto, fiat, helpers, modes): before = modes.get('report_services', False) modes['report_services'] = True services1, converted_price = get_current_price(crypto, convert_crypto, **modes) if not helpers or convert_crypto not in helpers[fiat]: services2, fiat_price = get_current_price(convert_crypto, fiat, **modes) else: services2, fiat_price = helpers[fiat][convert_crypto] modes['report_services'] = before if modes.get('report_services', False): #print("composit service:", crypto, fiat, services1, services2) serv = CompositeService(services1, services2, convert_crypto) return [serv], converted_price * fiat_price else: return converted_price * fiat_price all_composite_cryptos = ['btc', 'ltc', 'doge', 'uno'] if crypto in all_composite_cryptos: all_composite_cryptos.remove(crypto) for composite_attempt in all_composite_cryptos: if composite_attempt in services and services[composite_attempt]: result = _do_composite_price_fetch( crypto, composite_attempt, fiat, helper_prices, modes ) if not isinstance(result, Exception): return result raise result
[ "def", "get_current_price", "(", "crypto", ",", "fiat", ",", "services", "=", "None", ",", "convert_to", "=", "None", ",", "helper_prices", "=", "None", ",", "*", "*", "modes", ")", ":", "fiat", "=", "fiat", ".", "lower", "(", ")", "args", "=", "{", "'crypto'", ":", "crypto", ",", "'fiat'", ":", "fiat", ",", "'convert_to'", ":", "convert_to", "}", "if", "not", "services", ":", "services", "=", "get_optimal_services", "(", "crypto", ",", "'current_price'", ")", "if", "fiat", "in", "services", ":", "# first, try service with explicit fiat support", "try_services", "=", "services", "[", "fiat", "]", "result", "=", "_try_price_fetch", "(", "try_services", ",", "args", ",", "modes", ")", "if", "not", "isinstance", "(", "result", ",", "Exception", ")", ":", "return", "result", "if", "'*'", "in", "services", ":", "# then try wildcard service", "try_services", "=", "services", "[", "'*'", "]", "result", "=", "_try_price_fetch", "(", "try_services", ",", "args", ",", "modes", ")", "if", "not", "isinstance", "(", "result", ",", "Exception", ")", ":", "return", "result", "def", "_do_composite_price_fetch", "(", "crypto", ",", "convert_crypto", ",", "fiat", ",", "helpers", ",", "modes", ")", ":", "before", "=", "modes", ".", "get", "(", "'report_services'", ",", "False", ")", "modes", "[", "'report_services'", "]", "=", "True", "services1", ",", "converted_price", "=", "get_current_price", "(", "crypto", ",", "convert_crypto", ",", "*", "*", "modes", ")", "if", "not", "helpers", "or", "convert_crypto", "not", "in", "helpers", "[", "fiat", "]", ":", "services2", ",", "fiat_price", "=", "get_current_price", "(", "convert_crypto", ",", "fiat", ",", "*", "*", "modes", ")", "else", ":", "services2", ",", "fiat_price", "=", "helpers", "[", "fiat", "]", "[", "convert_crypto", "]", "modes", "[", "'report_services'", "]", "=", "before", "if", "modes", ".", "get", "(", "'report_services'", ",", "False", ")", ":", "#print(\"composit service:\", crypto, fiat, services1, services2)", "serv", "=", "CompositeService", "(", "services1", ",", "services2", ",", "convert_crypto", ")", "return", "[", "serv", "]", ",", "converted_price", "*", "fiat_price", "else", ":", "return", "converted_price", "*", "fiat_price", "all_composite_cryptos", "=", "[", "'btc'", ",", "'ltc'", ",", "'doge'", ",", "'uno'", "]", "if", "crypto", "in", "all_composite_cryptos", ":", "all_composite_cryptos", ".", "remove", "(", "crypto", ")", "for", "composite_attempt", "in", "all_composite_cryptos", ":", "if", "composite_attempt", "in", "services", "and", "services", "[", "composite_attempt", "]", ":", "result", "=", "_do_composite_price_fetch", "(", "crypto", ",", "composite_attempt", ",", "fiat", ",", "helper_prices", ",", "modes", ")", "if", "not", "isinstance", "(", "result", ",", "Exception", ")", ":", "return", "result", "raise", "result" ]
41.571429
23.178571
def process_result(self, context, result_body, exc, content_type): """ given an result body and an exception object, return the appropriate result object, or raise an exception. """ return process_result(self, context, result_body, exc, content_type)
[ "def", "process_result", "(", "self", ",", "context", ",", "result_body", ",", "exc", ",", "content_type", ")", ":", "return", "process_result", "(", "self", ",", "context", ",", "result_body", ",", "exc", ",", "content_type", ")" ]
41.714286
12.857143
def output_json(data, code, headers=None): '''Use Flask JSON to serialize''' resp = make_response(json.dumps(data), code) resp.headers.extend(headers or {}) return resp
[ "def", "output_json", "(", "data", ",", "code", ",", "headers", "=", "None", ")", ":", "resp", "=", "make_response", "(", "json", ".", "dumps", "(", "data", ")", ",", "code", ")", "resp", ".", "headers", ".", "extend", "(", "headers", "or", "{", "}", ")", "return", "resp" ]
36
8
def set(self, value, mode=None): """Sets metric value. :param int|long value: New value. :param str|unicode mode: Update mode. * None - Unconditional update. * max - Sets metric value if it is greater that the current one. * min - Sets metric value if it is less that the current one. :rtype: bool """ if mode == 'max': func = uwsgi.metric_set_max elif mode == 'min': func = uwsgi.metric_set_min else: func = uwsgi.metric_set return func(self.name, value)
[ "def", "set", "(", "self", ",", "value", ",", "mode", "=", "None", ")", ":", "if", "mode", "==", "'max'", ":", "func", "=", "uwsgi", ".", "metric_set_max", "elif", "mode", "==", "'min'", ":", "func", "=", "uwsgi", ".", "metric_set_min", "else", ":", "func", "=", "uwsgi", ".", "metric_set", "return", "func", "(", "self", ".", "name", ",", "value", ")" ]
24.333333
20.416667
def raw_datastream(request, pid, dsid, repo=None, headers=None, as_of_date=None): ''' Access raw datastream content from a Fedora object. Returns :class:`~django.http.HttpResponse` for HEAD requests, :class:`~django.http.StreamingHttpResponse` for GET requests. The headers and status code from Fedora response are set on the django response; any headers specified in the parameters will override Fedora headers. If an HTTP_RANGE header is present on the request, it is passed through to Fedora. This view method is wrapped with ETag and last modified conditionals. :param request: HttpRequest :param pid: Fedora object PID :param dsid: datastream ID :param repo: :class:`~eulcore.django.fedora.server.Repository` instance to use, in case your application requires custom repository initialization (optional) :param headers: dictionary of additional headers to include in the response :param as_of_date: access a historical version of the datastream ''' return _raw_datastream(request, pid, dsid, repo=repo, headers=headers, as_of_date=as_of_date)
[ "def", "raw_datastream", "(", "request", ",", "pid", ",", "dsid", ",", "repo", "=", "None", ",", "headers", "=", "None", ",", "as_of_date", "=", "None", ")", ":", "return", "_raw_datastream", "(", "request", ",", "pid", ",", "dsid", ",", "repo", "=", "repo", ",", "headers", "=", "headers", ",", "as_of_date", "=", "as_of_date", ")" ]
48.652174
25.956522
def get_reversed_statuses(context): """Return a mapping of exit codes to status strings. Args: context (scriptworker.context.Context): the scriptworker context Returns: dict: the mapping of exit codes to status strings. """ _rev = {v: k for k, v in STATUSES.items()} _rev.update(dict(context.config['reversed_statuses'])) return _rev
[ "def", "get_reversed_statuses", "(", "context", ")", ":", "_rev", "=", "{", "v", ":", "k", "for", "k", ",", "v", "in", "STATUSES", ".", "items", "(", ")", "}", "_rev", ".", "update", "(", "dict", "(", "context", ".", "config", "[", "'reversed_statuses'", "]", ")", ")", "return", "_rev" ]
28.307692
21.769231
def loggabor(self, x_pos, y_pos, sf_0, B_sf, theta, B_theta, preprocess=True): """ Returns the envelope of a LogGabor Note that the convention for coordinates follows that of matrices: the origin is at the top left of the image, and coordinates are first the rows (vertical axis, going down) then the columns (horizontal axis, going right). """ env = np.multiply(self.band(sf_0, B_sf), self.orientation(theta, B_theta)) if not(x_pos==0.) and not(y_pos==0.): # bypass translation whenever none is needed env = env.astype(np.complex128) * self.trans(x_pos*1., y_pos*1.) if preprocess : env *= self.f_mask # retina processing # normalizing energy: env /= np.sqrt((np.abs(env)**2).mean()) # in the case a a single bump (see ``orientation``), we should compensate the fact that the distribution gets complex: env *= np.sqrt(2.) return env
[ "def", "loggabor", "(", "self", ",", "x_pos", ",", "y_pos", ",", "sf_0", ",", "B_sf", ",", "theta", ",", "B_theta", ",", "preprocess", "=", "True", ")", ":", "env", "=", "np", ".", "multiply", "(", "self", ".", "band", "(", "sf_0", ",", "B_sf", ")", ",", "self", ".", "orientation", "(", "theta", ",", "B_theta", ")", ")", "if", "not", "(", "x_pos", "==", "0.", ")", "and", "not", "(", "y_pos", "==", "0.", ")", ":", "# bypass translation whenever none is needed", "env", "=", "env", ".", "astype", "(", "np", ".", "complex128", ")", "*", "self", ".", "trans", "(", "x_pos", "*", "1.", ",", "y_pos", "*", "1.", ")", "if", "preprocess", ":", "env", "*=", "self", ".", "f_mask", "# retina processing", "# normalizing energy:", "env", "/=", "np", ".", "sqrt", "(", "(", "np", ".", "abs", "(", "env", ")", "**", "2", ")", ".", "mean", "(", ")", ")", "# in the case a a single bump (see ``orientation``), we should compensate the fact that the distribution gets complex:", "env", "*=", "np", ".", "sqrt", "(", "2.", ")", "return", "env" ]
47.55
29.05
def push(self, obj): """Pushes a new item to the stack""" rv = getattr(self._local, "stack", None) if rv is None: self._local.stack = rv = [] rv.append(obj) return rv
[ "def", "push", "(", "self", ",", "obj", ")", ":", "rv", "=", "getattr", "(", "self", ".", "_local", ",", "\"stack\"", ",", "None", ")", "if", "rv", "is", "None", ":", "self", ".", "_local", ".", "stack", "=", "rv", "=", "[", "]", "rv", ".", "append", "(", "obj", ")", "return", "rv" ]
30.285714
12.571429
def load_related(self, meta, fname, data, fields, encoding): '''Parse data for related objects.''' field = meta.dfields[fname] if field in meta.multifields: fmeta = field.structure_class()._meta if fmeta.name in ('hashtable', 'zset'): return ((native_str(id, encoding), pairs_to_dict(fdata, encoding)) for id, fdata in data) else: return ((native_str(id, encoding), fdata) for id, fdata in data) else: # this is data for stdmodel instances return self.build(data, meta, fields, fields, encoding)
[ "def", "load_related", "(", "self", ",", "meta", ",", "fname", ",", "data", ",", "fields", ",", "encoding", ")", ":", "field", "=", "meta", ".", "dfields", "[", "fname", "]", "if", "field", "in", "meta", ".", "multifields", ":", "fmeta", "=", "field", ".", "structure_class", "(", ")", ".", "_meta", "if", "fmeta", ".", "name", "in", "(", "'hashtable'", ",", "'zset'", ")", ":", "return", "(", "(", "native_str", "(", "id", ",", "encoding", ")", ",", "pairs_to_dict", "(", "fdata", ",", "encoding", ")", ")", "for", "id", ",", "fdata", "in", "data", ")", "else", ":", "return", "(", "(", "native_str", "(", "id", ",", "encoding", ")", ",", "fdata", ")", "for", "id", ",", "fdata", "in", "data", ")", "else", ":", "# this is data for stdmodel instances\r", "return", "self", ".", "build", "(", "data", ",", "meta", ",", "fields", ",", "fields", ",", "encoding", ")" ]
46.133333
13.333333
def confd_state_netconf_listen_tcp_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring") netconf = ET.SubElement(confd_state, "netconf") listen = ET.SubElement(netconf, "listen") tcp = ET.SubElement(listen, "tcp") port = ET.SubElement(tcp, "port") port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "confd_state_netconf_listen_tcp_port", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "confd_state", "=", "ET", ".", "SubElement", "(", "config", ",", "\"confd-state\"", ",", "xmlns", "=", "\"http://tail-f.com/yang/confd-monitoring\"", ")", "netconf", "=", "ET", ".", "SubElement", "(", "confd_state", ",", "\"netconf\"", ")", "listen", "=", "ET", ".", "SubElement", "(", "netconf", ",", "\"listen\"", ")", "tcp", "=", "ET", ".", "SubElement", "(", "listen", ",", "\"tcp\"", ")", "port", "=", "ET", ".", "SubElement", "(", "tcp", ",", "\"port\"", ")", "port", ".", "text", "=", "kwargs", ".", "pop", "(", "'port'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
42.615385
13.923077
def get_product_set( self, location, product_set_id, project_id=None, retry=None, timeout=None, metadata=None ): """ For the documentation see: :class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` """ client = self.get_conn() name = ProductSearchClient.product_set_path(project_id, location, product_set_id) self.log.info('Retrieving ProductSet: %s', name) response = client.get_product_set(name=name, retry=retry, timeout=timeout, metadata=metadata) self.log.info('ProductSet retrieved.') self.log.debug('ProductSet retrieved:\n%s', response) return MessageToDict(response)
[ "def", "get_product_set", "(", "self", ",", "location", ",", "product_set_id", ",", "project_id", "=", "None", ",", "retry", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ")", ":", "client", "=", "self", ".", "get_conn", "(", ")", "name", "=", "ProductSearchClient", ".", "product_set_path", "(", "project_id", ",", "location", ",", "product_set_id", ")", "self", ".", "log", ".", "info", "(", "'Retrieving ProductSet: %s'", ",", "name", ")", "response", "=", "client", ".", "get_product_set", "(", "name", "=", "name", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "self", ".", "log", ".", "info", "(", "'ProductSet retrieved.'", ")", "self", ".", "log", ".", "debug", "(", "'ProductSet retrieved:\\n%s'", ",", "response", ")", "return", "MessageToDict", "(", "response", ")" ]
49.785714
23.928571
def register_all(self, callback, user_data=None): """Register a callback for all sensors.""" self._callback = callback self._callback_data = user_data
[ "def", "register_all", "(", "self", ",", "callback", ",", "user_data", "=", "None", ")", ":", "self", ".", "_callback", "=", "callback", "self", ".", "_callback_data", "=", "user_data" ]
42.75
4.25
def parse_request(cls, request_string): """JSONRPC allows for **batch** requests to be communicated as array of dicts. This method parses out each individual element in the batch and returns a list of tuples, each tuple a result of parsing of each item in the batch. :Returns: | tuple of (results, is_batch_mode_flag) | where: | - results is a tuple describing the request | - Is_batch_mode_flag is a Bool indicating if the | request came in in batch mode (as array of requests) or not. :Raises: RPCParseError, RPCInvalidRequest """ try: batch = cls.json_loads(request_string) except ValueError as err: raise errors.RPCParseError("No valid JSON. (%s)" % str(err)) if isinstance(batch, (list, tuple)) and batch: # batch is true batch. # list of parsed request objects, is_batch_mode_flag return [cls._parse_single_request_trap_errors(request) for request in batch], True elif isinstance(batch, dict): # `batch` is actually single request object return [cls._parse_single_request_trap_errors(batch)], False raise errors.RPCInvalidRequest("Neither a batch array nor a single request object found in the request.")
[ "def", "parse_request", "(", "cls", ",", "request_string", ")", ":", "try", ":", "batch", "=", "cls", ".", "json_loads", "(", "request_string", ")", "except", "ValueError", "as", "err", ":", "raise", "errors", ".", "RPCParseError", "(", "\"No valid JSON. (%s)\"", "%", "str", "(", "err", ")", ")", "if", "isinstance", "(", "batch", ",", "(", "list", ",", "tuple", ")", ")", "and", "batch", ":", "# batch is true batch.", "# list of parsed request objects, is_batch_mode_flag", "return", "[", "cls", ".", "_parse_single_request_trap_errors", "(", "request", ")", "for", "request", "in", "batch", "]", ",", "True", "elif", "isinstance", "(", "batch", ",", "dict", ")", ":", "# `batch` is actually single request object", "return", "[", "cls", ".", "_parse_single_request_trap_errors", "(", "batch", ")", "]", ",", "False", "raise", "errors", ".", "RPCInvalidRequest", "(", "\"Neither a batch array nor a single request object found in the request.\"", ")" ]
48.357143
23.928571
def get_elt_projected_plots(self, zero_to_efermi=True, ylim=None, vbm_cbm_marker=False): """ Method returning a plot composed of subplots along different elements Returns: a pylab object with different subfigures for each projection The blue and red colors are for spin up and spin down The bigger the red or blue dot in the band structure the higher character for the corresponding element and orbital """ band_linewidth = 1.0 proj = self._get_projections_by_branches({e.symbol: ['s', 'p', 'd'] for e in self._bs.structure.composition.elements}) data = self.bs_plot_data(zero_to_efermi) plt = pretty_plot(12, 8) e_min = -4 e_max = 4 if self._bs.is_metal(): e_min = -10 e_max = 10 count = 1 for el in self._bs.structure.composition.elements: plt.subplot(220 + count) self._maketicks(plt) for b in range(len(data['distances'])): for i in range(self._nb_bands): plt.plot(data['distances'][b], [data['energy'][b][str(Spin.up)][i][j] for j in range(len(data['distances'][b]))], '-', color=[192 / 255, 192 / 255, 192 / 255], linewidth=band_linewidth) if self._bs.is_spin_polarized: plt.plot(data['distances'][b], [data['energy'][b][str(Spin.down)][i][j] for j in range(len(data['distances'][b]))], '--', color=[128 / 255, 128 / 255, 128 / 255], linewidth=band_linewidth) for j in range(len(data['energy'][b][str(Spin.up)][i])): markerscale = sum([proj[b][str(Spin.down)][i][ j][str(el)][o] for o in proj[b] [str(Spin.down)][i][j][ str(el)]]) plt.plot(data['distances'][b][j], data['energy'][b][str(Spin.down)][i][j], 'bo', markersize=markerscale * 15.0, color=[markerscale, 0.3 * markerscale, 0.4 * markerscale]) for j in range(len(data['energy'][b][str(Spin.up)][i])): markerscale = sum( [proj[b][str(Spin.up)][i][j][str(el)][o] for o in proj[b] [str(Spin.up)][i][j][str(el)]]) plt.plot(data['distances'][b][j], data['energy'][b][str(Spin.up)][i][j], 'o', markersize=markerscale * 15.0, color=[markerscale, 0.3 * markerscale, 0.4 * markerscale]) if ylim is None: if self._bs.is_metal(): if zero_to_efermi: plt.ylim(e_min, e_max) else: plt.ylim(self._bs.efermi + e_min, self._bs.efermi + e_max) else: if vbm_cbm_marker: for cbm in data['cbm']: plt.scatter(cbm[0], cbm[1], color='r', marker='o', s=100) for vbm in data['vbm']: plt.scatter(vbm[0], vbm[1], color='g', marker='o', s=100) plt.ylim(data['vbm'][0][1] + e_min, data['cbm'][0][1] + e_max) else: plt.ylim(ylim) plt.title(str(el)) count += 1 return plt
[ "def", "get_elt_projected_plots", "(", "self", ",", "zero_to_efermi", "=", "True", ",", "ylim", "=", "None", ",", "vbm_cbm_marker", "=", "False", ")", ":", "band_linewidth", "=", "1.0", "proj", "=", "self", ".", "_get_projections_by_branches", "(", "{", "e", ".", "symbol", ":", "[", "'s'", ",", "'p'", ",", "'d'", "]", "for", "e", "in", "self", ".", "_bs", ".", "structure", ".", "composition", ".", "elements", "}", ")", "data", "=", "self", ".", "bs_plot_data", "(", "zero_to_efermi", ")", "plt", "=", "pretty_plot", "(", "12", ",", "8", ")", "e_min", "=", "-", "4", "e_max", "=", "4", "if", "self", ".", "_bs", ".", "is_metal", "(", ")", ":", "e_min", "=", "-", "10", "e_max", "=", "10", "count", "=", "1", "for", "el", "in", "self", ".", "_bs", ".", "structure", ".", "composition", ".", "elements", ":", "plt", ".", "subplot", "(", "220", "+", "count", ")", "self", ".", "_maketicks", "(", "plt", ")", "for", "b", "in", "range", "(", "len", "(", "data", "[", "'distances'", "]", ")", ")", ":", "for", "i", "in", "range", "(", "self", ".", "_nb_bands", ")", ":", "plt", ".", "plot", "(", "data", "[", "'distances'", "]", "[", "b", "]", ",", "[", "data", "[", "'energy'", "]", "[", "b", "]", "[", "str", "(", "Spin", ".", "up", ")", "]", "[", "i", "]", "[", "j", "]", "for", "j", "in", "range", "(", "len", "(", "data", "[", "'distances'", "]", "[", "b", "]", ")", ")", "]", ",", "'-'", ",", "color", "=", "[", "192", "/", "255", ",", "192", "/", "255", ",", "192", "/", "255", "]", ",", "linewidth", "=", "band_linewidth", ")", "if", "self", ".", "_bs", ".", "is_spin_polarized", ":", "plt", ".", "plot", "(", "data", "[", "'distances'", "]", "[", "b", "]", ",", "[", "data", "[", "'energy'", "]", "[", "b", "]", "[", "str", "(", "Spin", ".", "down", ")", "]", "[", "i", "]", "[", "j", "]", "for", "j", "in", "range", "(", "len", "(", "data", "[", "'distances'", "]", "[", "b", "]", ")", ")", "]", ",", "'--'", ",", "color", "=", "[", "128", "/", "255", ",", "128", "/", "255", ",", "128", "/", "255", "]", ",", "linewidth", "=", "band_linewidth", ")", "for", "j", "in", "range", "(", "len", "(", "data", "[", "'energy'", "]", "[", "b", "]", "[", "str", "(", "Spin", ".", "up", ")", "]", "[", "i", "]", ")", ")", ":", "markerscale", "=", "sum", "(", "[", "proj", "[", "b", "]", "[", "str", "(", "Spin", ".", "down", ")", "]", "[", "i", "]", "[", "j", "]", "[", "str", "(", "el", ")", "]", "[", "o", "]", "for", "o", "in", "proj", "[", "b", "]", "[", "str", "(", "Spin", ".", "down", ")", "]", "[", "i", "]", "[", "j", "]", "[", "str", "(", "el", ")", "]", "]", ")", "plt", ".", "plot", "(", "data", "[", "'distances'", "]", "[", "b", "]", "[", "j", "]", ",", "data", "[", "'energy'", "]", "[", "b", "]", "[", "str", "(", "Spin", ".", "down", ")", "]", "[", "i", "]", "[", "j", "]", ",", "'bo'", ",", "markersize", "=", "markerscale", "*", "15.0", ",", "color", "=", "[", "markerscale", ",", "0.3", "*", "markerscale", ",", "0.4", "*", "markerscale", "]", ")", "for", "j", "in", "range", "(", "len", "(", "data", "[", "'energy'", "]", "[", "b", "]", "[", "str", "(", "Spin", ".", "up", ")", "]", "[", "i", "]", ")", ")", ":", "markerscale", "=", "sum", "(", "[", "proj", "[", "b", "]", "[", "str", "(", "Spin", ".", "up", ")", "]", "[", "i", "]", "[", "j", "]", "[", "str", "(", "el", ")", "]", "[", "o", "]", "for", "o", "in", "proj", "[", "b", "]", "[", "str", "(", "Spin", ".", "up", ")", "]", "[", "i", "]", "[", "j", "]", "[", "str", "(", "el", ")", "]", "]", ")", "plt", ".", "plot", "(", "data", "[", "'distances'", "]", "[", "b", "]", "[", "j", "]", ",", "data", "[", "'energy'", "]", "[", "b", "]", "[", "str", "(", "Spin", ".", "up", ")", "]", "[", "i", "]", "[", "j", "]", ",", "'o'", ",", "markersize", "=", "markerscale", "*", "15.0", ",", "color", "=", "[", "markerscale", ",", "0.3", "*", "markerscale", ",", "0.4", "*", "markerscale", "]", ")", "if", "ylim", "is", "None", ":", "if", "self", ".", "_bs", ".", "is_metal", "(", ")", ":", "if", "zero_to_efermi", ":", "plt", ".", "ylim", "(", "e_min", ",", "e_max", ")", "else", ":", "plt", ".", "ylim", "(", "self", ".", "_bs", ".", "efermi", "+", "e_min", ",", "self", ".", "_bs", ".", "efermi", "+", "e_max", ")", "else", ":", "if", "vbm_cbm_marker", ":", "for", "cbm", "in", "data", "[", "'cbm'", "]", ":", "plt", ".", "scatter", "(", "cbm", "[", "0", "]", ",", "cbm", "[", "1", "]", ",", "color", "=", "'r'", ",", "marker", "=", "'o'", ",", "s", "=", "100", ")", "for", "vbm", "in", "data", "[", "'vbm'", "]", ":", "plt", ".", "scatter", "(", "vbm", "[", "0", "]", ",", "vbm", "[", "1", "]", ",", "color", "=", "'g'", ",", "marker", "=", "'o'", ",", "s", "=", "100", ")", "plt", ".", "ylim", "(", "data", "[", "'vbm'", "]", "[", "0", "]", "[", "1", "]", "+", "e_min", ",", "data", "[", "'cbm'", "]", "[", "0", "]", "[", "1", "]", "+", "e_max", ")", "else", ":", "plt", ".", "ylim", "(", "ylim", ")", "plt", ".", "title", "(", "str", "(", "el", ")", ")", "count", "+=", "1", "return", "plt" ]
49.244186
20.732558
def _expand_variable_match(positional_vars, named_vars, match): """Expand a matched variable with its value. Args: positional_vars (list): A list of positonal variables. This list will be modified. named_vars (dict): A dictionary of named variables. match (re.Match): A regular expression match. Returns: str: The expanded variable to replace the match. Raises: ValueError: If a positional or named variable is required by the template but not specified or if an unexpected template expression is encountered. """ positional = match.group("positional") name = match.group("name") if name is not None: try: return six.text_type(named_vars[name]) except KeyError: raise ValueError( "Named variable '{}' not specified and needed by template " "`{}` at position {}".format(name, match.string, match.start()) ) elif positional is not None: try: return six.text_type(positional_vars.pop(0)) except IndexError: raise ValueError( "Positional variable not specified and needed by template " "`{}` at position {}".format(match.string, match.start()) ) else: raise ValueError("Unknown template expression {}".format(match.group(0)))
[ "def", "_expand_variable_match", "(", "positional_vars", ",", "named_vars", ",", "match", ")", ":", "positional", "=", "match", ".", "group", "(", "\"positional\"", ")", "name", "=", "match", ".", "group", "(", "\"name\"", ")", "if", "name", "is", "not", "None", ":", "try", ":", "return", "six", ".", "text_type", "(", "named_vars", "[", "name", "]", ")", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Named variable '{}' not specified and needed by template \"", "\"`{}` at position {}\"", ".", "format", "(", "name", ",", "match", ".", "string", ",", "match", ".", "start", "(", ")", ")", ")", "elif", "positional", "is", "not", "None", ":", "try", ":", "return", "six", ".", "text_type", "(", "positional_vars", ".", "pop", "(", "0", ")", ")", "except", "IndexError", ":", "raise", "ValueError", "(", "\"Positional variable not specified and needed by template \"", "\"`{}` at position {}\"", ".", "format", "(", "match", ".", "string", ",", "match", ".", "start", "(", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Unknown template expression {}\"", ".", "format", "(", "match", ".", "group", "(", "0", ")", ")", ")" ]
37.297297
23.054054
def rotate_vectors(R, v, axis=-1): """Rotate vectors by given quaternions For simplicity, this function simply converts the input quaternion(s) to a matrix, and rotates the input vector(s) by the usual matrix multiplication. However, it should be noted that if each input quaternion is only used to rotate a single vector, it is more efficient (in terms of operation counts) to use the formula v' = v + 2 * r x (s * v + r x v) / m where x represents the cross product, s and r are the scalar and vector parts of the quaternion, respectively, and m is the sum of the squares of the components of the quaternion. If you are looping over a very large number of quaternions, and just rotating a single vector each time, you might want to implement that alternative algorithm using numba (or something that doesn't use python). Parameters ========== R: quaternion array Quaternions by which to rotate the input vectors v: float array Three-vectors to be rotated. axis: int Axis of the `v` array to use as the vector dimension. This axis of `v` must have length 3. Returns ======= vprime: float array The rotated vectors. This array has shape R.shape+v.shape. """ R = np.asarray(R, dtype=np.quaternion) v = np.asarray(v, dtype=float) if v.ndim < 1 or 3 not in v.shape: raise ValueError("Input `v` does not have at least one dimension of length 3") if v.shape[axis] != 3: raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis])) m = as_rotation_matrix(R) m_axes = list(range(m.ndim)) v_axes = list(range(m.ndim, m.ndim+v.ndim)) mv_axes = list(v_axes) mv_axes[axis] = m_axes[-2] mv_axes = m_axes[:-2] + mv_axes v_axes[axis] = m_axes[-1] return np.einsum(m, m_axes, v, v_axes, mv_axes)
[ "def", "rotate_vectors", "(", "R", ",", "v", ",", "axis", "=", "-", "1", ")", ":", "R", "=", "np", ".", "asarray", "(", "R", ",", "dtype", "=", "np", ".", "quaternion", ")", "v", "=", "np", ".", "asarray", "(", "v", ",", "dtype", "=", "float", ")", "if", "v", ".", "ndim", "<", "1", "or", "3", "not", "in", "v", ".", "shape", ":", "raise", "ValueError", "(", "\"Input `v` does not have at least one dimension of length 3\"", ")", "if", "v", ".", "shape", "[", "axis", "]", "!=", "3", ":", "raise", "ValueError", "(", "\"Input `v` axis {0} has length {1}, not 3.\"", ".", "format", "(", "axis", ",", "v", ".", "shape", "[", "axis", "]", ")", ")", "m", "=", "as_rotation_matrix", "(", "R", ")", "m_axes", "=", "list", "(", "range", "(", "m", ".", "ndim", ")", ")", "v_axes", "=", "list", "(", "range", "(", "m", ".", "ndim", ",", "m", ".", "ndim", "+", "v", ".", "ndim", ")", ")", "mv_axes", "=", "list", "(", "v_axes", ")", "mv_axes", "[", "axis", "]", "=", "m_axes", "[", "-", "2", "]", "mv_axes", "=", "m_axes", "[", ":", "-", "2", "]", "+", "mv_axes", "v_axes", "[", "axis", "]", "=", "m_axes", "[", "-", "1", "]", "return", "np", ".", "einsum", "(", "m", ",", "m_axes", ",", "v", ",", "v_axes", ",", "mv_axes", ")" ]
36.705882
21.705882
def from_py_func(cls, func): """ Create a ``CustomJS`` instance from a Python function. The function is translated to JavaScript using PScript. """ from bokeh.util.deprecation import deprecated deprecated("'from_py_func' is deprecated and will be removed in an eventual 2.0 release. " "Use CustomJS directly instead.") if not isinstance(func, FunctionType): raise ValueError('CustomJS.from_py_func needs function object.') pscript = import_required('pscript', 'To use Python functions for CustomJS, you need PScript ' + '("conda install -c conda-forge pscript" or "pip install pscript")') # Collect default values default_values = func.__defaults__ # Python 2.6+ default_names = func.__code__.co_varnames[:len(default_values)] args = dict(zip(default_names, default_values)) args.pop('window', None) # Clear window, so we use the global window object # Get JS code, we could rip out the function def, or just # call the function. We do the latter. code = pscript.py2js(func, 'cb') + 'cb(%s);\n' % ', '.join(default_names) return cls(code=code, args=args)
[ "def", "from_py_func", "(", "cls", ",", "func", ")", ":", "from", "bokeh", ".", "util", ".", "deprecation", "import", "deprecated", "deprecated", "(", "\"'from_py_func' is deprecated and will be removed in an eventual 2.0 release. \"", "\"Use CustomJS directly instead.\"", ")", "if", "not", "isinstance", "(", "func", ",", "FunctionType", ")", ":", "raise", "ValueError", "(", "'CustomJS.from_py_func needs function object.'", ")", "pscript", "=", "import_required", "(", "'pscript'", ",", "'To use Python functions for CustomJS, you need PScript '", "+", "'(\"conda install -c conda-forge pscript\" or \"pip install pscript\")'", ")", "# Collect default values", "default_values", "=", "func", ".", "__defaults__", "# Python 2.6+", "default_names", "=", "func", ".", "__code__", ".", "co_varnames", "[", ":", "len", "(", "default_values", ")", "]", "args", "=", "dict", "(", "zip", "(", "default_names", ",", "default_values", ")", ")", "args", ".", "pop", "(", "'window'", ",", "None", ")", "# Clear window, so we use the global window object", "# Get JS code, we could rip out the function def, or just", "# call the function. We do the latter.", "code", "=", "pscript", ".", "py2js", "(", "func", ",", "'cb'", ")", "+", "'cb(%s);\\n'", "%", "', '", ".", "join", "(", "default_names", ")", "return", "cls", "(", "code", "=", "code", ",", "args", "=", "args", ")" ]
57.409091
22.818182
def _main_cli(self): """Main function of SMAC for CLI interface Returns ------- instance optimizer """ self.logger.info("SMAC call: %s" % (" ".join(sys.argv))) cmd_reader = CMDReader() args, _ = cmd_reader.read_cmd() root_logger = logging.getLogger() root_logger.setLevel(args.verbose_level) logger_handler = logging.StreamHandler( stream=sys.stdout) if root_logger.level >= logging.INFO: formatter = logging.Formatter( "%(levelname)s:\t%(message)s") else: formatter = logging.Formatter( "%(asctime)s:%(levelname)s:%(name)s:%(message)s", "%Y-%m-%d %H:%M:%S") logger_handler.setFormatter(formatter) root_logger.addHandler(logger_handler) # remove default handler root_logger.removeHandler(root_logger.handlers[0]) # Create defaults rh = None initial_configs = None stats = None incumbent = None # Create scenario-object scen = Scenario(args.scenario_file, []) if args.mode == "SMAC": optimizer = SMAC( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, stats=stats, restore_incumbent=incumbent, run_id=args.seed) elif args.mode == "ROAR": optimizer = ROAR( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, run_id=args.seed) elif args.mode == "EPILS": optimizer = EPILS( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, run_id=args.seed) else: optimizer = None return optimizer
[ "def", "_main_cli", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "\"SMAC call: %s\"", "%", "(", "\" \"", ".", "join", "(", "sys", ".", "argv", ")", ")", ")", "cmd_reader", "=", "CMDReader", "(", ")", "args", ",", "_", "=", "cmd_reader", ".", "read_cmd", "(", ")", "root_logger", "=", "logging", ".", "getLogger", "(", ")", "root_logger", ".", "setLevel", "(", "args", ".", "verbose_level", ")", "logger_handler", "=", "logging", ".", "StreamHandler", "(", "stream", "=", "sys", ".", "stdout", ")", "if", "root_logger", ".", "level", ">=", "logging", ".", "INFO", ":", "formatter", "=", "logging", ".", "Formatter", "(", "\"%(levelname)s:\\t%(message)s\"", ")", "else", ":", "formatter", "=", "logging", ".", "Formatter", "(", "\"%(asctime)s:%(levelname)s:%(name)s:%(message)s\"", ",", "\"%Y-%m-%d %H:%M:%S\"", ")", "logger_handler", ".", "setFormatter", "(", "formatter", ")", "root_logger", ".", "addHandler", "(", "logger_handler", ")", "# remove default handler", "root_logger", ".", "removeHandler", "(", "root_logger", ".", "handlers", "[", "0", "]", ")", "# Create defaults", "rh", "=", "None", "initial_configs", "=", "None", "stats", "=", "None", "incumbent", "=", "None", "# Create scenario-object", "scen", "=", "Scenario", "(", "args", ".", "scenario_file", ",", "[", "]", ")", "if", "args", ".", "mode", "==", "\"SMAC\"", ":", "optimizer", "=", "SMAC", "(", "scenario", "=", "scen", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "args", ".", "seed", ")", ",", "runhistory", "=", "rh", ",", "initial_configurations", "=", "initial_configs", ",", "stats", "=", "stats", ",", "restore_incumbent", "=", "incumbent", ",", "run_id", "=", "args", ".", "seed", ")", "elif", "args", ".", "mode", "==", "\"ROAR\"", ":", "optimizer", "=", "ROAR", "(", "scenario", "=", "scen", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "args", ".", "seed", ")", ",", "runhistory", "=", "rh", ",", "initial_configurations", "=", "initial_configs", ",", "run_id", "=", "args", ".", "seed", ")", "elif", "args", ".", "mode", "==", "\"EPILS\"", ":", "optimizer", "=", "EPILS", "(", "scenario", "=", "scen", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "args", ".", "seed", ")", ",", "runhistory", "=", "rh", ",", "initial_configurations", "=", "initial_configs", ",", "run_id", "=", "args", ".", "seed", ")", "else", ":", "optimizer", "=", "None", "return", "optimizer" ]
31.584615
14.430769
def constant(name, value): """Creates a constant that can be referenced from gin config files. After calling this function in Python, the constant can be referenced from within a Gin config file using the macro syntax. For example, in Python: gin.constant('THE_ANSWER', 42) Then, in a Gin config file: meaning.of_life = %THE_ANSWER Note that any Python object can be used as the value of a constant (including objects not representable as Gin literals). Values will be stored until program termination in a Gin-internal dictionary, so avoid creating constants with values that should have a limited lifetime. Optionally, a disambiguating module may be prefixed onto the constant name. For instance: gin.constant('some.modules.PI', 3.14159) Args: name: The name of the constant, possibly prepended by one or more disambiguating module components separated by periods. An macro with this name (including the modules) will be created. value: The value of the constant. This can be anything (including objects not representable as Gin literals). The value will be stored and returned whenever the constant is referenced. Raises: ValueError: If the constant's selector is invalid, or a constant with the given selector already exists. """ if not config_parser.MODULE_RE.match(name): raise ValueError("Invalid constant selector '{}'.".format(name)) if _CONSTANTS.matching_selectors(name): err_str = "Constants matching selector '{}' already exist ({})." raise ValueError(err_str.format(name, _CONSTANTS.matching_selectors(name))) _CONSTANTS[name] = value
[ "def", "constant", "(", "name", ",", "value", ")", ":", "if", "not", "config_parser", ".", "MODULE_RE", ".", "match", "(", "name", ")", ":", "raise", "ValueError", "(", "\"Invalid constant selector '{}'.\"", ".", "format", "(", "name", ")", ")", "if", "_CONSTANTS", ".", "matching_selectors", "(", "name", ")", ":", "err_str", "=", "\"Constants matching selector '{}' already exist ({}).\"", "raise", "ValueError", "(", "err_str", ".", "format", "(", "name", ",", "_CONSTANTS", ".", "matching_selectors", "(", "name", ")", ")", ")", "_CONSTANTS", "[", "name", "]", "=", "value" ]
38.619048
26.142857
def getKw(self, kw): """ Extract doc snippet for element configuration, :param kw: element name :return: instance itself 1 call getKwAsDict() to return config as a dict 2 call getKwAsJson() to return config as json string 3 call getKwAsString() to return config as a raw string USAGE: getKw('Q10') """ ikw = kw.lower() line_continue_flag = '' appendflag = False try: for line in self.file_lines: if line.strip() == '': continue line = ' '.join(line.strip().split()).strip('\n; ') if line.startswith('!'): continue if line.lower().startswith(ikw + ' :') or line.lower().startswith(ikw + ':'): conflist = [] # list to put into element configuration conflist.append(line) appendflag = True elif appendflag and line_continue_flag == '&': conflist.append(line) line_continue_flag = line[-1] if line_continue_flag != '&': appendflag = False conf_str = ''.join(conflist).replace('&', ',') if 'line' in conf_str.lower().split('=')[0]: # if bl defines lattice conf_str = conf_str.lower().replace(',', ' ')[::-1].replace('enil', 'beamline,lattice'[::-1], 1)[ ::-1] # avoid the case with bl keyword has 'line' except: conf_str = '' # print conf_str # split('!epics'): second part is epics control conf splitedparts = conf_str.split('!epics') self.confstr = splitedparts[0] try: self.confstr_epics = splitedparts[1].strip() except IndexError: self.confstr_epics = '' return self
[ "def", "getKw", "(", "self", ",", "kw", ")", ":", "ikw", "=", "kw", ".", "lower", "(", ")", "line_continue_flag", "=", "''", "appendflag", "=", "False", "try", ":", "for", "line", "in", "self", ".", "file_lines", ":", "if", "line", ".", "strip", "(", ")", "==", "''", ":", "continue", "line", "=", "' '", ".", "join", "(", "line", ".", "strip", "(", ")", ".", "split", "(", ")", ")", ".", "strip", "(", "'\\n; '", ")", "if", "line", ".", "startswith", "(", "'!'", ")", ":", "continue", "if", "line", ".", "lower", "(", ")", ".", "startswith", "(", "ikw", "+", "' :'", ")", "or", "line", ".", "lower", "(", ")", ".", "startswith", "(", "ikw", "+", "':'", ")", ":", "conflist", "=", "[", "]", "# list to put into element configuration", "conflist", ".", "append", "(", "line", ")", "appendflag", "=", "True", "elif", "appendflag", "and", "line_continue_flag", "==", "'&'", ":", "conflist", ".", "append", "(", "line", ")", "line_continue_flag", "=", "line", "[", "-", "1", "]", "if", "line_continue_flag", "!=", "'&'", ":", "appendflag", "=", "False", "conf_str", "=", "''", ".", "join", "(", "conflist", ")", ".", "replace", "(", "'&'", ",", "','", ")", "if", "'line'", "in", "conf_str", ".", "lower", "(", ")", ".", "split", "(", "'='", ")", "[", "0", "]", ":", "# if bl defines lattice", "conf_str", "=", "conf_str", ".", "lower", "(", ")", ".", "replace", "(", "','", ",", "' '", ")", "[", ":", ":", "-", "1", "]", ".", "replace", "(", "'enil'", ",", "'beamline,lattice'", "[", ":", ":", "-", "1", "]", ",", "1", ")", "[", ":", ":", "-", "1", "]", "# avoid the case with bl keyword has 'line'", "except", ":", "conf_str", "=", "''", "# print conf_str", "# split('!epics'): second part is epics control conf", "splitedparts", "=", "conf_str", ".", "split", "(", "'!epics'", ")", "self", ".", "confstr", "=", "splitedparts", "[", "0", "]", "try", ":", "self", ".", "confstr_epics", "=", "splitedparts", "[", "1", "]", ".", "strip", "(", ")", "except", "IndexError", ":", "self", ".", "confstr_epics", "=", "''", "return", "self" ]
40.12766
18.489362
def random_density(qubits: Union[int, Qubits]) -> Density: """ Returns: A randomly sampled Density from the Hilbert–Schmidt ensemble of quantum states Ref: "Induced measures in the space of mixed quantum states" Karol Zyczkowski, Hans-Juergen Sommers, J. Phys. A34, 7111-7125 (2001) https://arxiv.org/abs/quant-ph/0012101 """ N, qubits = qubits_count_tuple(qubits) size = (2**N, 2**N) ginibre_ensemble = (np.random.normal(size=size) + 1j * np.random.normal(size=size)) / np.sqrt(2.0) matrix = ginibre_ensemble @ np.transpose(np.conjugate(ginibre_ensemble)) matrix /= np.trace(matrix) return Density(matrix, qubits=qubits)
[ "def", "random_density", "(", "qubits", ":", "Union", "[", "int", ",", "Qubits", "]", ")", "->", "Density", ":", "N", ",", "qubits", "=", "qubits_count_tuple", "(", "qubits", ")", "size", "=", "(", "2", "**", "N", ",", "2", "**", "N", ")", "ginibre_ensemble", "=", "(", "np", ".", "random", ".", "normal", "(", "size", "=", "size", ")", "+", "1j", "*", "np", ".", "random", ".", "normal", "(", "size", "=", "size", ")", ")", "/", "np", ".", "sqrt", "(", "2.0", ")", "matrix", "=", "ginibre_ensemble", "@", "np", ".", "transpose", "(", "np", ".", "conjugate", "(", "ginibre_ensemble", ")", ")", "matrix", "/=", "np", ".", "trace", "(", "matrix", ")", "return", "Density", "(", "matrix", ",", "qubits", "=", "qubits", ")" ]
41.352941
17.823529
def underlying_typedef_type(self): """Return the underlying type of a typedef declaration. Returns a Type for the typedef this cursor is a declaration for. If the current cursor is not a typedef, this raises. """ if not hasattr(self, '_underlying_type'): assert self.kind.is_declaration() self._underlying_type = \ conf.lib.clang_getTypedefDeclUnderlyingType(self) return self._underlying_type
[ "def", "underlying_typedef_type", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_underlying_type'", ")", ":", "assert", "self", ".", "kind", ".", "is_declaration", "(", ")", "self", ".", "_underlying_type", "=", "conf", ".", "lib", ".", "clang_getTypedefDeclUnderlyingType", "(", "self", ")", "return", "self", ".", "_underlying_type" ]
39.166667
15.166667
def can_ignore_error(self, reqhnd=None): """Tests if the error is worth reporting. """ value = sys.exc_info()[1] try: if isinstance(value, BrokenPipeError) or \ isinstance(value, ConnectionResetError): return True except NameError: pass if not self.done: return False if not isinstance(value, socket.error): return False need_close = value.errno == 9 if need_close and reqhnd is not None: reqhnd.close_connection = 1 return need_close
[ "def", "can_ignore_error", "(", "self", ",", "reqhnd", "=", "None", ")", ":", "value", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "try", ":", "if", "isinstance", "(", "value", ",", "BrokenPipeError", ")", "or", "isinstance", "(", "value", ",", "ConnectionResetError", ")", ":", "return", "True", "except", "NameError", ":", "pass", "if", "not", "self", ".", "done", ":", "return", "False", "if", "not", "isinstance", "(", "value", ",", "socket", ".", "error", ")", ":", "return", "False", "need_close", "=", "value", ".", "errno", "==", "9", "if", "need_close", "and", "reqhnd", "is", "not", "None", ":", "reqhnd", ".", "close_connection", "=", "1", "return", "need_close" ]
32.944444
11.055556
def mitochondrial_genes(host, org) -> pd.Index: """Mitochondrial gene symbols for specific organism through BioMart. Parameters ---------- host : {{'www.ensembl.org', ...}} A valid BioMart host URL. org : {{'hsapiens', 'mmusculus', 'drerio'}} Organism to query. Currently available are human ('hsapiens'), mouse ('mmusculus') and zebrafish ('drerio'). Returns ------- A :class:`pandas.Index` containing mitochondrial gene symbols. """ try: from bioservices import biomart except ImportError: raise ImportError( 'You need to install the `bioservices` module.') from io import StringIO s = biomart.BioMart(host=host) # building query s.new_query() if org == 'hsapiens': s.add_dataset_to_xml('hsapiens_gene_ensembl') s.add_attribute_to_xml('hgnc_symbol') elif org == 'mmusculus': s.add_dataset_to_xml('mmusculus_gene_ensembl') s.add_attribute_to_xml('mgi_symbol') elif org == 'drerio': s.add_dataset_to_xml('drerio_gene_ensembl') s.add_attribute_to_xml('zfin_id_symbol') else: logg.msg('organism ', str(org), ' is unavailable', v=4, no_indent=True) return None s.add_attribute_to_xml('chromosome_name') xml = s.get_xml() # parsing mitochondrial gene symbols res = pd.read_csv(StringIO(s.query(xml)), sep='\t', header=None) res.columns = ['symbol', 'chromosome_name'] res = res.dropna() res = res[res['chromosome_name'] == 'MT'] res = res.set_index('symbol') res = res[~res.index.duplicated(keep='first')] return res.index
[ "def", "mitochondrial_genes", "(", "host", ",", "org", ")", "->", "pd", ".", "Index", ":", "try", ":", "from", "bioservices", "import", "biomart", "except", "ImportError", ":", "raise", "ImportError", "(", "'You need to install the `bioservices` module.'", ")", "from", "io", "import", "StringIO", "s", "=", "biomart", ".", "BioMart", "(", "host", "=", "host", ")", "# building query", "s", ".", "new_query", "(", ")", "if", "org", "==", "'hsapiens'", ":", "s", ".", "add_dataset_to_xml", "(", "'hsapiens_gene_ensembl'", ")", "s", ".", "add_attribute_to_xml", "(", "'hgnc_symbol'", ")", "elif", "org", "==", "'mmusculus'", ":", "s", ".", "add_dataset_to_xml", "(", "'mmusculus_gene_ensembl'", ")", "s", ".", "add_attribute_to_xml", "(", "'mgi_symbol'", ")", "elif", "org", "==", "'drerio'", ":", "s", ".", "add_dataset_to_xml", "(", "'drerio_gene_ensembl'", ")", "s", ".", "add_attribute_to_xml", "(", "'zfin_id_symbol'", ")", "else", ":", "logg", ".", "msg", "(", "'organism '", ",", "str", "(", "org", ")", ",", "' is unavailable'", ",", "v", "=", "4", ",", "no_indent", "=", "True", ")", "return", "None", "s", ".", "add_attribute_to_xml", "(", "'chromosome_name'", ")", "xml", "=", "s", ".", "get_xml", "(", ")", "# parsing mitochondrial gene symbols", "res", "=", "pd", ".", "read_csv", "(", "StringIO", "(", "s", ".", "query", "(", "xml", ")", ")", ",", "sep", "=", "'\\t'", ",", "header", "=", "None", ")", "res", ".", "columns", "=", "[", "'symbol'", ",", "'chromosome_name'", "]", "res", "=", "res", ".", "dropna", "(", ")", "res", "=", "res", "[", "res", "[", "'chromosome_name'", "]", "==", "'MT'", "]", "res", "=", "res", ".", "set_index", "(", "'symbol'", ")", "res", "=", "res", "[", "~", "res", ".", "index", ".", "duplicated", "(", "keep", "=", "'first'", ")", "]", "return", "res", ".", "index" ]
32.795918
17.469388
def serialize(self) -> str: """ Dump current object to a JSON-compatible dictionary. :return: dict representation of current DIDDoc """ return { '@context': DIDDoc.CONTEXT, 'id': canon_ref(self.did, self.did), 'publicKey': [pubkey.to_dict() for pubkey in self.pubkey.values()], 'authentication': [{ 'type': pubkey.type.authn_type, 'publicKey': canon_ref(self.did, pubkey.id) } for pubkey in self.pubkey.values() if pubkey.authn], 'service': [service.to_dict() for service in self.service.values()] }
[ "def", "serialize", "(", "self", ")", "->", "str", ":", "return", "{", "'@context'", ":", "DIDDoc", ".", "CONTEXT", ",", "'id'", ":", "canon_ref", "(", "self", ".", "did", ",", "self", ".", "did", ")", ",", "'publicKey'", ":", "[", "pubkey", ".", "to_dict", "(", ")", "for", "pubkey", "in", "self", ".", "pubkey", ".", "values", "(", ")", "]", ",", "'authentication'", ":", "[", "{", "'type'", ":", "pubkey", ".", "type", ".", "authn_type", ",", "'publicKey'", ":", "canon_ref", "(", "self", ".", "did", ",", "pubkey", ".", "id", ")", "}", "for", "pubkey", "in", "self", ".", "pubkey", ".", "values", "(", ")", "if", "pubkey", ".", "authn", "]", ",", "'service'", ":", "[", "service", ".", "to_dict", "(", ")", "for", "service", "in", "self", ".", "service", ".", "values", "(", ")", "]", "}" ]
37.470588
19.352941
def strftimegen(start_dt, end_dt): """ Return a generator function for datetime format strings. The generator produce a day-by-day sequence starting from the first datetime to the second datetime argument. """ if start_dt > end_dt: raise ValueError("the start datetime is after the end datetime: (%r,%r)" % (start_dt, end_dt)) def iterftime(string): date_subs = [i for i in DATE_FORMATS if i[1].search(string) is not None] if not date_subs: yield string else: dt = start_dt date_path = string while end_dt >= dt: for item in date_subs: date_path = item[1].sub(dt.strftime(item[0]), date_path) yield date_path dt = dt + datetime.timedelta(days=1) return iterftime
[ "def", "strftimegen", "(", "start_dt", ",", "end_dt", ")", ":", "if", "start_dt", ">", "end_dt", ":", "raise", "ValueError", "(", "\"the start datetime is after the end datetime: (%r,%r)\"", "%", "(", "start_dt", ",", "end_dt", ")", ")", "def", "iterftime", "(", "string", ")", ":", "date_subs", "=", "[", "i", "for", "i", "in", "DATE_FORMATS", "if", "i", "[", "1", "]", ".", "search", "(", "string", ")", "is", "not", "None", "]", "if", "not", "date_subs", ":", "yield", "string", "else", ":", "dt", "=", "start_dt", "date_path", "=", "string", "while", "end_dt", ">=", "dt", ":", "for", "item", "in", "date_subs", ":", "date_path", "=", "item", "[", "1", "]", ".", "sub", "(", "dt", ".", "strftime", "(", "item", "[", "0", "]", ")", ",", "date_path", ")", "yield", "date_path", "dt", "=", "dt", "+", "datetime", ".", "timedelta", "(", "days", "=", "1", ")", "return", "iterftime" ]
35.73913
19.652174
def create_function(self, vpc_config): """Create lambda function, configures lambda parameters. We need to upload non-zero zip when creating function. Uploading hello_world python lambda function since AWS doesn't care which executable is in ZIP. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda """ zip_file = 'lambda-holder.zip' with zipfile.ZipFile(zip_file, mode='w') as zipped: zipped.writestr('index.py', 'print "Hello world"') contents = '' with open('lambda-holder.zip', 'rb') as openfile: contents = openfile.read() LOG.info('Creating lambda function: %s', self.app_name) try: self.lambda_client.create_function( Environment=self.lambda_environment, FunctionName=self.app_name, Runtime=self.runtime, Role=self.role_arn, Handler=self.handler, Code={'ZipFile': contents}, Description=self.description, Timeout=int(self.timeout), MemorySize=int(self.memory), Publish=False, VpcConfig=vpc_config, Tags={'app_group': self.group, 'app_name': self.app_name}) except boto3.exceptions.botocore.exceptions.ClientError as error: if 'CreateNetworkInterface' in error.response['Error']['Message']: message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn) LOG.critical(message) raise SystemExit(message) raise LOG.info("Successfully created Lambda function and alias")
[ "def", "create_function", "(", "self", ",", "vpc_config", ")", ":", "zip_file", "=", "'lambda-holder.zip'", "with", "zipfile", ".", "ZipFile", "(", "zip_file", ",", "mode", "=", "'w'", ")", "as", "zipped", ":", "zipped", ".", "writestr", "(", "'index.py'", ",", "'print \"Hello world\"'", ")", "contents", "=", "''", "with", "open", "(", "'lambda-holder.zip'", ",", "'rb'", ")", "as", "openfile", ":", "contents", "=", "openfile", ".", "read", "(", ")", "LOG", ".", "info", "(", "'Creating lambda function: %s'", ",", "self", ".", "app_name", ")", "try", ":", "self", ".", "lambda_client", ".", "create_function", "(", "Environment", "=", "self", ".", "lambda_environment", ",", "FunctionName", "=", "self", ".", "app_name", ",", "Runtime", "=", "self", ".", "runtime", ",", "Role", "=", "self", ".", "role_arn", ",", "Handler", "=", "self", ".", "handler", ",", "Code", "=", "{", "'ZipFile'", ":", "contents", "}", ",", "Description", "=", "self", ".", "description", ",", "Timeout", "=", "int", "(", "self", ".", "timeout", ")", ",", "MemorySize", "=", "int", "(", "self", ".", "memory", ")", ",", "Publish", "=", "False", ",", "VpcConfig", "=", "vpc_config", ",", "Tags", "=", "{", "'app_group'", ":", "self", ".", "group", ",", "'app_name'", ":", "self", ".", "app_name", "}", ")", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "if", "'CreateNetworkInterface'", "in", "error", ".", "response", "[", "'Error'", "]", "[", "'Message'", "]", ":", "message", "=", "'{0} is missing \"ec2:CreateNetworkInterface\"'", ".", "format", "(", "self", ".", "role_arn", ")", "LOG", ".", "critical", "(", "message", ")", "raise", "SystemExit", "(", "message", ")", "raise", "LOG", ".", "info", "(", "\"Successfully created Lambda function and alias\"", ")" ]
39.377778
18.2
def precip(self, start, end, **kwargs): r""" Returns precipitation observations at a user specified location for a specified time. Users must specify at least one geographic search parameter ('stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', or 'subgacc') to obtain observation data. Other parameters may also be included. See below mandatory and optional parameters. Also see the metadata() function for station IDs. Arguments: ---------- start: string, mandatory Start date in form of YYYYMMDDhhmm. MUST BE USED WITH THE END PARAMETER. Default time is UTC e.g., start='201306011800' end: string, mandatory End date in form of YYYYMMDDhhmm. MUST BE USED WITH THE START PARAMETER. Default time is UTC e.g., end='201306011800' obtimezone: string, optional Set to either UTC or local. Sets timezone of obs. Default is UTC. e.g. obtimezone='local' showemptystations: string, optional Set to '1' to show stations even if no obs exist that match the time period. Stations without obs are omitted by default. stid: string, optional Single or comma separated list of MesoWest station IDs. e.g. stid='kden,kslc,wbb' county: string, optional County/parish/borough (US/Canada only), full name e.g. county='Larimer' state: string, optional US state, 2-letter ID e.g. state='CO' country: string, optional Single or comma separated list of abbreviated 2 or 3 character countries e.g. country='us,ca,mx' radius: list, optional Distance from a lat/lon pt or stid as [lat,lon,radius (mi)] or [stid, radius (mi)]. e.g. radius="-120,40,20" bbox: list, optional Stations within a [lon/lat] box in the order [lonmin,latmin,lonmax,latmax] e.g. bbox="-120,40,-119,41" cwa: string, optional NWS county warning area. See http://www.nws.noaa.gov/organization.php for CWA list. e.g. cwa='LOX' nwsfirezone: string, optional NWS fire zones. See http://www.nws.noaa.gov/geodata/catalog/wsom/html/firezone.htm for a shapefile containing the full list of zones. e.g. nwsfirezone='LOX241' gacc: string, optional Name of Geographic Area Coordination Center e.g. gacc='EBCC' See http://gacc.nifc.gov/ for a list of GACCs. subgacc: string, optional Name of Sub GACC e.g. subgacc='EB07' vars: string, optional Single or comma separated list of sensor variables. Will return all stations that match one of provided variables. Useful for filtering all stations that sense only certain vars. Do not request vars twice in the query. e.g. vars='wind_speed,pressure' Use the variables function to see a list of sensor vars. status: string, optional A value of either active or inactive returns stations currently set as active or inactive in the archive. Omitting this param returns all stations. e.g. status='active' units: string, optional String or set of strings and pipes separated by commas. Default is metric units. Set units='ENGLISH' for FREEDOM UNITS ;) Valid other combinations are as follows: temp|C, temp|F, temp|K; speed|mps, speed|mph, speed|kph, speed|kts; pres|pa, pres|mb; height|m, height|ft; precip|mm, precip|cm, precip|in; alti|pa, alti|inhg. e.g. units='temp|F,speed|kph,metric' groupby: string, optional Results can be grouped by key words: state, county, country, cwa, nwszone, mwsfirezone, gacc, subgacc e.g. groupby='state' timeformat: string, optional A python format string for returning customized date-time groups for observation times. Can include characters. e.g. timeformat='%m/%d/%Y at %H:%M' Returns: -------- Dictionary of precipitation observations. Raises: ------- None. """ self._check_geo_param(kwargs) kwargs['start'] = start kwargs['end'] = end kwargs['token'] = self.token return self._get_response('stations/precipitation', kwargs)
[ "def", "precip", "(", "self", ",", "start", ",", "end", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_check_geo_param", "(", "kwargs", ")", "kwargs", "[", "'start'", "]", "=", "start", "kwargs", "[", "'end'", "]", "=", "end", "kwargs", "[", "'token'", "]", "=", "self", ".", "token", "return", "self", ".", "_get_response", "(", "'stations/precipitation'", ",", "kwargs", ")" ]
57.053333
32.506667
def _l2rgb(self, mode): """Convert from L (black and white) to RGB. """ self._check_modes(("L", "LA")) self.channels.append(self.channels[0].copy()) self.channels.append(self.channels[0].copy()) if self.fill_value is not None: self.fill_value = self.fill_value[:1] * 3 + self.fill_value[1:] if self.mode == "LA": self.channels[1], self.channels[3] = \ self.channels[3], self.channels[1] self.mode = mode
[ "def", "_l2rgb", "(", "self", ",", "mode", ")", ":", "self", ".", "_check_modes", "(", "(", "\"L\"", ",", "\"LA\"", ")", ")", "self", ".", "channels", ".", "append", "(", "self", ".", "channels", "[", "0", "]", ".", "copy", "(", ")", ")", "self", ".", "channels", ".", "append", "(", "self", ".", "channels", "[", "0", "]", ".", "copy", "(", ")", ")", "if", "self", ".", "fill_value", "is", "not", "None", ":", "self", ".", "fill_value", "=", "self", ".", "fill_value", "[", ":", "1", "]", "*", "3", "+", "self", ".", "fill_value", "[", "1", ":", "]", "if", "self", ".", "mode", "==", "\"LA\"", ":", "self", ".", "channels", "[", "1", "]", ",", "self", ".", "channels", "[", "3", "]", "=", "self", ".", "channels", "[", "3", "]", ",", "self", ".", "channels", "[", "1", "]", "self", ".", "mode", "=", "mode" ]
41.333333
10.666667
def _getitem_via_pathlist(external_dict,path_list,**kwargs): ''' y = {'c': {'b': 200}} _getitem_via_pathlist(y,['c','b']) ''' if('s2n' in kwargs): s2n = kwargs['s2n'] else: s2n = 0 if('n2s' in kwargs): n2s = kwargs['n2s'] else: n2s = 0 this = external_dict for i in range(0,path_list.__len__()): key = path_list[i] if(n2s ==1): key = str(key) if(s2n==1): try: int(key) except: pass else: key = int(key) this = this.__getitem__(key) return(this)
[ "def", "_getitem_via_pathlist", "(", "external_dict", ",", "path_list", ",", "*", "*", "kwargs", ")", ":", "if", "(", "'s2n'", "in", "kwargs", ")", ":", "s2n", "=", "kwargs", "[", "'s2n'", "]", "else", ":", "s2n", "=", "0", "if", "(", "'n2s'", "in", "kwargs", ")", ":", "n2s", "=", "kwargs", "[", "'n2s'", "]", "else", ":", "n2s", "=", "0", "this", "=", "external_dict", "for", "i", "in", "range", "(", "0", ",", "path_list", ".", "__len__", "(", ")", ")", ":", "key", "=", "path_list", "[", "i", "]", "if", "(", "n2s", "==", "1", ")", ":", "key", "=", "str", "(", "key", ")", "if", "(", "s2n", "==", "1", ")", ":", "try", ":", "int", "(", "key", ")", "except", ":", "pass", "else", ":", "key", "=", "int", "(", "key", ")", "this", "=", "this", ".", "__getitem__", "(", "key", ")", "return", "(", "this", ")" ]
23.333333
18.444444
def continues(method): '''Method decorator signifying that the visitor should not visit the current node's children once this method has been invoked. ''' @functools.wraps(method) def wrapped(self, *args, **kwargs): yield method(self, *args, **kwargs) raise self.Continue() return wrapped
[ "def", "continues", "(", "method", ")", ":", "@", "functools", ".", "wraps", "(", "method", ")", "def", "wrapped", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "yield", "method", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "raise", "self", ".", "Continue", "(", ")", "return", "wrapped" ]
39.111111
16.888889
def context(name): '''A decorator for theme context processors''' def wrapper(func): g.theme.context_processors[name] = func return func return wrapper
[ "def", "context", "(", "name", ")", ":", "def", "wrapper", "(", "func", ")", ":", "g", ".", "theme", ".", "context_processors", "[", "name", "]", "=", "func", "return", "func", "return", "wrapper" ]
29
16.666667
def execute_script(script_blocks, script_vars, gallery_conf): """Execute and capture output from python script already in block structure Parameters ---------- script_blocks : list (label, content, line_number) List where each element is a tuple with the label ('text' or 'code'), the corresponding content string of block and the leading line number script_vars : dict Configuration and run time variables gallery_conf : dict Contains the configuration of Sphinx-Gallery Returns ------- output_blocks : list List of strings where each element is the restructured text representation of the output of each block time_elapsed : float Time elapsed during execution """ example_globals = { # A lot of examples contains 'print(__doc__)' for example in # scikit-learn so that running the example prints some useful # information. Because the docstring has been separated from # the code blocks in sphinx-gallery, __doc__ is actually # __builtin__.__doc__ in the execution context and we do not # want to print it '__doc__': '', # Examples may contain if __name__ == '__main__' guards # for in example scikit-learn if the example uses multiprocessing '__name__': '__main__', # Don't ever support __file__: Issues #166 #212 } argv_orig = sys.argv[:] if script_vars['execute_script']: # We want to run the example without arguments. See # https://github.com/sphinx-gallery/sphinx-gallery/pull/252 # for more details. sys.argv[0] = script_vars['src_file'] sys.argv[1:] = [] t_start = time() gc.collect() _, memory_start = _memory_usage(lambda: None, gallery_conf) compiler = codeop.Compile() # include at least one entry to avoid max() ever failing script_vars['memory_delta'] = [memory_start] output_blocks = [execute_code_block(compiler, block, example_globals, script_vars, gallery_conf) for block in script_blocks] time_elapsed = time() - t_start script_vars['memory_delta'] = ( # actually turn it into a delta now max(script_vars['memory_delta']) - memory_start) sys.argv = argv_orig # Write md5 checksum if the example was meant to run (no-plot # shall not cache md5sum) and has built correctly if script_vars['execute_script']: with open(script_vars['target_file'] + '.md5', 'w') as file_checksum: file_checksum.write(get_md5sum(script_vars['target_file'])) gallery_conf['passing_examples'].append(script_vars['src_file']) return output_blocks, time_elapsed
[ "def", "execute_script", "(", "script_blocks", ",", "script_vars", ",", "gallery_conf", ")", ":", "example_globals", "=", "{", "# A lot of examples contains 'print(__doc__)' for example in", "# scikit-learn so that running the example prints some useful", "# information. Because the docstring has been separated from", "# the code blocks in sphinx-gallery, __doc__ is actually", "# __builtin__.__doc__ in the execution context and we do not", "# want to print it", "'__doc__'", ":", "''", ",", "# Examples may contain if __name__ == '__main__' guards", "# for in example scikit-learn if the example uses multiprocessing", "'__name__'", ":", "'__main__'", ",", "# Don't ever support __file__: Issues #166 #212", "}", "argv_orig", "=", "sys", ".", "argv", "[", ":", "]", "if", "script_vars", "[", "'execute_script'", "]", ":", "# We want to run the example without arguments. See", "# https://github.com/sphinx-gallery/sphinx-gallery/pull/252", "# for more details.", "sys", ".", "argv", "[", "0", "]", "=", "script_vars", "[", "'src_file'", "]", "sys", ".", "argv", "[", "1", ":", "]", "=", "[", "]", "t_start", "=", "time", "(", ")", "gc", ".", "collect", "(", ")", "_", ",", "memory_start", "=", "_memory_usage", "(", "lambda", ":", "None", ",", "gallery_conf", ")", "compiler", "=", "codeop", ".", "Compile", "(", ")", "# include at least one entry to avoid max() ever failing", "script_vars", "[", "'memory_delta'", "]", "=", "[", "memory_start", "]", "output_blocks", "=", "[", "execute_code_block", "(", "compiler", ",", "block", ",", "example_globals", ",", "script_vars", ",", "gallery_conf", ")", "for", "block", "in", "script_blocks", "]", "time_elapsed", "=", "time", "(", ")", "-", "t_start", "script_vars", "[", "'memory_delta'", "]", "=", "(", "# actually turn it into a delta now", "max", "(", "script_vars", "[", "'memory_delta'", "]", ")", "-", "memory_start", ")", "sys", ".", "argv", "=", "argv_orig", "# Write md5 checksum if the example was meant to run (no-plot", "# shall not cache md5sum) and has built correctly", "if", "script_vars", "[", "'execute_script'", "]", ":", "with", "open", "(", "script_vars", "[", "'target_file'", "]", "+", "'.md5'", ",", "'w'", ")", "as", "file_checksum", ":", "file_checksum", ".", "write", "(", "get_md5sum", "(", "script_vars", "[", "'target_file'", "]", ")", ")", "gallery_conf", "[", "'passing_examples'", "]", ".", "append", "(", "script_vars", "[", "'src_file'", "]", ")", "return", "output_blocks", ",", "time_elapsed" ]
39.724638
20.942029
def _matching_string(matched, string): """Return the string as byte or unicode depending on the type of matched, assuming string is an ASCII string. """ if string is None: return string if IS_PY2: # pylint: disable=undefined-variable if isinstance(matched, text_type): return text_type(string) else: if isinstance(matched, bytes) and isinstance(string, str): return string.encode(locale.getpreferredencoding(False)) return string
[ "def", "_matching_string", "(", "matched", ",", "string", ")", ":", "if", "string", "is", "None", ":", "return", "string", "if", "IS_PY2", ":", "# pylint: disable=undefined-variable", "if", "isinstance", "(", "matched", ",", "text_type", ")", ":", "return", "text_type", "(", "string", ")", "else", ":", "if", "isinstance", "(", "matched", ",", "bytes", ")", "and", "isinstance", "(", "string", ",", "str", ")", ":", "return", "string", ".", "encode", "(", "locale", ".", "getpreferredencoding", "(", "False", ")", ")", "return", "string" ]
39.428571
14.428571
def list_namespaced_replica_set(self, namespace, **kwargs): """ list or watch objects of kind ReplicaSet This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_replica_set(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ReplicaSetList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_namespaced_replica_set_with_http_info(namespace, **kwargs) else: (data) = self.list_namespaced_replica_set_with_http_info(namespace, **kwargs) return data
[ "def", "list_namespaced_replica_set", "(", "self", ",", "namespace", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "list_namespaced_replica_set_with_http_info", "(", "namespace", ",", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "list_namespaced_replica_set_with_http_info", "(", "namespace", ",", "*", "*", "kwargs", ")", "return", "data" ]
166.928571
137.142857
def clone (self, new_id, new_toolset_properties): """ Returns another generator which differers from $(self) in - id - value to <toolset> feature in properties """ assert isinstance(new_id, basestring) assert is_iterable_typed(new_toolset_properties, basestring) return self.__class__ (new_id, self.composing_, self.source_types_, self.target_types_and_names_, # Note: this does not remove any subfeatures of <toolset> # which might cause problems property.change (self.requirements_, '<toolset>') + new_toolset_properties)
[ "def", "clone", "(", "self", ",", "new_id", ",", "new_toolset_properties", ")", ":", "assert", "isinstance", "(", "new_id", ",", "basestring", ")", "assert", "is_iterable_typed", "(", "new_toolset_properties", ",", "basestring", ")", "return", "self", ".", "__class__", "(", "new_id", ",", "self", ".", "composing_", ",", "self", ".", "source_types_", ",", "self", ".", "target_types_and_names_", ",", "# Note: this does not remove any subfeatures of <toolset>", "# which might cause problems", "property", ".", "change", "(", "self", ".", "requirements_", ",", "'<toolset>'", ")", "+", "new_toolset_properties", ")" ]
54.571429
18
def WaitUntilComplete(self,poll_freq=2,timeout=None): """Poll until all request objects have completed. If status is 'notStarted' or 'executing' continue polling. If status is 'succeeded' then success Else log as error poll_freq option is in seconds Returns an Int the number of unsuccessful requests. This behavior is subject to change. >>> clc.v2.Server(alias='BTDI',id='WA1BTDIKRT02').PowerOn().WaitUntilComplete() 0 """ start_time = time.time() while len(self.requests): cur_requests = [] for request in self.requests: status = request.Status() if status in ('notStarted','executing','resumed','queued','running'): cur_requests.append(request) elif status == 'succeeded': self.success_requests.append(request) elif status in ("failed", "unknown"): self.error_requests.append(request) self.requests = cur_requests if self.requests > 0 and clc.v2.time_utils.TimeoutExpired(start_time, timeout): raise clc.RequestTimeoutException('Timeout waiting for Requests: {0}'.format(self.requests[0].id), self.requests[0].Status()) time.sleep(poll_freq) # alternately - sleep for the delta between start time and 2s # Is this the best approach? Non-zero indicates some error. Exception seems the wrong approach for # a partial failure return(len(self.error_requests))
[ "def", "WaitUntilComplete", "(", "self", ",", "poll_freq", "=", "2", ",", "timeout", "=", "None", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "while", "len", "(", "self", ".", "requests", ")", ":", "cur_requests", "=", "[", "]", "for", "request", "in", "self", ".", "requests", ":", "status", "=", "request", ".", "Status", "(", ")", "if", "status", "in", "(", "'notStarted'", ",", "'executing'", ",", "'resumed'", ",", "'queued'", ",", "'running'", ")", ":", "cur_requests", ".", "append", "(", "request", ")", "elif", "status", "==", "'succeeded'", ":", "self", ".", "success_requests", ".", "append", "(", "request", ")", "elif", "status", "in", "(", "\"failed\"", ",", "\"unknown\"", ")", ":", "self", ".", "error_requests", ".", "append", "(", "request", ")", "self", ".", "requests", "=", "cur_requests", "if", "self", ".", "requests", ">", "0", "and", "clc", ".", "v2", ".", "time_utils", ".", "TimeoutExpired", "(", "start_time", ",", "timeout", ")", ":", "raise", "clc", ".", "RequestTimeoutException", "(", "'Timeout waiting for Requests: {0}'", ".", "format", "(", "self", ".", "requests", "[", "0", "]", ".", "id", ")", ",", "self", ".", "requests", "[", "0", "]", ".", "Status", "(", ")", ")", "time", ".", "sleep", "(", "poll_freq", ")", "# alternately - sleep for the delta between start time and 2s", "# Is this the best approach? Non-zero indicates some error. Exception seems the wrong approach for", "# a partial failure", "return", "(", "len", "(", "self", ".", "error_requests", ")", ")" ]
38.2
29
def query_orders(self, accounts, status='filled'): """查询订单 Arguments: accounts {[type]} -- [description] Keyword Arguments: status {str} -- 'open' 待成交 'filled' 成交 (default: {'filled'}) Returns: [type] -- [description] """ try: data = self.call("orders", {'client': accounts, 'status': status}) if data is not None: orders = data.get('dataTable', False) order_headers = orders['columns'] if ('成交状态' in order_headers or '状态说明' in order_headers) and ('备注' in order_headers): order_headers[order_headers.index('备注')] = '废弃' order_headers = [cn_en_compare[item] for item in order_headers] order_all = pd.DataFrame( orders['rows'], columns=order_headers ).assign(account_cookie=accounts) order_all.towards = order_all.towards.apply( lambda x: trade_towards_cn_en[x] ) if 'order_time' in order_headers: # 这是order_status order_all['status'] = order_all.status.apply( lambda x: order_status_cn_en[x] ) if 'order_date' not in order_headers: order_all.order_time = order_all.order_time.apply( lambda x: QA_util_get_order_datetime( dt='{} {}'.format(datetime.date.today(), x) ) ) else: order_all = order_all.assign( order_time=order_all.order_date .apply(QA_util_date_int2str) + ' ' + order_all.order_time ) if 'trade_time' in order_headers: order_all.trade_time = order_all.trade_time.apply( lambda x: '{} {}'.format(datetime.date.today(), x) ) if status is 'filled': return order_all.loc[:, self.dealstatus_headers].set_index( ['account_cookie', 'realorder_id'] ).sort_index() else: return order_all.loc[:, self.orderstatus_headers].set_index( ['account_cookie', 'realorder_id'] ).sort_index() else: print('response is None') return False except Exception as e: print(e) return False
[ "def", "query_orders", "(", "self", ",", "accounts", ",", "status", "=", "'filled'", ")", ":", "try", ":", "data", "=", "self", ".", "call", "(", "\"orders\"", ",", "{", "'client'", ":", "accounts", ",", "'status'", ":", "status", "}", ")", "if", "data", "is", "not", "None", ":", "orders", "=", "data", ".", "get", "(", "'dataTable'", ",", "False", ")", "order_headers", "=", "orders", "[", "'columns'", "]", "if", "(", "'成交状态' in orde", "_h", "aders", "or", "'状态说明' in orde", "_h", "aders) and ('", "备", "' i", " ", "order_he", "de", "s):", "", "", "order_headers", "[", "order_headers", ".", "index", "(", "'备注')] =", " ", "'", "弃", "", "order_headers", "=", "[", "cn_en_compare", "[", "item", "]", "for", "item", "in", "order_headers", "]", "order_all", "=", "pd", ".", "DataFrame", "(", "orders", "[", "'rows'", "]", ",", "columns", "=", "order_headers", ")", ".", "assign", "(", "account_cookie", "=", "accounts", ")", "order_all", ".", "towards", "=", "order_all", ".", "towards", ".", "apply", "(", "lambda", "x", ":", "trade_towards_cn_en", "[", "x", "]", ")", "if", "'order_time'", "in", "order_headers", ":", "# 这是order_status", "order_all", "[", "'status'", "]", "=", "order_all", ".", "status", ".", "apply", "(", "lambda", "x", ":", "order_status_cn_en", "[", "x", "]", ")", "if", "'order_date'", "not", "in", "order_headers", ":", "order_all", ".", "order_time", "=", "order_all", ".", "order_time", ".", "apply", "(", "lambda", "x", ":", "QA_util_get_order_datetime", "(", "dt", "=", "'{} {}'", ".", "format", "(", "datetime", ".", "date", ".", "today", "(", ")", ",", "x", ")", ")", ")", "else", ":", "order_all", "=", "order_all", ".", "assign", "(", "order_time", "=", "order_all", ".", "order_date", ".", "apply", "(", "QA_util_date_int2str", ")", "+", "' '", "+", "order_all", ".", "order_time", ")", "if", "'trade_time'", "in", "order_headers", ":", "order_all", ".", "trade_time", "=", "order_all", ".", "trade_time", ".", "apply", "(", "lambda", "x", ":", "'{} {}'", ".", "format", "(", "datetime", ".", "date", ".", "today", "(", ")", ",", "x", ")", ")", "if", "status", "is", "'filled'", ":", "return", "order_all", ".", "loc", "[", ":", ",", "self", ".", "dealstatus_headers", "]", ".", "set_index", "(", "[", "'account_cookie'", ",", "'realorder_id'", "]", ")", ".", "sort_index", "(", ")", "else", ":", "return", "order_all", ".", "loc", "[", ":", ",", "self", ".", "orderstatus_headers", "]", ".", "set_index", "(", "[", "'account_cookie'", ",", "'realorder_id'", "]", ")", ".", "sort_index", "(", ")", "else", ":", "print", "(", "'response is None'", ")", "return", "False", "except", "Exception", "as", "e", ":", "print", "(", "e", ")", "return", "False" ]
39.302632
19.513158
def utilization(prev, curr, counters): """ calculate the utilization delta_busy = curr.busy - prev.busy delta_idle = curr.idle - prev.idle utilization = delta_busy / (delta_busy + delta_idle) :param prev: previous resource :param curr: current resource :param counters: list of two, busy ticks and idle ticks :return: value, NaN if invalid. """ busy_prop, idle_prop = counters pb = getattr(prev, busy_prop) pi = getattr(prev, idle_prop) cb = getattr(curr, busy_prop) ci = getattr(curr, idle_prop) db = minus(cb, pb) di = minus(ci, pi) return mul(div(db, add(db, di)), 100)
[ "def", "utilization", "(", "prev", ",", "curr", ",", "counters", ")", ":", "busy_prop", ",", "idle_prop", "=", "counters", "pb", "=", "getattr", "(", "prev", ",", "busy_prop", ")", "pi", "=", "getattr", "(", "prev", ",", "idle_prop", ")", "cb", "=", "getattr", "(", "curr", ",", "busy_prop", ")", "ci", "=", "getattr", "(", "curr", ",", "idle_prop", ")", "db", "=", "minus", "(", "cb", ",", "pb", ")", "di", "=", "minus", "(", "ci", ",", "pi", ")", "return", "mul", "(", "div", "(", "db", ",", "add", "(", "db", ",", "di", ")", ")", ",", "100", ")" ]
25.958333
15.375
def AddBlock(self, block): """ Add the given block to the model and also its input/output variables """ if isinstance(block, Block): self.blocks.append(block) self.max_order = max(self.max_order, block.max_input_order-1) self.max_order = max(self.max_order, block.max_output_order) for variable in block.inputs+block.outputs: self._AddVariable(variable) else: print(block) raise TypeError self._utd_graph = False
[ "def", "AddBlock", "(", "self", ",", "block", ")", ":", "if", "isinstance", "(", "block", ",", "Block", ")", ":", "self", ".", "blocks", ".", "append", "(", "block", ")", "self", ".", "max_order", "=", "max", "(", "self", ".", "max_order", ",", "block", ".", "max_input_order", "-", "1", ")", "self", ".", "max_order", "=", "max", "(", "self", ".", "max_order", ",", "block", ".", "max_output_order", ")", "for", "variable", "in", "block", ".", "inputs", "+", "block", ".", "outputs", ":", "self", ".", "_AddVariable", "(", "variable", ")", "else", ":", "print", "(", "block", ")", "raise", "TypeError", "self", ".", "_utd_graph", "=", "False" ]
38.214286
14.642857
def get_alternatives(self): """ Get the spelling alternatives for search terms. Returns: A dict in form: {<search term>: {'count': <number of times the searh term occurs in the Storage>, 'words': {<an alternative>: {'count': <number of times the alternative occurs in the Storage>, 'cr': <cr value of the alternative>, 'idif': <idif value of the alternative>, 'h': <h value of the alternative>} } // Repeated for every alternative. } } // Repeated for every search term """ return dict([(alternatives.find('to').text, {'count': int(alternatives.find('count').text), 'words': dict([(word.text, word.attrib) for word in alternatives.findall('word')])}) for alternatives in self._content.find('alternatives_list').findall('alternatives')])
[ "def", "get_alternatives", "(", "self", ")", ":", "return", "dict", "(", "[", "(", "alternatives", ".", "find", "(", "'to'", ")", ".", "text", ",", "{", "'count'", ":", "int", "(", "alternatives", ".", "find", "(", "'count'", ")", ".", "text", ")", ",", "'words'", ":", "dict", "(", "[", "(", "word", ".", "text", ",", "word", ".", "attrib", ")", "for", "word", "in", "alternatives", ".", "findall", "(", "'word'", ")", "]", ")", "}", ")", "for", "alternatives", "in", "self", ".", "_content", ".", "find", "(", "'alternatives_list'", ")", ".", "findall", "(", "'alternatives'", ")", "]", ")" ]
61
30.4
def get_author_by_name(self, name: str) -> Optional[Author]: """Get an author by name, if it exists in the database.""" return self.session.query(Author).filter(Author.has_name(name)).one_or_none()
[ "def", "get_author_by_name", "(", "self", ",", "name", ":", "str", ")", "->", "Optional", "[", "Author", "]", ":", "return", "self", ".", "session", ".", "query", "(", "Author", ")", ".", "filter", "(", "Author", ".", "has_name", "(", "name", ")", ")", ".", "one_or_none", "(", ")" ]
70.333333
21.666667
def getsetitem(self, key, klass, args=None, kwdargs=None): """This is similar to setdefault(), except that the new value is created by instantiating _klass_. This prevents you from having to create an object and initialize it and then throw it away if there is already a dictionary item of that type. """ with self.lock: if key in self: return self.getitem(key) # Instantiate value. if not args: args = [] if not kwdargs: kwdargs = {} value = klass(*args, **kwdargs) self.setitem(key, value) return value
[ "def", "getsetitem", "(", "self", ",", "key", ",", "klass", ",", "args", "=", "None", ",", "kwdargs", "=", "None", ")", ":", "with", "self", ".", "lock", ":", "if", "key", "in", "self", ":", "return", "self", ".", "getitem", "(", "key", ")", "# Instantiate value.", "if", "not", "args", ":", "args", "=", "[", "]", "if", "not", "kwdargs", ":", "kwdargs", "=", "{", "}", "value", "=", "klass", "(", "*", "args", ",", "*", "*", "kwdargs", ")", "self", ".", "setitem", "(", "key", ",", "value", ")", "return", "value" ]
33.45
16.7
def optionIsSet(self, name): """ Check whether an option with a given name exists and has been set. :param name: the name of the option to check; can be short or long name. :return: true if an option matching the given name exists and it has had it's value set by the user """ name = name.strip() if not self.hasOption(name): return False return self.getOption(name).isSet()
[ "def", "optionIsSet", "(", "self", ",", "name", ")", ":", "name", "=", "name", ".", "strip", "(", ")", "if", "not", "self", ".", "hasOption", "(", "name", ")", ":", "return", "False", "return", "self", ".", "getOption", "(", "name", ")", ".", "isSet", "(", ")" ]
34.583333
16.916667
def remove_section(self, section): """Remove a file section.""" existed = section in self._sections if existed: del self._sections[section] del self._proxies[section] return existed
[ "def", "remove_section", "(", "self", ",", "section", ")", ":", "existed", "=", "section", "in", "self", ".", "_sections", "if", "existed", ":", "del", "self", ".", "_sections", "[", "section", "]", "del", "self", ".", "_proxies", "[", "section", "]", "return", "existed" ]
33
7.285714
def parse_external_id(output, type=EXTERNAL_ID_TYPE_ANY): """ Attempt to parse the output of job submission commands for an external id.__doc__ >>> parse_external_id("12345.pbsmanager") '12345.pbsmanager' >>> parse_external_id('Submitted batch job 185') '185' >>> parse_external_id('Submitted batch job 185', type='torque') 'Submitted batch job 185' >>> parse_external_id('submitted to cluster 125.') '125' >>> parse_external_id('submitted to cluster 125.', type='slurm') >>> """ external_id = None for pattern_type, pattern in EXTERNAL_ID_PATTERNS: if type != EXTERNAL_ID_TYPE_ANY and type != pattern_type: continue match = search(pattern, output) if match: external_id = match.group(1) break return external_id
[ "def", "parse_external_id", "(", "output", ",", "type", "=", "EXTERNAL_ID_TYPE_ANY", ")", ":", "external_id", "=", "None", "for", "pattern_type", ",", "pattern", "in", "EXTERNAL_ID_PATTERNS", ":", "if", "type", "!=", "EXTERNAL_ID_TYPE_ANY", "and", "type", "!=", "pattern_type", ":", "continue", "match", "=", "search", "(", "pattern", ",", "output", ")", "if", "match", ":", "external_id", "=", "match", ".", "group", "(", "1", ")", "break", "return", "external_id" ]
31.307692
20.538462
def exec_workflow(self, model, record_id, signal): """Execute the workflow `signal` on the instance having the ID `record_id` of `model`. *Python 2:* :raise: :class:`odoorpc.error.RPCError` :raise: :class:`odoorpc.error.InternalError` (if not logged) :raise: `urllib2.URLError` (connection error) *Python 3:* :raise: :class:`odoorpc.error.RPCError` :raise: :class:`odoorpc.error.InternalError` (if not logged) :raise: `urllib.error.URLError` (connection error) """ if tools.v(self.version)[0] >= 11: raise DeprecationWarning( u"Workflows have been removed in Odoo >= 11.0") self._check_logged_user() # Execute the workflow query args_to_send = [self.env.db, self.env.uid, self._password, model, signal, record_id] data = self.json( '/jsonrpc', {'service': 'object', 'method': 'exec_workflow', 'args': args_to_send}) return data.get('result')
[ "def", "exec_workflow", "(", "self", ",", "model", ",", "record_id", ",", "signal", ")", ":", "if", "tools", ".", "v", "(", "self", ".", "version", ")", "[", "0", "]", ">=", "11", ":", "raise", "DeprecationWarning", "(", "u\"Workflows have been removed in Odoo >= 11.0\"", ")", "self", ".", "_check_logged_user", "(", ")", "# Execute the workflow query", "args_to_send", "=", "[", "self", ".", "env", ".", "db", ",", "self", ".", "env", ".", "uid", ",", "self", ".", "_password", ",", "model", ",", "signal", ",", "record_id", "]", "data", "=", "self", ".", "json", "(", "'/jsonrpc'", ",", "{", "'service'", ":", "'object'", ",", "'method'", ":", "'exec_workflow'", ",", "'args'", ":", "args_to_send", "}", ")", "return", "data", ".", "get", "(", "'result'", ")" ]
36.37931
15.758621
def query(): """Query hot movies infomation from douban.""" r = requests_get(QUERY_URL) try: rows = r.json()['subject_collection_items'] except (IndexError, TypeError): rows = [] return MoviesCollection(rows)
[ "def", "query", "(", ")", ":", "r", "=", "requests_get", "(", "QUERY_URL", ")", "try", ":", "rows", "=", "r", ".", "json", "(", ")", "[", "'subject_collection_items'", "]", "except", "(", "IndexError", ",", "TypeError", ")", ":", "rows", "=", "[", "]", "return", "MoviesCollection", "(", "rows", ")" ]
21.545455
21.363636
def add_aliases_formatting(self, aliases): """Adds the formatting information on a command's aliases. The formatting should be added to the :attr:`paginator`. The default implementation is the :attr:`aliases_heading` bolded followed by a comma separated list of aliases. This is not called if there are no aliases to format. Parameters ----------- aliases: Sequence[:class:`str`] A list of aliases to format. """ self.paginator.add_line('**%s** %s' % (self.aliases_heading, ', '.join(aliases)), empty=True)
[ "def", "add_aliases_formatting", "(", "self", ",", "aliases", ")", ":", "self", ".", "paginator", ".", "add_line", "(", "'**%s** %s'", "%", "(", "self", ".", "aliases_heading", ",", "', '", ".", "join", "(", "aliases", ")", ")", ",", "empty", "=", "True", ")" ]
36.6875
22.375
def decrypt(key, ciphertext): """Decrypt Vigenere encrypted ``ciphertext`` using ``key``. Example: >>> decrypt("KEY", "RIJVS") HELLO Args: key (iterable): The key to use ciphertext (str): The text to decrypt Returns: Decrypted ciphertext """ index = 0 decrypted = "" for char in ciphertext: if char in string.punctuation + string.whitespace + string.digits: decrypted += char continue # Not part of the decryption # Rotate character by the alphabet position of the letter in the key alphabet = string.ascii_uppercase if key[index].isupper() else string.ascii_lowercase decrypted += ''.join(shift.decrypt(int(alphabet.index(key[index])), char)) index = (index + 1) % len(key) return decrypted
[ "def", "decrypt", "(", "key", ",", "ciphertext", ")", ":", "index", "=", "0", "decrypted", "=", "\"\"", "for", "char", "in", "ciphertext", ":", "if", "char", "in", "string", ".", "punctuation", "+", "string", ".", "whitespace", "+", "string", ".", "digits", ":", "decrypted", "+=", "char", "continue", "# Not part of the decryption", "# Rotate character by the alphabet position of the letter in the key", "alphabet", "=", "string", ".", "ascii_uppercase", "if", "key", "[", "index", "]", ".", "isupper", "(", ")", "else", "string", ".", "ascii_lowercase", "decrypted", "+=", "''", ".", "join", "(", "shift", ".", "decrypt", "(", "int", "(", "alphabet", ".", "index", "(", "key", "[", "index", "]", ")", ")", ",", "char", ")", ")", "index", "=", "(", "index", "+", "1", ")", "%", "len", "(", "key", ")", "return", "decrypted" ]
30.037037
22.925926
def video_top(body_output, targets, model_hparams, vocab_size): """Top transformation for video.""" del targets # unused arg num_channels = model_hparams.problem.num_channels shape = common_layers.shape_list(body_output) reshape_shape = shape[:-1] + [num_channels, vocab_size] res = tf.reshape(body_output, reshape_shape) # Calculate argmax so as to have a summary with the produced images. x = tf.argmax(tf.reshape(res, [-1, vocab_size]), axis=-1) x = tf.reshape(x, shape[:-1] + [num_channels]) common_video.gif_summary("results", x, max_outputs=1) return res
[ "def", "video_top", "(", "body_output", ",", "targets", ",", "model_hparams", ",", "vocab_size", ")", ":", "del", "targets", "# unused arg", "num_channels", "=", "model_hparams", ".", "problem", ".", "num_channels", "shape", "=", "common_layers", ".", "shape_list", "(", "body_output", ")", "reshape_shape", "=", "shape", "[", ":", "-", "1", "]", "+", "[", "num_channels", ",", "vocab_size", "]", "res", "=", "tf", ".", "reshape", "(", "body_output", ",", "reshape_shape", ")", "# Calculate argmax so as to have a summary with the produced images.", "x", "=", "tf", ".", "argmax", "(", "tf", ".", "reshape", "(", "res", ",", "[", "-", "1", ",", "vocab_size", "]", ")", ",", "axis", "=", "-", "1", ")", "x", "=", "tf", ".", "reshape", "(", "x", ",", "shape", "[", ":", "-", "1", "]", "+", "[", "num_channels", "]", ")", "common_video", ".", "gif_summary", "(", "\"results\"", ",", "x", ",", "max_outputs", "=", "1", ")", "return", "res" ]
47.666667
14.75
def format_output(data, headers, format_name, **kwargs): """Format output using *format_name*. This is a wrapper around the :class:`TabularOutputFormatter` class. :param iterable data: An :term:`iterable` (e.g. list) of rows. :param iterable headers: The column headers. :param str format_name: The display format to use. :param \*\*kwargs: Optional arguments for the formatter. :return: The formatted data. :rtype: str """ formatter = TabularOutputFormatter(format_name=format_name) return formatter.format_output(data, headers, **kwargs)
[ "def", "format_output", "(", "data", ",", "headers", ",", "format_name", ",", "*", "*", "kwargs", ")", ":", "formatter", "=", "TabularOutputFormatter", "(", "format_name", "=", "format_name", ")", "return", "formatter", ".", "format_output", "(", "data", ",", "headers", ",", "*", "*", "kwargs", ")" ]
38.133333
20.666667
def save(self, overwrite=True): """ Saves PopulationSet and TransitSignal. Shouldn't need to use this if you're using :func:`FPPCalculation.from_ini`. Saves :class`PopulationSet` to ``[folder]/popset.h5]`` and :class:`TransitSignal` to ``[folder]/trsig.pkl``. :param overwrite: (optional) Whether to overwrite existing files. """ self.save_popset(overwrite=overwrite) self.save_signal()
[ "def", "save", "(", "self", ",", "overwrite", "=", "True", ")", ":", "self", ".", "save_popset", "(", "overwrite", "=", "overwrite", ")", "self", ".", "save_signal", "(", ")" ]
29.1875
16.1875
def _spacingx(node, max_dims, xoffset, xspace): '''Determine the spacing of the current node depending on the number of the leaves of the tree ''' x_spacing = _n_terminations(node) * xspace if x_spacing > max_dims[0]: max_dims[0] = x_spacing return xoffset - x_spacing / 2.
[ "def", "_spacingx", "(", "node", ",", "max_dims", ",", "xoffset", ",", "xspace", ")", ":", "x_spacing", "=", "_n_terminations", "(", "node", ")", "*", "xspace", "if", "x_spacing", ">", "max_dims", "[", "0", "]", ":", "max_dims", "[", "0", "]", "=", "x_spacing", "return", "xoffset", "-", "x_spacing", "/", "2." ]
30.1
18.9
def set_variable(self, name, type_, size): """ Register variable of name and type_, with a (multidimensional) size. :param name: variable name as it appears in code :param type_: may be any key from Kernel.datatypes_size (typically float or double) :param size: either None for scalars or an n-tuple of ints for an n-dimensional array """ assert type_ in self.datatypes_size, 'only float and double variables are supported' if self.datatype is None: self.datatype = type_ else: assert type_ == self.datatype, 'mixing of datatypes within a kernel is not supported.' assert type(size) in [tuple, type(None)], 'size has to be defined as tuple or None' self.variables[name] = (type_, size)
[ "def", "set_variable", "(", "self", ",", "name", ",", "type_", ",", "size", ")", ":", "assert", "type_", "in", "self", ".", "datatypes_size", ",", "'only float and double variables are supported'", "if", "self", ".", "datatype", "is", "None", ":", "self", ".", "datatype", "=", "type_", "else", ":", "assert", "type_", "==", "self", ".", "datatype", ",", "'mixing of datatypes within a kernel is not supported.'", "assert", "type", "(", "size", ")", "in", "[", "tuple", ",", "type", "(", "None", ")", "]", ",", "'size has to be defined as tuple or None'", "self", ".", "variables", "[", "name", "]", "=", "(", "type_", ",", "size", ")" ]
52.266667
26.933333
def create(cls, name, protocol_number, protocol_agent=None, comment=None): """ Create the IP Service :param str name: name of ip-service :param int protocol_number: ip proto number for this service :param str,ProtocolAgent protocol_agent: optional protocol agent for this service :param str comment: optional comment :raises CreateElementFailed: failure creating element with reason :return: instance with meta :rtype: IPService """ json = {'name': name, 'protocol_number': protocol_number, 'protocol_agent_ref': element_resolver(protocol_agent) or None, 'comment': comment} return ElementCreator(cls, json)
[ "def", "create", "(", "cls", ",", "name", ",", "protocol_number", ",", "protocol_agent", "=", "None", ",", "comment", "=", "None", ")", ":", "json", "=", "{", "'name'", ":", "name", ",", "'protocol_number'", ":", "protocol_number", ",", "'protocol_agent_ref'", ":", "element_resolver", "(", "protocol_agent", ")", "or", "None", ",", "'comment'", ":", "comment", "}", "return", "ElementCreator", "(", "cls", ",", "json", ")" ]
39.315789
17.421053
def write_file(file_path, content): """ Write file at the specified path with content. If file exists, it will be overwritten. """ handler = open(file_path, 'w+') handler.write(content) handler.close()
[ "def", "write_file", "(", "file_path", ",", "content", ")", ":", "handler", "=", "open", "(", "file_path", ",", "'w+'", ")", "handler", ".", "write", "(", "content", ")", "handler", ".", "close", "(", ")" ]
27.75
7.25
def slice_naive(self, key): """ Naively (on index) slice the field data and values. Args: key: Int, slice, or iterable to select data and values Returns: field: Sliced field object """ cls = self.__class__ key = check_key(self, key) enum = pd.Series(range(len(self))) enum.index = self.index values = self.field_values[enum[key].values] data = self.loc[key] return cls(data, field_values=values)
[ "def", "slice_naive", "(", "self", ",", "key", ")", ":", "cls", "=", "self", ".", "__class__", "key", "=", "check_key", "(", "self", ",", "key", ")", "enum", "=", "pd", ".", "Series", "(", "range", "(", "len", "(", "self", ")", ")", ")", "enum", ".", "index", "=", "self", ".", "index", "values", "=", "self", ".", "field_values", "[", "enum", "[", "key", "]", ".", "values", "]", "data", "=", "self", ".", "loc", "[", "key", "]", "return", "cls", "(", "data", ",", "field_values", "=", "values", ")" ]
29.470588
14.647059
def _iter_coords(nsls): """Iterate through all matching coordinates in a sequence of slices.""" # First convert all slices to ranges ranges = list() for nsl in nsls: if isinstance(nsl, int): ranges.append(range(nsl, nsl+1)) else: ranges.append(range(nsl.start, nsl.stop)) # Iterate through all matching coordinates yield from itertools.product(*ranges)
[ "def", "_iter_coords", "(", "nsls", ")", ":", "# First convert all slices to ranges", "ranges", "=", "list", "(", ")", "for", "nsl", "in", "nsls", ":", "if", "isinstance", "(", "nsl", ",", "int", ")", ":", "ranges", ".", "append", "(", "range", "(", "nsl", ",", "nsl", "+", "1", ")", ")", "else", ":", "ranges", ".", "append", "(", "range", "(", "nsl", ".", "start", ",", "nsl", ".", "stop", ")", ")", "# Iterate through all matching coordinates", "yield", "from", "itertools", ".", "product", "(", "*", "ranges", ")" ]
36.909091
10.636364
def withdraw(self, amount): """ Withdraws specified neopoints from the user's account, returns result Parameters: amount (int) -- Amount of neopoints to withdraw Returns bool - True if successful, False otherwise Raises notEnoughBalance """ pg = self.usr.getPage("http://www.neopets.com/bank.phtml") try: results = pg.find(text = "Account Type:").parent.parent.parent.find_all("td", align="center") self.balance = results[1].text.replace(" NP", "") except Exception: logging.getLogger("neolib.user").exception("Could not parse user's bank balance.", {'pg': pg}) if int(amount) > int(self.balance.replace(",", "")): raise notEnoughBalance form = pg.form(action="process_bank.phtml") form.update({'type': 'withdraw', 'amount': str(amount)}) form.usePin = True pg = form.submit() # Success redirects to bank page if "It's great to see you again" in pg.content: self.__loadDetails(pg) return True else: logging.getLogger("neolib.user").info("Failed to withdraw NPs for unknown reason. User NPs: " + str(self.usr.nps) + ". Amount: " + str(amount), {'pg': pg}) return False
[ "def", "withdraw", "(", "self", ",", "amount", ")", ":", "pg", "=", "self", ".", "usr", ".", "getPage", "(", "\"http://www.neopets.com/bank.phtml\"", ")", "try", ":", "results", "=", "pg", ".", "find", "(", "text", "=", "\"Account Type:\"", ")", ".", "parent", ".", "parent", ".", "parent", ".", "find_all", "(", "\"td\"", ",", "align", "=", "\"center\"", ")", "self", ".", "balance", "=", "results", "[", "1", "]", ".", "text", ".", "replace", "(", "\" NP\"", ",", "\"\"", ")", "except", "Exception", ":", "logging", ".", "getLogger", "(", "\"neolib.user\"", ")", ".", "exception", "(", "\"Could not parse user's bank balance.\"", ",", "{", "'pg'", ":", "pg", "}", ")", "if", "int", "(", "amount", ")", ">", "int", "(", "self", ".", "balance", ".", "replace", "(", "\",\"", ",", "\"\"", ")", ")", ":", "raise", "notEnoughBalance", "form", "=", "pg", ".", "form", "(", "action", "=", "\"process_bank.phtml\"", ")", "form", ".", "update", "(", "{", "'type'", ":", "'withdraw'", ",", "'amount'", ":", "str", "(", "amount", ")", "}", ")", "form", ".", "usePin", "=", "True", "pg", "=", "form", ".", "submit", "(", ")", "# Success redirects to bank page", "if", "\"It's great to see you again\"", "in", "pg", ".", "content", ":", "self", ".", "__loadDetails", "(", "pg", ")", "return", "True", "else", ":", "logging", ".", "getLogger", "(", "\"neolib.user\"", ")", ".", "info", "(", "\"Failed to withdraw NPs for unknown reason. User NPs: \"", "+", "str", "(", "self", ".", "usr", ".", "nps", ")", "+", "\". Amount: \"", "+", "str", "(", "amount", ")", ",", "{", "'pg'", ":", "pg", "}", ")", "return", "False" ]
39.028571
24.514286
def main(args): """Remove lines after marker.""" filename = args[0] marker = args[1] for line in fileinput.input(filename, inplace=1): print(line.rstrip()) if line.startswith(marker): break
[ "def", "main", "(", "args", ")", ":", "filename", "=", "args", "[", "0", "]", "marker", "=", "args", "[", "1", "]", "for", "line", "in", "fileinput", ".", "input", "(", "filename", ",", "inplace", "=", "1", ")", ":", "print", "(", "line", ".", "rstrip", "(", ")", ")", "if", "line", ".", "startswith", "(", "marker", ")", ":", "break" ]
28.25
14.5
def set_content(self, content): """Set textual content for the object/node. Verifies the node is allowed to contain content, and throws an exception if not. """ if self.allows_content: self.content = content.strip() else: raise UNTLStructureException( 'Element "%s" does not allow textual content' % (self.tag,) )
[ "def", "set_content", "(", "self", ",", "content", ")", ":", "if", "self", ".", "allows_content", ":", "self", ".", "content", "=", "content", ".", "strip", "(", ")", "else", ":", "raise", "UNTLStructureException", "(", "'Element \"%s\" does not allow textual content'", "%", "(", "self", ".", "tag", ",", ")", ")" ]
33.583333
16.25
def role_get(role_id=None, name=None, profile=None, **connection_args): ''' Return a specific roles (keystone role-get) CLI Examples: .. code-block:: bash salt '*' keystone.role_get c965f79c4f864eaaa9c3b41904e67082 salt '*' keystone.role_get role_id=c965f79c4f864eaaa9c3b41904e67082 salt '*' keystone.role_get name=nova ''' kstone = auth(profile, **connection_args) ret = {} if name: for role in kstone.roles.list(): if role.name == name: role_id = role.id break if not role_id: return {'Error': 'Unable to resolve role id'} role = kstone.roles.get(role_id) ret[role.name] = {'id': role.id, 'name': role.name} return ret
[ "def", "role_get", "(", "role_id", "=", "None", ",", "name", "=", "None", ",", "profile", "=", "None", ",", "*", "*", "connection_args", ")", ":", "kstone", "=", "auth", "(", "profile", ",", "*", "*", "connection_args", ")", "ret", "=", "{", "}", "if", "name", ":", "for", "role", "in", "kstone", ".", "roles", ".", "list", "(", ")", ":", "if", "role", ".", "name", "==", "name", ":", "role_id", "=", "role", ".", "id", "break", "if", "not", "role_id", ":", "return", "{", "'Error'", ":", "'Unable to resolve role id'", "}", "role", "=", "kstone", ".", "roles", ".", "get", "(", "role_id", ")", "ret", "[", "role", ".", "name", "]", "=", "{", "'id'", ":", "role", ".", "id", ",", "'name'", ":", "role", ".", "name", "}", "return", "ret" ]
28.961538
20.423077
def blast(args): """ %prog blast <deltafile|coordsfile> Covert delta or coordsfile to BLAST tabular output. """ p = OptionParser(blast.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) deltafile, = args blastfile = deltafile.rsplit(".", 1)[0] + ".blast" if need_update(deltafile, blastfile): coords = Coords(deltafile) fw = open(blastfile, "w") for c in coords: print(c.blastline, file=fw)
[ "def", "blast", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "blast", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "1", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "deltafile", ",", "=", "args", "blastfile", "=", "deltafile", ".", "rsplit", "(", "\".\"", ",", "1", ")", "[", "0", "]", "+", "\".blast\"", "if", "need_update", "(", "deltafile", ",", "blastfile", ")", ":", "coords", "=", "Coords", "(", "deltafile", ")", "fw", "=", "open", "(", "blastfile", ",", "\"w\"", ")", "for", "c", "in", "coords", ":", "print", "(", "c", ".", "blastline", ",", "file", "=", "fw", ")" ]
24.85
14.85
def factory(cfg, src_db, dest_db): """ Instantiate Transformation :param cfg: transformation configuration "class": "na3x.transformation.transformer.Col2XTransformation", <Transformation class> "cfg": { "src.db.load": { "src": "sprint.backlog_links" <Collection(s) to be loaded> }, "transform": { "func": "ext.transformers.gantt_links" <transformer function> }, "dest.db.cleanup": { "target": "baseline.gantt_links" <Collection to be cleaned during transformation (usually the same as destination)> }, "dest.db.save": { "dest": "baseline.gantt_links" <Destination collection> } } :param src_db: source db for transformation :param dest_db: destination db for transformation :return: Transformation instance """ return obj_for_name(cfg[Transformation.__CFG_KEY_TRANSFORMATION_CLASS])( cfg[Transformation.CFG_KEY_TRANSFORMATION_CFG], src_db, dest_db)
[ "def", "factory", "(", "cfg", ",", "src_db", ",", "dest_db", ")", ":", "return", "obj_for_name", "(", "cfg", "[", "Transformation", ".", "__CFG_KEY_TRANSFORMATION_CLASS", "]", ")", "(", "cfg", "[", "Transformation", ".", "CFG_KEY_TRANSFORMATION_CFG", "]", ",", "src_db", ",", "dest_db", ")" ]
39
23.8
def add_job(self, job, merged=False, widened=False): """ Appended a new job to this JobInfo node. :param job: The new job to append. :param bool merged: Whether it is a merged job or not. :param bool widened: Whether it is a widened job or not. """ job_type = '' if merged: job_type = 'merged' elif widened: job_type = 'widened' self.jobs.append((job, job_type))
[ "def", "add_job", "(", "self", ",", "job", ",", "merged", "=", "False", ",", "widened", "=", "False", ")", ":", "job_type", "=", "''", "if", "merged", ":", "job_type", "=", "'merged'", "elif", "widened", ":", "job_type", "=", "'widened'", "self", ".", "jobs", ".", "append", "(", "(", "job", ",", "job_type", ")", ")" ]
32.428571
13.285714
def set_process(self, process = None): """ Manually set the parent process. Use with care! @type process: L{Process} @param process: (Optional) Process object. Use C{None} for no process. """ if process is None: self.__process = None else: global Process # delayed import if Process is None: from winappdbg.process import Process if not isinstance(process, Process): msg = "Parent process must be a Process instance, " msg += "got %s instead" % type(process) raise TypeError(msg) self.__process = process
[ "def", "set_process", "(", "self", ",", "process", "=", "None", ")", ":", "if", "process", "is", "None", ":", "self", ".", "__process", "=", "None", "else", ":", "global", "Process", "# delayed import", "if", "Process", "is", "None", ":", "from", "winappdbg", ".", "process", "import", "Process", "if", "not", "isinstance", "(", "process", ",", "Process", ")", ":", "msg", "=", "\"Parent process must be a Process instance, \"", "msg", "+=", "\"got %s instead\"", "%", "type", "(", "process", ")", "raise", "TypeError", "(", "msg", ")", "self", ".", "__process", "=", "process" ]
37.5
13.166667
def get(self, requestId): """ Gets details of a device management request. It accepts requestId (string) as parameters In case of failure it throws APIException """ url = MgmtRequests.mgmtSingleRequest % (requestId) r = self._apiClient.get(url) if r.status_code == 200: return r.json() else: raise ApiException(r)
[ "def", "get", "(", "self", ",", "requestId", ")", ":", "url", "=", "MgmtRequests", ".", "mgmtSingleRequest", "%", "(", "requestId", ")", "r", "=", "self", ".", "_apiClient", ".", "get", "(", "url", ")", "if", "r", ".", "status_code", "==", "200", ":", "return", "r", ".", "json", "(", ")", "else", ":", "raise", "ApiException", "(", "r", ")" ]
30.615385
12.615385
def delete_password(self, service, username): """Delete the password for the username of the service. """ try: key_name = self._key_for_service(service) hkey = winreg.OpenKey( winreg.HKEY_CURRENT_USER, key_name, 0, winreg.KEY_ALL_ACCESS) winreg.DeleteValue(hkey, username) winreg.CloseKey(hkey) except WindowsError: e = sys.exc_info()[1] raise PasswordDeleteError(e) self._delete_key_if_empty(service)
[ "def", "delete_password", "(", "self", ",", "service", ",", "username", ")", ":", "try", ":", "key_name", "=", "self", ".", "_key_for_service", "(", "service", ")", "hkey", "=", "winreg", ".", "OpenKey", "(", "winreg", ".", "HKEY_CURRENT_USER", ",", "key_name", ",", "0", ",", "winreg", ".", "KEY_ALL_ACCESS", ")", "winreg", ".", "DeleteValue", "(", "hkey", ",", "username", ")", "winreg", ".", "CloseKey", "(", "hkey", ")", "except", "WindowsError", ":", "e", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "raise", "PasswordDeleteError", "(", "e", ")", "self", ".", "_delete_key_if_empty", "(", "service", ")" ]
38
7.285714
def composite_multiscale_entropy(time_series, sample_length, scale, tolerance=None): """Calculate the Composite Multiscale Entropy of the given time series. Args: time_series: Time series for analysis sample_length: Number of sequential points of the time series scale: Scale factor tolerance: Tolerance (default = 0.1...0.2 * std(time_series)) Returns: Vector containing Composite Multiscale Entropy Reference: [1] Wu, Shuen-De, et al. "Time series analysis using composite multiscale entropy." Entropy 15.3 (2013): 1069-1084. """ cmse = np.zeros((1, scale)) for i in range(scale): for j in range(i): tmp = util_granulate_time_series(time_series[j:], i + 1) cmse[i] += sample_entropy(tmp, sample_length, tolerance) / (i + 1) return cmse
[ "def", "composite_multiscale_entropy", "(", "time_series", ",", "sample_length", ",", "scale", ",", "tolerance", "=", "None", ")", ":", "cmse", "=", "np", ".", "zeros", "(", "(", "1", ",", "scale", ")", ")", "for", "i", "in", "range", "(", "scale", ")", ":", "for", "j", "in", "range", "(", "i", ")", ":", "tmp", "=", "util_granulate_time_series", "(", "time_series", "[", "j", ":", "]", ",", "i", "+", "1", ")", "cmse", "[", "i", "]", "+=", "sample_entropy", "(", "tmp", ",", "sample_length", ",", "tolerance", ")", "/", "(", "i", "+", "1", ")", "return", "cmse" ]
36.652174
24.391304
def _init_code(self, code: int) -> None: """ Initialize from an int terminal code. """ if -1 < code < 256: self.code = '{:02}'.format(code) self.hexval = term2hex(code) self.rgb = hex2rgb(self.hexval) else: raise ValueError(' '.join(( 'Code must be in the range 0-255, inclusive.', 'Got: {} ({})' )).format(code, getattr(code, '__name__', type(code).__name__)))
[ "def", "_init_code", "(", "self", ",", "code", ":", "int", ")", "->", "None", ":", "if", "-", "1", "<", "code", "<", "256", ":", "self", ".", "code", "=", "'{:02}'", ".", "format", "(", "code", ")", "self", ".", "hexval", "=", "term2hex", "(", "code", ")", "self", ".", "rgb", "=", "hex2rgb", "(", "self", ".", "hexval", ")", "else", ":", "raise", "ValueError", "(", "' '", ".", "join", "(", "(", "'Code must be in the range 0-255, inclusive.'", ",", "'Got: {} ({})'", ")", ")", ".", "format", "(", "code", ",", "getattr", "(", "code", ",", "'__name__'", ",", "type", "(", "code", ")", ".", "__name__", ")", ")", ")" ]
42.454545
10.545455
def statistics(self): """ Access the statistics :returns: twilio.rest.taskrouter.v1.workspace.worker.worker_statistics.WorkerStatisticsList :rtype: twilio.rest.taskrouter.v1.workspace.worker.worker_statistics.WorkerStatisticsList """ if self._statistics is None: self._statistics = WorkerStatisticsList( self._version, workspace_sid=self._solution['workspace_sid'], worker_sid=self._solution['sid'], ) return self._statistics
[ "def", "statistics", "(", "self", ")", ":", "if", "self", ".", "_statistics", "is", "None", ":", "self", ".", "_statistics", "=", "WorkerStatisticsList", "(", "self", ".", "_version", ",", "workspace_sid", "=", "self", ".", "_solution", "[", "'workspace_sid'", "]", ",", "worker_sid", "=", "self", ".", "_solution", "[", "'sid'", "]", ",", ")", "return", "self", ".", "_statistics" ]
38.642857
19.928571
def get_extended_metadata_text(self, item_id, metadata_type): """Get extended metadata text for a media item. Args: item_id (str): The item for which metadata is required metadata_type (str): The type of text to return, eg ``'ARTIST_BIO'``, or ``'ALBUM_NOTES'``. Calling `get_extended_metadata` for the item will show which extended metadata_types are available (under relatedBrowse and relatedText). Returns: str: The item's extended metadata text or None See also: The Sonos `getExtendedMetadataText API <http://musicpartners.sonos.com/node/127>`_ """ response = self.soap_client.call( 'getExtendedMetadataText', [('id', item_id), ('type', metadata_type)]) return response.get('getExtendedMetadataTextResult', None)
[ "def", "get_extended_metadata_text", "(", "self", ",", "item_id", ",", "metadata_type", ")", ":", "response", "=", "self", ".", "soap_client", ".", "call", "(", "'getExtendedMetadataText'", ",", "[", "(", "'id'", ",", "item_id", ")", ",", "(", "'type'", ",", "metadata_type", ")", "]", ")", "return", "response", ".", "get", "(", "'getExtendedMetadataTextResult'", ",", "None", ")" ]
41.714286
21.047619
def spkobj(spk, outCell=None): """ Find the set of ID codes of all objects in a specified SPK file. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkobj_c.html :param spk: Name of SPK file. :type spk: str :param outCell: Optional Spice Int Cell. :type outCell: spiceypy.utils.support_types.SpiceCell """ spk = stypes.stringToCharP(spk) if not outCell: outCell = stypes.SPICEINT_CELL(1000) assert isinstance(outCell, stypes.SpiceCell) assert outCell.dtype == 2 libspice.spkobj_c(spk, ctypes.byref(outCell)) return outCell
[ "def", "spkobj", "(", "spk", ",", "outCell", "=", "None", ")", ":", "spk", "=", "stypes", ".", "stringToCharP", "(", "spk", ")", "if", "not", "outCell", ":", "outCell", "=", "stypes", ".", "SPICEINT_CELL", "(", "1000", ")", "assert", "isinstance", "(", "outCell", ",", "stypes", ".", "SpiceCell", ")", "assert", "outCell", ".", "dtype", "==", "2", "libspice", ".", "spkobj_c", "(", "spk", ",", "ctypes", ".", "byref", "(", "outCell", ")", ")", "return", "outCell" ]
32.166667
15.611111
def _upload_in_splits( self, destination_folder_id, source_path, preflight_check, verbose = True, chunked_upload_threads = 5 ): ''' Since Box has a maximum file size limit (15 GB at time of writing), we need to split files larger than this into smaller parts, and chunk upload each part ''' file_size = os.stat(source_path).st_size split_size = BOX_MAX_FILE_SIZE # Make sure that the last split piece is still big enough for a chunked upload while file_size % split_size < BOX_MIN_CHUNK_UPLOAD_SIZE: split_size -= 1000 if split_size < BOX_MIN_CHUNK_UPLOAD_SIZE: raise Exception('Lazy programming error') split_start_byte = 0 part_count = 0 uploaded_file_ids = [] while split_start_byte < file_size: dest_file_name = '{0}.part{1}'.format( os.path.basename(source_path), part_count) prev_uploaded_file_ids = self.find_file( destination_folder_id, dest_file_name ) if len( prev_uploaded_file_ids ) == 1: if verbose: print ( '\nSkipping upload of split {0} of {1}; already exists'.format( part_count + 1, math.ceil(file_size / split_size) ) ) uploaded_file_ids.extend( prev_uploaded_file_ids ) else: if verbose: print ( '\nUploading split {0} of {1}'.format( part_count + 1, math.ceil(file_size / split_size) ) ) uploaded_file_ids.append( self._chunked_upload( destination_folder_id, source_path, dest_file_name = dest_file_name, split_start_byte = split_start_byte, file_size = min(split_size, file_size - split_start_byte), # Take the min of file_size - split_start_byte so that the last part of a split doesn't read into the next split preflight_check = preflight_check, verbose = verbose, upload_threads = chunked_upload_threads, ) ) part_count += 1 split_start_byte += split_size return uploaded_file_ids
[ "def", "_upload_in_splits", "(", "self", ",", "destination_folder_id", ",", "source_path", ",", "preflight_check", ",", "verbose", "=", "True", ",", "chunked_upload_threads", "=", "5", ")", ":", "file_size", "=", "os", ".", "stat", "(", "source_path", ")", ".", "st_size", "split_size", "=", "BOX_MAX_FILE_SIZE", "# Make sure that the last split piece is still big enough for a chunked upload", "while", "file_size", "%", "split_size", "<", "BOX_MIN_CHUNK_UPLOAD_SIZE", ":", "split_size", "-=", "1000", "if", "split_size", "<", "BOX_MIN_CHUNK_UPLOAD_SIZE", ":", "raise", "Exception", "(", "'Lazy programming error'", ")", "split_start_byte", "=", "0", "part_count", "=", "0", "uploaded_file_ids", "=", "[", "]", "while", "split_start_byte", "<", "file_size", ":", "dest_file_name", "=", "'{0}.part{1}'", ".", "format", "(", "os", ".", "path", ".", "basename", "(", "source_path", ")", ",", "part_count", ")", "prev_uploaded_file_ids", "=", "self", ".", "find_file", "(", "destination_folder_id", ",", "dest_file_name", ")", "if", "len", "(", "prev_uploaded_file_ids", ")", "==", "1", ":", "if", "verbose", ":", "print", "(", "'\\nSkipping upload of split {0} of {1}; already exists'", ".", "format", "(", "part_count", "+", "1", ",", "math", ".", "ceil", "(", "file_size", "/", "split_size", ")", ")", ")", "uploaded_file_ids", ".", "extend", "(", "prev_uploaded_file_ids", ")", "else", ":", "if", "verbose", ":", "print", "(", "'\\nUploading split {0} of {1}'", ".", "format", "(", "part_count", "+", "1", ",", "math", ".", "ceil", "(", "file_size", "/", "split_size", ")", ")", ")", "uploaded_file_ids", ".", "append", "(", "self", ".", "_chunked_upload", "(", "destination_folder_id", ",", "source_path", ",", "dest_file_name", "=", "dest_file_name", ",", "split_start_byte", "=", "split_start_byte", ",", "file_size", "=", "min", "(", "split_size", ",", "file_size", "-", "split_start_byte", ")", ",", "# Take the min of file_size - split_start_byte so that the last part of a split doesn't read into the next split", "preflight_check", "=", "preflight_check", ",", "verbose", "=", "verbose", ",", "upload_threads", "=", "chunked_upload_threads", ",", ")", ")", "part_count", "+=", "1", "split_start_byte", "+=", "split_size", "return", "uploaded_file_ids" ]
53.625
29.775
def delete_all(config=None): """ Deletes all hosts from ssh config. """ storm_ = get_storm_instance(config) try: storm_.delete_all_entries() print(get_formatted_message('all entries deleted.', 'success')) except Exception as error: print(get_formatted_message(str(error), 'error'), file=sys.stderr) sys.exit(1)
[ "def", "delete_all", "(", "config", "=", "None", ")", ":", "storm_", "=", "get_storm_instance", "(", "config", ")", "try", ":", "storm_", ".", "delete_all_entries", "(", ")", "print", "(", "get_formatted_message", "(", "'all entries deleted.'", ",", "'success'", ")", ")", "except", "Exception", "as", "error", ":", "print", "(", "get_formatted_message", "(", "str", "(", "error", ")", ",", "'error'", ")", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")" ]
29.666667
15.666667
def find_path(name, path=None, exact=False): """ Search for a file or directory on your local filesystem by name (file must be in a directory specified in a PATH environment variable) Args: fname (PathLike or str): file name to match. If exact is False this may be a glob pattern path (str or Iterable[PathLike]): list of directories to search either specified as an os.pathsep separated string or a list of directories. Defaults to environment PATH. exact (bool): if True, only returns exact matches. Default False. Notes: For recursive behavior set `path=(d for d, _, _ in os.walk('.'))`, where '.' might be replaced by the root directory of interest. Example: >>> list(find_path('ping', exact=True)) >>> list(find_path('bin')) >>> list(find_path('bin')) >>> list(find_path('*cc*')) >>> list(find_path('cmake*')) Example: >>> import ubelt as ub >>> from os.path import dirname >>> path = dirname(dirname(ub.util_platform.__file__)) >>> res = sorted(find_path('ubelt/util_*.py', path=path)) >>> assert len(res) >= 10 >>> res = sorted(find_path('ubelt/util_platform.py', path=path, exact=True)) >>> print(res) >>> assert len(res) == 1 """ path = os.environ.get('PATH', os.defpath) if path is None else path dpaths = path.split(os.pathsep) if isinstance(path, six.string_types) else path candidates = (join(dpath, name) for dpath in dpaths) if exact: if WIN32: # nocover pathext = [''] + os.environ.get('PATHEXT', '').split(os.pathsep) candidates = (p + ext for p in candidates for ext in pathext) candidates = filter(exists, candidates) else: import glob candidates = it.chain.from_iterable( glob.glob(pattern) for pattern in candidates) return candidates
[ "def", "find_path", "(", "name", ",", "path", "=", "None", ",", "exact", "=", "False", ")", ":", "path", "=", "os", ".", "environ", ".", "get", "(", "'PATH'", ",", "os", ".", "defpath", ")", "if", "path", "is", "None", "else", "path", "dpaths", "=", "path", ".", "split", "(", "os", ".", "pathsep", ")", "if", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "else", "path", "candidates", "=", "(", "join", "(", "dpath", ",", "name", ")", "for", "dpath", "in", "dpaths", ")", "if", "exact", ":", "if", "WIN32", ":", "# nocover", "pathext", "=", "[", "''", "]", "+", "os", ".", "environ", ".", "get", "(", "'PATHEXT'", ",", "''", ")", ".", "split", "(", "os", ".", "pathsep", ")", "candidates", "=", "(", "p", "+", "ext", "for", "p", "in", "candidates", "for", "ext", "in", "pathext", ")", "candidates", "=", "filter", "(", "exists", ",", "candidates", ")", "else", ":", "import", "glob", "candidates", "=", "it", ".", "chain", ".", "from_iterable", "(", "glob", ".", "glob", "(", "pattern", ")", "for", "pattern", "in", "candidates", ")", "return", "candidates" ]
39.163265
22.183673
def repeat(sequence): ''' Return a driver function that can advance a repeated of values. .. code-block:: none seq = [0, 1, 2, 3] # repeat(seq) => [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, ...] Args: sequence (seq) : a sequence of values for the driver to bounce ''' N = len(sequence) def f(i): return sequence[i%N] return partial(force, sequence=_advance(f))
[ "def", "repeat", "(", "sequence", ")", ":", "N", "=", "len", "(", "sequence", ")", "def", "f", "(", "i", ")", ":", "return", "sequence", "[", "i", "%", "N", "]", "return", "partial", "(", "force", ",", "sequence", "=", "_advance", "(", "f", ")", ")" ]
23.352941
27
def print_summary(self, stream=sys.stdout, indent="", recurse_level=2): """Print a summary of the activity done by this `Link`. Parameters ---------- stream : `file` Stream to print to indent : str Indentation at start of line recurse_level : int Number of recursion levels to print """ Link.print_summary(self, stream, indent, recurse_level) if recurse_level > 0: recurse_level -= 1 indent += " " stream.write("\n") self._scatter_link.print_summary(stream, indent, recurse_level)
[ "def", "print_summary", "(", "self", ",", "stream", "=", "sys", ".", "stdout", ",", "indent", "=", "\"\"", ",", "recurse_level", "=", "2", ")", ":", "Link", ".", "print_summary", "(", "self", ",", "stream", ",", "indent", ",", "recurse_level", ")", "if", "recurse_level", ">", "0", ":", "recurse_level", "-=", "1", "indent", "+=", "\" \"", "stream", ".", "write", "(", "\"\\n\"", ")", "self", ".", "_scatter_link", ".", "print_summary", "(", "stream", ",", "indent", ",", "recurse_level", ")" ]
29.571429
19.285714
def list_platforms(server_url): ''' To list all ASAM platforms present on the Novell Fan-Out Driver CLI Example: .. code-block:: bash salt-run asam.list_platforms prov1.domain.com ''' config = _get_asam_configuration(server_url) if not config: return False url = config['platform_config_url'] data = { 'manual': 'false', } auth = ( config['username'], config['password'] ) try: html_content = _make_post_request(url, data, auth, verify=False) except Exception as exc: err_msg = "Failed to look up existing platforms" log.error('%s:\n%s', err_msg, exc) return {server_url: err_msg} parser = _parse_html_content(html_content) platform_list = _get_platforms(parser.data) if platform_list: return {server_url: platform_list} else: return {server_url: "No existing platforms found"}
[ "def", "list_platforms", "(", "server_url", ")", ":", "config", "=", "_get_asam_configuration", "(", "server_url", ")", "if", "not", "config", ":", "return", "False", "url", "=", "config", "[", "'platform_config_url'", "]", "data", "=", "{", "'manual'", ":", "'false'", ",", "}", "auth", "=", "(", "config", "[", "'username'", "]", ",", "config", "[", "'password'", "]", ")", "try", ":", "html_content", "=", "_make_post_request", "(", "url", ",", "data", ",", "auth", ",", "verify", "=", "False", ")", "except", "Exception", "as", "exc", ":", "err_msg", "=", "\"Failed to look up existing platforms\"", "log", ".", "error", "(", "'%s:\\n%s'", ",", "err_msg", ",", "exc", ")", "return", "{", "server_url", ":", "err_msg", "}", "parser", "=", "_parse_html_content", "(", "html_content", ")", "platform_list", "=", "_get_platforms", "(", "parser", ".", "data", ")", "if", "platform_list", ":", "return", "{", "server_url", ":", "platform_list", "}", "else", ":", "return", "{", "server_url", ":", "\"No existing platforms found\"", "}" ]
23.282051
23.435897
def _add_listeners ( self ): """ Adds the event listeners for a specified object. """ object = self.value canvas = self.factory.canvas if canvas is not None: for name in canvas.node_children: object.on_trait_change(self._nodes_replaced, name) object.on_trait_change(self._nodes_changed, name + "_items") for name in canvas.edge_children: object.on_trait_change(self._edges_replaced, name) object.on_trait_change(self._edges_changed, name + "_items") else: raise ValueError("Graph canvas not set for graph editor.")
[ "def", "_add_listeners", "(", "self", ")", ":", "object", "=", "self", ".", "value", "canvas", "=", "self", ".", "factory", ".", "canvas", "if", "canvas", "is", "not", "None", ":", "for", "name", "in", "canvas", ".", "node_children", ":", "object", ".", "on_trait_change", "(", "self", ".", "_nodes_replaced", ",", "name", ")", "object", ".", "on_trait_change", "(", "self", ".", "_nodes_changed", ",", "name", "+", "\"_items\"", ")", "for", "name", "in", "canvas", ".", "edge_children", ":", "object", ".", "on_trait_change", "(", "self", ".", "_edges_replaced", ",", "name", ")", "object", ".", "on_trait_change", "(", "self", ".", "_edges_changed", ",", "name", "+", "\"_items\"", ")", "else", ":", "raise", "ValueError", "(", "\"Graph canvas not set for graph editor.\"", ")" ]
43.266667
18
def timedelta2millisecond(td): """Get milliseconds from a timedelta.""" milliseconds = td.days * 24 * 60 * 60 * 1000 milliseconds += td.seconds * 1000 milliseconds += td.microseconds / 1000 return milliseconds
[ "def", "timedelta2millisecond", "(", "td", ")", ":", "milliseconds", "=", "td", ".", "days", "*", "24", "*", "60", "*", "60", "*", "1000", "milliseconds", "+=", "td", ".", "seconds", "*", "1000", "milliseconds", "+=", "td", ".", "microseconds", "/", "1000", "return", "milliseconds" ]
37.333333
6.666667
def _pull_status(data, item): ''' Process a status update from a docker pull, updating the data structure. For containers created with older versions of Docker, there is no distinction in the status updates between layers that were already present (and thus not necessary to download), and those which were actually downloaded. Because of this, any function that needs to invoke this function needs to pre-fetch the image IDs by running _prep_pull() in any function that calls _pull_status(). It is important to grab this information before anything is pulled so we aren't looking at the state of the images post-pull. We can't rely on the way that __context__ is utilized by the images() function, because by design we clear the relevant context variables once we've made changes to allow the next call to images() to pick up any changes that were made. ''' def _already_exists(id_): ''' Layer already exists ''' already_pulled = data.setdefault('Layers', {}).setdefault( 'Already_Pulled', []) if id_ not in already_pulled: already_pulled.append(id_) def _new_layer(id_): ''' Pulled a new layer ''' pulled = data.setdefault('Layers', {}).setdefault( 'Pulled', []) if id_ not in pulled: pulled.append(id_) if 'docker._pull_status' not in __context__: log.warning( '_pull_status context variable was not populated, information on ' 'downloaded layers may be inaccurate. Please report this to the ' 'SaltStack development team, and if possible include the image ' '(and tag) that was being pulled.' ) __context__['docker._pull_status'] = NOTSET status = item['status'] if status == 'Already exists': _already_exists(item['id']) elif status in 'Pull complete': _new_layer(item['id']) elif status.startswith('Status: '): data['Status'] = status[8:] elif status == 'Download complete': if __context__['docker._pull_status'] is not NOTSET: id_ = item['id'] if id_ in __context__['docker._pull_status']: _already_exists(id_) else: _new_layer(id_)
[ "def", "_pull_status", "(", "data", ",", "item", ")", ":", "def", "_already_exists", "(", "id_", ")", ":", "'''\n Layer already exists\n '''", "already_pulled", "=", "data", ".", "setdefault", "(", "'Layers'", ",", "{", "}", ")", ".", "setdefault", "(", "'Already_Pulled'", ",", "[", "]", ")", "if", "id_", "not", "in", "already_pulled", ":", "already_pulled", ".", "append", "(", "id_", ")", "def", "_new_layer", "(", "id_", ")", ":", "'''\n Pulled a new layer\n '''", "pulled", "=", "data", ".", "setdefault", "(", "'Layers'", ",", "{", "}", ")", ".", "setdefault", "(", "'Pulled'", ",", "[", "]", ")", "if", "id_", "not", "in", "pulled", ":", "pulled", ".", "append", "(", "id_", ")", "if", "'docker._pull_status'", "not", "in", "__context__", ":", "log", ".", "warning", "(", "'_pull_status context variable was not populated, information on '", "'downloaded layers may be inaccurate. Please report this to the '", "'SaltStack development team, and if possible include the image '", "'(and tag) that was being pulled.'", ")", "__context__", "[", "'docker._pull_status'", "]", "=", "NOTSET", "status", "=", "item", "[", "'status'", "]", "if", "status", "==", "'Already exists'", ":", "_already_exists", "(", "item", "[", "'id'", "]", ")", "elif", "status", "in", "'Pull complete'", ":", "_new_layer", "(", "item", "[", "'id'", "]", ")", "elif", "status", ".", "startswith", "(", "'Status: '", ")", ":", "data", "[", "'Status'", "]", "=", "status", "[", "8", ":", "]", "elif", "status", "==", "'Download complete'", ":", "if", "__context__", "[", "'docker._pull_status'", "]", "is", "not", "NOTSET", ":", "id_", "=", "item", "[", "'id'", "]", "if", "id_", "in", "__context__", "[", "'docker._pull_status'", "]", ":", "_already_exists", "(", "id_", ")", "else", ":", "_new_layer", "(", "id_", ")" ]
39.275862
20.896552
def make_auto_deployable(self, stage, swagger=None): """ Sets up the resource such that it will triggers a re-deployment when Swagger changes :param swagger: Dictionary containing the Swagger definition of the API """ if not swagger: return # CloudFormation does NOT redeploy the API unless it has a new deployment resource # that points to latest RestApi resource. Append a hash of Swagger Body location to # redeploy only when the API data changes. First 10 characters of hash is good enough # to prevent redeployment when API has not changed # NOTE: `str(swagger)` is for backwards compatibility. Changing it to a JSON or something will break compat generator = logical_id_generator.LogicalIdGenerator(self.logical_id, str(swagger)) self.logical_id = generator.gen() hash = generator.get_hash(length=40) # Get the full hash self.Description = "RestApi deployment id: {}".format(hash) stage.update_deployment_ref(self.logical_id)
[ "def", "make_auto_deployable", "(", "self", ",", "stage", ",", "swagger", "=", "None", ")", ":", "if", "not", "swagger", ":", "return", "# CloudFormation does NOT redeploy the API unless it has a new deployment resource", "# that points to latest RestApi resource. Append a hash of Swagger Body location to", "# redeploy only when the API data changes. First 10 characters of hash is good enough", "# to prevent redeployment when API has not changed", "# NOTE: `str(swagger)` is for backwards compatibility. Changing it to a JSON or something will break compat", "generator", "=", "logical_id_generator", ".", "LogicalIdGenerator", "(", "self", ".", "logical_id", ",", "str", "(", "swagger", ")", ")", "self", ".", "logical_id", "=", "generator", ".", "gen", "(", ")", "hash", "=", "generator", ".", "get_hash", "(", "length", "=", "40", ")", "# Get the full hash", "self", ".", "Description", "=", "\"RestApi deployment id: {}\"", ".", "format", "(", "hash", ")", "stage", ".", "update_deployment_ref", "(", "self", ".", "logical_id", ")" ]
52.4
31.2
def time_termination(population, num_generations, num_evaluations, args): """Return True if the elapsed time meets or exceeds a duration of time. This function compares the elapsed time with a specified maximum. It returns True if the maximum is met or exceeded. If the `start_time` keyword argument is omitted, it defaults to `None` and will be set to the current system time (in seconds). If the `max_time` keyword argument is omitted, it will default to `None` and will immediately terminate. The `max_time` argument can be specified in seconds as a floating-point number, as minutes/seconds as a two-element tuple of floating-point numbers, or as hours/minutes/seconds as a three-element tuple of floating-point numbers. .. Arguments: population -- the population of Individuals num_generations -- the number of elapsed generations num_evaluations -- the number of candidate solution evaluations args -- a dictionary of keyword arguments Optional keyword arguments in args: - *start_time* -- the time from which to start measuring (default None) - *max_time* -- the maximum time that should elapse (default None) """ start_time = args.setdefault('start_time', None) max_time = args.setdefault('max_time', None) logging = args.get('_ec').logger if start_time is None: start_time = time.time() args['start_time'] = start_time logging.debug('time_termination terminator added without setting the start_time argument; setting start_time to current time') if max_time is None: logging.debug('time_termination terminator added without setting the max_time argument; terminator will immediately terminate') else: try: max_time = max_time[0] * 3600.0 + max_time[1] * 60.00 + max_time[2] args['max_time'] = max_time except TypeError: pass except IndexError: max_time = max_time[0] * 60 + max_time[1] args['max_time'] = max_time time_elapsed = time.time() - start_time return max_time is None or time_elapsed >= max_time
[ "def", "time_termination", "(", "population", ",", "num_generations", ",", "num_evaluations", ",", "args", ")", ":", "start_time", "=", "args", ".", "setdefault", "(", "'start_time'", ",", "None", ")", "max_time", "=", "args", ".", "setdefault", "(", "'max_time'", ",", "None", ")", "logging", "=", "args", ".", "get", "(", "'_ec'", ")", ".", "logger", "if", "start_time", "is", "None", ":", "start_time", "=", "time", ".", "time", "(", ")", "args", "[", "'start_time'", "]", "=", "start_time", "logging", ".", "debug", "(", "'time_termination terminator added without setting the start_time argument; setting start_time to current time'", ")", "if", "max_time", "is", "None", ":", "logging", ".", "debug", "(", "'time_termination terminator added without setting the max_time argument; terminator will immediately terminate'", ")", "else", ":", "try", ":", "max_time", "=", "max_time", "[", "0", "]", "*", "3600.0", "+", "max_time", "[", "1", "]", "*", "60.00", "+", "max_time", "[", "2", "]", "args", "[", "'max_time'", "]", "=", "max_time", "except", "TypeError", ":", "pass", "except", "IndexError", ":", "max_time", "=", "max_time", "[", "0", "]", "*", "60", "+", "max_time", "[", "1", "]", "args", "[", "'max_time'", "]", "=", "max_time", "time_elapsed", "=", "time", ".", "time", "(", ")", "-", "start_time", "return", "max_time", "is", "None", "or", "time_elapsed", ">=", "max_time" ]
46.369565
24.326087
def preprovision_rbridge_id_rbridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") preprovision = ET.SubElement(config, "preprovision", xmlns="urn:brocade.com:mgmt:brocade-preprovision") rbridge_id = ET.SubElement(preprovision, "rbridge-id") wwn_key = ET.SubElement(rbridge_id, "wwn") wwn_key.text = kwargs.pop('wwn') rbridge_id = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id.text = kwargs.pop('rbridge_id') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "preprovision_rbridge_id_rbridge_id", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "preprovision", "=", "ET", ".", "SubElement", "(", "config", ",", "\"preprovision\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-preprovision\"", ")", "rbridge_id", "=", "ET", ".", "SubElement", "(", "preprovision", ",", "\"rbridge-id\"", ")", "wwn_key", "=", "ET", ".", "SubElement", "(", "rbridge_id", ",", "\"wwn\"", ")", "wwn_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'wwn'", ")", "rbridge_id", "=", "ET", ".", "SubElement", "(", "rbridge_id", ",", "\"rbridge-id\"", ")", "rbridge_id", ".", "text", "=", "kwargs", ".", "pop", "(", "'rbridge_id'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
45.692308
16.692308
def wash_for_js(text): """ DEPRECATED: use htmlutils.escape_javascript_string() instead, and take note that returned value is no longer enclosed into quotes. """ from invenio_utils.html import escape_javascript_string if isinstance(text, six.string_types): return '"%s"' % escape_javascript_string( text, escape_for_html=False, escape_CDATA=False, escape_script_tag_with_quote=None) else: return text
[ "def", "wash_for_js", "(", "text", ")", ":", "from", "invenio_utils", ".", "html", "import", "escape_javascript_string", "if", "isinstance", "(", "text", ",", "six", ".", "string_types", ")", ":", "return", "'\"%s\"'", "%", "escape_javascript_string", "(", "text", ",", "escape_for_html", "=", "False", ",", "escape_CDATA", "=", "False", ",", "escape_script_tag_with_quote", "=", "None", ")", "else", ":", "return", "text" ]
32.133333
14.8
def set_settings(self, releases=None, default_release=None): """set path to storage""" super(ShardedClusters, self).set_settings(releases, default_release) ReplicaSets().set_settings(releases, default_release)
[ "def", "set_settings", "(", "self", ",", "releases", "=", "None", ",", "default_release", "=", "None", ")", ":", "super", "(", "ShardedClusters", ",", "self", ")", ".", "set_settings", "(", "releases", ",", "default_release", ")", "ReplicaSets", "(", ")", ".", "set_settings", "(", "releases", ",", "default_release", ")" ]
57.5
19.25
def _linux_nqn(): ''' Return NVMe NQN from a Linux host. ''' ret = [] initiator = '/etc/nvme/hostnqn' try: with salt.utils.files.fopen(initiator, 'r') as _nvme: for line in _nvme: line = line.strip() if line.startswith('nqn.'): ret.append(line) except IOError as ex: if ex.errno != errno.ENOENT: log.debug("Error while accessing '%s': %s", initiator, ex) return ret
[ "def", "_linux_nqn", "(", ")", ":", "ret", "=", "[", "]", "initiator", "=", "'/etc/nvme/hostnqn'", "try", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "initiator", ",", "'r'", ")", "as", "_nvme", ":", "for", "line", "in", "_nvme", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", ".", "startswith", "(", "'nqn.'", ")", ":", "ret", ".", "append", "(", "line", ")", "except", "IOError", "as", "ex", ":", "if", "ex", ".", "errno", "!=", "errno", ".", "ENOENT", ":", "log", ".", "debug", "(", "\"Error while accessing '%s': %s\"", ",", "initiator", ",", "ex", ")", "return", "ret" ]
26.333333
19.666667
def message(subject, message, access_token, all_members=False, project_member_ids=None, base_url=OH_BASE_URL): """ Send an email to individual users or in bulk. To learn more about Open Humans OAuth2 projects, go to: https://www.openhumans.org/direct-sharing/oauth2-features/ :param subject: This field is the subject of the email. :param message: This field is the body of the email. :param access_token: This is user specific access token/master token. :param all_members: This is a boolean field to send email to all members of the project. :param project_member_ids: This field is the list of project_member_id. :param base_url: It is this URL `https://www.openhumans.org`. """ url = urlparse.urljoin( base_url, '/api/direct-sharing/project/message/?{}'.format( urlparse.urlencode({'access_token': access_token}))) if not(all_members) and not(project_member_ids): response = requests.post(url, data={'subject': subject, 'message': message}) handle_error(response, 200) return response elif all_members and project_member_ids: raise ValueError( "One (and only one) of the following must be specified: " "project_members_id or all_members is set to True.") else: r = requests.post(url, data={'all_members': all_members, 'project_member_ids': project_member_ids, 'subject': subject, 'message': message}) handle_error(r, 200) return r
[ "def", "message", "(", "subject", ",", "message", ",", "access_token", ",", "all_members", "=", "False", ",", "project_member_ids", "=", "None", ",", "base_url", "=", "OH_BASE_URL", ")", ":", "url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "'/api/direct-sharing/project/message/?{}'", ".", "format", "(", "urlparse", ".", "urlencode", "(", "{", "'access_token'", ":", "access_token", "}", ")", ")", ")", "if", "not", "(", "all_members", ")", "and", "not", "(", "project_member_ids", ")", ":", "response", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "{", "'subject'", ":", "subject", ",", "'message'", ":", "message", "}", ")", "handle_error", "(", "response", ",", "200", ")", "return", "response", "elif", "all_members", "and", "project_member_ids", ":", "raise", "ValueError", "(", "\"One (and only one) of the following must be specified: \"", "\"project_members_id or all_members is set to True.\"", ")", "else", ":", "r", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "{", "'all_members'", ":", "all_members", ",", "'project_member_ids'", ":", "project_member_ids", ",", "'subject'", ":", "subject", ",", "'message'", ":", "message", "}", ")", "handle_error", "(", "r", ",", "200", ")", "return", "r" ]
48.147059
20.852941
def default(self, obj): """This is slightly different than json.JSONEncoder.default(obj) in that it should returned the serialized representation of the passed object, not a serializable representation. """ if isinstance(obj, (datetime.date, datetime.time, datetime.datetime)): return '"%s"' % obj.isoformat() elif isinstance(obj, unicode): return '"%s"' % unicodedata.normalize('NFD', obj).encode('utf-8') elif isinstance(obj, decimal.Decimal): return str(obj) return super(Encoder, self).default(obj)
[ "def", "default", "(", "self", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "datetime", ".", "date", ",", "datetime", ".", "time", ",", "datetime", ".", "datetime", ")", ")", ":", "return", "'\"%s\"'", "%", "obj", ".", "isoformat", "(", ")", "elif", "isinstance", "(", "obj", ",", "unicode", ")", ":", "return", "'\"%s\"'", "%", "unicodedata", ".", "normalize", "(", "'NFD'", ",", "obj", ")", ".", "encode", "(", "'utf-8'", ")", "elif", "isinstance", "(", "obj", ",", "decimal", ".", "Decimal", ")", ":", "return", "str", "(", "obj", ")", "return", "super", "(", "Encoder", ",", "self", ")", ".", "default", "(", "obj", ")" ]
49.25
14.333333
def is_valid(self): # type: () -> Union[bool, None] """Determine if the value in the property is valid. If the value of the property is validated as 'valid', than returns a True, otherwise a False. When no validators are configured, returns a None. It checks against all configured validators and returns a single boolean outcome. :returns: True when the :ivar:`value` is valid :rtype: bool or None """ if not hasattr(self, '_validators'): return None else: self.validate(reason=False) if all([vr is None for vr in self._validation_results]): return None else: return all(self._validation_results)
[ "def", "is_valid", "(", "self", ")", ":", "# type: () -> Union[bool, None]", "if", "not", "hasattr", "(", "self", ",", "'_validators'", ")", ":", "return", "None", "else", ":", "self", ".", "validate", "(", "reason", "=", "False", ")", "if", "all", "(", "[", "vr", "is", "None", "for", "vr", "in", "self", ".", "_validation_results", "]", ")", ":", "return", "None", "else", ":", "return", "all", "(", "self", ".", "_validation_results", ")" ]
39
20.052632
def _find_zero(cpu, constrs, ptr): """ Helper for finding the closest NULL or, effectively NULL byte from a starting address. :param Cpu cpu: :param ConstraintSet constrs: Constraints for current `State` :param int ptr: Address to start searching for a zero from :return: Offset from `ptr` to first byte that is 0 or an `Expression` that must be zero """ offset = 0 while True: byt = cpu.read_int(ptr + offset, 8) if issymbolic(byt): if not solver.can_be_true(constrs, byt != 0): break else: if byt == 0: break offset += 1 return offset
[ "def", "_find_zero", "(", "cpu", ",", "constrs", ",", "ptr", ")", ":", "offset", "=", "0", "while", "True", ":", "byt", "=", "cpu", ".", "read_int", "(", "ptr", "+", "offset", ",", "8", ")", "if", "issymbolic", "(", "byt", ")", ":", "if", "not", "solver", ".", "can_be_true", "(", "constrs", ",", "byt", "!=", "0", ")", ":", "break", "else", ":", "if", "byt", "==", "0", ":", "break", "offset", "+=", "1", "return", "offset" ]
26.916667
24.333333
def AgregarUbicacionTambo(self, latitud, longitud, domicilio, cod_localidad, cod_provincia, codigo_postal, nombre_partido_depto, **kwargs): "Agrego los datos del productor a la liq." ubic_tambo = {'latitud': latitud, 'longitud': longitud, 'domicilio': domicilio, 'codLocalidad': cod_localidad, 'codProvincia': cod_provincia, 'nombrePartidoDepto': nombre_partido_depto, 'codigoPostal': codigo_postal} self.solicitud['tambo']['ubicacionTambo'] = ubic_tambo return True
[ "def", "AgregarUbicacionTambo", "(", "self", ",", "latitud", ",", "longitud", ",", "domicilio", ",", "cod_localidad", ",", "cod_provincia", ",", "codigo_postal", ",", "nombre_partido_depto", ",", "*", "*", "kwargs", ")", ":", "ubic_tambo", "=", "{", "'latitud'", ":", "latitud", ",", "'longitud'", ":", "longitud", ",", "'domicilio'", ":", "domicilio", ",", "'codLocalidad'", ":", "cod_localidad", ",", "'codProvincia'", ":", "cod_provincia", ",", "'nombrePartidoDepto'", ":", "nombre_partido_depto", ",", "'codigoPostal'", ":", "codigo_postal", "}", "self", ".", "solicitud", "[", "'tambo'", "]", "[", "'ubicacionTambo'", "]", "=", "ubic_tambo", "return", "True" ]
52.230769
15.461538
def clear_port_stats(self): """ Clear only port stats (leave stream and packet group stats). Do not use - still working with Ixia to resolve. """ stat = IxeStat(self) stat.ix_set_default() stat.enableValidStats = True stat.ix_set() stat.write()
[ "def", "clear_port_stats", "(", "self", ")", ":", "stat", "=", "IxeStat", "(", "self", ")", "stat", ".", "ix_set_default", "(", ")", "stat", ".", "enableValidStats", "=", "True", "stat", ".", "ix_set", "(", ")", "stat", ".", "write", "(", ")" ]
30
13.5
def to_dict(obj): """ Convert an instance of an object into a dict. """ d = _to_json_type(obj) if isinstance(d, dict): return scrub_dict(d) else: raise ValueError("The value provided must be an object.")
[ "def", "to_dict", "(", "obj", ")", ":", "d", "=", "_to_json_type", "(", "obj", ")", "if", "isinstance", "(", "d", ",", "dict", ")", ":", "return", "scrub_dict", "(", "d", ")", "else", ":", "raise", "ValueError", "(", "\"The value provided must be an object.\"", ")" ]
29
14.75