text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def addFailure(self, result): """Add a failure to the result.""" result.addFailure(self, (Exception, Exception(), None)) # Since TAP will not provide assertion data, clean up the assertion # section so it is not so spaced out. test, err = result.failures[-1] result.failures[-1] = (test, "")
[ "def", "addFailure", "(", "self", ",", "result", ")", ":", "result", ".", "addFailure", "(", "self", ",", "(", "Exception", ",", "Exception", "(", ")", ",", "None", ")", ")", "# Since TAP will not provide assertion data, clean up the assertion", "# section so it is not so spaced out.", "test", ",", "err", "=", "result", ".", "failures", "[", "-", "1", "]", "result", ".", "failures", "[", "-", "1", "]", "=", "(", "test", ",", "\"\"", ")" ]
47.571429
10.714286
def run_iqtree(phy, model, threads, cluster, node): """ run IQ-Tree """ # set ppn based on threads if threads > 24: ppn = 24 else: ppn = threads tree = '%s.treefile' % (phy) if check(tree) is False: if model is False: model = 'TEST' dir = os.getcwd() command = 'iqtree-omp -s %s -m %s -nt %s -quiet' % \ (phy, model, threads) if cluster is False: p = Popen(command, shell = True) else: if node is False: node = '1' qsub = 'qsub -l nodes=%s:ppn=%s -m e -N iqtree' % (node, ppn) command = 'cd /tmp; mkdir iqtree; cd iqtree; cp %s/%s .; %s; mv * %s/; rm -r ../iqtree' \ % (dir, phy, command, dir) re_call = 'cd %s; %s --no-fast --iq' % (dir.rsplit('/', 1)[0], ' '.join(sys.argv)) p = Popen('echo "%s;%s" | %s' % (command, re_call, qsub), shell = True) p.communicate() return tree
[ "def", "run_iqtree", "(", "phy", ",", "model", ",", "threads", ",", "cluster", ",", "node", ")", ":", "# set ppn based on threads", "if", "threads", ">", "24", ":", "ppn", "=", "24", "else", ":", "ppn", "=", "threads", "tree", "=", "'%s.treefile'", "%", "(", "phy", ")", "if", "check", "(", "tree", ")", "is", "False", ":", "if", "model", "is", "False", ":", "model", "=", "'TEST'", "dir", "=", "os", ".", "getcwd", "(", ")", "command", "=", "'iqtree-omp -s %s -m %s -nt %s -quiet'", "%", "(", "phy", ",", "model", ",", "threads", ")", "if", "cluster", "is", "False", ":", "p", "=", "Popen", "(", "command", ",", "shell", "=", "True", ")", "else", ":", "if", "node", "is", "False", ":", "node", "=", "'1'", "qsub", "=", "'qsub -l nodes=%s:ppn=%s -m e -N iqtree'", "%", "(", "node", ",", "ppn", ")", "command", "=", "'cd /tmp; mkdir iqtree; cd iqtree; cp %s/%s .; %s; mv * %s/; rm -r ../iqtree'", "%", "(", "dir", ",", "phy", ",", "command", ",", "dir", ")", "re_call", "=", "'cd %s; %s --no-fast --iq'", "%", "(", "dir", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "0", "]", ",", "' '", ".", "join", "(", "sys", ".", "argv", ")", ")", "p", "=", "Popen", "(", "'echo \"%s;%s\" | %s'", "%", "(", "command", ",", "re_call", ",", "qsub", ")", ",", "shell", "=", "True", ")", "p", ".", "communicate", "(", ")", "return", "tree" ]
35.285714
19
def get_error_details(self): # type: () -> Optional[Dict[str, Any]] """ Get more information about the latest X server error. """ details = {} # type: Dict[str, Any] if ERROR.details: details = {"xerror_details": ERROR.details} ERROR.details = None xserver_error = ctypes.create_string_buffer(1024) self.xlib.XGetErrorText( MSS.display, details.get("xerror_details", {}).get("error_code", 0), xserver_error, len(xserver_error), ) xerror = xserver_error.value.decode("utf-8") if xerror != "0": details["xerror"] = xerror return details
[ "def", "get_error_details", "(", "self", ")", ":", "# type: () -> Optional[Dict[str, Any]]", "details", "=", "{", "}", "# type: Dict[str, Any]", "if", "ERROR", ".", "details", ":", "details", "=", "{", "\"xerror_details\"", ":", "ERROR", ".", "details", "}", "ERROR", ".", "details", "=", "None", "xserver_error", "=", "ctypes", ".", "create_string_buffer", "(", "1024", ")", "self", ".", "xlib", ".", "XGetErrorText", "(", "MSS", ".", "display", ",", "details", ".", "get", "(", "\"xerror_details\"", ",", "{", "}", ")", ".", "get", "(", "\"error_code\"", ",", "0", ")", ",", "xserver_error", ",", "len", "(", "xserver_error", ")", ",", ")", "xerror", "=", "xserver_error", ".", "value", ".", "decode", "(", "\"utf-8\"", ")", "if", "xerror", "!=", "\"0\"", ":", "details", "[", "\"xerror\"", "]", "=", "xerror", "return", "details" ]
34.380952
16.047619
def file_flags(self): """Return the file flags attribute of the BFD file being processed.""" if not self._ptr: raise BfdException("BFD not initialized") return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.FILE_FLAGS)
[ "def", "file_flags", "(", "self", ")", ":", "if", "not", "self", ".", "_ptr", ":", "raise", "BfdException", "(", "\"BFD not initialized\"", ")", "return", "_bfd", ".", "get_bfd_attribute", "(", "self", ".", "_ptr", ",", "BfdAttributes", ".", "FILE_FLAGS", ")" ]
41.833333
20.166667
def zoning_defined_configuration_alias_alias_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") zoning = ET.SubElement(config, "zoning", xmlns="urn:brocade.com:mgmt:brocade-zone") defined_configuration = ET.SubElement(zoning, "defined-configuration") alias = ET.SubElement(defined_configuration, "alias") alias_name = ET.SubElement(alias, "alias-name") alias_name.text = kwargs.pop('alias_name') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "zoning_defined_configuration_alias_alias_name", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "zoning", "=", "ET", ".", "SubElement", "(", "config", ",", "\"zoning\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-zone\"", ")", "defined_configuration", "=", "ET", ".", "SubElement", "(", "zoning", ",", "\"defined-configuration\"", ")", "alias", "=", "ET", ".", "SubElement", "(", "defined_configuration", ",", "\"alias\"", ")", "alias_name", "=", "ET", ".", "SubElement", "(", "alias", ",", "\"alias-name\"", ")", "alias_name", ".", "text", "=", "kwargs", ".", "pop", "(", "'alias_name'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
47.25
19.166667
def snake(s): """Convert from title or camelCase to snake_case.""" if len(s) < 2: return s.lower() out = s[0].lower() for c in s[1:]: if c.isupper(): out += "_" c = c.lower() out += c return out
[ "def", "snake", "(", "s", ")", ":", "if", "len", "(", "s", ")", "<", "2", ":", "return", "s", ".", "lower", "(", ")", "out", "=", "s", "[", "0", "]", ".", "lower", "(", ")", "for", "c", "in", "s", "[", "1", ":", "]", ":", "if", "c", ".", "isupper", "(", ")", ":", "out", "+=", "\"_\"", "c", "=", "c", ".", "lower", "(", ")", "out", "+=", "c", "return", "out" ]
22.909091
18.545455
def linkify_templates(self): """ Link all templates, and create the template graph too :return: None """ # First we create a list of all templates for i in itertools.chain(iter(list(self.items.values())), iter(list(self.templates.values()))): self.linkify_item_templates(i) for i in self: i.tags = self.get_all_tags(i)
[ "def", "linkify_templates", "(", "self", ")", ":", "# First we create a list of all templates", "for", "i", "in", "itertools", ".", "chain", "(", "iter", "(", "list", "(", "self", ".", "items", ".", "values", "(", ")", ")", ")", ",", "iter", "(", "list", "(", "self", ".", "templates", ".", "values", "(", ")", ")", ")", ")", ":", "self", ".", "linkify_item_templates", "(", "i", ")", "for", "i", "in", "self", ":", "i", ".", "tags", "=", "self", ".", "get_all_tags", "(", "i", ")" ]
35.083333
14.75
def main(argv=None): """ben-elastic entry point""" arguments = cli_common(__doc__, argv=argv) es_export = ESExporter(arguments['CAMPAIGN-DIR'], arguments['--es']) es_export.export() if argv is not None: return es_export
[ "def", "main", "(", "argv", "=", "None", ")", ":", "arguments", "=", "cli_common", "(", "__doc__", ",", "argv", "=", "argv", ")", "es_export", "=", "ESExporter", "(", "arguments", "[", "'CAMPAIGN-DIR'", "]", ",", "arguments", "[", "'--es'", "]", ")", "es_export", ".", "export", "(", ")", "if", "argv", "is", "not", "None", ":", "return", "es_export" ]
34.428571
15.428571
def _run_model(iterator, args, tf_args): """mapPartitions function to run single-node inferencing from a checkpoint/saved_model, using the model's input/output mappings. Args: :iterator: input RDD partition iterator. :args: arguments for TFModel, in argparse format :tf_args: arguments for TensorFlow inferencing code, in argparse or ARGV format. Returns: An iterator of result data. """ single_node_env(tf_args) logging.info("===== input_mapping: {}".format(args.input_mapping)) logging.info("===== output_mapping: {}".format(args.output_mapping)) input_tensor_names = [tensor for col, tensor in sorted(args.input_mapping.items())] output_tensor_names = [tensor for tensor, col in sorted(args.output_mapping.items())] # if using a signature_def_key, get input/output tensor info from the requested signature if args.signature_def_key: assert args.export_dir, "Inferencing with signature_def_key requires --export_dir argument" logging.info("===== loading meta_graph_def for tag_set ({0}) from saved_model: {1}".format(args.tag_set, args.export_dir)) meta_graph_def = get_meta_graph_def(args.export_dir, args.tag_set) signature = meta_graph_def.signature_def[args.signature_def_key] logging.debug("signature: {}".format(signature)) inputs_tensor_info = signature.inputs logging.debug("inputs_tensor_info: {0}".format(inputs_tensor_info)) outputs_tensor_info = signature.outputs logging.debug("outputs_tensor_info: {0}".format(outputs_tensor_info)) result = [] global global_sess, global_args if global_sess and global_args == args: # if graph/session already loaded/started (and using same args), just reuse it sess = global_sess else: # otherwise, create new session and load graph from disk tf.reset_default_graph() sess = tf.Session(graph=tf.get_default_graph()) if args.export_dir: assert args.tag_set, "Inferencing from a saved_model requires --tag_set" # load graph from a saved_model logging.info("===== restoring from saved_model: {}".format(args.export_dir)) loader.load(sess, args.tag_set.split(','), args.export_dir) elif args.model_dir: # load graph from a checkpoint ckpt = tf.train.latest_checkpoint(args.model_dir) assert ckpt, "Invalid model checkpoint path: {}".format(args.model_dir) logging.info("===== restoring from checkpoint: {}".format(ckpt + ".meta")) saver = tf.train.import_meta_graph(ckpt + ".meta", clear_devices=True) saver.restore(sess, ckpt) else: raise Exception("Inferencing requires either --model_dir or --export_dir argument") global_sess = sess global_args = args # get list of input/output tensors (by name) if args.signature_def_key: input_tensors = [inputs_tensor_info[t].name for t in input_tensor_names] output_tensors = [outputs_tensor_info[output_tensor_names[0]].name] else: input_tensors = [t + ':0' for t in input_tensor_names] output_tensors = [t + ':0' for t in output_tensor_names] logging.info("input_tensors: {0}".format(input_tensors)) logging.info("output_tensors: {0}".format(output_tensors)) # feed data in batches and return output tensors for tensors in yield_batch(iterator, args.batch_size, len(input_tensor_names)): inputs_feed_dict = {} for i in range(len(input_tensors)): inputs_feed_dict[input_tensors[i]] = tensors[i] outputs = sess.run(output_tensors, feed_dict=inputs_feed_dict) lengths = [len(output) for output in outputs] input_size = len(tensors[0]) assert all([length == input_size for length in lengths]), "Output array sizes {} must match input size: {}".format(lengths, input_size) python_outputs = [output.tolist() for output in outputs] # convert from numpy to standard python types result.extend(zip(*python_outputs)) # convert to an array of tuples of "output columns" return result
[ "def", "_run_model", "(", "iterator", ",", "args", ",", "tf_args", ")", ":", "single_node_env", "(", "tf_args", ")", "logging", ".", "info", "(", "\"===== input_mapping: {}\"", ".", "format", "(", "args", ".", "input_mapping", ")", ")", "logging", ".", "info", "(", "\"===== output_mapping: {}\"", ".", "format", "(", "args", ".", "output_mapping", ")", ")", "input_tensor_names", "=", "[", "tensor", "for", "col", ",", "tensor", "in", "sorted", "(", "args", ".", "input_mapping", ".", "items", "(", ")", ")", "]", "output_tensor_names", "=", "[", "tensor", "for", "tensor", ",", "col", "in", "sorted", "(", "args", ".", "output_mapping", ".", "items", "(", ")", ")", "]", "# if using a signature_def_key, get input/output tensor info from the requested signature", "if", "args", ".", "signature_def_key", ":", "assert", "args", ".", "export_dir", ",", "\"Inferencing with signature_def_key requires --export_dir argument\"", "logging", ".", "info", "(", "\"===== loading meta_graph_def for tag_set ({0}) from saved_model: {1}\"", ".", "format", "(", "args", ".", "tag_set", ",", "args", ".", "export_dir", ")", ")", "meta_graph_def", "=", "get_meta_graph_def", "(", "args", ".", "export_dir", ",", "args", ".", "tag_set", ")", "signature", "=", "meta_graph_def", ".", "signature_def", "[", "args", ".", "signature_def_key", "]", "logging", ".", "debug", "(", "\"signature: {}\"", ".", "format", "(", "signature", ")", ")", "inputs_tensor_info", "=", "signature", ".", "inputs", "logging", ".", "debug", "(", "\"inputs_tensor_info: {0}\"", ".", "format", "(", "inputs_tensor_info", ")", ")", "outputs_tensor_info", "=", "signature", ".", "outputs", "logging", ".", "debug", "(", "\"outputs_tensor_info: {0}\"", ".", "format", "(", "outputs_tensor_info", ")", ")", "result", "=", "[", "]", "global", "global_sess", ",", "global_args", "if", "global_sess", "and", "global_args", "==", "args", ":", "# if graph/session already loaded/started (and using same args), just reuse it", "sess", "=", "global_sess", "else", ":", "# otherwise, create new session and load graph from disk", "tf", ".", "reset_default_graph", "(", ")", "sess", "=", "tf", ".", "Session", "(", "graph", "=", "tf", ".", "get_default_graph", "(", ")", ")", "if", "args", ".", "export_dir", ":", "assert", "args", ".", "tag_set", ",", "\"Inferencing from a saved_model requires --tag_set\"", "# load graph from a saved_model", "logging", ".", "info", "(", "\"===== restoring from saved_model: {}\"", ".", "format", "(", "args", ".", "export_dir", ")", ")", "loader", ".", "load", "(", "sess", ",", "args", ".", "tag_set", ".", "split", "(", "','", ")", ",", "args", ".", "export_dir", ")", "elif", "args", ".", "model_dir", ":", "# load graph from a checkpoint", "ckpt", "=", "tf", ".", "train", ".", "latest_checkpoint", "(", "args", ".", "model_dir", ")", "assert", "ckpt", ",", "\"Invalid model checkpoint path: {}\"", ".", "format", "(", "args", ".", "model_dir", ")", "logging", ".", "info", "(", "\"===== restoring from checkpoint: {}\"", ".", "format", "(", "ckpt", "+", "\".meta\"", ")", ")", "saver", "=", "tf", ".", "train", ".", "import_meta_graph", "(", "ckpt", "+", "\".meta\"", ",", "clear_devices", "=", "True", ")", "saver", ".", "restore", "(", "sess", ",", "ckpt", ")", "else", ":", "raise", "Exception", "(", "\"Inferencing requires either --model_dir or --export_dir argument\"", ")", "global_sess", "=", "sess", "global_args", "=", "args", "# get list of input/output tensors (by name)", "if", "args", ".", "signature_def_key", ":", "input_tensors", "=", "[", "inputs_tensor_info", "[", "t", "]", ".", "name", "for", "t", "in", "input_tensor_names", "]", "output_tensors", "=", "[", "outputs_tensor_info", "[", "output_tensor_names", "[", "0", "]", "]", ".", "name", "]", "else", ":", "input_tensors", "=", "[", "t", "+", "':0'", "for", "t", "in", "input_tensor_names", "]", "output_tensors", "=", "[", "t", "+", "':0'", "for", "t", "in", "output_tensor_names", "]", "logging", ".", "info", "(", "\"input_tensors: {0}\"", ".", "format", "(", "input_tensors", ")", ")", "logging", ".", "info", "(", "\"output_tensors: {0}\"", ".", "format", "(", "output_tensors", ")", ")", "# feed data in batches and return output tensors", "for", "tensors", "in", "yield_batch", "(", "iterator", ",", "args", ".", "batch_size", ",", "len", "(", "input_tensor_names", ")", ")", ":", "inputs_feed_dict", "=", "{", "}", "for", "i", "in", "range", "(", "len", "(", "input_tensors", ")", ")", ":", "inputs_feed_dict", "[", "input_tensors", "[", "i", "]", "]", "=", "tensors", "[", "i", "]", "outputs", "=", "sess", ".", "run", "(", "output_tensors", ",", "feed_dict", "=", "inputs_feed_dict", ")", "lengths", "=", "[", "len", "(", "output", ")", "for", "output", "in", "outputs", "]", "input_size", "=", "len", "(", "tensors", "[", "0", "]", ")", "assert", "all", "(", "[", "length", "==", "input_size", "for", "length", "in", "lengths", "]", ")", ",", "\"Output array sizes {} must match input size: {}\"", ".", "format", "(", "lengths", ",", "input_size", ")", "python_outputs", "=", "[", "output", ".", "tolist", "(", ")", "for", "output", "in", "outputs", "]", "# convert from numpy to standard python types", "result", ".", "extend", "(", "zip", "(", "*", "python_outputs", ")", ")", "# convert to an array of tuples of \"output columns\"", "return", "result" ]
47.329268
27.268293
def remove_callback_for_action(self, action, callback): """ Remove a callback for a specific action This is mainly for cleanup purposes or a plugin that replaces a GUI widget. :param str action: the cation of which the callback is going to be remove :param callback: the callback to be removed """ if action in self.__action_to_callbacks: if callback in self.__action_to_callbacks[action]: self.__action_to_callbacks[action].remove(callback)
[ "def", "remove_callback_for_action", "(", "self", ",", "action", ",", "callback", ")", ":", "if", "action", "in", "self", ".", "__action_to_callbacks", ":", "if", "callback", "in", "self", ".", "__action_to_callbacks", "[", "action", "]", ":", "self", ".", "__action_to_callbacks", "[", "action", "]", ".", "remove", "(", "callback", ")" ]
46.272727
22.454545
def info(self): """Gets info endpoint. Used to perform login auth.""" url = self.api_url + self.info_url resp = self.session.get(url) if resp.status_code != 200: error = {'description': "Info HTTP response not valid"} raise CFException(error, resp.status_code) try: info = resp.json() except ValueError as e: error = {'description': "Info HTTP response not valid, %s" % str(e)} raise CFException(error, resp.status_code) return info
[ "def", "info", "(", "self", ")", ":", "url", "=", "self", ".", "api_url", "+", "self", ".", "info_url", "resp", "=", "self", ".", "session", ".", "get", "(", "url", ")", "if", "resp", ".", "status_code", "!=", "200", ":", "error", "=", "{", "'description'", ":", "\"Info HTTP response not valid\"", "}", "raise", "CFException", "(", "error", ",", "resp", ".", "status_code", ")", "try", ":", "info", "=", "resp", ".", "json", "(", ")", "except", "ValueError", "as", "e", ":", "error", "=", "{", "'description'", ":", "\"Info HTTP response not valid, %s\"", "%", "str", "(", "e", ")", "}", "raise", "CFException", "(", "error", ",", "resp", ".", "status_code", ")", "return", "info" ]
41.230769
15.307692
def delete_os_dummy_rtr_nwk(self, rtr_id, net_id, subnet_id): """Delete the dummy interface to the router. """ subnet_lst = set() subnet_lst.add(subnet_id) ret = self.os_helper.delete_intf_router(None, None, rtr_id, subnet_lst) if not ret: return ret return self.os_helper.delete_network_all_subnets(net_id)
[ "def", "delete_os_dummy_rtr_nwk", "(", "self", ",", "rtr_id", ",", "net_id", ",", "subnet_id", ")", ":", "subnet_lst", "=", "set", "(", ")", "subnet_lst", ".", "add", "(", "subnet_id", ")", "ret", "=", "self", ".", "os_helper", ".", "delete_intf_router", "(", "None", ",", "None", ",", "rtr_id", ",", "subnet_lst", ")", "if", "not", "ret", ":", "return", "ret", "return", "self", ".", "os_helper", ".", "delete_network_all_subnets", "(", "net_id", ")" ]
45.125
18
def _CallFlowLegacy(self, flow_name=None, next_state=None, request_data=None, client_id=None, base_session_id=None, **kwargs): """Creates a new flow and send its responses to a state. This creates a new flow. The flow may send back many responses which will be queued by the framework until the flow terminates. The final status message will cause the entire transaction to be committed to the specified state. Args: flow_name: The name of the flow to invoke. next_state: The state in this flow, that responses to this message should go to. request_data: Any dict provided here will be available in the RequestState protobuf. The Responses object maintains a reference to this protobuf for use in the execution of the state method. (so you can access this data by responses.request). There is no format mandated on this data but it may be a serialized protobuf. client_id: If given, the flow is started for this client. base_session_id: A URN which will be used to build a URN. **kwargs: Arguments for the child flow. Returns: The URN of the child flow which was created. Raises: RuntimeError: In case of no cpu quota left to start more clients. """ client_id = client_id or self.runner_args.client_id # We prepare a request state, and add it to our queue - any # responses from the child flow will return to the request state # and the stated next_state. Note however, that there is no # client_id or actual request message here because we directly # invoke the child flow rather than queue anything for it. state = rdf_flow_runner.RequestState( id=self.GetNextOutboundId(), session_id=utils.SmartUnicode(self.session_id), client_id=client_id, next_state=next_state, response_count=0) if request_data: state.data = rdf_protodict.Dict().FromDict(request_data) # Pass our logs collection urn to the flow object. logs_urn = self.hunt_obj.logs_collection_urn # If we were called with write_intermediate_results, propagate down to # child flows. This allows write_intermediate_results to be set to True # either at the top level parent, or somewhere in the middle of # the call chain. write_intermediate = kwargs.pop("write_intermediate_results", False) # Create the new child flow but do not notify the user about it. child_urn = self.hunt_obj.StartAFF4Flow( base_session_id=base_session_id or self.session_id, client_id=client_id, cpu_limit=self._GetSubFlowCPULimit(), flow_name=flow_name, logs_collection_urn=logs_urn, network_bytes_limit=self._GetSubFlowNetworkLimit(), notify_to_user=False, parent_flow=self.hunt_obj, queue=self.runner_args.queue, request_state=state, sync=False, token=self.token, write_intermediate_results=write_intermediate, **kwargs) self.QueueRequest(state) return child_urn
[ "def", "_CallFlowLegacy", "(", "self", ",", "flow_name", "=", "None", ",", "next_state", "=", "None", ",", "request_data", "=", "None", ",", "client_id", "=", "None", ",", "base_session_id", "=", "None", ",", "*", "*", "kwargs", ")", ":", "client_id", "=", "client_id", "or", "self", ".", "runner_args", ".", "client_id", "# We prepare a request state, and add it to our queue - any", "# responses from the child flow will return to the request state", "# and the stated next_state. Note however, that there is no", "# client_id or actual request message here because we directly", "# invoke the child flow rather than queue anything for it.", "state", "=", "rdf_flow_runner", ".", "RequestState", "(", "id", "=", "self", ".", "GetNextOutboundId", "(", ")", ",", "session_id", "=", "utils", ".", "SmartUnicode", "(", "self", ".", "session_id", ")", ",", "client_id", "=", "client_id", ",", "next_state", "=", "next_state", ",", "response_count", "=", "0", ")", "if", "request_data", ":", "state", ".", "data", "=", "rdf_protodict", ".", "Dict", "(", ")", ".", "FromDict", "(", "request_data", ")", "# Pass our logs collection urn to the flow object.", "logs_urn", "=", "self", ".", "hunt_obj", ".", "logs_collection_urn", "# If we were called with write_intermediate_results, propagate down to", "# child flows. This allows write_intermediate_results to be set to True", "# either at the top level parent, or somewhere in the middle of", "# the call chain.", "write_intermediate", "=", "kwargs", ".", "pop", "(", "\"write_intermediate_results\"", ",", "False", ")", "# Create the new child flow but do not notify the user about it.", "child_urn", "=", "self", ".", "hunt_obj", ".", "StartAFF4Flow", "(", "base_session_id", "=", "base_session_id", "or", "self", ".", "session_id", ",", "client_id", "=", "client_id", ",", "cpu_limit", "=", "self", ".", "_GetSubFlowCPULimit", "(", ")", ",", "flow_name", "=", "flow_name", ",", "logs_collection_urn", "=", "logs_urn", ",", "network_bytes_limit", "=", "self", ".", "_GetSubFlowNetworkLimit", "(", ")", ",", "notify_to_user", "=", "False", ",", "parent_flow", "=", "self", ".", "hunt_obj", ",", "queue", "=", "self", ".", "runner_args", ".", "queue", ",", "request_state", "=", "state", ",", "sync", "=", "False", ",", "token", "=", "self", ".", "token", ",", "write_intermediate_results", "=", "write_intermediate", ",", "*", "*", "kwargs", ")", "self", ".", "QueueRequest", "(", "state", ")", "return", "child_urn" ]
40.153846
21.192308
def set_attribute(self, name, value): """Sets the attribute of the element to a specified value @type name: str @param name: the name of the attribute @type value: str @param value: the attribute of the value """ js_executor = self.driver_wrapper.js_executor def set_attribute_element(): """ Wrapper to set attribute """ js_executor.execute_template('setAttributeTemplate', { 'attribute_name': str(name), 'attribute_value': str(value)}, self.element) return True self.execute_and_handle_webelement_exceptions(set_attribute_element, 'set attribute "' + str(name) + '" to "' + str(value) + '"') return self
[ "def", "set_attribute", "(", "self", ",", "name", ",", "value", ")", ":", "js_executor", "=", "self", ".", "driver_wrapper", ".", "js_executor", "def", "set_attribute_element", "(", ")", ":", "\"\"\"\n Wrapper to set attribute\n \"\"\"", "js_executor", ".", "execute_template", "(", "'setAttributeTemplate'", ",", "{", "'attribute_name'", ":", "str", "(", "name", ")", ",", "'attribute_value'", ":", "str", "(", "value", ")", "}", ",", "self", ".", "element", ")", "return", "True", "self", ".", "execute_and_handle_webelement_exceptions", "(", "set_attribute_element", ",", "'set attribute \"'", "+", "str", "(", "name", ")", "+", "'\" to \"'", "+", "str", "(", "value", ")", "+", "'\"'", ")", "return", "self" ]
41.2
15.4
def range(start, finish, step): """Like built-in :func:`~builtins.range`, but with float support""" value = start while value <= finish: yield value value += step
[ "def", "range", "(", "start", ",", "finish", ",", "step", ")", ":", "value", "=", "start", "while", "value", "<=", "finish", ":", "yield", "value", "value", "+=", "step" ]
34.166667
11.666667
def _move(self, from_state=None, to_state=None, when=None, mode=None): """ Internal helper to move a task from one state to another (e.g. from QUEUED to DELAYED). The "when" argument indicates the timestamp of the task in the new state. If no to_state is specified, the task will be simply removed from the original state. The "mode" param can be specified to define how the timestamp in the new state should be updated and is passed to the ZADD Redis script (see its documentation for details). Raises TaskNotFound if the task is not in the expected state or not in the expected queue. """ pipeline = self.tiger.connection.pipeline() scripts = self.tiger.scripts _key = self.tiger._key from_state = from_state or self.state queue = self.queue assert from_state assert queue scripts.fail_if_not_in_zset(_key(from_state, queue), self.id, client=pipeline) if to_state: if not when: when = time.time() if mode: scripts.zadd(_key(to_state, queue), when, self.id, mode, client=pipeline) else: pipeline.zadd(_key(to_state, queue), self.id, when) pipeline.sadd(_key(to_state), queue) pipeline.zrem(_key(from_state, queue), self.id) if not to_state: # Remove the task if necessary if self.unique: # Only delete if it's not in any other queue check_states = set([ACTIVE, QUEUED, ERROR, SCHEDULED]) check_states.remove(from_state) # TODO: Do the following two in one call. scripts.delete_if_not_in_zsets(_key('task', self.id, 'executions'), self.id, [ _key(state, queue) for state in check_states ], client=pipeline) scripts.delete_if_not_in_zsets(_key('task', self.id), self.id, [ _key(state, queue) for state in check_states ], client=pipeline) else: # Safe to remove pipeline.delete(_key('task', self.id, 'executions')) pipeline.delete(_key('task', self.id)) scripts.srem_if_not_exists(_key(from_state), queue, _key(from_state, queue), client=pipeline) if to_state == QUEUED: pipeline.publish(_key('activity'), queue) try: scripts.execute_pipeline(pipeline) except redis.ResponseError as e: if '<FAIL_IF_NOT_IN_ZSET>' in e.args[0]: raise TaskNotFound('Task {} not found in queue "{}" in state "{}".'.format( self.id, queue, from_state )) raise else: self._state = to_state
[ "def", "_move", "(", "self", ",", "from_state", "=", "None", ",", "to_state", "=", "None", ",", "when", "=", "None", ",", "mode", "=", "None", ")", ":", "pipeline", "=", "self", ".", "tiger", ".", "connection", ".", "pipeline", "(", ")", "scripts", "=", "self", ".", "tiger", ".", "scripts", "_key", "=", "self", ".", "tiger", ".", "_key", "from_state", "=", "from_state", "or", "self", ".", "state", "queue", "=", "self", ".", "queue", "assert", "from_state", "assert", "queue", "scripts", ".", "fail_if_not_in_zset", "(", "_key", "(", "from_state", ",", "queue", ")", ",", "self", ".", "id", ",", "client", "=", "pipeline", ")", "if", "to_state", ":", "if", "not", "when", ":", "when", "=", "time", ".", "time", "(", ")", "if", "mode", ":", "scripts", ".", "zadd", "(", "_key", "(", "to_state", ",", "queue", ")", ",", "when", ",", "self", ".", "id", ",", "mode", ",", "client", "=", "pipeline", ")", "else", ":", "pipeline", ".", "zadd", "(", "_key", "(", "to_state", ",", "queue", ")", ",", "self", ".", "id", ",", "when", ")", "pipeline", ".", "sadd", "(", "_key", "(", "to_state", ")", ",", "queue", ")", "pipeline", ".", "zrem", "(", "_key", "(", "from_state", ",", "queue", ")", ",", "self", ".", "id", ")", "if", "not", "to_state", ":", "# Remove the task if necessary", "if", "self", ".", "unique", ":", "# Only delete if it's not in any other queue", "check_states", "=", "set", "(", "[", "ACTIVE", ",", "QUEUED", ",", "ERROR", ",", "SCHEDULED", "]", ")", "check_states", ".", "remove", "(", "from_state", ")", "# TODO: Do the following two in one call.", "scripts", ".", "delete_if_not_in_zsets", "(", "_key", "(", "'task'", ",", "self", ".", "id", ",", "'executions'", ")", ",", "self", ".", "id", ",", "[", "_key", "(", "state", ",", "queue", ")", "for", "state", "in", "check_states", "]", ",", "client", "=", "pipeline", ")", "scripts", ".", "delete_if_not_in_zsets", "(", "_key", "(", "'task'", ",", "self", ".", "id", ")", ",", "self", ".", "id", ",", "[", "_key", "(", "state", ",", "queue", ")", "for", "state", "in", "check_states", "]", ",", "client", "=", "pipeline", ")", "else", ":", "# Safe to remove", "pipeline", ".", "delete", "(", "_key", "(", "'task'", ",", "self", ".", "id", ",", "'executions'", ")", ")", "pipeline", ".", "delete", "(", "_key", "(", "'task'", ",", "self", ".", "id", ")", ")", "scripts", ".", "srem_if_not_exists", "(", "_key", "(", "from_state", ")", ",", "queue", ",", "_key", "(", "from_state", ",", "queue", ")", ",", "client", "=", "pipeline", ")", "if", "to_state", "==", "QUEUED", ":", "pipeline", ".", "publish", "(", "_key", "(", "'activity'", ")", ",", "queue", ")", "try", ":", "scripts", ".", "execute_pipeline", "(", "pipeline", ")", "except", "redis", ".", "ResponseError", "as", "e", ":", "if", "'<FAIL_IF_NOT_IN_ZSET>'", "in", "e", ".", "args", "[", "0", "]", ":", "raise", "TaskNotFound", "(", "'Task {} not found in queue \"{}\" in state \"{}\".'", ".", "format", "(", "self", ".", "id", ",", "queue", ",", "from_state", ")", ")", "raise", "else", ":", "self", ".", "_state", "=", "to_state" ]
40.506849
21.356164
def _is_dirty(dir_path): """Check whether a git repository has uncommitted changes.""" try: subprocess.check_call(["git", "diff", "--quiet"], cwd=dir_path) return False except subprocess.CalledProcessError: return True
[ "def", "_is_dirty", "(", "dir_path", ")", ":", "try", ":", "subprocess", ".", "check_call", "(", "[", "\"git\"", ",", "\"diff\"", ",", "\"--quiet\"", "]", ",", "cwd", "=", "dir_path", ")", "return", "False", "except", "subprocess", ".", "CalledProcessError", ":", "return", "True" ]
35.428571
17.285714
def solver(A, config): """Generate an SA solver given matrix A and a configuration. Parameters ---------- A : array, matrix, csr_matrix, bsr_matrix Matrix to invert, CSR or BSR format preferred for efficiency config : dict A dictionary of solver configuration parameters that is used to generate a smoothed aggregation solver Returns ------- ml : smoothed_aggregation_solver smoothed aggregation hierarchy Notes ----- config must contain the following parameter entries for smoothed_aggregation_solver: symmetry, smooth, presmoother, postsmoother, B, strength, max_levels, max_coarse, coarse_solver, aggregate, keep Examples -------- >>> from pyamg.gallery import poisson >>> from pyamg import solver_configuration,solver >>> A = poisson((40,40),format='csr') >>> config = solver_configuration(A,verb=False) >>> ml = solver(A,config) """ # Convert A to acceptable format A = make_csr(A) # Generate smoothed aggregation solver try: return \ smoothed_aggregation_solver(A, B=config['B'], BH=config['BH'], smooth=config['smooth'], strength=config['strength'], max_levels=config['max_levels'], max_coarse=config['max_coarse'], coarse_solver=config['coarse_solver'], symmetry=config['symmetry'], aggregate=config['aggregate'], presmoother=config['presmoother'], postsmoother=config['postsmoother'], keep=config['keep']) except BaseException: raise TypeError('Failed generating smoothed_aggregation_solver')
[ "def", "solver", "(", "A", ",", "config", ")", ":", "# Convert A to acceptable format", "A", "=", "make_csr", "(", "A", ")", "# Generate smoothed aggregation solver", "try", ":", "return", "smoothed_aggregation_solver", "(", "A", ",", "B", "=", "config", "[", "'B'", "]", ",", "BH", "=", "config", "[", "'BH'", "]", ",", "smooth", "=", "config", "[", "'smooth'", "]", ",", "strength", "=", "config", "[", "'strength'", "]", ",", "max_levels", "=", "config", "[", "'max_levels'", "]", ",", "max_coarse", "=", "config", "[", "'max_coarse'", "]", ",", "coarse_solver", "=", "config", "[", "'coarse_solver'", "]", ",", "symmetry", "=", "config", "[", "'symmetry'", "]", ",", "aggregate", "=", "config", "[", "'aggregate'", "]", ",", "presmoother", "=", "config", "[", "'presmoother'", "]", ",", "postsmoother", "=", "config", "[", "'postsmoother'", "]", ",", "keep", "=", "config", "[", "'keep'", "]", ")", "except", "BaseException", ":", "raise", "TypeError", "(", "'Failed generating smoothed_aggregation_solver'", ")" ]
38.461538
22.557692
def normalize_value(self, value, transform=True): """Prepare the given value to be stored in the index For the parameters, see BaseIndex.normalize_value Raises ------ ValueError If ``raise_if_not_float`` is True and the value cannot be casted to a float. """ if transform: value = self.transform_value(value) try: return float(value) except (ValueError, TypeError): if self.raise_if_not_float: raise ValueError('Invalid value %s for field %s.%s' % ( value, self.model.__name__, self.field.name )) return 0
[ "def", "normalize_value", "(", "self", ",", "value", ",", "transform", "=", "True", ")", ":", "if", "transform", ":", "value", "=", "self", ".", "transform_value", "(", "value", ")", "try", ":", "return", "float", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "if", "self", ".", "raise_if_not_float", ":", "raise", "ValueError", "(", "'Invalid value %s for field %s.%s'", "%", "(", "value", ",", "self", ".", "model", ".", "__name__", ",", "self", ".", "field", ".", "name", ")", ")", "return", "0" ]
31.045455
18.818182
def cast_in(self, element): """encode the element into the internal tag list.""" if _debug: SequenceOfAny._debug("cast_in %r", element) # make sure it is a list if not isinstance(element, List): raise EncodingError("%r is not a list" % (element,)) t = TagList() element.encode(t) self.tagList.extend(t.tagList)
[ "def", "cast_in", "(", "self", ",", "element", ")", ":", "if", "_debug", ":", "SequenceOfAny", ".", "_debug", "(", "\"cast_in %r\"", ",", "element", ")", "# make sure it is a list", "if", "not", "isinstance", "(", "element", ",", "List", ")", ":", "raise", "EncodingError", "(", "\"%r is not a list\"", "%", "(", "element", ",", ")", ")", "t", "=", "TagList", "(", ")", "element", ".", "encode", "(", "t", ")", "self", ".", "tagList", ".", "extend", "(", "t", ".", "tagList", ")" ]
30.833333
18.666667
def do_parse(infilename: str, jsonfilename: Optional[str], rdffilename: Optional[str], rdffmt: str, context: Optional[str] = None) -> bool: """ Parse the jsg in infilename and save the results in outfilename :param infilename: name of the file containing the ShExC :param jsonfilename: target ShExJ equivalent :param rdffilename: target ShExR equivalent :param rdffmt: target RDF format :param context: @context to use for rdf generation. If None use what is in the file :return: true if success """ shexj = parse(FileStream(infilename, encoding="utf-8")) if shexj is not None: shexj['@context'] = context if context else "http://www.w3.org/ns/shex.jsonld" if jsonfilename: with open(jsonfilename, 'w') as outfile: outfile.write(shexj._as_json_dumps()) if rdffilename: g = Graph().parse(data=shexj._as_json, format="json-ld") g.serialize(open(rdffilename, "wb"), format=rdffmt) return True return False
[ "def", "do_parse", "(", "infilename", ":", "str", ",", "jsonfilename", ":", "Optional", "[", "str", "]", ",", "rdffilename", ":", "Optional", "[", "str", "]", ",", "rdffmt", ":", "str", ",", "context", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "bool", ":", "shexj", "=", "parse", "(", "FileStream", "(", "infilename", ",", "encoding", "=", "\"utf-8\"", ")", ")", "if", "shexj", "is", "not", "None", ":", "shexj", "[", "'@context'", "]", "=", "context", "if", "context", "else", "\"http://www.w3.org/ns/shex.jsonld\"", "if", "jsonfilename", ":", "with", "open", "(", "jsonfilename", ",", "'w'", ")", "as", "outfile", ":", "outfile", ".", "write", "(", "shexj", ".", "_as_json_dumps", "(", ")", ")", "if", "rdffilename", ":", "g", "=", "Graph", "(", ")", ".", "parse", "(", "data", "=", "shexj", ".", "_as_json", ",", "format", "=", "\"json-ld\"", ")", "g", ".", "serialize", "(", "open", "(", "rdffilename", ",", "\"wb\"", ")", ",", "format", "=", "rdffmt", ")", "return", "True", "return", "False" ]
46.636364
19.545455
def encrypt_item(table_name, aws_cmk_id): """Demonstrate use of EncryptedTable to transparently encrypt an item.""" index_key = {"partition_attribute": "is this", "sort_attribute": 55} plaintext_item = { "example": "data", "some numbers": 99, "and some binary": Binary(b"\x00\x01\x02"), "leave me": "alone", # We want to ignore this attribute } # Collect all of the attributes that will be encrypted (used later). encrypted_attributes = set(plaintext_item.keys()) encrypted_attributes.remove("leave me") # Collect all of the attributes that will not be encrypted (used later). unencrypted_attributes = set(index_key.keys()) unencrypted_attributes.add("leave me") # Add the index pairs to the item. plaintext_item.update(index_key) # Create a normal table resource. table = boto3.resource("dynamodb").Table(table_name) # Use the TableInfo helper to collect information about the indexes. table_info = TableInfo(name=table_name) table_info.refresh_indexed_attributes(table.meta.client) # Create a crypto materials provider using the specified AWS KMS key. aws_kms_cmp = AwsKmsCryptographicMaterialsProvider(key_id=aws_cmk_id) encryption_context = EncryptionContext( table_name=table_name, partition_key_name=table_info.primary_index.partition, sort_key_name=table_info.primary_index.sort, # The only attributes that are used by the AWS KMS cryptographic materials providers # are the primary index attributes. # These attributes need to be in the form of a DynamoDB JSON structure, so first # convert the standard dictionary. attributes=dict_to_ddb(index_key), ) # Create attribute actions that tells the encrypted table to encrypt all attributes, # only sign the primary index attributes, and ignore the one identified attribute to # ignore. actions = AttributeActions( default_action=CryptoAction.ENCRYPT_AND_SIGN, attribute_actions={"leave me": CryptoAction.DO_NOTHING} ) actions.set_index_keys(*table_info.protected_index_keys()) # Build the crypto config to use for this item. # When using the higher-level helpers, this is handled for you. crypto_config = CryptoConfig( materials_provider=aws_kms_cmp, encryption_context=encryption_context, attribute_actions=actions ) # Encrypt the plaintext item directly encrypted_item = encrypt_python_item(plaintext_item, crypto_config) # You could now put the encrypted item to DynamoDB just as you would any other item. # table.put_item(Item=encrypted_item) # We will skip this for the purposes of this example. # Decrypt the encrypted item directly decrypted_item = decrypt_python_item(encrypted_item, crypto_config) # Verify that all of the attributes are different in the encrypted item for name in encrypted_attributes: assert encrypted_item[name] != plaintext_item[name] assert decrypted_item[name] == plaintext_item[name] # Verify that all of the attributes that should not be encrypted were not. for name in unencrypted_attributes: assert decrypted_item[name] == encrypted_item[name] == plaintext_item[name]
[ "def", "encrypt_item", "(", "table_name", ",", "aws_cmk_id", ")", ":", "index_key", "=", "{", "\"partition_attribute\"", ":", "\"is this\"", ",", "\"sort_attribute\"", ":", "55", "}", "plaintext_item", "=", "{", "\"example\"", ":", "\"data\"", ",", "\"some numbers\"", ":", "99", ",", "\"and some binary\"", ":", "Binary", "(", "b\"\\x00\\x01\\x02\"", ")", ",", "\"leave me\"", ":", "\"alone\"", ",", "# We want to ignore this attribute", "}", "# Collect all of the attributes that will be encrypted (used later).", "encrypted_attributes", "=", "set", "(", "plaintext_item", ".", "keys", "(", ")", ")", "encrypted_attributes", ".", "remove", "(", "\"leave me\"", ")", "# Collect all of the attributes that will not be encrypted (used later).", "unencrypted_attributes", "=", "set", "(", "index_key", ".", "keys", "(", ")", ")", "unencrypted_attributes", ".", "add", "(", "\"leave me\"", ")", "# Add the index pairs to the item.", "plaintext_item", ".", "update", "(", "index_key", ")", "# Create a normal table resource.", "table", "=", "boto3", ".", "resource", "(", "\"dynamodb\"", ")", ".", "Table", "(", "table_name", ")", "# Use the TableInfo helper to collect information about the indexes.", "table_info", "=", "TableInfo", "(", "name", "=", "table_name", ")", "table_info", ".", "refresh_indexed_attributes", "(", "table", ".", "meta", ".", "client", ")", "# Create a crypto materials provider using the specified AWS KMS key.", "aws_kms_cmp", "=", "AwsKmsCryptographicMaterialsProvider", "(", "key_id", "=", "aws_cmk_id", ")", "encryption_context", "=", "EncryptionContext", "(", "table_name", "=", "table_name", ",", "partition_key_name", "=", "table_info", ".", "primary_index", ".", "partition", ",", "sort_key_name", "=", "table_info", ".", "primary_index", ".", "sort", ",", "# The only attributes that are used by the AWS KMS cryptographic materials providers", "# are the primary index attributes.", "# These attributes need to be in the form of a DynamoDB JSON structure, so first", "# convert the standard dictionary.", "attributes", "=", "dict_to_ddb", "(", "index_key", ")", ",", ")", "# Create attribute actions that tells the encrypted table to encrypt all attributes,", "# only sign the primary index attributes, and ignore the one identified attribute to", "# ignore.", "actions", "=", "AttributeActions", "(", "default_action", "=", "CryptoAction", ".", "ENCRYPT_AND_SIGN", ",", "attribute_actions", "=", "{", "\"leave me\"", ":", "CryptoAction", ".", "DO_NOTHING", "}", ")", "actions", ".", "set_index_keys", "(", "*", "table_info", ".", "protected_index_keys", "(", ")", ")", "# Build the crypto config to use for this item.", "# When using the higher-level helpers, this is handled for you.", "crypto_config", "=", "CryptoConfig", "(", "materials_provider", "=", "aws_kms_cmp", ",", "encryption_context", "=", "encryption_context", ",", "attribute_actions", "=", "actions", ")", "# Encrypt the plaintext item directly", "encrypted_item", "=", "encrypt_python_item", "(", "plaintext_item", ",", "crypto_config", ")", "# You could now put the encrypted item to DynamoDB just as you would any other item.", "# table.put_item(Item=encrypted_item)", "# We will skip this for the purposes of this example.", "# Decrypt the encrypted item directly", "decrypted_item", "=", "decrypt_python_item", "(", "encrypted_item", ",", "crypto_config", ")", "# Verify that all of the attributes are different in the encrypted item", "for", "name", "in", "encrypted_attributes", ":", "assert", "encrypted_item", "[", "name", "]", "!=", "plaintext_item", "[", "name", "]", "assert", "decrypted_item", "[", "name", "]", "==", "plaintext_item", "[", "name", "]", "# Verify that all of the attributes that should not be encrypted were not.", "for", "name", "in", "unencrypted_attributes", ":", "assert", "decrypted_item", "[", "name", "]", "==", "encrypted_item", "[", "name", "]", "==", "plaintext_item", "[", "name", "]" ]
45.126761
24.070423
def load_config(json_path): """Load config info from a .json file and return it.""" with open(json_path, 'r') as json_file: config = json.loads(json_file.read()) # sanity-test the config: assert(config['tree'][0]['page'] == 'index') return config
[ "def", "load_config", "(", "json_path", ")", ":", "with", "open", "(", "json_path", ",", "'r'", ")", "as", "json_file", ":", "config", "=", "json", ".", "loads", "(", "json_file", ".", "read", "(", ")", ")", "# sanity-test the config:", "assert", "(", "config", "[", "'tree'", "]", "[", "0", "]", "[", "'page'", "]", "==", "'index'", ")", "return", "config" ]
36.285714
8.428571
def loads(cls, data, store_password, try_decrypt_keys=True): """ See :meth:`jks.jks.KeyStore.loads`. :param bytes data: Byte string representation of the keystore to be loaded. :param str password: Keystore password string :param bool try_decrypt_keys: Whether to automatically try to decrypt any encountered key entries using the same password as the keystore password. :returns: A loaded :class:`BksKeyStore` instance, if the keystore could be successfully parsed and the supplied store password is correct. If the ``try_decrypt_keys`` parameters was set to ``True``, any keys that could be successfully decrypted using the store password have already been decrypted; otherwise, no atttempt to decrypt any key entries is made. :raises BadKeystoreFormatException: If the keystore is malformed in some way :raises UnsupportedKeystoreVersionException: If the keystore contains an unknown format version number :raises KeystoreSignatureException: If the keystore signature could not be verified using the supplied store password :raises DuplicateAliasException: If the keystore contains duplicate aliases """ try: pos = 0 version = b4.unpack_from(data, pos)[0]; pos += 4 if version not in [1,2]: raise UnsupportedKeystoreVersionException("Unsupported BKS keystore version; only V1 and V2 supported, found v"+repr(version)) salt, pos = cls._read_data(data, pos) iteration_count = b4.unpack_from(data, pos)[0]; pos += 4 store_type = "bks" entries, size = cls._load_bks_entries(data[pos:], store_type, store_password, try_decrypt_keys=try_decrypt_keys) hmac_fn = hashlib.sha1 hmac_digest_size = hmac_fn().digest_size hmac_key_size = hmac_digest_size*8 if version != 1 else hmac_digest_size hmac_key = rfc7292.derive_key(hmac_fn, rfc7292.PURPOSE_MAC_MATERIAL, store_password, salt, iteration_count, hmac_key_size//8) store_data = data[pos:pos+size] store_hmac = data[pos+size:pos+size+hmac_digest_size] if len(store_hmac) != hmac_digest_size: raise BadKeystoreFormatException("Bad HMAC size; found %d bytes, expected %d bytes" % (len(store_hmac), hmac_digest_size)) hmac = HMAC.new(hmac_key, digestmod=SHA) hmac.update(store_data) computed_hmac = hmac.digest() if store_hmac != computed_hmac: raise KeystoreSignatureException("Hash mismatch; incorrect keystore password?") return cls(store_type, entries, version=version) except struct.error as e: raise BadKeystoreFormatException(e)
[ "def", "loads", "(", "cls", ",", "data", ",", "store_password", ",", "try_decrypt_keys", "=", "True", ")", ":", "try", ":", "pos", "=", "0", "version", "=", "b4", ".", "unpack_from", "(", "data", ",", "pos", ")", "[", "0", "]", "pos", "+=", "4", "if", "version", "not", "in", "[", "1", ",", "2", "]", ":", "raise", "UnsupportedKeystoreVersionException", "(", "\"Unsupported BKS keystore version; only V1 and V2 supported, found v\"", "+", "repr", "(", "version", ")", ")", "salt", ",", "pos", "=", "cls", ".", "_read_data", "(", "data", ",", "pos", ")", "iteration_count", "=", "b4", ".", "unpack_from", "(", "data", ",", "pos", ")", "[", "0", "]", "pos", "+=", "4", "store_type", "=", "\"bks\"", "entries", ",", "size", "=", "cls", ".", "_load_bks_entries", "(", "data", "[", "pos", ":", "]", ",", "store_type", ",", "store_password", ",", "try_decrypt_keys", "=", "try_decrypt_keys", ")", "hmac_fn", "=", "hashlib", ".", "sha1", "hmac_digest_size", "=", "hmac_fn", "(", ")", ".", "digest_size", "hmac_key_size", "=", "hmac_digest_size", "*", "8", "if", "version", "!=", "1", "else", "hmac_digest_size", "hmac_key", "=", "rfc7292", ".", "derive_key", "(", "hmac_fn", ",", "rfc7292", ".", "PURPOSE_MAC_MATERIAL", ",", "store_password", ",", "salt", ",", "iteration_count", ",", "hmac_key_size", "//", "8", ")", "store_data", "=", "data", "[", "pos", ":", "pos", "+", "size", "]", "store_hmac", "=", "data", "[", "pos", "+", "size", ":", "pos", "+", "size", "+", "hmac_digest_size", "]", "if", "len", "(", "store_hmac", ")", "!=", "hmac_digest_size", ":", "raise", "BadKeystoreFormatException", "(", "\"Bad HMAC size; found %d bytes, expected %d bytes\"", "%", "(", "len", "(", "store_hmac", ")", ",", "hmac_digest_size", ")", ")", "hmac", "=", "HMAC", ".", "new", "(", "hmac_key", ",", "digestmod", "=", "SHA", ")", "hmac", ".", "update", "(", "store_data", ")", "computed_hmac", "=", "hmac", ".", "digest", "(", ")", "if", "store_hmac", "!=", "computed_hmac", ":", "raise", "KeystoreSignatureException", "(", "\"Hash mismatch; incorrect keystore password?\"", ")", "return", "cls", "(", "store_type", ",", "entries", ",", "version", "=", "version", ")", "except", "struct", ".", "error", "as", "e", ":", "raise", "BadKeystoreFormatException", "(", "e", ")" ]
54.980392
36.54902
def verify_logout_request(cls, logout_request, ticket): """verifies the single logout request came from the CAS server returns True if the logout_request is valid, False otherwise """ try: session_index = cls.get_saml_slos(logout_request) session_index = session_index[0].text if session_index == ticket: return True else: return False except (AttributeError, IndexError): return False
[ "def", "verify_logout_request", "(", "cls", ",", "logout_request", ",", "ticket", ")", ":", "try", ":", "session_index", "=", "cls", ".", "get_saml_slos", "(", "logout_request", ")", "session_index", "=", "session_index", "[", "0", "]", ".", "text", "if", "session_index", "==", "ticket", ":", "return", "True", "else", ":", "return", "False", "except", "(", "AttributeError", ",", "IndexError", ")", ":", "return", "False" ]
38.846154
13.076923
def search_cloud_integration_for_facet(self, facet, **kwargs): # noqa: E501 """Lists the values of a specific facet over the customer's non-deleted cloud integrations # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.search_cloud_integration_for_facet(facet, async_req=True) >>> result = thread.get() :param async_req bool :param str facet: (required) :param FacetSearchRequestContainer body: :return: ResponseContainerFacetResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.search_cloud_integration_for_facet_with_http_info(facet, **kwargs) # noqa: E501 else: (data) = self.search_cloud_integration_for_facet_with_http_info(facet, **kwargs) # noqa: E501 return data
[ "def", "search_cloud_integration_for_facet", "(", "self", ",", "facet", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "search_cloud_integration_for_facet_with_http_info", "(", "facet", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "search_cloud_integration_for_facet_with_http_info", "(", "facet", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
48.181818
21.590909
def get_members_of_group(self, gname): """Get all members of a group which name is given in parameter :param gname: name of the group :type gname: str :return: list of contacts in the group :rtype: list[alignak.objects.contact.Contact] """ contactgroup = self.find_by_name(gname) if contactgroup: return contactgroup.get_contacts() return []
[ "def", "get_members_of_group", "(", "self", ",", "gname", ")", ":", "contactgroup", "=", "self", ".", "find_by_name", "(", "gname", ")", "if", "contactgroup", ":", "return", "contactgroup", ".", "get_contacts", "(", ")", "return", "[", "]" ]
34.583333
10.833333
def from_array(array): """ Deserialize a new Message from a given dictionary. :return: new Message instance. :rtype: Message """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from ..receivable.peer import User, Chat from ..receivable.media import Animation, Audio, Contact, Document, Game, Location, MessageEntity, PhotoSize from ..receivable.media import Sticker, Venue, Video, VideoNote, Voice from ..receivable.payments import Invoice, SuccessfulPayment from ..receivable.passport import PassportData data = {} data['message_id'] = int(array.get('message_id')) data['date'] = int(array.get('date')) data['chat'] = Chat.from_array(array.get('chat')) data['from_peer'] = User.from_array(array.get('from')) if array.get('from') is not None else None data['forward_from'] = User.from_array(array.get('forward_from')) if array.get('forward_from') is not None else None data['forward_from_chat'] = Chat.from_array(array.get('forward_from_chat')) if array.get('forward_from_chat') is not None else None data['forward_from_message_id'] = int(array.get('forward_from_message_id')) if array.get('forward_from_message_id') is not None else None data['forward_signature'] = u(array.get('forward_signature')) if array.get('forward_signature') is not None else None data['forward_date'] = int(array.get('forward_date')) if array.get('forward_date') is not None else None data['reply_to_message'] = Message.from_array(array.get('reply_to_message')) if array.get('reply_to_message') is not None else None data['edit_date'] = int(array.get('edit_date')) if array.get('edit_date') is not None else None data['media_group_id'] = u(array.get('media_group_id')) if array.get('media_group_id') is not None else None data['author_signature'] = u(array.get('author_signature')) if array.get('author_signature') is not None else None data['text'] = u(array.get('text')) if array.get('text') is not None else None data['entities'] = MessageEntity.from_array_list(array.get('entities'), list_level=1) if array.get('entities') is not None else None data['caption_entities'] = MessageEntity.from_array_list(array.get('caption_entities'), list_level=1) if array.get('caption_entities') is not None else None data['audio'] = Audio.from_array(array.get('audio')) if array.get('audio') is not None else None data['document'] = Document.from_array(array.get('document')) if array.get('document') is not None else None data['animation'] = Animation.from_array(array.get('animation')) if array.get('animation') is not None else None data['game'] = Game.from_array(array.get('game')) if array.get('game') is not None else None data['photo'] = PhotoSize.from_array_list(array.get('photo'), list_level=1) if array.get('photo') is not None else None data['sticker'] = Sticker.from_array(array.get('sticker')) if array.get('sticker') is not None else None data['video'] = Video.from_array(array.get('video')) if array.get('video') is not None else None data['voice'] = Voice.from_array(array.get('voice')) if array.get('voice') is not None else None data['video_note'] = VideoNote.from_array(array.get('video_note')) if array.get('video_note') is not None else None data['caption'] = u(array.get('caption')) if array.get('caption') is not None else None data['contact'] = Contact.from_array(array.get('contact')) if array.get('contact') is not None else None data['location'] = Location.from_array(array.get('location')) if array.get('location') is not None else None data['venue'] = Venue.from_array(array.get('venue')) if array.get('venue') is not None else None data['new_chat_members'] = User.from_array_list(array.get('new_chat_members'), list_level=1) if array.get('new_chat_members') is not None else None data['left_chat_member'] = User.from_array(array.get('left_chat_member')) if array.get('left_chat_member') is not None else None data['new_chat_title'] = u(array.get('new_chat_title')) if array.get('new_chat_title') is not None else None data['new_chat_photo'] = PhotoSize.from_array_list(array.get('new_chat_photo'), list_level=1) if array.get('new_chat_photo') is not None else None data['delete_chat_photo'] = bool(array.get('delete_chat_photo')) if array.get('delete_chat_photo') is not None else None data['group_chat_created'] = bool(array.get('group_chat_created')) if array.get('group_chat_created') is not None else None data['supergroup_chat_created'] = bool(array.get('supergroup_chat_created')) if array.get('supergroup_chat_created') is not None else None data['channel_chat_created'] = bool(array.get('channel_chat_created')) if array.get('channel_chat_created') is not None else None data['migrate_to_chat_id'] = int(array.get('migrate_to_chat_id')) if array.get('migrate_to_chat_id') is not None else None data['migrate_from_chat_id'] = int(array.get('migrate_from_chat_id')) if array.get('migrate_from_chat_id') is not None else None data['pinned_message'] = Message.from_array(array.get('pinned_message')) if array.get('pinned_message') is not None else None data['invoice'] = Invoice.from_array(array.get('invoice')) if array.get('invoice') is not None else None data['successful_payment'] = SuccessfulPayment.from_array(array.get('successful_payment')) if array.get('successful_payment') is not None else None data['connected_website'] = u(array.get('connected_website')) if array.get('connected_website') is not None else None data['passport_data'] = PassportData.from_array(array.get('passport_data')) if array.get('passport_data') is not None else None data['_raw'] = array return Message(**data)
[ "def", "from_array", "(", "array", ")", ":", "if", "array", "is", "None", "or", "not", "array", ":", "return", "None", "# end if", "assert_type_or_raise", "(", "array", ",", "dict", ",", "parameter_name", "=", "\"array\"", ")", "from", ".", ".", "receivable", ".", "peer", "import", "User", ",", "Chat", "from", ".", ".", "receivable", ".", "media", "import", "Animation", ",", "Audio", ",", "Contact", ",", "Document", ",", "Game", ",", "Location", ",", "MessageEntity", ",", "PhotoSize", "from", ".", ".", "receivable", ".", "media", "import", "Sticker", ",", "Venue", ",", "Video", ",", "VideoNote", ",", "Voice", "from", ".", ".", "receivable", ".", "payments", "import", "Invoice", ",", "SuccessfulPayment", "from", ".", ".", "receivable", ".", "passport", "import", "PassportData", "data", "=", "{", "}", "data", "[", "'message_id'", "]", "=", "int", "(", "array", ".", "get", "(", "'message_id'", ")", ")", "data", "[", "'date'", "]", "=", "int", "(", "array", ".", "get", "(", "'date'", ")", ")", "data", "[", "'chat'", "]", "=", "Chat", ".", "from_array", "(", "array", ".", "get", "(", "'chat'", ")", ")", "data", "[", "'from_peer'", "]", "=", "User", ".", "from_array", "(", "array", ".", "get", "(", "'from'", ")", ")", "if", "array", ".", "get", "(", "'from'", ")", "is", "not", "None", "else", "None", "data", "[", "'forward_from'", "]", "=", "User", ".", "from_array", "(", "array", ".", "get", "(", "'forward_from'", ")", ")", "if", "array", ".", "get", "(", "'forward_from'", ")", "is", "not", "None", "else", "None", "data", "[", "'forward_from_chat'", "]", "=", "Chat", ".", "from_array", "(", "array", ".", "get", "(", "'forward_from_chat'", ")", ")", "if", "array", ".", "get", "(", "'forward_from_chat'", ")", "is", "not", "None", "else", "None", "data", "[", "'forward_from_message_id'", "]", "=", "int", "(", "array", ".", "get", "(", "'forward_from_message_id'", ")", ")", "if", "array", ".", "get", "(", "'forward_from_message_id'", ")", "is", "not", "None", "else", "None", "data", "[", "'forward_signature'", "]", "=", "u", "(", "array", ".", "get", "(", "'forward_signature'", ")", ")", "if", "array", ".", "get", "(", "'forward_signature'", ")", "is", "not", "None", "else", "None", "data", "[", "'forward_date'", "]", "=", "int", "(", "array", ".", "get", "(", "'forward_date'", ")", ")", "if", "array", ".", "get", "(", "'forward_date'", ")", "is", "not", "None", "else", "None", "data", "[", "'reply_to_message'", "]", "=", "Message", ".", "from_array", "(", "array", ".", "get", "(", "'reply_to_message'", ")", ")", "if", "array", ".", "get", "(", "'reply_to_message'", ")", "is", "not", "None", "else", "None", "data", "[", "'edit_date'", "]", "=", "int", "(", "array", ".", "get", "(", "'edit_date'", ")", ")", "if", "array", ".", "get", "(", "'edit_date'", ")", "is", "not", "None", "else", "None", "data", "[", "'media_group_id'", "]", "=", "u", "(", "array", ".", "get", "(", "'media_group_id'", ")", ")", "if", "array", ".", "get", "(", "'media_group_id'", ")", "is", "not", "None", "else", "None", "data", "[", "'author_signature'", "]", "=", "u", "(", "array", ".", "get", "(", "'author_signature'", ")", ")", "if", "array", ".", "get", "(", "'author_signature'", ")", "is", "not", "None", "else", "None", "data", "[", "'text'", "]", "=", "u", "(", "array", ".", "get", "(", "'text'", ")", ")", "if", "array", ".", "get", "(", "'text'", ")", "is", "not", "None", "else", "None", "data", "[", "'entities'", "]", "=", "MessageEntity", ".", "from_array_list", "(", "array", ".", "get", "(", "'entities'", ")", ",", "list_level", "=", "1", ")", "if", "array", ".", "get", "(", "'entities'", ")", "is", "not", "None", "else", "None", "data", "[", "'caption_entities'", "]", "=", "MessageEntity", ".", "from_array_list", "(", "array", ".", "get", "(", "'caption_entities'", ")", ",", "list_level", "=", "1", ")", "if", "array", ".", "get", "(", "'caption_entities'", ")", "is", "not", "None", "else", "None", "data", "[", "'audio'", "]", "=", "Audio", ".", "from_array", "(", "array", ".", "get", "(", "'audio'", ")", ")", "if", "array", ".", "get", "(", "'audio'", ")", "is", "not", "None", "else", "None", "data", "[", "'document'", "]", "=", "Document", ".", "from_array", "(", "array", ".", "get", "(", "'document'", ")", ")", "if", "array", ".", "get", "(", "'document'", ")", "is", "not", "None", "else", "None", "data", "[", "'animation'", "]", "=", "Animation", ".", "from_array", "(", "array", ".", "get", "(", "'animation'", ")", ")", "if", "array", ".", "get", "(", "'animation'", ")", "is", "not", "None", "else", "None", "data", "[", "'game'", "]", "=", "Game", ".", "from_array", "(", "array", ".", "get", "(", "'game'", ")", ")", "if", "array", ".", "get", "(", "'game'", ")", "is", "not", "None", "else", "None", "data", "[", "'photo'", "]", "=", "PhotoSize", ".", "from_array_list", "(", "array", ".", "get", "(", "'photo'", ")", ",", "list_level", "=", "1", ")", "if", "array", ".", "get", "(", "'photo'", ")", "is", "not", "None", "else", "None", "data", "[", "'sticker'", "]", "=", "Sticker", ".", "from_array", "(", "array", ".", "get", "(", "'sticker'", ")", ")", "if", "array", ".", "get", "(", "'sticker'", ")", "is", "not", "None", "else", "None", "data", "[", "'video'", "]", "=", "Video", ".", "from_array", "(", "array", ".", "get", "(", "'video'", ")", ")", "if", "array", ".", "get", "(", "'video'", ")", "is", "not", "None", "else", "None", "data", "[", "'voice'", "]", "=", "Voice", ".", "from_array", "(", "array", ".", "get", "(", "'voice'", ")", ")", "if", "array", ".", "get", "(", "'voice'", ")", "is", "not", "None", "else", "None", "data", "[", "'video_note'", "]", "=", "VideoNote", ".", "from_array", "(", "array", ".", "get", "(", "'video_note'", ")", ")", "if", "array", ".", "get", "(", "'video_note'", ")", "is", "not", "None", "else", "None", "data", "[", "'caption'", "]", "=", "u", "(", "array", ".", "get", "(", "'caption'", ")", ")", "if", "array", ".", "get", "(", "'caption'", ")", "is", "not", "None", "else", "None", "data", "[", "'contact'", "]", "=", "Contact", ".", "from_array", "(", "array", ".", "get", "(", "'contact'", ")", ")", "if", "array", ".", "get", "(", "'contact'", ")", "is", "not", "None", "else", "None", "data", "[", "'location'", "]", "=", "Location", ".", "from_array", "(", "array", ".", "get", "(", "'location'", ")", ")", "if", "array", ".", "get", "(", "'location'", ")", "is", "not", "None", "else", "None", "data", "[", "'venue'", "]", "=", "Venue", ".", "from_array", "(", "array", ".", "get", "(", "'venue'", ")", ")", "if", "array", ".", "get", "(", "'venue'", ")", "is", "not", "None", "else", "None", "data", "[", "'new_chat_members'", "]", "=", "User", ".", "from_array_list", "(", "array", ".", "get", "(", "'new_chat_members'", ")", ",", "list_level", "=", "1", ")", "if", "array", ".", "get", "(", "'new_chat_members'", ")", "is", "not", "None", "else", "None", "data", "[", "'left_chat_member'", "]", "=", "User", ".", "from_array", "(", "array", ".", "get", "(", "'left_chat_member'", ")", ")", "if", "array", ".", "get", "(", "'left_chat_member'", ")", "is", "not", "None", "else", "None", "data", "[", "'new_chat_title'", "]", "=", "u", "(", "array", ".", "get", "(", "'new_chat_title'", ")", ")", "if", "array", ".", "get", "(", "'new_chat_title'", ")", "is", "not", "None", "else", "None", "data", "[", "'new_chat_photo'", "]", "=", "PhotoSize", ".", "from_array_list", "(", "array", ".", "get", "(", "'new_chat_photo'", ")", ",", "list_level", "=", "1", ")", "if", "array", ".", "get", "(", "'new_chat_photo'", ")", "is", "not", "None", "else", "None", "data", "[", "'delete_chat_photo'", "]", "=", "bool", "(", "array", ".", "get", "(", "'delete_chat_photo'", ")", ")", "if", "array", ".", "get", "(", "'delete_chat_photo'", ")", "is", "not", "None", "else", "None", "data", "[", "'group_chat_created'", "]", "=", "bool", "(", "array", ".", "get", "(", "'group_chat_created'", ")", ")", "if", "array", ".", "get", "(", "'group_chat_created'", ")", "is", "not", "None", "else", "None", "data", "[", "'supergroup_chat_created'", "]", "=", "bool", "(", "array", ".", "get", "(", "'supergroup_chat_created'", ")", ")", "if", "array", ".", "get", "(", "'supergroup_chat_created'", ")", "is", "not", "None", "else", "None", "data", "[", "'channel_chat_created'", "]", "=", "bool", "(", "array", ".", "get", "(", "'channel_chat_created'", ")", ")", "if", "array", ".", "get", "(", "'channel_chat_created'", ")", "is", "not", "None", "else", "None", "data", "[", "'migrate_to_chat_id'", "]", "=", "int", "(", "array", ".", "get", "(", "'migrate_to_chat_id'", ")", ")", "if", "array", ".", "get", "(", "'migrate_to_chat_id'", ")", "is", "not", "None", "else", "None", "data", "[", "'migrate_from_chat_id'", "]", "=", "int", "(", "array", ".", "get", "(", "'migrate_from_chat_id'", ")", ")", "if", "array", ".", "get", "(", "'migrate_from_chat_id'", ")", "is", "not", "None", "else", "None", "data", "[", "'pinned_message'", "]", "=", "Message", ".", "from_array", "(", "array", ".", "get", "(", "'pinned_message'", ")", ")", "if", "array", ".", "get", "(", "'pinned_message'", ")", "is", "not", "None", "else", "None", "data", "[", "'invoice'", "]", "=", "Invoice", ".", "from_array", "(", "array", ".", "get", "(", "'invoice'", ")", ")", "if", "array", ".", "get", "(", "'invoice'", ")", "is", "not", "None", "else", "None", "data", "[", "'successful_payment'", "]", "=", "SuccessfulPayment", ".", "from_array", "(", "array", ".", "get", "(", "'successful_payment'", ")", ")", "if", "array", ".", "get", "(", "'successful_payment'", ")", "is", "not", "None", "else", "None", "data", "[", "'connected_website'", "]", "=", "u", "(", "array", ".", "get", "(", "'connected_website'", ")", ")", "if", "array", ".", "get", "(", "'connected_website'", ")", "is", "not", "None", "else", "None", "data", "[", "'passport_data'", "]", "=", "PassportData", ".", "from_array", "(", "array", ".", "get", "(", "'passport_data'", ")", ")", "if", "array", ".", "get", "(", "'passport_data'", ")", "is", "not", "None", "else", "None", "data", "[", "'_raw'", "]", "=", "array", "return", "Message", "(", "*", "*", "data", ")" ]
92.138462
60.569231
def signal_handler(signum, stackframe): """Helper function to handle caught signals.""" global g_runner global g_handling_signal if g_handling_signal: # Don't do this recursively. return g_handling_signal = True print("") print("----------------------------------------------------------------------") print("") print("SIGNAL CAUGHT (" + str(signum) + "). TEARING DOWN CLOUDS.") print("") print("----------------------------------------------------------------------") g_runner.terminate()
[ "def", "signal_handler", "(", "signum", ",", "stackframe", ")", ":", "global", "g_runner", "global", "g_handling_signal", "if", "g_handling_signal", ":", "# Don't do this recursively.", "return", "g_handling_signal", "=", "True", "print", "(", "\"\"", ")", "print", "(", "\"----------------------------------------------------------------------\"", ")", "print", "(", "\"\"", ")", "print", "(", "\"SIGNAL CAUGHT (\"", "+", "str", "(", "signum", ")", "+", "\"). TEARING DOWN CLOUDS.\"", ")", "print", "(", "\"\"", ")", "print", "(", "\"----------------------------------------------------------------------\"", ")", "g_runner", ".", "terminate", "(", ")" ]
31.764706
22.647059
def trocar_codigo_de_ativacao(self, novo_codigo_ativacao, opcao=constantes.CODIGO_ATIVACAO_REGULAR, codigo_emergencia=None): """Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`. :return: Uma resposta SAT padrão. :rtype: satcfe.resposta.padrao.RespostaSAT """ resp = self._http_post('trocarcodigodeativacao', novo_codigo_ativacao=novo_codigo_ativacao, opcao=opcao, codigo_emergencia=codigo_emergencia) conteudo = resp.json() return RespostaSAT.trocar_codigo_de_ativacao(conteudo.get('retorno'))
[ "def", "trocar_codigo_de_ativacao", "(", "self", ",", "novo_codigo_ativacao", ",", "opcao", "=", "constantes", ".", "CODIGO_ATIVACAO_REGULAR", ",", "codigo_emergencia", "=", "None", ")", ":", "resp", "=", "self", ".", "_http_post", "(", "'trocarcodigodeativacao'", ",", "novo_codigo_ativacao", "=", "novo_codigo_ativacao", ",", "opcao", "=", "opcao", ",", "codigo_emergencia", "=", "codigo_emergencia", ")", "conteudo", "=", "resp", ".", "json", "(", ")", "return", "RespostaSAT", ".", "trocar_codigo_de_ativacao", "(", "conteudo", ".", "get", "(", "'retorno'", ")", ")" ]
44.785714
13.571429
def on_dummies(self, *args): """Give the dummies numbers such that, when appended to their names, they give a unique name for the resulting new :class:`board.Pawn` or :class:`board.Spot`. """ def renum_dummy(dummy, *args): dummy.num = dummynum(self.app.character, dummy.prefix) + 1 for dummy in self.dummies: if dummy is None or hasattr(dummy, '_numbered'): continue if dummy == self.dummything: self.app.pawncfg.bind(imgpaths=self._propagate_thing_paths) if dummy == self.dummyplace: self.app.spotcfg.bind(imgpaths=self._propagate_place_paths) dummy.num = dummynum(self.app.character, dummy.prefix) + 1 Logger.debug("MainScreen: dummy #{}".format(dummy.num)) dummy.bind(prefix=partial(renum_dummy, dummy)) dummy._numbered = True
[ "def", "on_dummies", "(", "self", ",", "*", "args", ")", ":", "def", "renum_dummy", "(", "dummy", ",", "*", "args", ")", ":", "dummy", ".", "num", "=", "dummynum", "(", "self", ".", "app", ".", "character", ",", "dummy", ".", "prefix", ")", "+", "1", "for", "dummy", "in", "self", ".", "dummies", ":", "if", "dummy", "is", "None", "or", "hasattr", "(", "dummy", ",", "'_numbered'", ")", ":", "continue", "if", "dummy", "==", "self", ".", "dummything", ":", "self", ".", "app", ".", "pawncfg", ".", "bind", "(", "imgpaths", "=", "self", ".", "_propagate_thing_paths", ")", "if", "dummy", "==", "self", ".", "dummyplace", ":", "self", ".", "app", ".", "spotcfg", ".", "bind", "(", "imgpaths", "=", "self", ".", "_propagate_place_paths", ")", "dummy", ".", "num", "=", "dummynum", "(", "self", ".", "app", ".", "character", ",", "dummy", ".", "prefix", ")", "+", "1", "Logger", ".", "debug", "(", "\"MainScreen: dummy #{}\"", ".", "format", "(", "dummy", ".", "num", ")", ")", "dummy", ".", "bind", "(", "prefix", "=", "partial", "(", "renum_dummy", ",", "dummy", ")", ")", "dummy", ".", "_numbered", "=", "True" ]
45.2
17.05
def delete_view(self, request, object_id, extra_context=None): """ Overrides the default to enable redirecting to the directory view after deletion of a image. we need to fetch the object and find out who the parent is before super, because super will delete the object and make it impossible to find out the parent folder to redirect to. """ try: obj = self.get_queryset(request).get(pk=unquote(object_id)) parent_folder = obj.folder except self.model.DoesNotExist: parent_folder = None if request.POST: # Return to folder listing, since there is no usable file listing. super(FileAdmin, self).delete_view( request=request, object_id=object_id, extra_context=extra_context) if parent_folder: url = reverse('admin:filer-directory_listing', kwargs={'folder_id': parent_folder.id}) else: url = reverse('admin:filer-directory_listing-unfiled_images') url = "{0}{1}".format( url, admin_url_params_encoded(request) ) return HttpResponseRedirect(url) return super(FileAdmin, self).delete_view( request=request, object_id=object_id, extra_context=extra_context)
[ "def", "delete_view", "(", "self", ",", "request", ",", "object_id", ",", "extra_context", "=", "None", ")", ":", "try", ":", "obj", "=", "self", ".", "get_queryset", "(", "request", ")", ".", "get", "(", "pk", "=", "unquote", "(", "object_id", ")", ")", "parent_folder", "=", "obj", ".", "folder", "except", "self", ".", "model", ".", "DoesNotExist", ":", "parent_folder", "=", "None", "if", "request", ".", "POST", ":", "# Return to folder listing, since there is no usable file listing.", "super", "(", "FileAdmin", ",", "self", ")", ".", "delete_view", "(", "request", "=", "request", ",", "object_id", "=", "object_id", ",", "extra_context", "=", "extra_context", ")", "if", "parent_folder", ":", "url", "=", "reverse", "(", "'admin:filer-directory_listing'", ",", "kwargs", "=", "{", "'folder_id'", ":", "parent_folder", ".", "id", "}", ")", "else", ":", "url", "=", "reverse", "(", "'admin:filer-directory_listing-unfiled_images'", ")", "url", "=", "\"{0}{1}\"", ".", "format", "(", "url", ",", "admin_url_params_encoded", "(", "request", ")", ")", "return", "HttpResponseRedirect", "(", "url", ")", "return", "super", "(", "FileAdmin", ",", "self", ")", ".", "delete_view", "(", "request", "=", "request", ",", "object_id", "=", "object_id", ",", "extra_context", "=", "extra_context", ")" ]
40.647059
18.470588
def closed(self, error=None): """ Notify the application that the connection has been closed. :param error: The exception which has caused the connection to be closed. If the connection has been closed due to an EOF, pass ``None``. """ if self._application: try: self._application.closed(error) except Exception: # Ignore exceptions from the notification pass
[ "def", "closed", "(", "self", ",", "error", "=", "None", ")", ":", "if", "self", ".", "_application", ":", "try", ":", "self", ".", "_application", ".", "closed", "(", "error", ")", "except", "Exception", ":", "# Ignore exceptions from the notification", "pass" ]
33.6
18.4
def parse(readDataInstance): """ Returns a new L{NtHeaders} object. @type readDataInstance: L{ReadData} @param readDataInstance: A L{ReadData} object with data to be parsed as a L{NtHeaders} object. @rtype: L{NtHeaders} @return: A new L{NtHeaders} object. """ nt = NtHeaders() nt.signature.value = readDataInstance.readDword() nt.fileHeader = FileHeader.parse(readDataInstance) nt.optionalHeader = OptionalHeader.parse(readDataInstance) return nt
[ "def", "parse", "(", "readDataInstance", ")", ":", "nt", "=", "NtHeaders", "(", ")", "nt", ".", "signature", ".", "value", "=", "readDataInstance", ".", "readDword", "(", ")", "nt", ".", "fileHeader", "=", "FileHeader", ".", "parse", "(", "readDataInstance", ")", "nt", ".", "optionalHeader", "=", "OptionalHeader", ".", "parse", "(", "readDataInstance", ")", "return", "nt" ]
36.4
17.2
def masktorgb(mask, color='lightgreen', alpha=1.0): """Convert boolean mask to RGB image object for canvas overlay. Parameters ---------- mask : ndarray Boolean mask to overlay. 2D image only. color : str Color name accepted by Ginga. alpha : float Opacity. Unmasked data are always transparent. Returns ------- rgbobj : RGBImage RGB image for canvas Image object. Raises ------ ValueError Invalid mask dimension. """ mask = np.asarray(mask) if mask.ndim != 2: raise ValueError('ndim={0} is not supported'.format(mask.ndim)) ht, wd = mask.shape r, g, b = colors.lookup_color(color) rgbobj = RGBImage(data_np=np.zeros((ht, wd, 4), dtype=np.uint8)) rc = rgbobj.get_slice('R') gc = rgbobj.get_slice('G') bc = rgbobj.get_slice('B') ac = rgbobj.get_slice('A') ac[:] = 0 # Transparent background rc[mask] = int(r * 255) gc[mask] = int(g * 255) bc[mask] = int(b * 255) ac[mask] = int(alpha * 255) # For debugging #rgbobj.save_as_file('ztmp_rgbobj.png') return rgbobj
[ "def", "masktorgb", "(", "mask", ",", "color", "=", "'lightgreen'", ",", "alpha", "=", "1.0", ")", ":", "mask", "=", "np", ".", "asarray", "(", "mask", ")", "if", "mask", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "'ndim={0} is not supported'", ".", "format", "(", "mask", ".", "ndim", ")", ")", "ht", ",", "wd", "=", "mask", ".", "shape", "r", ",", "g", ",", "b", "=", "colors", ".", "lookup_color", "(", "color", ")", "rgbobj", "=", "RGBImage", "(", "data_np", "=", "np", ".", "zeros", "(", "(", "ht", ",", "wd", ",", "4", ")", ",", "dtype", "=", "np", ".", "uint8", ")", ")", "rc", "=", "rgbobj", ".", "get_slice", "(", "'R'", ")", "gc", "=", "rgbobj", ".", "get_slice", "(", "'G'", ")", "bc", "=", "rgbobj", ".", "get_slice", "(", "'B'", ")", "ac", "=", "rgbobj", ".", "get_slice", "(", "'A'", ")", "ac", "[", ":", "]", "=", "0", "# Transparent background", "rc", "[", "mask", "]", "=", "int", "(", "r", "*", "255", ")", "gc", "[", "mask", "]", "=", "int", "(", "g", "*", "255", ")", "bc", "[", "mask", "]", "=", "int", "(", "b", "*", "255", ")", "ac", "[", "mask", "]", "=", "int", "(", "alpha", "*", "255", ")", "# For debugging", "#rgbobj.save_as_file('ztmp_rgbobj.png')", "return", "rgbobj" ]
22.285714
21.510204
def process_samples_array(samples, **kwargs): """Convert an array of nested sampling dead and live points of the type produced by PolyChord and MultiNest into a nestcheck nested sampling run dictionary. Parameters ---------- samples: 2d numpy array Array of dead points and any remaining live points at termination. Has #parameters + 2 columns: param_1, param_2, ... , logl, birth_logl kwargs: dict, optional Options passed to birth_inds_given_contours Returns ------- ns_run: dict Nested sampling run dict (see the module docstring for more details). Only contains information in samples (not additional optional output key). """ samples = samples[np.argsort(samples[:, -2])] ns_run = {} ns_run['logl'] = samples[:, -2] ns_run['theta'] = samples[:, :-2] birth_contours = samples[:, -1] # birth_contours, ns_run['theta'] = check_logls_unique( # samples[:, -2], samples[:, -1], samples[:, :-2]) birth_inds = birth_inds_given_contours( birth_contours, ns_run['logl'], **kwargs) ns_run['thread_labels'] = threads_given_birth_inds(birth_inds) unique_threads = np.unique(ns_run['thread_labels']) assert np.array_equal(unique_threads, np.asarray(range(unique_threads.shape[0]))) # Work out nlive_array and thread_min_max logls from thread labels and # birth contours thread_min_max = np.zeros((unique_threads.shape[0], 2)) # NB delta_nlive indexes are offset from points' indexes by 1 as we need an # element to represent the initial sampling of live points before any dead # points are created. # I.E. birth on step 1 corresponds to replacing dead point zero delta_nlive = np.zeros(samples.shape[0] + 1) for label in unique_threads: thread_inds = np.where(ns_run['thread_labels'] == label)[0] # Max is final logl in thread thread_min_max[label, 1] = ns_run['logl'][thread_inds[-1]] thread_start_birth_ind = birth_inds[thread_inds[0]] # delta nlive indexes are +1 from logl indexes to allow for initial # nlive (before first dead point) delta_nlive[thread_inds[-1] + 1] -= 1 if thread_start_birth_ind == birth_inds[0]: # thread minimum is -inf as it starts by sampling from whole prior thread_min_max[label, 0] = -np.inf delta_nlive[0] += 1 else: assert thread_start_birth_ind >= 0 thread_min_max[label, 0] = ns_run['logl'][thread_start_birth_ind] delta_nlive[thread_start_birth_ind + 1] += 1 ns_run['thread_min_max'] = thread_min_max ns_run['nlive_array'] = np.cumsum(delta_nlive)[:-1] return ns_run
[ "def", "process_samples_array", "(", "samples", ",", "*", "*", "kwargs", ")", ":", "samples", "=", "samples", "[", "np", ".", "argsort", "(", "samples", "[", ":", ",", "-", "2", "]", ")", "]", "ns_run", "=", "{", "}", "ns_run", "[", "'logl'", "]", "=", "samples", "[", ":", ",", "-", "2", "]", "ns_run", "[", "'theta'", "]", "=", "samples", "[", ":", ",", ":", "-", "2", "]", "birth_contours", "=", "samples", "[", ":", ",", "-", "1", "]", "# birth_contours, ns_run['theta'] = check_logls_unique(", "# samples[:, -2], samples[:, -1], samples[:, :-2])", "birth_inds", "=", "birth_inds_given_contours", "(", "birth_contours", ",", "ns_run", "[", "'logl'", "]", ",", "*", "*", "kwargs", ")", "ns_run", "[", "'thread_labels'", "]", "=", "threads_given_birth_inds", "(", "birth_inds", ")", "unique_threads", "=", "np", ".", "unique", "(", "ns_run", "[", "'thread_labels'", "]", ")", "assert", "np", ".", "array_equal", "(", "unique_threads", ",", "np", ".", "asarray", "(", "range", "(", "unique_threads", ".", "shape", "[", "0", "]", ")", ")", ")", "# Work out nlive_array and thread_min_max logls from thread labels and", "# birth contours", "thread_min_max", "=", "np", ".", "zeros", "(", "(", "unique_threads", ".", "shape", "[", "0", "]", ",", "2", ")", ")", "# NB delta_nlive indexes are offset from points' indexes by 1 as we need an", "# element to represent the initial sampling of live points before any dead", "# points are created.", "# I.E. birth on step 1 corresponds to replacing dead point zero", "delta_nlive", "=", "np", ".", "zeros", "(", "samples", ".", "shape", "[", "0", "]", "+", "1", ")", "for", "label", "in", "unique_threads", ":", "thread_inds", "=", "np", ".", "where", "(", "ns_run", "[", "'thread_labels'", "]", "==", "label", ")", "[", "0", "]", "# Max is final logl in thread", "thread_min_max", "[", "label", ",", "1", "]", "=", "ns_run", "[", "'logl'", "]", "[", "thread_inds", "[", "-", "1", "]", "]", "thread_start_birth_ind", "=", "birth_inds", "[", "thread_inds", "[", "0", "]", "]", "# delta nlive indexes are +1 from logl indexes to allow for initial", "# nlive (before first dead point)", "delta_nlive", "[", "thread_inds", "[", "-", "1", "]", "+", "1", "]", "-=", "1", "if", "thread_start_birth_ind", "==", "birth_inds", "[", "0", "]", ":", "# thread minimum is -inf as it starts by sampling from whole prior", "thread_min_max", "[", "label", ",", "0", "]", "=", "-", "np", ".", "inf", "delta_nlive", "[", "0", "]", "+=", "1", "else", ":", "assert", "thread_start_birth_ind", ">=", "0", "thread_min_max", "[", "label", ",", "0", "]", "=", "ns_run", "[", "'logl'", "]", "[", "thread_start_birth_ind", "]", "delta_nlive", "[", "thread_start_birth_ind", "+", "1", "]", "+=", "1", "ns_run", "[", "'thread_min_max'", "]", "=", "thread_min_max", "ns_run", "[", "'nlive_array'", "]", "=", "np", ".", "cumsum", "(", "delta_nlive", ")", "[", ":", "-", "1", "]", "return", "ns_run" ]
44.42623
18.295082
def is_excluded_for_sdesc(self, sdesc, is_tpl=False): """ Check whether this host should have the passed service *description* be "excluded" or "not included". :param sdesc: service description :type sdesc: :param is_tpl: True if service is template, otherwise False :type is_tpl: bool :return: True if service description excluded, otherwise False :rtype: bool """ if not is_tpl and self.service_includes: return sdesc not in self.service_includes if self.service_excludes: return sdesc in self.service_excludes return False
[ "def", "is_excluded_for_sdesc", "(", "self", ",", "sdesc", ",", "is_tpl", "=", "False", ")", ":", "if", "not", "is_tpl", "and", "self", ".", "service_includes", ":", "return", "sdesc", "not", "in", "self", ".", "service_includes", "if", "self", ".", "service_excludes", ":", "return", "sdesc", "in", "self", ".", "service_excludes", "return", "False" ]
39.6875
14.125
def add_package(self, name): """ Registers a single package :param name: (str) The effect package to add """ name, cls_name = parse_package_string(name) if name in self.package_map: return package = EffectPackage(name) package.load() self.packages.append(package) self.package_map[package.name] = package # Load effect package dependencies self.polulate(package.effect_packages)
[ "def", "add_package", "(", "self", ",", "name", ")", ":", "name", ",", "cls_name", "=", "parse_package_string", "(", "name", ")", "if", "name", "in", "self", ".", "package_map", ":", "return", "package", "=", "EffectPackage", "(", "name", ")", "package", ".", "load", "(", ")", "self", ".", "packages", ".", "append", "(", "package", ")", "self", ".", "package_map", "[", "package", ".", "name", "]", "=", "package", "# Load effect package dependencies", "self", ".", "polulate", "(", "package", ".", "effect_packages", ")" ]
24.894737
16.157895
def logical_and(self, other): """logical_and(t) = self(t) and other(t).""" return self.operation(other, lambda x, y: int(x and y))
[ "def", "logical_and", "(", "self", ",", "other", ")", ":", "return", "self", ".", "operation", "(", "other", ",", "lambda", "x", ",", "y", ":", "int", "(", "x", "and", "y", ")", ")" ]
48
11.333333
def _load_managed_entries(self): """ loads scheduler managed entries. no start-up procedures are performed """ for process_name, process_entry in context.process_context.items(): if isinstance(process_entry, ManagedProcessEntry): function = self.fire_managed_worker else: self.logger.warning('Skipping non-managed context entry {0} of type {1}.' .format(process_name, process_entry.__class__.__name__)) continue try: self._register_process_entry(process_entry, function) except Exception: self.logger.error('Managed Thread Handler {0} failed to start. Skipping it.' .format(process_entry.key), exc_info=True)
[ "def", "_load_managed_entries", "(", "self", ")", ":", "for", "process_name", ",", "process_entry", "in", "context", ".", "process_context", ".", "items", "(", ")", ":", "if", "isinstance", "(", "process_entry", ",", "ManagedProcessEntry", ")", ":", "function", "=", "self", ".", "fire_managed_worker", "else", ":", "self", ".", "logger", ".", "warning", "(", "'Skipping non-managed context entry {0} of type {1}.'", ".", "format", "(", "process_name", ",", "process_entry", ".", "__class__", ".", "__name__", ")", ")", "continue", "try", ":", "self", ".", "_register_process_entry", "(", "process_entry", ",", "function", ")", "except", "Exception", ":", "self", ".", "logger", ".", "error", "(", "'Managed Thread Handler {0} failed to start. Skipping it.'", ".", "format", "(", "process_entry", ".", "key", ")", ",", "exc_info", "=", "True", ")" ]
53.933333
27.2
def focus0(self): ''' First focus of the ellipse, Point class. ''' f = Point(self.center) if self.xAxisIsMajor: f.x -= self.linearEccentricity else: f.y -= self.linearEccentricity return f
[ "def", "focus0", "(", "self", ")", ":", "f", "=", "Point", "(", "self", ".", "center", ")", "if", "self", ".", "xAxisIsMajor", ":", "f", ".", "x", "-=", "self", ".", "linearEccentricity", "else", ":", "f", ".", "y", "-=", "self", ".", "linearEccentricity", "return", "f" ]
21.583333
20.416667
def _lml_arbitrary_scale(self): """ Log of the marginal likelihood for arbitrary scale. Returns ------- lml : float Log of the marginal likelihood. """ s = self.scale D = self._D n = len(self._y) lml = -self._df * log2pi - n * log(s) lml -= sum(npsum(log(d)) for d in D) d = (mTQ - yTQ for (mTQ, yTQ) in zip(self._mTQ, self._yTQ)) lml -= sum((i / j) @ i for (i, j) in zip(d, D)) / s return lml / 2
[ "def", "_lml_arbitrary_scale", "(", "self", ")", ":", "s", "=", "self", ".", "scale", "D", "=", "self", ".", "_D", "n", "=", "len", "(", "self", ".", "_y", ")", "lml", "=", "-", "self", ".", "_df", "*", "log2pi", "-", "n", "*", "log", "(", "s", ")", "lml", "-=", "sum", "(", "npsum", "(", "log", "(", "d", ")", ")", "for", "d", "in", "D", ")", "d", "=", "(", "mTQ", "-", "yTQ", "for", "(", "mTQ", ",", "yTQ", ")", "in", "zip", "(", "self", ".", "_mTQ", ",", "self", ".", "_yTQ", ")", ")", "lml", "-=", "sum", "(", "(", "i", "/", "j", ")", "@", "i", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "d", ",", "D", ")", ")", "/", "s", "return", "lml", "/", "2" ]
28.111111
17.222222
def process(self, context): import os from maya import cmds """Inject the current working file""" current_file = cmds.file(sceneName=True, query=True) # Maya returns forward-slashes by default normalised = os.path.normpath(current_file) context.set_data('currentFile', value=normalised) # For backwards compatibility context.set_data('current_file', value=normalised)
[ "def", "process", "(", "self", ",", "context", ")", ":", "import", "os", "from", "maya", "import", "cmds", "current_file", "=", "cmds", ".", "file", "(", "sceneName", "=", "True", ",", "query", "=", "True", ")", "# Maya returns forward-slashes by default", "normalised", "=", "os", ".", "path", ".", "normpath", "(", "current_file", ")", "context", ".", "set_data", "(", "'currentFile'", ",", "value", "=", "normalised", ")", "# For backwards compatibility", "context", ".", "set_data", "(", "'current_file'", ",", "value", "=", "normalised", ")" ]
30.714286
20.357143
def get_tree(cls, session=None, json=False, json_fields=None, query=None): """ This method generate tree of current node table in dict or json format. You can make custom query with attribute ``query``. By default it return all nodes in table. Args: session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session Kwargs: json (bool): if True return JSON jqTree format json_fields (function): append custom fields in JSON query (function): it takes :class:`sqlalchemy.orm.query.Query` object as an argument, and returns in a modified form :: def query(nodes): return nodes.filter(node.__class__.tree_id.is_(node.tree_id)) node.get_tree(session=DBSession, json=True, query=query) Example: * :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_get_tree` * :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_get_json_tree` * :mod:`sqlalchemy_mptt.tests.cases.get_tree.test_get_json_tree_with_custom_field` """ # noqa tree = [] nodes_of_level = {} # handle custom query nodes = cls._base_query(session) if query: nodes = query(nodes) nodes = cls._base_order(nodes).all() # search minimal level of nodes. min_level = min([node.level for node in nodes] or [None]) def get_node_id(node): return getattr(node, node.get_pk_name()) for node in nodes: result = cls._node_to_dict(node, json, json_fields) parent_id = node.parent_id if node.level != min_level: # for cildren # Find parent in the tree if parent_id not in nodes_of_level.keys(): continue if 'children' not in nodes_of_level[parent_id]: nodes_of_level[parent_id]['children'] = [] # Append node to parent nl = nodes_of_level[parent_id]['children'] nl.append(result) nodes_of_level[get_node_id(node)] = nl[-1] else: # for top level nodes tree.append(result) nodes_of_level[get_node_id(node)] = tree[-1] return tree
[ "def", "get_tree", "(", "cls", ",", "session", "=", "None", ",", "json", "=", "False", ",", "json_fields", "=", "None", ",", "query", "=", "None", ")", ":", "# noqa", "tree", "=", "[", "]", "nodes_of_level", "=", "{", "}", "# handle custom query", "nodes", "=", "cls", ".", "_base_query", "(", "session", ")", "if", "query", ":", "nodes", "=", "query", "(", "nodes", ")", "nodes", "=", "cls", ".", "_base_order", "(", "nodes", ")", ".", "all", "(", ")", "# search minimal level of nodes.", "min_level", "=", "min", "(", "[", "node", ".", "level", "for", "node", "in", "nodes", "]", "or", "[", "None", "]", ")", "def", "get_node_id", "(", "node", ")", ":", "return", "getattr", "(", "node", ",", "node", ".", "get_pk_name", "(", ")", ")", "for", "node", "in", "nodes", ":", "result", "=", "cls", ".", "_node_to_dict", "(", "node", ",", "json", ",", "json_fields", ")", "parent_id", "=", "node", ".", "parent_id", "if", "node", ".", "level", "!=", "min_level", ":", "# for cildren", "# Find parent in the tree", "if", "parent_id", "not", "in", "nodes_of_level", ".", "keys", "(", ")", ":", "continue", "if", "'children'", "not", "in", "nodes_of_level", "[", "parent_id", "]", ":", "nodes_of_level", "[", "parent_id", "]", "[", "'children'", "]", "=", "[", "]", "# Append node to parent", "nl", "=", "nodes_of_level", "[", "parent_id", "]", "[", "'children'", "]", "nl", ".", "append", "(", "result", ")", "nodes_of_level", "[", "get_node_id", "(", "node", ")", "]", "=", "nl", "[", "-", "1", "]", "else", ":", "# for top level nodes", "tree", ".", "append", "(", "result", ")", "nodes_of_level", "[", "get_node_id", "(", "node", ")", "]", "=", "tree", "[", "-", "1", "]", "return", "tree" ]
38.644068
21.932203
async def read(self, n=None): """Read all content """ if self._streamed: return b'' buffer = [] async for body in self: buffer.append(body) return b''.join(buffer)
[ "async", "def", "read", "(", "self", ",", "n", "=", "None", ")", ":", "if", "self", ".", "_streamed", ":", "return", "b''", "buffer", "=", "[", "]", "async", "for", "body", "in", "self", ":", "buffer", ".", "append", "(", "body", ")", "return", "b''", ".", "join", "(", "buffer", ")" ]
25.222222
10.111111
def _load_nucmer_hits(self, infile): '''Returns two dictionaries: 1) name=>contig length. 2) Second is dictionary of nucmer hits (ignoring self matches). contig name => list of hits''' hits = {} lengths = {} file_reader = pymummer.coords_file.reader(infile) for al in file_reader: if al.qry_name == al.ref_name: continue elif al.qry_name not in hits: hits[al.qry_name] = [] hits[al.qry_name].append(al) lengths[al.qry_name] = al.qry_length lengths[al.ref_name] = al.ref_length return lengths, hits
[ "def", "_load_nucmer_hits", "(", "self", ",", "infile", ")", ":", "hits", "=", "{", "}", "lengths", "=", "{", "}", "file_reader", "=", "pymummer", ".", "coords_file", ".", "reader", "(", "infile", ")", "for", "al", "in", "file_reader", ":", "if", "al", ".", "qry_name", "==", "al", ".", "ref_name", ":", "continue", "elif", "al", ".", "qry_name", "not", "in", "hits", ":", "hits", "[", "al", ".", "qry_name", "]", "=", "[", "]", "hits", "[", "al", ".", "qry_name", "]", ".", "append", "(", "al", ")", "lengths", "[", "al", ".", "qry_name", "]", "=", "al", ".", "qry_length", "lengths", "[", "al", ".", "ref_name", "]", "=", "al", ".", "ref_length", "return", "lengths", ",", "hits" ]
38.647059
10.058824
def logout(self): """ Logout from the backend :return: return True if logout is successfull, otherwise False :rtype: bool """ logger.debug("request backend logout") if not self.authenticated: logger.warning("Unnecessary logout ...") return True endpoint = 'logout' _ = self.get_response(method='POST', endpoint=endpoint) self.session.close() self.set_token(token=None) return True
[ "def", "logout", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"request backend logout\"", ")", "if", "not", "self", ".", "authenticated", ":", "logger", ".", "warning", "(", "\"Unnecessary logout ...\"", ")", "return", "True", "endpoint", "=", "'logout'", "_", "=", "self", ".", "get_response", "(", "method", "=", "'POST'", ",", "endpoint", "=", "endpoint", ")", "self", ".", "session", ".", "close", "(", ")", "self", ".", "set_token", "(", "token", "=", "None", ")", "return", "True" ]
24.3
19.9
def _SetSocketTimeouts(self): """Sets the timeouts for socket send and receive.""" # Note that timeout must be an integer value. If timeout is a float # it appears that zmq will not enforce the timeout. timeout = int(self.timeout_seconds * 1000) receive_timeout = min( self._ZMQ_SOCKET_RECEIVE_TIMEOUT_MILLISECONDS, timeout) send_timeout = min(self._ZMQ_SOCKET_SEND_TIMEOUT_MILLISECONDS, timeout) self._zmq_socket.setsockopt(zmq.RCVTIMEO, receive_timeout) self._zmq_socket.setsockopt(zmq.SNDTIMEO, send_timeout)
[ "def", "_SetSocketTimeouts", "(", "self", ")", ":", "# Note that timeout must be an integer value. If timeout is a float", "# it appears that zmq will not enforce the timeout.", "timeout", "=", "int", "(", "self", ".", "timeout_seconds", "*", "1000", ")", "receive_timeout", "=", "min", "(", "self", ".", "_ZMQ_SOCKET_RECEIVE_TIMEOUT_MILLISECONDS", ",", "timeout", ")", "send_timeout", "=", "min", "(", "self", ".", "_ZMQ_SOCKET_SEND_TIMEOUT_MILLISECONDS", ",", "timeout", ")", "self", ".", "_zmq_socket", ".", "setsockopt", "(", "zmq", ".", "RCVTIMEO", ",", "receive_timeout", ")", "self", ".", "_zmq_socket", ".", "setsockopt", "(", "zmq", ".", "SNDTIMEO", ",", "send_timeout", ")" ]
49.272727
19.636364
def getbit(self, key, offset): """Returns the bit value at offset in the string value stored at key. :raises TypeError: if offset is not int :raises ValueError: if offset is less than 0 """ if not isinstance(offset, int): raise TypeError("offset argument must be int") if offset < 0: raise ValueError("offset must be greater equal 0") return self.execute(b'GETBIT', key, offset)
[ "def", "getbit", "(", "self", ",", "key", ",", "offset", ")", ":", "if", "not", "isinstance", "(", "offset", ",", "int", ")", ":", "raise", "TypeError", "(", "\"offset argument must be int\"", ")", "if", "offset", "<", "0", ":", "raise", "ValueError", "(", "\"offset must be greater equal 0\"", ")", "return", "self", ".", "execute", "(", "b'GETBIT'", ",", "key", ",", "offset", ")" ]
40.818182
12.636364
def get_experiments(redis, active=True): """Gets the full list of experiments""" key = ACTIVE_EXPERIMENTS_REDIS_KEY if active else ARCHIVED_EXPERIMENTS_REDIS_KEY return [Experiment(redis, escape.to_unicode(name)) for name in redis.smembers(key)]
[ "def", "get_experiments", "(", "redis", ",", "active", "=", "True", ")", ":", "key", "=", "ACTIVE_EXPERIMENTS_REDIS_KEY", "if", "active", "else", "ARCHIVED_EXPERIMENTS_REDIS_KEY", "return", "[", "Experiment", "(", "redis", ",", "escape", ".", "to_unicode", "(", "name", ")", ")", "for", "name", "in", "redis", ".", "smembers", "(", "key", ")", "]" ]
50.8
26.2
def bam_to_fastq_pair(in_file, target_region, pair): """Generator to convert BAM files into name, seq, qual in a region. """ space, start, end = target_region bam_file = pysam.Samfile(in_file, "rb") for read in bam_file: if (not read.is_unmapped and not read.mate_is_unmapped and bam_file.getrname(read.tid) == space and bam_file.getrname(read.mrnm) == space and read.pos >= start and read.pos <= end and read.mpos >= start and read.mpos <= end and not read.is_secondary and read.is_paired and getattr(read, "is_read%s" % pair)): seq = Seq.Seq(read.seq) qual = list(read.qual) if read.is_reverse: seq = seq.reverse_complement() qual.reverse() yield read.qname, str(seq), "".join(qual)
[ "def", "bam_to_fastq_pair", "(", "in_file", ",", "target_region", ",", "pair", ")", ":", "space", ",", "start", ",", "end", "=", "target_region", "bam_file", "=", "pysam", ".", "Samfile", "(", "in_file", ",", "\"rb\"", ")", "for", "read", "in", "bam_file", ":", "if", "(", "not", "read", ".", "is_unmapped", "and", "not", "read", ".", "mate_is_unmapped", "and", "bam_file", ".", "getrname", "(", "read", ".", "tid", ")", "==", "space", "and", "bam_file", ".", "getrname", "(", "read", ".", "mrnm", ")", "==", "space", "and", "read", ".", "pos", ">=", "start", "and", "read", ".", "pos", "<=", "end", "and", "read", ".", "mpos", ">=", "start", "and", "read", ".", "mpos", "<=", "end", "and", "not", "read", ".", "is_secondary", "and", "read", ".", "is_paired", "and", "getattr", "(", "read", ",", "\"is_read%s\"", "%", "pair", ")", ")", ":", "seq", "=", "Seq", ".", "Seq", "(", "read", ".", "seq", ")", "qual", "=", "list", "(", "read", ".", "qual", ")", "if", "read", ".", "is_reverse", ":", "seq", "=", "seq", ".", "reverse_complement", "(", ")", "qual", ".", "reverse", "(", ")", "yield", "read", ".", "qname", ",", "str", "(", "seq", ")", ",", "\"\"", ".", "join", "(", "qual", ")" ]
45.789474
10.947368
def new_array(state, element_type, size): """ Allocates a new array in memory and returns the reference to the base. """ size_bounded = SimSootExpr_NewArray._bound_array_size(state, size) # return the reference of the array base # => elements getting lazy initialized in the javavm memory return SimSootValue_ArrayBaseRef(heap_alloc_id=state.javavm_memory.get_new_uuid(), element_type=element_type, size=size_bounded)
[ "def", "new_array", "(", "state", ",", "element_type", ",", "size", ")", ":", "size_bounded", "=", "SimSootExpr_NewArray", ".", "_bound_array_size", "(", "state", ",", "size", ")", "# return the reference of the array base", "# => elements getting lazy initialized in the javavm memory", "return", "SimSootValue_ArrayBaseRef", "(", "heap_alloc_id", "=", "state", ".", "javavm_memory", ".", "get_new_uuid", "(", ")", ",", "element_type", "=", "element_type", ",", "size", "=", "size_bounded", ")" ]
54.6
20.4
def body_class_tag(context): """ Return CSS "class" attributes for <body>. Allows to provide a CSS namespace using urlpatterns namespace (as ``.ns-*``) and view name (as ``.vw-*``). Usage: ``{% body_class %}`` Example: ``ns-my-app vw-my-view`` or ``ns-contacts vw-list`` Requires: ``apps.core.middlewares.CoreMiddleware``. """ request = context.get("request") if not hasattr(request, "ROUTE"): return "" css_classes = [] namespace = request.ROUTE["namespace"] if namespace: namespace = RE_CLEAN_CSS_NAME.sub("-", namespace.lower()) css_classes.append("ns-{}".format(namespace)) view = request.ROUTE["url_name"] # Use ``url_name`` as ``view_name`` includes the namespace. if view: view = RE_CLEAN_CSS_NAME.sub("-", view.lower()) css_classes.append("vw-{}".format(view)) return " ".join(css_classes)
[ "def", "body_class_tag", "(", "context", ")", ":", "request", "=", "context", ".", "get", "(", "\"request\"", ")", "if", "not", "hasattr", "(", "request", ",", "\"ROUTE\"", ")", ":", "return", "\"\"", "css_classes", "=", "[", "]", "namespace", "=", "request", ".", "ROUTE", "[", "\"namespace\"", "]", "if", "namespace", ":", "namespace", "=", "RE_CLEAN_CSS_NAME", ".", "sub", "(", "\"-\"", ",", "namespace", ".", "lower", "(", ")", ")", "css_classes", ".", "append", "(", "\"ns-{}\"", ".", "format", "(", "namespace", ")", ")", "view", "=", "request", ".", "ROUTE", "[", "\"url_name\"", "]", "# Use ``url_name`` as ``view_name`` includes the namespace.", "if", "view", ":", "view", "=", "RE_CLEAN_CSS_NAME", ".", "sub", "(", "\"-\"", ",", "view", ".", "lower", "(", ")", ")", "css_classes", ".", "append", "(", "\"vw-{}\"", ".", "format", "(", "view", ")", ")", "return", "\" \"", ".", "join", "(", "css_classes", ")" ]
32.518519
22.37037
def rotate(curve, degs, origin=None): """Returns curve rotated by `degs` degrees (CCW) around the point `origin` (a complex number). By default origin is either `curve.point(0.5)`, or in the case that curve is an Arc object, `origin` defaults to `curve.center`. """ def transform(z): return exp(1j*radians(degs))*(z - origin) + origin if origin is None: if isinstance(curve, Arc): origin = curve.center else: origin = curve.point(0.5) if isinstance(curve, Path): return Path(*[rotate(seg, degs, origin=origin) for seg in curve]) elif is_bezier_segment(curve): return bpoints2bezier([transform(bpt) for bpt in curve.bpoints()]) elif isinstance(curve, Arc): new_start = transform(curve.start) new_end = transform(curve.end) new_rotation = curve.rotation + degs return Arc(new_start, radius=curve.radius, rotation=new_rotation, large_arc=curve.large_arc, sweep=curve.sweep, end=new_end) else: raise TypeError("Input `curve` should be a Path, Line, " "QuadraticBezier, CubicBezier, or Arc object.")
[ "def", "rotate", "(", "curve", ",", "degs", ",", "origin", "=", "None", ")", ":", "def", "transform", "(", "z", ")", ":", "return", "exp", "(", "1j", "*", "radians", "(", "degs", ")", ")", "*", "(", "z", "-", "origin", ")", "+", "origin", "if", "origin", "is", "None", ":", "if", "isinstance", "(", "curve", ",", "Arc", ")", ":", "origin", "=", "curve", ".", "center", "else", ":", "origin", "=", "curve", ".", "point", "(", "0.5", ")", "if", "isinstance", "(", "curve", ",", "Path", ")", ":", "return", "Path", "(", "*", "[", "rotate", "(", "seg", ",", "degs", ",", "origin", "=", "origin", ")", "for", "seg", "in", "curve", "]", ")", "elif", "is_bezier_segment", "(", "curve", ")", ":", "return", "bpoints2bezier", "(", "[", "transform", "(", "bpt", ")", "for", "bpt", "in", "curve", ".", "bpoints", "(", ")", "]", ")", "elif", "isinstance", "(", "curve", ",", "Arc", ")", ":", "new_start", "=", "transform", "(", "curve", ".", "start", ")", "new_end", "=", "transform", "(", "curve", ".", "end", ")", "new_rotation", "=", "curve", ".", "rotation", "+", "degs", "return", "Arc", "(", "new_start", ",", "radius", "=", "curve", ".", "radius", ",", "rotation", "=", "new_rotation", ",", "large_arc", "=", "curve", ".", "large_arc", ",", "sweep", "=", "curve", ".", "sweep", ",", "end", "=", "new_end", ")", "else", ":", "raise", "TypeError", "(", "\"Input `curve` should be a Path, Line, \"", "\"QuadraticBezier, CubicBezier, or Arc object.\"", ")" ]
42.888889
18.925926
def dagify_min_edge(g): """Input a graph and output a DAG. The heuristic is to reverse the edge with the lowest score of the cycle if possible, else remove it. Args: g (networkx.DiGraph): Graph to modify to output a DAG Returns: networkx.DiGraph: DAG made out of the input graph. """ while not nx.is_directed_acyclic_graph(g): cycle = next(nx.simple_cycles(g)) scores = [] edges = [] for i, j in zip(cycle[:1], cycle[:1]): edges.append((i, j)) scores.append(g[i][j]['weight']) i, j = edges[scores.index(min(scores))] gc = deepcopy(g) gc.remove_edge(i, j) gc.add_edge(j, i) if len(list(nx.simple_cycles(gc))) < len(list(nx.simple_cycles(g))): g.add_edge(j, i, weight=min(scores)) g.remove_edge(i, j) return g
[ "def", "dagify_min_edge", "(", "g", ")", ":", "while", "not", "nx", ".", "is_directed_acyclic_graph", "(", "g", ")", ":", "cycle", "=", "next", "(", "nx", ".", "simple_cycles", "(", "g", ")", ")", "scores", "=", "[", "]", "edges", "=", "[", "]", "for", "i", ",", "j", "in", "zip", "(", "cycle", "[", ":", "1", "]", ",", "cycle", "[", ":", "1", "]", ")", ":", "edges", ".", "append", "(", "(", "i", ",", "j", ")", ")", "scores", ".", "append", "(", "g", "[", "i", "]", "[", "j", "]", "[", "'weight'", "]", ")", "i", ",", "j", "=", "edges", "[", "scores", ".", "index", "(", "min", "(", "scores", ")", ")", "]", "gc", "=", "deepcopy", "(", "g", ")", "gc", ".", "remove_edge", "(", "i", ",", "j", ")", "gc", ".", "add_edge", "(", "j", ",", "i", ")", "if", "len", "(", "list", "(", "nx", ".", "simple_cycles", "(", "gc", ")", ")", ")", "<", "len", "(", "list", "(", "nx", ".", "simple_cycles", "(", "g", ")", ")", ")", ":", "g", ".", "add_edge", "(", "j", ",", "i", ",", "weight", "=", "min", "(", "scores", ")", ")", "g", ".", "remove_edge", "(", "i", ",", "j", ")", "return", "g" ]
29.241379
19.344828
def derive_ordering(self): """ Returns what field should be used for ordering (using a prepended '-' to indicate descending sort). If the default order of the queryset should be used, returns None """ if '_order' in self.request.GET: return self.request.GET['_order'] elif self.default_order: return self.default_order else: return None
[ "def", "derive_ordering", "(", "self", ")", ":", "if", "'_order'", "in", "self", ".", "request", ".", "GET", ":", "return", "self", ".", "request", ".", "GET", "[", "'_order'", "]", "elif", "self", ".", "default_order", ":", "return", "self", ".", "default_order", "else", ":", "return", "None" ]
34.833333
17.833333
def list(self, filter=None, type=None, sort=None, limit=None, page=None): # pylint: disable=redefined-builtin """Get a list of configs. :param filter: (optional) Filters to apply as a string list. :param type: (optional) `union` or `inter` as string. :param sort: (optional) Sort fields to apply as string list. :param limit: (optional) Limit returned list length. :param page: (optional) Page to return. :return: :class:`configs.Page <configs.Page>` object """ schema = self.LIST_SCHEMA resp = self.service.list(self.base, filter, type, sort, limit, page) cs, l = self.service.decode(schema, resp, many=True, links=True) return Page(cs, l)
[ "def", "list", "(", "self", ",", "filter", "=", "None", ",", "type", "=", "None", ",", "sort", "=", "None", ",", "limit", "=", "None", ",", "page", "=", "None", ")", ":", "# pylint: disable=redefined-builtin", "schema", "=", "self", ".", "LIST_SCHEMA", "resp", "=", "self", ".", "service", ".", "list", "(", "self", ".", "base", ",", "filter", ",", "type", ",", "sort", ",", "limit", ",", "page", ")", "cs", ",", "l", "=", "self", ".", "service", ".", "decode", "(", "schema", ",", "resp", ",", "many", "=", "True", ",", "links", "=", "True", ")", "return", "Page", "(", "cs", ",", "l", ")" ]
51.714286
23
def displayOutdated(modules, dependency_specs, use_colours): ''' print information about outdated modules, return 0 if there is nothing to be done and nonzero otherwise ''' if use_colours: DIM = colorama.Style.DIM #pylint: disable=no-member NORMAL = colorama.Style.NORMAL #pylint: disable=no-member BRIGHT = colorama.Style.BRIGHT #pylint: disable=no-member YELLOW = colorama.Fore.YELLOW #pylint: disable=no-member RED = colorama.Fore.RED #pylint: disable=no-member GREEN = colorama.Fore.GREEN #pylint: disable=no-member RESET = colorama.Style.RESET_ALL #pylint: disable=no-member else: DIM = BRIGHT = YELLOW = RED = GREEN = RESET = u'' status = 0 # access, , get components, internal from yotta.lib import access from yotta.lib import access_common # sourceparse, , parse version source urls, internal from yotta.lib import sourceparse for name, m in modules.items(): if m.isTestDependency(): continue try: latest_v = access.latestSuitableVersion(name, '*', registry='modules', quiet=True) except access_common.Unavailable as e: latest_v = None if not m: m_version = u' ' + RESET + BRIGHT + RED + u"missing" + RESET else: m_version = DIM + u'@%s' % (m.version) if not latest_v: print(u'%s%s%s%s not available from the registry%s' % (RED, name, m_version, NORMAL, RESET)) status = 2 continue elif not m or m.version < latest_v: update_prevented_by = '' if m: specs_preventing_update = [ x for x in dependency_specs if x.name == name and not sourceparse.parseSourceURL(x.nonShrinkwrappedVersionReq()).semanticSpecMatches(latest_v) ] shrinkwrap_prevents_update = [ x for x in dependency_specs if x.name == name and x.isShrinkwrapped() and not sourceparse.parseSourceURL(x.versionReq()).semanticSpecMatches(latest_v) ] if len(specs_preventing_update): update_prevented_by = ' (update prevented by specifications: %s)' % ( ', '.join(['%s from %s' % (x.version_req, x.specifying_module) for x in specs_preventing_update]) ) if len(shrinkwrap_prevents_update): update_prevented_by += ' yotta-shrinkwrap.json prevents update' if m.version.major() < latest_v.major(): # major versions being outdated might be deliberate, so not # that bad: colour = GREEN elif m.version.minor() < latest_v.minor(): # minor outdated versions is moderately bad colour = YELLOW else: # patch-outdated versions is really bad, because there should # be no reason not to update: colour = RED else: colour = RED print(u'%s%s%s latest: %s%s%s%s' % (name, m_version, RESET, colour, latest_v.version, update_prevented_by, RESET)) if not status: status = 1 return status
[ "def", "displayOutdated", "(", "modules", ",", "dependency_specs", ",", "use_colours", ")", ":", "if", "use_colours", ":", "DIM", "=", "colorama", ".", "Style", ".", "DIM", "#pylint: disable=no-member", "NORMAL", "=", "colorama", ".", "Style", ".", "NORMAL", "#pylint: disable=no-member", "BRIGHT", "=", "colorama", ".", "Style", ".", "BRIGHT", "#pylint: disable=no-member", "YELLOW", "=", "colorama", ".", "Fore", ".", "YELLOW", "#pylint: disable=no-member", "RED", "=", "colorama", ".", "Fore", ".", "RED", "#pylint: disable=no-member", "GREEN", "=", "colorama", ".", "Fore", ".", "GREEN", "#pylint: disable=no-member", "RESET", "=", "colorama", ".", "Style", ".", "RESET_ALL", "#pylint: disable=no-member", "else", ":", "DIM", "=", "BRIGHT", "=", "YELLOW", "=", "RED", "=", "GREEN", "=", "RESET", "=", "u''", "status", "=", "0", "# access, , get components, internal", "from", "yotta", ".", "lib", "import", "access", "from", "yotta", ".", "lib", "import", "access_common", "# sourceparse, , parse version source urls, internal", "from", "yotta", ".", "lib", "import", "sourceparse", "for", "name", ",", "m", "in", "modules", ".", "items", "(", ")", ":", "if", "m", ".", "isTestDependency", "(", ")", ":", "continue", "try", ":", "latest_v", "=", "access", ".", "latestSuitableVersion", "(", "name", ",", "'*'", ",", "registry", "=", "'modules'", ",", "quiet", "=", "True", ")", "except", "access_common", ".", "Unavailable", "as", "e", ":", "latest_v", "=", "None", "if", "not", "m", ":", "m_version", "=", "u' '", "+", "RESET", "+", "BRIGHT", "+", "RED", "+", "u\"missing\"", "+", "RESET", "else", ":", "m_version", "=", "DIM", "+", "u'@%s'", "%", "(", "m", ".", "version", ")", "if", "not", "latest_v", ":", "print", "(", "u'%s%s%s%s not available from the registry%s'", "%", "(", "RED", ",", "name", ",", "m_version", ",", "NORMAL", ",", "RESET", ")", ")", "status", "=", "2", "continue", "elif", "not", "m", "or", "m", ".", "version", "<", "latest_v", ":", "update_prevented_by", "=", "''", "if", "m", ":", "specs_preventing_update", "=", "[", "x", "for", "x", "in", "dependency_specs", "if", "x", ".", "name", "==", "name", "and", "not", "sourceparse", ".", "parseSourceURL", "(", "x", ".", "nonShrinkwrappedVersionReq", "(", ")", ")", ".", "semanticSpecMatches", "(", "latest_v", ")", "]", "shrinkwrap_prevents_update", "=", "[", "x", "for", "x", "in", "dependency_specs", "if", "x", ".", "name", "==", "name", "and", "x", ".", "isShrinkwrapped", "(", ")", "and", "not", "sourceparse", ".", "parseSourceURL", "(", "x", ".", "versionReq", "(", ")", ")", ".", "semanticSpecMatches", "(", "latest_v", ")", "]", "if", "len", "(", "specs_preventing_update", ")", ":", "update_prevented_by", "=", "' (update prevented by specifications: %s)'", "%", "(", "', '", ".", "join", "(", "[", "'%s from %s'", "%", "(", "x", ".", "version_req", ",", "x", ".", "specifying_module", ")", "for", "x", "in", "specs_preventing_update", "]", ")", ")", "if", "len", "(", "shrinkwrap_prevents_update", ")", ":", "update_prevented_by", "+=", "' yotta-shrinkwrap.json prevents update'", "if", "m", ".", "version", ".", "major", "(", ")", "<", "latest_v", ".", "major", "(", ")", ":", "# major versions being outdated might be deliberate, so not", "# that bad:", "colour", "=", "GREEN", "elif", "m", ".", "version", ".", "minor", "(", ")", "<", "latest_v", ".", "minor", "(", ")", ":", "# minor outdated versions is moderately bad", "colour", "=", "YELLOW", "else", ":", "# patch-outdated versions is really bad, because there should", "# be no reason not to update:", "colour", "=", "RED", "else", ":", "colour", "=", "RED", "print", "(", "u'%s%s%s latest: %s%s%s%s'", "%", "(", "name", ",", "m_version", ",", "RESET", ",", "colour", ",", "latest_v", ".", "version", ",", "update_prevented_by", ",", "RESET", ")", ")", "if", "not", "status", ":", "status", "=", "1", "return", "status" ]
45.026667
23.32
def create_sync_ops(self, host_device): """Create an assignment operation for each weight on all devices. The weight is assigned the value of the copy on the `host_device'. """ sync_ops = [] host_params = self.params_device[host_device] for device, params in (self.params_device).iteritems(): if device == host_device: continue for k in self.params_names: if isinstance(params[k], tf.Variable): sync_ops += [tf.assign(params[k], host_params[k])] return sync_ops
[ "def", "create_sync_ops", "(", "self", ",", "host_device", ")", ":", "sync_ops", "=", "[", "]", "host_params", "=", "self", ".", "params_device", "[", "host_device", "]", "for", "device", ",", "params", "in", "(", "self", ".", "params_device", ")", ".", "iteritems", "(", ")", ":", "if", "device", "==", "host_device", ":", "continue", "for", "k", "in", "self", ".", "params_names", ":", "if", "isinstance", "(", "params", "[", "k", "]", ",", "tf", ".", "Variable", ")", ":", "sync_ops", "+=", "[", "tf", ".", "assign", "(", "params", "[", "k", "]", ",", "host_params", "[", "k", "]", ")", "]", "return", "sync_ops" ]
39.615385
12.692308
def get_id_fields(self): """ Called to return a list of fields consisting of, at minimum, the PK field name. The output of this method is used to construct a Prefetch object with a .only() queryset when this field is not being sideloaded but we need to return a list of IDs. """ model = self.get_model() out = [model._meta.pk.name] # get PK field name # If this is being called, it means it # is a many-relation to its parent. # Django wants the FK to the parent, # but since accurately inferring the FK # pointing back to the parent is less than trivial, # we will just pull all ID fields. # TODO: We also might need to return all non-nullable fields, # or else it is possible Django will issue another request. for field in model._meta.fields: if isinstance(field, models.ForeignKey): out.append(field.name + '_id') return out
[ "def", "get_id_fields", "(", "self", ")", ":", "model", "=", "self", ".", "get_model", "(", ")", "out", "=", "[", "model", ".", "_meta", ".", "pk", ".", "name", "]", "# get PK field name", "# If this is being called, it means it", "# is a many-relation to its parent.", "# Django wants the FK to the parent,", "# but since accurately inferring the FK", "# pointing back to the parent is less than trivial,", "# we will just pull all ID fields.", "# TODO: We also might need to return all non-nullable fields,", "# or else it is possible Django will issue another request.", "for", "field", "in", "model", ".", "_meta", ".", "fields", ":", "if", "isinstance", "(", "field", ",", "models", ".", "ForeignKey", ")", ":", "out", ".", "append", "(", "field", ".", "name", "+", "'_id'", ")", "return", "out" ]
39.68
16.16
def append_text(self, content): """ Append text nodes into L{Content.data} Here is where the I{true} type is used to translate the value into the proper python type. @param content: The current content being unmarshalled. @type content: L{Content} """ Core.append_text(self, content) known = self.resolver.top().resolved content.text = self.translated(content.text, known)
[ "def", "append_text", "(", "self", ",", "content", ")", ":", "Core", ".", "append_text", "(", "self", ",", "content", ")", "known", "=", "self", ".", "resolver", ".", "top", "(", ")", ".", "resolved", "content", ".", "text", "=", "self", ".", "translated", "(", "content", ".", "text", ",", "known", ")" ]
40.181818
9.272727
def _handle_products(request, category, products, prefix): ''' Handles a products list form in the given request. Returns the form instance, the discounts applicable to this form, and whether the contents were handled. ''' current_cart = CartController.for_user(request.user) ProductsForm = forms.ProductsForm(category, products) # Create initial data for each of products in category items = commerce.ProductItem.objects.filter( product__in=products, cart=current_cart.cart, ).select_related("product") quantities = [] seen = set() for item in items: quantities.append((item.product, item.quantity)) seen.add(item.product) zeros = set(products) - seen for product in zeros: quantities.append((product, 0)) products_form = ProductsForm( request.POST or None, product_quantities=quantities, prefix=prefix, ) if request.method == "POST" and products_form.is_valid(): if products_form.has_changed(): _set_quantities_from_products_form(products_form, current_cart) # If category is required, the user must have at least one # in an active+valid cart if category.required: carts = commerce.Cart.objects.filter(user=request.user) items = commerce.ProductItem.objects.filter( product__category=category, cart=carts, ) if len(items) == 0: products_form.add_error( None, "You must have at least one item from this category", ) handled = False if products_form.errors else True # Making this a function to lazily evaluate when it's displayed # in templates. discounts = util.lazy( DiscountController.available_discounts, request.user, [], products, ) return products_form, discounts, handled
[ "def", "_handle_products", "(", "request", ",", "category", ",", "products", ",", "prefix", ")", ":", "current_cart", "=", "CartController", ".", "for_user", "(", "request", ".", "user", ")", "ProductsForm", "=", "forms", ".", "ProductsForm", "(", "category", ",", "products", ")", "# Create initial data for each of products in category", "items", "=", "commerce", ".", "ProductItem", ".", "objects", ".", "filter", "(", "product__in", "=", "products", ",", "cart", "=", "current_cart", ".", "cart", ",", ")", ".", "select_related", "(", "\"product\"", ")", "quantities", "=", "[", "]", "seen", "=", "set", "(", ")", "for", "item", "in", "items", ":", "quantities", ".", "append", "(", "(", "item", ".", "product", ",", "item", ".", "quantity", ")", ")", "seen", ".", "add", "(", "item", ".", "product", ")", "zeros", "=", "set", "(", "products", ")", "-", "seen", "for", "product", "in", "zeros", ":", "quantities", ".", "append", "(", "(", "product", ",", "0", ")", ")", "products_form", "=", "ProductsForm", "(", "request", ".", "POST", "or", "None", ",", "product_quantities", "=", "quantities", ",", "prefix", "=", "prefix", ",", ")", "if", "request", ".", "method", "==", "\"POST\"", "and", "products_form", ".", "is_valid", "(", ")", ":", "if", "products_form", ".", "has_changed", "(", ")", ":", "_set_quantities_from_products_form", "(", "products_form", ",", "current_cart", ")", "# If category is required, the user must have at least one", "# in an active+valid cart", "if", "category", ".", "required", ":", "carts", "=", "commerce", ".", "Cart", ".", "objects", ".", "filter", "(", "user", "=", "request", ".", "user", ")", "items", "=", "commerce", ".", "ProductItem", ".", "objects", ".", "filter", "(", "product__category", "=", "category", ",", "cart", "=", "carts", ",", ")", "if", "len", "(", "items", ")", "==", "0", ":", "products_form", ".", "add_error", "(", "None", ",", "\"You must have at least one item from this category\"", ",", ")", "handled", "=", "False", "if", "products_form", ".", "errors", "else", "True", "# Making this a function to lazily evaluate when it's displayed", "# in templates.", "discounts", "=", "util", ".", "lazy", "(", "DiscountController", ".", "available_discounts", ",", "request", ".", "user", ",", "[", "]", ",", "products", ",", ")", "return", "products_form", ",", "discounts", ",", "handled" ]
31.344262
20.721311
def build_c(self): """Calculates the total attenuation from the total absorption and total scattering c = a + b """ lg.info('Building total attenuation C') self.c = self.a + self.b
[ "def", "build_c", "(", "self", ")", ":", "lg", ".", "info", "(", "'Building total attenuation C'", ")", "self", ".", "c", "=", "self", ".", "a", "+", "self", ".", "b" ]
30.714286
14.285714
def get_file_url(self, fid, public=None): """ Get url for the file :param string fid: File ID :param boolean public: public or internal url :rtype: string """ try: volume_id, rest = fid.strip().split(",") except ValueError: raise BadFidFormat( "fid must be in format: <volume_id>,<file_name_hash>") file_location = self.get_file_location(volume_id) if public is None: public = self.use_public_url volume_url = file_location.public_url if public else file_location.url url = "http://{volume_url}/{fid}".format( volume_url=volume_url, fid=fid) return url
[ "def", "get_file_url", "(", "self", ",", "fid", ",", "public", "=", "None", ")", ":", "try", ":", "volume_id", ",", "rest", "=", "fid", ".", "strip", "(", ")", ".", "split", "(", "\",\"", ")", "except", "ValueError", ":", "raise", "BadFidFormat", "(", "\"fid must be in format: <volume_id>,<file_name_hash>\"", ")", "file_location", "=", "self", ".", "get_file_location", "(", "volume_id", ")", "if", "public", "is", "None", ":", "public", "=", "self", ".", "use_public_url", "volume_url", "=", "file_location", ".", "public_url", "if", "public", "else", "file_location", ".", "url", "url", "=", "\"http://{volume_url}/{fid}\"", ".", "format", "(", "volume_url", "=", "volume_url", ",", "fid", "=", "fid", ")", "return", "url" ]
35.1
14.3
def nearest_point(query, root_id, get_properties, dist_fun=euclidean_dist): """Find the point in the tree that minimizes the distance to the query. This method implements the nearest_point query for any structure implementing a kd-tree. The only requirement is a function capable to extract the relevant properties from a node representation of the particular implementation. Args: query (:obj:`tuple` of float or int): Stores the position of the node. root_id (:obj): The identifier of the root in the kd-tree implementation. get_properties (:obj:`function`): The function to extract the relevant properties from a node, namely its point, region, axis, left child identifier, right child identifier and if it is active. If the implementation does not uses the active attribute the function should return always True. dist_fun (:obj:`function`, optional): The distance function, euclidean distance by default. Returns: :obj:`tuple`: Tuple of length 2, where the first element is the identifier of the nearest node, the second is the distance to the query. """ k = len(query) dist = math.inf nearest_node_id = None # stack_node: stack of identifiers to nodes within a region that # contains the query. # stack_look: stack of identifiers to nodes within a region that # does not contains the query. stack_node = deque([root_id]) stack_look = deque() while stack_node or stack_look: if stack_node: node_id = stack_node.pop() look_node = False else: node_id = stack_look.pop() look_node = True point, region, axis, active, left, right = get_properties(node_id) # Should consider this node? # As it is within a region that does not contains the query, maybe # there is no chance to find a closer node in this region if look_node: inside_region = True for i in range(k): inside_region &= interval_condition(query[i], region[i][0], region[i][1], dist) if not inside_region: continue # Update the distance only if the node is active. if active: node_distance = dist_fun(query, point) if nearest_node_id is None or dist > node_distance: nearest_node_id = node_id dist = node_distance if query[axis] < point[axis]: side_node = left side_look = right else: side_node = right side_look = left if side_node is not None: stack_node.append(side_node) if side_look is not None: stack_look.append(side_look) return nearest_node_id, dist
[ "def", "nearest_point", "(", "query", ",", "root_id", ",", "get_properties", ",", "dist_fun", "=", "euclidean_dist", ")", ":", "k", "=", "len", "(", "query", ")", "dist", "=", "math", ".", "inf", "nearest_node_id", "=", "None", "# stack_node: stack of identifiers to nodes within a region that", "# contains the query.", "# stack_look: stack of identifiers to nodes within a region that", "# does not contains the query.", "stack_node", "=", "deque", "(", "[", "root_id", "]", ")", "stack_look", "=", "deque", "(", ")", "while", "stack_node", "or", "stack_look", ":", "if", "stack_node", ":", "node_id", "=", "stack_node", ".", "pop", "(", ")", "look_node", "=", "False", "else", ":", "node_id", "=", "stack_look", ".", "pop", "(", ")", "look_node", "=", "True", "point", ",", "region", ",", "axis", ",", "active", ",", "left", ",", "right", "=", "get_properties", "(", "node_id", ")", "# Should consider this node?", "# As it is within a region that does not contains the query, maybe", "# there is no chance to find a closer node in this region", "if", "look_node", ":", "inside_region", "=", "True", "for", "i", "in", "range", "(", "k", ")", ":", "inside_region", "&=", "interval_condition", "(", "query", "[", "i", "]", ",", "region", "[", "i", "]", "[", "0", "]", ",", "region", "[", "i", "]", "[", "1", "]", ",", "dist", ")", "if", "not", "inside_region", ":", "continue", "# Update the distance only if the node is active.", "if", "active", ":", "node_distance", "=", "dist_fun", "(", "query", ",", "point", ")", "if", "nearest_node_id", "is", "None", "or", "dist", ">", "node_distance", ":", "nearest_node_id", "=", "node_id", "dist", "=", "node_distance", "if", "query", "[", "axis", "]", "<", "point", "[", "axis", "]", ":", "side_node", "=", "left", "side_look", "=", "right", "else", ":", "side_node", "=", "right", "side_look", "=", "left", "if", "side_node", "is", "not", "None", ":", "stack_node", ".", "append", "(", "side_node", ")", "if", "side_look", "is", "not", "None", ":", "stack_look", ".", "append", "(", "side_look", ")", "return", "nearest_node_id", ",", "dist" ]
34.662651
21.771084
def dimap(D, I): """ Function to map directions to x,y pairs in equal area projection Parameters ---------- D : list or array of declinations (as float) I : list or array or inclinations (as float) Returns ------- XY : x, y values of directions for equal area projection [x,y] """ try: D = float(D) I = float(I) except TypeError: # is an array return dimap_V(D, I) # DEFINE FUNCTION VARIABLES # initialize equal area projection x,y XY = [0., 0.] # GET CARTESIAN COMPONENTS OF INPUT DIRECTION X = dir2cart([D, I, 1.]) # CHECK IF Z = 1 AND ABORT if X[2] == 1.0: return XY # return [0,0] # TAKE THE ABSOLUTE VALUE OF Z if X[2] < 0: # this only works on lower hemisphere projections X[2] = -X[2] # CALCULATE THE X,Y COORDINATES FOR THE EQUAL AREA PROJECTION # from Collinson 1983 R = old_div(np.sqrt(1. - X[2]), (np.sqrt(X[0]**2 + X[1]**2))) XY[1], XY[0] = X[0] * R, X[1] * R # RETURN XY[X,Y] return XY
[ "def", "dimap", "(", "D", ",", "I", ")", ":", "try", ":", "D", "=", "float", "(", "D", ")", "I", "=", "float", "(", "I", ")", "except", "TypeError", ":", "# is an array", "return", "dimap_V", "(", "D", ",", "I", ")", "# DEFINE FUNCTION VARIABLES", "# initialize equal area projection x,y", "XY", "=", "[", "0.", ",", "0.", "]", "# GET CARTESIAN COMPONENTS OF INPUT DIRECTION", "X", "=", "dir2cart", "(", "[", "D", ",", "I", ",", "1.", "]", ")", "# CHECK IF Z = 1 AND ABORT", "if", "X", "[", "2", "]", "==", "1.0", ":", "return", "XY", "# return [0,0]", "# TAKE THE ABSOLUTE VALUE OF Z", "if", "X", "[", "2", "]", "<", "0", ":", "# this only works on lower hemisphere projections", "X", "[", "2", "]", "=", "-", "X", "[", "2", "]", "# CALCULATE THE X,Y COORDINATES FOR THE EQUAL AREA PROJECTION", "# from Collinson 1983", "R", "=", "old_div", "(", "np", ".", "sqrt", "(", "1.", "-", "X", "[", "2", "]", ")", ",", "(", "np", ".", "sqrt", "(", "X", "[", "0", "]", "**", "2", "+", "X", "[", "1", "]", "**", "2", ")", ")", ")", "XY", "[", "1", "]", ",", "XY", "[", "0", "]", "=", "X", "[", "0", "]", "*", "R", ",", "X", "[", "1", "]", "*", "R", "# RETURN XY[X,Y]", "return", "XY" ]
24.902439
21.04878
def checkpoint(key=0, unpickler=pickle.load, pickler=pickle.dump, work_dir=gettempdir(), refresh=False): """ A utility decorator to save intermediate results of a function. It is the caller's responsibility to specify a key naming scheme such that the output of each function call with different arguments is stored in a separate file. :param key: The key to store the computed intermediate output of the decorated function. if key is a string, it is used directly as the name. if key is a string.Template object, you can specify your file-naming convention using the standard string.Template conventions. Since string.Template uses named substitutions, it can handle only keyword arguments. Therfore, in addition to the standard Template conventions, an additional feature is provided to help with non-keyword arguments. For instance if you have a function definition as f(m, n, arg3='myarg3',arg4='myarg4'). Say you want your key to be: n followed by an _ followed by 'text' followed by arg3 followed by a . followed by arg4. Let n = 3, arg3='out', arg4='txt', then you are interested in getting '3_textout.txt'. This is written as key=Template('{1}_text$arg3.$arg4') The filename is first generated by substituting the kwargs, i.e key_id.substitute(kwargs), this would give the string '{1}_textout.txt' as output. This is further processed by a call to format with args as the argument, where the second argument is picked (since counting starts from 0), and we get 3_textout.txt. if key is a callable function, it is called with the same arguments as that of the function, in a special format. key must be of the form lambda arg, kwarg: ... your definition. arg is an iterable containing the un-named arguments of the function, and kwarg is a dictionary containing the keyword arguments. For instance, the above example can be written as: key = lambda arg, kwarg: '%d_text%s.%s'.format(arg[1], kwarg['arg3'], kwarg['arg4']) Or one can define a function that takes the same arguments: def key_namer(args, kwargs): return '%d_text%s.%s'.format(arg[1], kwarg['arg3'], kwarg['arg4']) This way you can do complex argument processing and name generation. :param pickler: The function that loads the saved object and returns. This should ideally be of the same format as the one that is computed. However, in certain cases, it is enough as long as it provides the information necessary for the caller, even if it is not exactly same as the object returned by the function. :param unpickler: The function that saves the computed object into a file. :param work_dir: The location where the checkpoint files are stored. :param do_refresh: If enabled, this will not skip, effectively disabling the decoration @checkpoint. REFRESHING: One of the intended ways to use the refresh feature is as follows: Say you are checkpointing a function f1, f2; have a file or a place where you define refresh variables: defs.py: ------- REFRESH_f1 = True REFRESH_f2 = os.environ['F2_REFRESH'] # can set this externally code.py: ------- @checkpoint(..., refresh=REFRESH_f1) def f1(...): your code. @checkpoint(..., refresh=REFRESH_f2) def f2(...): your code. This way, you have control on what to refresh without modifying the code, by setting the defs either via input or by modifying defs.py. """ def decorator(func): def wrapped(*args, **kwargs): # If first arg is a string, use it directly. if isinstance(key, str): save_file = os.path.join(work_dir, key) elif isinstance(key, Template): save_file = os.path.join(work_dir, key.substitute(kwargs)) save_file = save_file.format(*args) elif isinstance(key, types.FunctionType): save_file = os.path.join(work_dir, key(args, kwargs)) else: logging.warn('Using 0-th argument as default.') save_file = os.path.join(work_dir, '{0}') save_file = save_file.format(args[key]) logging.info('checkpoint@ %s' % save_file) # cache_file doesn't exist, run the function and save output in checkpoint. if isinstance(refresh, types.FunctionType): do_refresh = refresh() else: do_refresh = refresh if do_refresh or not os.path.exists(path=save_file): # Otherwise compute it save it and return it. # If the program fails, don't checkpoint. try: out = func(*args, **kwargs) except: # a blank raise re-raises the last exception. raise else: # If the program is successful, then go ahead and call the save function. with open(save_file, 'wb') as f: pickler(out, f) return out # Otherwise, load the checkpoint file and send it. else: logging.info("Checkpoint exists. Loading from: %s" % save_file) with open(save_file, 'rb') as f: return unpickler(f) # Todo: Sending options to load/save functions. return wrapped return decorator
[ "def", "checkpoint", "(", "key", "=", "0", ",", "unpickler", "=", "pickle", ".", "load", ",", "pickler", "=", "pickle", ".", "dump", ",", "work_dir", "=", "gettempdir", "(", ")", ",", "refresh", "=", "False", ")", ":", "def", "decorator", "(", "func", ")", ":", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# If first arg is a string, use it directly.", "if", "isinstance", "(", "key", ",", "str", ")", ":", "save_file", "=", "os", ".", "path", ".", "join", "(", "work_dir", ",", "key", ")", "elif", "isinstance", "(", "key", ",", "Template", ")", ":", "save_file", "=", "os", ".", "path", ".", "join", "(", "work_dir", ",", "key", ".", "substitute", "(", "kwargs", ")", ")", "save_file", "=", "save_file", ".", "format", "(", "*", "args", ")", "elif", "isinstance", "(", "key", ",", "types", ".", "FunctionType", ")", ":", "save_file", "=", "os", ".", "path", ".", "join", "(", "work_dir", ",", "key", "(", "args", ",", "kwargs", ")", ")", "else", ":", "logging", ".", "warn", "(", "'Using 0-th argument as default.'", ")", "save_file", "=", "os", ".", "path", ".", "join", "(", "work_dir", ",", "'{0}'", ")", "save_file", "=", "save_file", ".", "format", "(", "args", "[", "key", "]", ")", "logging", ".", "info", "(", "'checkpoint@ %s'", "%", "save_file", ")", "# cache_file doesn't exist, run the function and save output in checkpoint.", "if", "isinstance", "(", "refresh", ",", "types", ".", "FunctionType", ")", ":", "do_refresh", "=", "refresh", "(", ")", "else", ":", "do_refresh", "=", "refresh", "if", "do_refresh", "or", "not", "os", ".", "path", ".", "exists", "(", "path", "=", "save_file", ")", ":", "# Otherwise compute it save it and return it.", "# If the program fails, don't checkpoint.", "try", ":", "out", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", ":", "# a blank raise re-raises the last exception.", "raise", "else", ":", "# If the program is successful, then go ahead and call the save function.", "with", "open", "(", "save_file", ",", "'wb'", ")", "as", "f", ":", "pickler", "(", "out", ",", "f", ")", "return", "out", "# Otherwise, load the checkpoint file and send it.", "else", ":", "logging", ".", "info", "(", "\"Checkpoint exists. Loading from: %s\"", "%", "save_file", ")", "with", "open", "(", "save_file", ",", "'rb'", ")", "as", "f", ":", "return", "unpickler", "(", "f", ")", "# Todo: Sending options to load/save functions.", "return", "wrapped", "return", "decorator" ]
42.257576
29.469697
def iat(x, maxlag=None): """Calculate the integrated autocorrelation time (IAT), given the trace from a Stochastic.""" if not maxlag: # Calculate maximum lag to which autocorrelation is calculated maxlag = _find_max_lag(x) acr = [autocorr(x, lag) for lag in range(1, maxlag + 1)] # Calculate gamma values gammas = [(acr[2 * i] + acr[2 * i + 1]) for i in range(maxlag // 2)] cut = _cut_time(gammas) if cut + 1 == len(gammas): print_("Not enough lag to calculate IAT") return np.sum(2 * gammas[:cut + 1]) - 1.0
[ "def", "iat", "(", "x", ",", "maxlag", "=", "None", ")", ":", "if", "not", "maxlag", ":", "# Calculate maximum lag to which autocorrelation is calculated", "maxlag", "=", "_find_max_lag", "(", "x", ")", "acr", "=", "[", "autocorr", "(", "x", ",", "lag", ")", "for", "lag", "in", "range", "(", "1", ",", "maxlag", "+", "1", ")", "]", "# Calculate gamma values", "gammas", "=", "[", "(", "acr", "[", "2", "*", "i", "]", "+", "acr", "[", "2", "*", "i", "+", "1", "]", ")", "for", "i", "in", "range", "(", "maxlag", "//", "2", ")", "]", "cut", "=", "_cut_time", "(", "gammas", ")", "if", "cut", "+", "1", "==", "len", "(", "gammas", ")", ":", "print_", "(", "\"Not enough lag to calculate IAT\"", ")", "return", "np", ".", "sum", "(", "2", "*", "gammas", "[", ":", "cut", "+", "1", "]", ")", "-", "1.0" ]
30.722222
23.111111
def error(self, correlation_id, error, message, *args, **kwargs): """ Logs recoverable application error. :param correlation_id: (optional) transaction id to trace execution through call chain. :param error: an error object associated with this message. :param message: a human-readable message to log. :param args: arguments to parameterize the message. :param kwargs: arguments to parameterize the message. """ self._format_and_write(LogLevel.Error, correlation_id, error, message, args, kwargs)
[ "def", "error", "(", "self", ",", "correlation_id", ",", "error", ",", "message", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_format_and_write", "(", "LogLevel", ".", "Error", ",", "correlation_id", ",", "error", ",", "message", ",", "args", ",", "kwargs", ")" ]
37.333333
27.866667
def PopItem(self): """Pops an item off the queue. If no ZeroMQ socket has been created, one will be created the first time this method is called. Returns: object: item from the queue. Raises: KeyboardInterrupt: if the process is sent a KeyboardInterrupt while popping an item. QueueEmpty: if the queue is empty, and no item could be popped within the queue timeout. RuntimeError: if terminate event is missing. zmq.error.ZMQError: if an error occurs in ZeroMQ. """ if not self._zmq_socket: self._CreateZMQSocket() if not self._terminate_event: raise RuntimeError('Missing terminate event.') logger.debug('Pop on {0:s} queue, port {1:d}'.format( self.name, self.port)) last_retry_time = time.time() + self.timeout_seconds while not self._terminate_event.is_set(): try: self._zmq_socket.send_pyobj(None) break except zmq.error.Again: # The existing socket is now out of sync, so we need to open a new one. self._CreateZMQSocket() if time.time() > last_retry_time: logger.warning('{0:s} timeout requesting item'.format(self.name)) raise errors.QueueEmpty continue while not self._terminate_event.is_set(): try: return self._ReceiveItemOnActivity(self._zmq_socket) except errors.QueueEmpty: continue except KeyboardInterrupt: self.Close(abort=True) raise
[ "def", "PopItem", "(", "self", ")", ":", "if", "not", "self", ".", "_zmq_socket", ":", "self", ".", "_CreateZMQSocket", "(", ")", "if", "not", "self", ".", "_terminate_event", ":", "raise", "RuntimeError", "(", "'Missing terminate event.'", ")", "logger", ".", "debug", "(", "'Pop on {0:s} queue, port {1:d}'", ".", "format", "(", "self", ".", "name", ",", "self", ".", "port", ")", ")", "last_retry_time", "=", "time", ".", "time", "(", ")", "+", "self", ".", "timeout_seconds", "while", "not", "self", ".", "_terminate_event", ".", "is_set", "(", ")", ":", "try", ":", "self", ".", "_zmq_socket", ".", "send_pyobj", "(", "None", ")", "break", "except", "zmq", ".", "error", ".", "Again", ":", "# The existing socket is now out of sync, so we need to open a new one.", "self", ".", "_CreateZMQSocket", "(", ")", "if", "time", ".", "time", "(", ")", ">", "last_retry_time", ":", "logger", ".", "warning", "(", "'{0:s} timeout requesting item'", ".", "format", "(", "self", ".", "name", ")", ")", "raise", "errors", ".", "QueueEmpty", "continue", "while", "not", "self", ".", "_terminate_event", ".", "is_set", "(", ")", ":", "try", ":", "return", "self", ".", "_ReceiveItemOnActivity", "(", "self", ".", "_zmq_socket", ")", "except", "errors", ".", "QueueEmpty", ":", "continue", "except", "KeyboardInterrupt", ":", "self", ".", "Close", "(", "abort", "=", "True", ")", "raise" ]
29.18
21.2
def popen(fn, *args, **kwargs) -> subprocess.Popen: """ Please ensure you're not killing the process before it had started properly :param fn: :param args: :param kwargs: :return: """ args = popen_encode(fn, *args, **kwargs) logging.getLogger(__name__).debug('Start %s', args) p = subprocess.Popen(args) return p
[ "def", "popen", "(", "fn", ",", "*", "args", ",", "*", "*", "kwargs", ")", "->", "subprocess", ".", "Popen", ":", "args", "=", "popen_encode", "(", "fn", ",", "*", "args", ",", "*", "*", "kwargs", ")", "logging", ".", "getLogger", "(", "__name__", ")", ".", "debug", "(", "'Start %s'", ",", "args", ")", "p", "=", "subprocess", ".", "Popen", "(", "args", ")", "return", "p" ]
23
21.8
def warn(what, string, pos): """ Combines a warning with a call to errors.position(). Simple convenience function. Arguments: string (str): The string being parsed. pos (int): The index of the character that caused trouble. """ pos = position(string, pos) warnings.warn("{0} at position {1}!".format(what, pos), Warning)
[ "def", "warn", "(", "what", ",", "string", ",", "pos", ")", ":", "pos", "=", "position", "(", "string", ",", "pos", ")", "warnings", ".", "warn", "(", "\"{0} at position {1}!\"", ".", "format", "(", "what", ",", "pos", ")", ",", "Warning", ")" ]
21.466667
21.466667
def badges(request): ''' Either displays a form containing a list of users with badges to render, or returns a .zip file containing their badges. ''' category = request.GET.getlist("category", []) product = request.GET.getlist("product", []) status = request.GET.get("status") form = forms.InvoicesWithProductAndStatusForm( request.POST or None, category=category, product=product, status=status, ) if form.is_valid(): response = HttpResponse() response["Content-Type"] = "application.zip" response["Content-Disposition"] = 'attachment; filename="badges.zip"' z = zipfile.ZipFile(response, "w") for invoice in form.cleaned_data["invoice"]: user = invoice.user badge = render_badge(user) z.writestr("badge_%d.svg" % user.id, badge.encode("utf-8")) return response data = { "form": form, } return render(request, "registrasion/badges.html", data)
[ "def", "badges", "(", "request", ")", ":", "category", "=", "request", ".", "GET", ".", "getlist", "(", "\"category\"", ",", "[", "]", ")", "product", "=", "request", ".", "GET", ".", "getlist", "(", "\"product\"", ",", "[", "]", ")", "status", "=", "request", ".", "GET", ".", "get", "(", "\"status\"", ")", "form", "=", "forms", ".", "InvoicesWithProductAndStatusForm", "(", "request", ".", "POST", "or", "None", ",", "category", "=", "category", ",", "product", "=", "product", ",", "status", "=", "status", ",", ")", "if", "form", ".", "is_valid", "(", ")", ":", "response", "=", "HttpResponse", "(", ")", "response", "[", "\"Content-Type\"", "]", "=", "\"application.zip\"", "response", "[", "\"Content-Disposition\"", "]", "=", "'attachment; filename=\"badges.zip\"'", "z", "=", "zipfile", ".", "ZipFile", "(", "response", ",", "\"w\"", ")", "for", "invoice", "in", "form", ".", "cleaned_data", "[", "\"invoice\"", "]", ":", "user", "=", "invoice", ".", "user", "badge", "=", "render_badge", "(", "user", ")", "z", ".", "writestr", "(", "\"badge_%d.svg\"", "%", "user", ".", "id", ",", "badge", ".", "encode", "(", "\"utf-8\"", ")", ")", "return", "response", "data", "=", "{", "\"form\"", ":", "form", ",", "}", "return", "render", "(", "request", ",", "\"registrasion/badges.html\"", ",", "data", ")" ]
29.029412
22.558824
def index_lists_equal(a: List[Index], b: List[Index]) -> bool: """ Are all indexes in list ``a`` equal to their counterparts in list ``b``, as per :func:`indexes_equal`? """ n = len(a) if len(b) != n: return False for i in range(n): if not indexes_equal(a[i], b[i]): log.debug("Mismatch: {!r} != {!r}", a[i], b[i]) return False return True
[ "def", "index_lists_equal", "(", "a", ":", "List", "[", "Index", "]", ",", "b", ":", "List", "[", "Index", "]", ")", "->", "bool", ":", "n", "=", "len", "(", "a", ")", "if", "len", "(", "b", ")", "!=", "n", ":", "return", "False", "for", "i", "in", "range", "(", "n", ")", ":", "if", "not", "indexes_equal", "(", "a", "[", "i", "]", ",", "b", "[", "i", "]", ")", ":", "log", ".", "debug", "(", "\"Mismatch: {!r} != {!r}\"", ",", "a", "[", "i", "]", ",", "b", "[", "i", "]", ")", "return", "False", "return", "True" ]
30.692308
16.230769
def get_center_of_mass(image): """ Compute an image center of mass in physical space which is defined as the mean of the intensity weighted voxel coordinate system. ANTsR function: `getCenterOfMass` Arguments --------- image : ANTsImage image from which center of mass will be computed Returns ------- scalar Example ------- >>> fi = ants.image_read( ants.get_ants_data("r16")) >>> com1 = ants.get_center_of_mass( fi ) >>> fi = ants.image_read( ants.get_ants_data("r64")) >>> com2 = ants.get_center_of_mass( fi ) """ if image.pixeltype != 'float': image = image.clone('float') libfn = utils.get_lib_fn('centerOfMass%s' % image._libsuffix) com = libfn(image.pointer) return tuple(com)
[ "def", "get_center_of_mass", "(", "image", ")", ":", "if", "image", ".", "pixeltype", "!=", "'float'", ":", "image", "=", "image", ".", "clone", "(", "'float'", ")", "libfn", "=", "utils", ".", "get_lib_fn", "(", "'centerOfMass%s'", "%", "image", ".", "_libsuffix", ")", "com", "=", "libfn", "(", "image", ".", "pointer", ")", "return", "tuple", "(", "com", ")" ]
25.5
21.5
def sum(self, field): """ Returns the sum of the field in the result set of the query by wrapping the query and performing a SUM aggregate of the specified field :param field: the field to pass to the SUM aggregate :type field: str :return: The sum of the specified field :rtype: int """ q = Query(self.connection).from_table(self, fields=[ SumField(field) ]) rows = q.select(bypass_safe_limit=True) return list(rows[0].values())[0]
[ "def", "sum", "(", "self", ",", "field", ")", ":", "q", "=", "Query", "(", "self", ".", "connection", ")", ".", "from_table", "(", "self", ",", "fields", "=", "[", "SumField", "(", "field", ")", "]", ")", "rows", "=", "q", ".", "select", "(", "bypass_safe_limit", "=", "True", ")", "return", "list", "(", "rows", "[", "0", "]", ".", "values", "(", ")", ")", "[", "0", "]" ]
35.133333
17.533333
def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Inherited method should take all specified arguments. :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default """ raise NotImplementedError
[ "def", "get", "(", "self", ",", "variable_path", ":", "str", ",", "default", ":", "t", ".", "Optional", "[", "t", ".", "Any", "]", "=", "None", ",", "coerce_type", ":", "t", ".", "Optional", "[", "t", ".", "Type", "]", "=", "None", ",", "coercer", ":", "t", ".", "Optional", "[", "t", ".", "Callable", "]", "=", "None", ",", "*", "*", "kwargs", ")", ":", "raise", "NotImplementedError" ]
42.411765
18.529412
def _pypsa_generator_timeseries(network, timesteps, mode=None): """Timeseries in PyPSA compatible format for generator instances Parameters ---------- network : Network The eDisGo grid topology model overall container timesteps : array_like Timesteps is an array-like object with entries of type :pandas:`pandas.Timestamp<timestamp>` specifying which time steps to export to pypsa representation and use in power flow analysis. mode : str, optional Specifically retrieve generator time series for MV or LV grid level or both. Either choose 'mv' or 'lv'. Defaults to None, which returns both timeseries for MV and LV in a single DataFrame. Returns ------- :pandas:`pandas.DataFrame<dataframe>` Time series table in PyPSA format """ mv_gen_timeseries_q = [] mv_gen_timeseries_p = [] lv_gen_timeseries_q = [] lv_gen_timeseries_p = [] # MV generator timeseries if mode is 'mv' or mode is None: for gen in network.mv_grid.generators: mv_gen_timeseries_q.append(gen.pypsa_timeseries('q').rename( repr(gen)).to_frame().loc[timesteps]) mv_gen_timeseries_p.append(gen.pypsa_timeseries('p').rename( repr(gen)).to_frame().loc[timesteps]) if mode is 'mv': lv_gen_timeseries_p, lv_gen_timeseries_q = \ _pypsa_generator_timeseries_aggregated_at_lv_station( network, timesteps) # LV generator timeseries if mode is 'lv' or mode is None: for lv_grid in network.mv_grid.lv_grids: for gen in lv_grid.generators: lv_gen_timeseries_q.append(gen.pypsa_timeseries('q').rename( repr(gen)).to_frame().loc[timesteps]) lv_gen_timeseries_p.append(gen.pypsa_timeseries('p').rename( repr(gen)).to_frame().loc[timesteps]) gen_df_p = pd.concat(mv_gen_timeseries_p + lv_gen_timeseries_p, axis=1) gen_df_q = pd.concat(mv_gen_timeseries_q + lv_gen_timeseries_q, axis=1) return gen_df_p, gen_df_q
[ "def", "_pypsa_generator_timeseries", "(", "network", ",", "timesteps", ",", "mode", "=", "None", ")", ":", "mv_gen_timeseries_q", "=", "[", "]", "mv_gen_timeseries_p", "=", "[", "]", "lv_gen_timeseries_q", "=", "[", "]", "lv_gen_timeseries_p", "=", "[", "]", "# MV generator timeseries", "if", "mode", "is", "'mv'", "or", "mode", "is", "None", ":", "for", "gen", "in", "network", ".", "mv_grid", ".", "generators", ":", "mv_gen_timeseries_q", ".", "append", "(", "gen", ".", "pypsa_timeseries", "(", "'q'", ")", ".", "rename", "(", "repr", "(", "gen", ")", ")", ".", "to_frame", "(", ")", ".", "loc", "[", "timesteps", "]", ")", "mv_gen_timeseries_p", ".", "append", "(", "gen", ".", "pypsa_timeseries", "(", "'p'", ")", ".", "rename", "(", "repr", "(", "gen", ")", ")", ".", "to_frame", "(", ")", ".", "loc", "[", "timesteps", "]", ")", "if", "mode", "is", "'mv'", ":", "lv_gen_timeseries_p", ",", "lv_gen_timeseries_q", "=", "_pypsa_generator_timeseries_aggregated_at_lv_station", "(", "network", ",", "timesteps", ")", "# LV generator timeseries", "if", "mode", "is", "'lv'", "or", "mode", "is", "None", ":", "for", "lv_grid", "in", "network", ".", "mv_grid", ".", "lv_grids", ":", "for", "gen", "in", "lv_grid", ".", "generators", ":", "lv_gen_timeseries_q", ".", "append", "(", "gen", ".", "pypsa_timeseries", "(", "'q'", ")", ".", "rename", "(", "repr", "(", "gen", ")", ")", ".", "to_frame", "(", ")", ".", "loc", "[", "timesteps", "]", ")", "lv_gen_timeseries_p", ".", "append", "(", "gen", ".", "pypsa_timeseries", "(", "'p'", ")", ".", "rename", "(", "repr", "(", "gen", ")", ")", ".", "to_frame", "(", ")", ".", "loc", "[", "timesteps", "]", ")", "gen_df_p", "=", "pd", ".", "concat", "(", "mv_gen_timeseries_p", "+", "lv_gen_timeseries_p", ",", "axis", "=", "1", ")", "gen_df_q", "=", "pd", ".", "concat", "(", "mv_gen_timeseries_q", "+", "lv_gen_timeseries_q", ",", "axis", "=", "1", ")", "return", "gen_df_p", ",", "gen_df_q" ]
39.320755
20.54717
def percentage(a, b, precision=1, mode=0): """ >>> percentage(100, 200) '100 of 200 (50.0%)' """ _a, _b = a, b pct = "{0:.{1}f}%".format(a * 100. / b, precision) a, b = thousands(a), thousands(b) if mode == 0: return "{0} of {1} ({2})".format(a, b, pct) elif mode == 1: return "{0} ({1})".format(a, pct) elif mode == 2: return _a * 100. / _b return pct
[ "def", "percentage", "(", "a", ",", "b", ",", "precision", "=", "1", ",", "mode", "=", "0", ")", ":", "_a", ",", "_b", "=", "a", ",", "b", "pct", "=", "\"{0:.{1}f}%\"", ".", "format", "(", "a", "*", "100.", "/", "b", ",", "precision", ")", "a", ",", "b", "=", "thousands", "(", "a", ")", ",", "thousands", "(", "b", ")", "if", "mode", "==", "0", ":", "return", "\"{0} of {1} ({2})\"", ".", "format", "(", "a", ",", "b", ",", "pct", ")", "elif", "mode", "==", "1", ":", "return", "\"{0} ({1})\"", ".", "format", "(", "a", ",", "pct", ")", "elif", "mode", "==", "2", ":", "return", "_a", "*", "100.", "/", "_b", "return", "pct" ]
27.066667
12.266667
def token_network_connect( self, registry_address: PaymentNetworkID, token_address: TokenAddress, funds: TokenAmount, initial_channel_target: int = 3, joinable_funds_target: float = 0.4, ) -> None: """ Automatically maintain channels open for the given token network. Args: token_address: the ERC20 token network to connect to. funds: the amount of funds that can be used by the ConnectionMananger. initial_channel_target: number of channels to open proactively. joinable_funds_target: fraction of the funds that will be used to join channels opened by other participants. """ if not is_binary_address(registry_address): raise InvalidAddress('registry_address must be a valid address in binary') if not is_binary_address(token_address): raise InvalidAddress('token_address must be a valid address in binary') token_network_identifier = views.get_token_network_identifier_by_token_address( chain_state=views.state_from_raiden(self.raiden), payment_network_id=registry_address, token_address=token_address, ) connection_manager = self.raiden.connection_manager_for_token_network( token_network_identifier, ) has_enough_reserve, estimated_required_reserve = has_enough_gas_reserve( raiden=self.raiden, channels_to_open=initial_channel_target, ) if not has_enough_reserve: raise InsufficientGasReserve(( 'The account balance is below the estimated amount necessary to ' 'finish the lifecycles of all active channels. A balance of at ' f'least {estimated_required_reserve} wei is required.' )) connection_manager.connect( funds=funds, initial_channel_target=initial_channel_target, joinable_funds_target=joinable_funds_target, )
[ "def", "token_network_connect", "(", "self", ",", "registry_address", ":", "PaymentNetworkID", ",", "token_address", ":", "TokenAddress", ",", "funds", ":", "TokenAmount", ",", "initial_channel_target", ":", "int", "=", "3", ",", "joinable_funds_target", ":", "float", "=", "0.4", ",", ")", "->", "None", ":", "if", "not", "is_binary_address", "(", "registry_address", ")", ":", "raise", "InvalidAddress", "(", "'registry_address must be a valid address in binary'", ")", "if", "not", "is_binary_address", "(", "token_address", ")", ":", "raise", "InvalidAddress", "(", "'token_address must be a valid address in binary'", ")", "token_network_identifier", "=", "views", ".", "get_token_network_identifier_by_token_address", "(", "chain_state", "=", "views", ".", "state_from_raiden", "(", "self", ".", "raiden", ")", ",", "payment_network_id", "=", "registry_address", ",", "token_address", "=", "token_address", ",", ")", "connection_manager", "=", "self", ".", "raiden", ".", "connection_manager_for_token_network", "(", "token_network_identifier", ",", ")", "has_enough_reserve", ",", "estimated_required_reserve", "=", "has_enough_gas_reserve", "(", "raiden", "=", "self", ".", "raiden", ",", "channels_to_open", "=", "initial_channel_target", ",", ")", "if", "not", "has_enough_reserve", ":", "raise", "InsufficientGasReserve", "(", "(", "'The account balance is below the estimated amount necessary to '", "'finish the lifecycles of all active channels. A balance of at '", "f'least {estimated_required_reserve} wei is required.'", ")", ")", "connection_manager", ".", "connect", "(", "funds", "=", "funds", ",", "initial_channel_target", "=", "initial_channel_target", ",", "joinable_funds_target", "=", "joinable_funds_target", ",", ")" ]
41.571429
22.959184
def render(self, **kwargs): """Renders the HTML representation of the element.""" return self._template.render(this=self, kwargs=kwargs)
[ "def", "render", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_template", ".", "render", "(", "this", "=", "self", ",", "kwargs", "=", "kwargs", ")" ]
50
11.666667
def atlas_make_zonefile_inventory( bit_offset, bit_length, con=None, path=None ): """ Get a summary description of the list of zonefiles we have for the given block range (a "zonefile inventory") Zonefile present/absent bits are ordered left-to-right, where the leftmost bit is the earliest zonefile in the blockchain. Offset and length are in bytes. This is slow. Use the in-RAM zonefile inventory vector whenever possible (see atlas_get_zonefile_inventory). """ listing = atlasdb_zonefile_inv_list( bit_offset, bit_length, con=con, path=path ) # serialize to inv bool_vec = [l['present'] for l in listing] if len(bool_vec) % 8 != 0: # pad bool_vec += [False] * (8 - (len(bool_vec) % 8)) inv = "" for i in xrange(0, len(bool_vec), 8): bit_vec = map( lambda b: 1 if b else 0, bool_vec[i:i+8] ) next_byte = (bit_vec[0] << 7) | \ (bit_vec[1] << 6) | \ (bit_vec[2] << 5) | \ (bit_vec[3] << 4) | \ (bit_vec[4] << 3) | \ (bit_vec[5] << 2) | \ (bit_vec[6] << 1) | \ (bit_vec[7]) inv += chr(next_byte) return inv
[ "def", "atlas_make_zonefile_inventory", "(", "bit_offset", ",", "bit_length", ",", "con", "=", "None", ",", "path", "=", "None", ")", ":", "listing", "=", "atlasdb_zonefile_inv_list", "(", "bit_offset", ",", "bit_length", ",", "con", "=", "con", ",", "path", "=", "path", ")", "# serialize to inv", "bool_vec", "=", "[", "l", "[", "'present'", "]", "for", "l", "in", "listing", "]", "if", "len", "(", "bool_vec", ")", "%", "8", "!=", "0", ":", "# pad ", "bool_vec", "+=", "[", "False", "]", "*", "(", "8", "-", "(", "len", "(", "bool_vec", ")", "%", "8", ")", ")", "inv", "=", "\"\"", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "bool_vec", ")", ",", "8", ")", ":", "bit_vec", "=", "map", "(", "lambda", "b", ":", "1", "if", "b", "else", "0", ",", "bool_vec", "[", "i", ":", "i", "+", "8", "]", ")", "next_byte", "=", "(", "bit_vec", "[", "0", "]", "<<", "7", ")", "|", "(", "bit_vec", "[", "1", "]", "<<", "6", ")", "|", "(", "bit_vec", "[", "2", "]", "<<", "5", ")", "|", "(", "bit_vec", "[", "3", "]", "<<", "4", ")", "|", "(", "bit_vec", "[", "4", "]", "<<", "3", ")", "|", "(", "bit_vec", "[", "5", "]", "<<", "2", ")", "|", "(", "bit_vec", "[", "6", "]", "<<", "1", ")", "|", "(", "bit_vec", "[", "7", "]", ")", "inv", "+=", "chr", "(", "next_byte", ")", "return", "inv" ]
34.111111
18.611111
def cigarRead(fileHandleOrFile): """Reads a list of pairwise alignments into a pairwise alignment structure. Query and target are reversed! """ fileHandle = _getFileHandle(fileHandleOrFile) #p = re.compile("cigar:\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+(.*)\\s*)*") p = re.compile("cigar:\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+([^\\s]+)(\\s+(.*)\\s*)*") line = fileHandle.readline() while line != '': pA = cigarReadFromString(line) if pA != None: yield pA line = fileHandle.readline() if isinstance(fileHandleOrFile, "".__class__): fileHandle.close()
[ "def", "cigarRead", "(", "fileHandleOrFile", ")", ":", "fileHandle", "=", "_getFileHandle", "(", "fileHandleOrFile", ")", "#p = re.compile(\"cigar:\\\\s+(.+)\\\\s+([0-9]+)\\\\s+([0-9]+)\\\\s+([\\\\+\\\\-\\\\.])\\\\s+(.+)\\\\s+([0-9]+)\\\\s+([0-9]+)\\\\s+([\\\\+\\\\-\\\\.])\\\\s+(.+)\\\\s+(.*)\\\\s*)*\")", "p", "=", "re", ".", "compile", "(", "\"cigar:\\\\s+(.+)\\\\s+([0-9]+)\\\\s+([0-9]+)\\\\s+([\\\\+\\\\-\\\\.])\\\\s+(.+)\\\\s+([0-9]+)\\\\s+([0-9]+)\\\\s+([\\\\+\\\\-\\\\.])\\\\s+([^\\\\s]+)(\\\\s+(.*)\\\\s*)*\"", ")", "line", "=", "fileHandle", ".", "readline", "(", ")", "while", "line", "!=", "''", ":", "pA", "=", "cigarReadFromString", "(", "line", ")", "if", "pA", "!=", "None", ":", "yield", "pA", "line", "=", "fileHandle", ".", "readline", "(", ")", "if", "isinstance", "(", "fileHandleOrFile", ",", "\"\"", ".", "__class__", ")", ":", "fileHandle", ".", "close", "(", ")" ]
46.8125
23.8125
def torrents(self, **filters): """ Returns a list of torrents matching the supplied filters. :param filter: Current status of the torrents. :param category: Fetch all torrents with the supplied label. :param sort: Sort torrents by. :param reverse: Enable reverse sorting. :param limit: Limit the number of torrents returned. :param offset: Set offset (if less than 0, offset from end). :return: list() of torrent with matching filter. """ params = {} for name, value in filters.items(): # make sure that old 'status' argument still works name = 'filter' if name == 'status' else name params[name] = value return self._get('query/torrents', params=params)
[ "def", "torrents", "(", "self", ",", "*", "*", "filters", ")", ":", "params", "=", "{", "}", "for", "name", ",", "value", "in", "filters", ".", "items", "(", ")", ":", "# make sure that old 'status' argument still works", "name", "=", "'filter'", "if", "name", "==", "'status'", "else", "name", "params", "[", "name", "]", "=", "value", "return", "self", ".", "_get", "(", "'query/torrents'", ",", "params", "=", "params", ")" ]
38.9
17.9
def derive_child_context(self, whence): """Derives a scalar context as a child of the current context.""" return _HandlerContext( container=self.container, queue=self.queue, field_name=None, annotations=None, depth=self.depth, whence=whence, value=bytearray(), # children start without a value ion_type=None, pending_symbol=None )
[ "def", "derive_child_context", "(", "self", ",", "whence", ")", ":", "return", "_HandlerContext", "(", "container", "=", "self", ".", "container", ",", "queue", "=", "self", ".", "queue", ",", "field_name", "=", "None", ",", "annotations", "=", "None", ",", "depth", "=", "self", ".", "depth", ",", "whence", "=", "whence", ",", "value", "=", "bytearray", "(", ")", ",", "# children start without a value", "ion_type", "=", "None", ",", "pending_symbol", "=", "None", ")" ]
34.692308
11.538462
def transitionStates(self,state): """ Return the indices of new states and their rates. """ newstates,rates = self.transition(state) newindices = self.getStateIndex(newstates) return newindices,rates
[ "def", "transitionStates", "(", "self", ",", "state", ")", ":", "newstates", ",", "rates", "=", "self", ".", "transition", "(", "state", ")", "newindices", "=", "self", ".", "getStateIndex", "(", "newstates", ")", "return", "newindices", ",", "rates" ]
39.571429
12.428571
def register_vcs_handler(vcs, method): # tyoe: (str, str) -> typing.Callable # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate
[ "def", "register_vcs_handler", "(", "vcs", ",", "method", ")", ":", "# tyoe: (str, str) -> typing.Callable # decorator", "def", "decorate", "(", "f", ")", ":", "\"\"\"Store f in HANDLERS[vcs][method].\"\"\"", "if", "vcs", "not", "in", "HANDLERS", ":", "HANDLERS", "[", "vcs", "]", "=", "{", "}", "HANDLERS", "[", "vcs", "]", "[", "method", "]", "=", "f", "return", "f", "return", "decorate" ]
39.888889
15.666667
def _run_bcbio_variation(vrn_file, rm_file, rm_interval_file, base_dir, sample, caller, data): """Run validation of a caller against the truth set using bcbio.variation. """ val_config_file = _create_validate_config_file(vrn_file, rm_file, rm_interval_file, base_dir, data) work_dir = os.path.join(base_dir, "work") out = {"summary": os.path.join(work_dir, "validate-summary.csv"), "grading": os.path.join(work_dir, "validate-grading.yaml"), "discordant": os.path.join(work_dir, "%s-eval-ref-discordance-annotate.vcf" % sample)} if not utils.file_exists(out["discordant"]) or not utils.file_exists(out["grading"]): bcbio_variation_comparison(val_config_file, base_dir, data) out["concordant"] = filter(os.path.exists, [os.path.join(work_dir, "%s-%s-concordance.vcf" % (sample, x)) for x in ["eval-ref", "ref-eval"]])[0] return out
[ "def", "_run_bcbio_variation", "(", "vrn_file", ",", "rm_file", ",", "rm_interval_file", ",", "base_dir", ",", "sample", ",", "caller", ",", "data", ")", ":", "val_config_file", "=", "_create_validate_config_file", "(", "vrn_file", ",", "rm_file", ",", "rm_interval_file", ",", "base_dir", ",", "data", ")", "work_dir", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "\"work\"", ")", "out", "=", "{", "\"summary\"", ":", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"validate-summary.csv\"", ")", ",", "\"grading\"", ":", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"validate-grading.yaml\"", ")", ",", "\"discordant\"", ":", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"%s-eval-ref-discordance-annotate.vcf\"", "%", "sample", ")", "}", "if", "not", "utils", ".", "file_exists", "(", "out", "[", "\"discordant\"", "]", ")", "or", "not", "utils", ".", "file_exists", "(", "out", "[", "\"grading\"", "]", ")", ":", "bcbio_variation_comparison", "(", "val_config_file", ",", "base_dir", ",", "data", ")", "out", "[", "\"concordant\"", "]", "=", "filter", "(", "os", ".", "path", ".", "exists", ",", "[", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"%s-%s-concordance.vcf\"", "%", "(", "sample", ",", "x", ")", ")", "for", "x", "in", "[", "\"eval-ref\"", ",", "\"ref-eval\"", "]", "]", ")", "[", "0", "]", "return", "out" ]
66.266667
29.4
def get_default_connection_details(): """ Gets the connection details based on environment vars or Thanatos default settings. :return: Returns a dictionary of connection details. :rtype: dict """ return { 'host': os.environ.get('MYSQL_HOST', '127.0.0.1'), 'user': os.environ.get('MYSQL_USER', 'vagrant'), 'password': os.environ.get('MYSQL_PASSWORD', 'vagrant'), 'database': os.environ.get('MYSQL_DB', 'thanatos'), }
[ "def", "get_default_connection_details", "(", ")", ":", "return", "{", "'host'", ":", "os", ".", "environ", ".", "get", "(", "'MYSQL_HOST'", ",", "'127.0.0.1'", ")", ",", "'user'", ":", "os", ".", "environ", ".", "get", "(", "'MYSQL_USER'", ",", "'vagrant'", ")", ",", "'password'", ":", "os", ".", "environ", ".", "get", "(", "'MYSQL_PASSWORD'", ",", "'vagrant'", ")", ",", "'database'", ":", "os", ".", "environ", ".", "get", "(", "'MYSQL_DB'", ",", "'thanatos'", ")", ",", "}" ]
35.461538
20.230769
def hscan(key, cursor=0, match=None, count=None, host=None, port=None, db=None, password=None): ''' Incrementally iterate hash fields and associated values. .. versionadded:: 2017.7.0 CLI Example: .. code-block:: bash salt '*' redis.hscan foo_hash match='field_prefix_*' count=1 ''' server = _connect(host, port, db, password) return server.hscan(key, cursor=cursor, match=match, count=count)
[ "def", "hscan", "(", "key", ",", "cursor", "=", "0", ",", "match", "=", "None", ",", "count", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "db", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_connect", "(", "host", ",", "port", ",", "db", ",", "password", ")", "return", "server", ".", "hscan", "(", "key", ",", "cursor", "=", "cursor", ",", "match", "=", "match", ",", "count", "=", "count", ")" ]
30.214286
29.642857
def convert(data): """ Convert from unicode to native ascii """ try: st = basestring except NameError: st = str if isinstance(data, st): return str(data) elif isinstance(data, Mapping): return dict(map(convert, data.iteritems())) elif isinstance(data, Iterable): return type(data)(map(convert, data)) else: return data
[ "def", "convert", "(", "data", ")", ":", "try", ":", "st", "=", "basestring", "except", "NameError", ":", "st", "=", "str", "if", "isinstance", "(", "data", ",", "st", ")", ":", "return", "str", "(", "data", ")", "elif", "isinstance", "(", "data", ",", "Mapping", ")", ":", "return", "dict", "(", "map", "(", "convert", ",", "data", ".", "iteritems", "(", ")", ")", ")", "elif", "isinstance", "(", "data", ",", "Iterable", ")", ":", "return", "type", "(", "data", ")", "(", "map", "(", "convert", ",", "data", ")", ")", "else", ":", "return", "data" ]
24.1875
13.6875
def _set_cam_share(self, v, load=False): """ Setter method for cam_share, mapped from YANG variable /hardware/profile/tcam/cam_share (container) If this variable is read-only (config: false) in the source YANG file, then _set_cam_share is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_cam_share() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=cam_share.cam_share, is_container='container', presence=False, yang_name="cam-share", rest_name="cam-share", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable cam-sharing for features'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """cam_share must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=cam_share.cam_share, is_container='container', presence=False, yang_name="cam-share", rest_name="cam-share", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable cam-sharing for features'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='container', is_config=True)""", }) self.__cam_share = t if hasattr(self, '_set'): self._set()
[ "def", "_set_cam_share", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "cam_share", ".", "cam_share", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"cam-share\"", ",", "rest_name", "=", "\"cam-share\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Enable cam-sharing for features'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-hardware'", ",", "defining_module", "=", "'brocade-hardware'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"cam_share must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=cam_share.cam_share, is_container='container', presence=False, yang_name=\"cam-share\", rest_name=\"cam-share\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable cam-sharing for features'}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__cam_share", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
73.227273
33.863636
def document_delete(index, doc_type, id, hosts=None, profile=None): ''' Delete a document from an index index Index name where the document resides doc_type Type of the document id Document identifier CLI example:: salt myminion elasticsearch.document_delete testindex doctype1 AUx-384m0Bug_8U80wQZ ''' es = _get_instance(hosts, profile) try: return es.delete(index=index, doc_type=doc_type, id=id) except elasticsearch.exceptions.NotFoundError: return None except elasticsearch.TransportError as e: raise CommandExecutionError("Cannot delete document {0} in index {1}, server returned code {2} with message {3}".format(id, index, e.status_code, e.error))
[ "def", "document_delete", "(", "index", ",", "doc_type", ",", "id", ",", "hosts", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", "return", "es", ".", "delete", "(", "index", "=", "index", ",", "doc_type", "=", "doc_type", ",", "id", "=", "id", ")", "except", "elasticsearch", ".", "exceptions", ".", "NotFoundError", ":", "return", "None", "except", "elasticsearch", ".", "TransportError", "as", "e", ":", "raise", "CommandExecutionError", "(", "\"Cannot delete document {0} in index {1}, server returned code {2} with message {3}\"", ".", "format", "(", "id", ",", "index", ",", "e", ".", "status_code", ",", "e", ".", "error", ")", ")" ]
32.043478
29.173913
def diagnose_embedding(emb, source, target): """A detailed diagnostic for minor embeddings. This diagnostic produces a generator, which lists all issues with `emb`. The errors are yielded in the form ExceptionClass, arg1, arg2,... where the arguments following the class are used to construct the exception object. User-friendly variants of this function are :func:`is_valid_embedding`, which returns a bool, and :func:`verify_embedding` which raises the first observed error. All exceptions are subclasses of :exc:`.EmbeddingError`. Args: emb (dict): Dictionary mapping source nodes to arrays of target nodes. source (list/:obj:`networkx.Graph`): Graph to be embedded as a NetworkX graph or a list of edges. target (list/:obj:`networkx.Graph`): Graph being embedded into as a NetworkX graph or a list of edges. Yields: One of: :exc:`.MissingChainError`, snode: a source node label that does not occur as a key of `emb`, or for which emb[snode] is empty :exc:`.ChainOverlapError`, tnode, snode0, snode0: a target node which occurs in both `emb[snode0]` and `emb[snode1]` :exc:`.DisconnectedChainError`, snode: a source node label whose chain is not a connected subgraph of `target` :exc:`.InvalidNodeError`, tnode, snode: a source node label and putative target node label which is not a node of `target` :exc:`.MissingEdgeError`, snode0, snode1: a pair of source node labels defining an edge which is not present between their chains """ if not hasattr(source, 'edges'): source = nx.Graph(source) if not hasattr(target, 'edges'): target = nx.Graph(target) label = {} embedded = set() for x in source: try: embx = emb[x] missing_chain = len(embx) == 0 except KeyError: missing_chain = True if missing_chain: yield MissingChainError, x continue all_present = True for q in embx: if label.get(q, x) != x: yield ChainOverlapError, q, x, label[q] elif q not in target: all_present = False yield InvalidNodeError, x, q else: label[q] = x if all_present: embedded.add(x) if not nx.is_connected(target.subgraph(embx)): yield DisconnectedChainError, x yielded = nx.Graph() for p, q in target.subgraph(label).edges(): yielded.add_edge(label[p], label[q]) for x, y in source.edges(): if x == y: continue if x in embedded and y in embedded and not yielded.has_edge(x, y): yield MissingEdgeError, x, y
[ "def", "diagnose_embedding", "(", "emb", ",", "source", ",", "target", ")", ":", "if", "not", "hasattr", "(", "source", ",", "'edges'", ")", ":", "source", "=", "nx", ".", "Graph", "(", "source", ")", "if", "not", "hasattr", "(", "target", ",", "'edges'", ")", ":", "target", "=", "nx", ".", "Graph", "(", "target", ")", "label", "=", "{", "}", "embedded", "=", "set", "(", ")", "for", "x", "in", "source", ":", "try", ":", "embx", "=", "emb", "[", "x", "]", "missing_chain", "=", "len", "(", "embx", ")", "==", "0", "except", "KeyError", ":", "missing_chain", "=", "True", "if", "missing_chain", ":", "yield", "MissingChainError", ",", "x", "continue", "all_present", "=", "True", "for", "q", "in", "embx", ":", "if", "label", ".", "get", "(", "q", ",", "x", ")", "!=", "x", ":", "yield", "ChainOverlapError", ",", "q", ",", "x", ",", "label", "[", "q", "]", "elif", "q", "not", "in", "target", ":", "all_present", "=", "False", "yield", "InvalidNodeError", ",", "x", ",", "q", "else", ":", "label", "[", "q", "]", "=", "x", "if", "all_present", ":", "embedded", ".", "add", "(", "x", ")", "if", "not", "nx", ".", "is_connected", "(", "target", ".", "subgraph", "(", "embx", ")", ")", ":", "yield", "DisconnectedChainError", ",", "x", "yielded", "=", "nx", ".", "Graph", "(", ")", "for", "p", ",", "q", "in", "target", ".", "subgraph", "(", "label", ")", ".", "edges", "(", ")", ":", "yielded", ".", "add_edge", "(", "label", "[", "p", "]", ",", "label", "[", "q", "]", ")", "for", "x", ",", "y", "in", "source", ".", "edges", "(", ")", ":", "if", "x", "==", "y", ":", "continue", "if", "x", "in", "embedded", "and", "y", "in", "embedded", "and", "not", "yielded", ".", "has_edge", "(", "x", ",", "y", ")", ":", "yield", "MissingEdgeError", ",", "x", ",", "y" ]
37.175676
25.945946
def humanize_time_since(timestamp=None): """ Returns a fuzzy time since. Will only return the largest time. EX: 20 days, 14 min """ timeDiff = datetime.datetime.now() - timestamp days = timeDiff.days hours = timeDiff.seconds / 3600 minutes = timeDiff.seconds % 3600 / 60 seconds = timeDiff.seconds % 3600 % 60 str = "" if days > 0: if days == 1: t_str = "day" else: t_str = "days" str += "{0} {1}".format(days, t_str) return str elif hours > 0: if hours == 1: t_str = "hour" else: t_str = "hours" str += "{0} {1}".format(hours, t_str) return str elif minutes > 0: if minutes == 1: t_str = "min" else: t_str = "mins" str += "{0} {1}".format(minutes, t_str) return str elif seconds > 0: if seconds == 1: t_str = "sec" else: t_str = "secs" str += "{0} {1}".format(seconds, t_str) return str else: return str
[ "def", "humanize_time_since", "(", "timestamp", "=", "None", ")", ":", "timeDiff", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "-", "timestamp", "days", "=", "timeDiff", ".", "days", "hours", "=", "timeDiff", ".", "seconds", "/", "3600", "minutes", "=", "timeDiff", ".", "seconds", "%", "3600", "/", "60", "seconds", "=", "timeDiff", ".", "seconds", "%", "3600", "%", "60", "str", "=", "\"\"", "if", "days", ">", "0", ":", "if", "days", "==", "1", ":", "t_str", "=", "\"day\"", "else", ":", "t_str", "=", "\"days\"", "str", "+=", "\"{0} {1}\"", ".", "format", "(", "days", ",", "t_str", ")", "return", "str", "elif", "hours", ">", "0", ":", "if", "hours", "==", "1", ":", "t_str", "=", "\"hour\"", "else", ":", "t_str", "=", "\"hours\"", "str", "+=", "\"{0} {1}\"", ".", "format", "(", "hours", ",", "t_str", ")", "return", "str", "elif", "minutes", ">", "0", ":", "if", "minutes", "==", "1", ":", "t_str", "=", "\"min\"", "else", ":", "t_str", "=", "\"mins\"", "str", "+=", "\"{0} {1}\"", ".", "format", "(", "minutes", ",", "t_str", ")", "return", "str", "elif", "seconds", ">", "0", ":", "if", "seconds", "==", "1", ":", "t_str", "=", "\"sec\"", "else", ":", "t_str", "=", "\"secs\"", "str", "+=", "\"{0} {1}\"", ".", "format", "(", "seconds", ",", "t_str", ")", "return", "str", "else", ":", "return", "str" ]
25.682927
16.756098
def do_handshake_with_robot(self): # type: ignore """Modified do_handshake() to send a ROBOT payload and return the result. """ try: # Start the handshake using nassl - will throw WantReadError right away self._ssl.do_handshake() except WantReadError: # Send the Client Hello len_to_read = self._network_bio.pending() while len_to_read: # Get the data from the SSL engine handshake_data_out = self._network_bio.read(len_to_read) # Send it to the peer self._sock.send(handshake_data_out) len_to_read = self._network_bio.pending() # Retrieve the server's response - directly read the underlying network socket # Retrieve data until we get to the ServerHelloDone # The server may send back a ServerHello, an Alert or a CertificateRequest first did_receive_hello_done = False remaining_bytes = b'' while not did_receive_hello_done: try: tls_record, len_consumed = TlsRecordParser.parse_bytes(remaining_bytes) remaining_bytes = remaining_bytes[len_consumed::] except NotEnoughData: # Try to get more data raw_ssl_bytes = self._sock.recv(16381) if not raw_ssl_bytes: # No data? break remaining_bytes = remaining_bytes + raw_ssl_bytes continue if isinstance(tls_record, TlsHandshakeRecord): # Does the record contain a ServerDone message? for handshake_message in tls_record.subprotocol_messages: if handshake_message.handshake_type == TlsHandshakeTypeByte.SERVER_DONE: did_receive_hello_done = True break # If not, it could be a ServerHello, Certificate or a CertificateRequest if the server requires client auth elif isinstance(tls_record, TlsAlertRecord): # Server returned a TLS alert break else: raise ValueError('Unknown record? Type {}'.format(tls_record.header.type)) if did_receive_hello_done: # Send a special Client Key Exchange Record as the payload self._sock.send(self._robot_cke_record.to_bytes()) if self._robot_should_finish_handshake: # Then send a CCS record ccs_record = TlsChangeCipherSpecRecord.from_parameters( tls_version=TlsVersionEnum[self._ssl_version.name] ) self._sock.send(ccs_record.to_bytes()) # Lastly send a Finished record finished_record_bytes = RobotTlsRecordPayloads.get_finished_record_bytes(self._ssl_version) self._sock.send(finished_record_bytes) # Return whatever the server sent back by raising an exception # The goal is to detect similar/different responses while True: try: tls_record, len_consumed = TlsRecordParser.parse_bytes(remaining_bytes) remaining_bytes = remaining_bytes[len_consumed::] except NotEnoughData: # Try to get more data try: raw_ssl_bytes = self._sock.recv(16381) if not raw_ssl_bytes: # No data? raise ServerResponseToRobot('No data') except socket.error as e: # Server closed the connection after receiving the CCS payload raise ServerResponseToRobot('socket.error {}'.format(str(e))) remaining_bytes = remaining_bytes + raw_ssl_bytes continue if isinstance(tls_record, TlsAlertRecord): raise ServerResponseToRobot('TLS Alert {} {}'.format(tls_record.alert_description, tls_record.alert_severity)) else: break raise ServerResponseToRobot('Ok')
[ "def", "do_handshake_with_robot", "(", "self", ")", ":", "# type: ignore", "try", ":", "# Start the handshake using nassl - will throw WantReadError right away", "self", ".", "_ssl", ".", "do_handshake", "(", ")", "except", "WantReadError", ":", "# Send the Client Hello", "len_to_read", "=", "self", ".", "_network_bio", ".", "pending", "(", ")", "while", "len_to_read", ":", "# Get the data from the SSL engine", "handshake_data_out", "=", "self", ".", "_network_bio", ".", "read", "(", "len_to_read", ")", "# Send it to the peer", "self", ".", "_sock", ".", "send", "(", "handshake_data_out", ")", "len_to_read", "=", "self", ".", "_network_bio", ".", "pending", "(", ")", "# Retrieve the server's response - directly read the underlying network socket", "# Retrieve data until we get to the ServerHelloDone", "# The server may send back a ServerHello, an Alert or a CertificateRequest first", "did_receive_hello_done", "=", "False", "remaining_bytes", "=", "b''", "while", "not", "did_receive_hello_done", ":", "try", ":", "tls_record", ",", "len_consumed", "=", "TlsRecordParser", ".", "parse_bytes", "(", "remaining_bytes", ")", "remaining_bytes", "=", "remaining_bytes", "[", "len_consumed", ":", ":", "]", "except", "NotEnoughData", ":", "# Try to get more data", "raw_ssl_bytes", "=", "self", ".", "_sock", ".", "recv", "(", "16381", ")", "if", "not", "raw_ssl_bytes", ":", "# No data?", "break", "remaining_bytes", "=", "remaining_bytes", "+", "raw_ssl_bytes", "continue", "if", "isinstance", "(", "tls_record", ",", "TlsHandshakeRecord", ")", ":", "# Does the record contain a ServerDone message?", "for", "handshake_message", "in", "tls_record", ".", "subprotocol_messages", ":", "if", "handshake_message", ".", "handshake_type", "==", "TlsHandshakeTypeByte", ".", "SERVER_DONE", ":", "did_receive_hello_done", "=", "True", "break", "# If not, it could be a ServerHello, Certificate or a CertificateRequest if the server requires client auth", "elif", "isinstance", "(", "tls_record", ",", "TlsAlertRecord", ")", ":", "# Server returned a TLS alert", "break", "else", ":", "raise", "ValueError", "(", "'Unknown record? Type {}'", ".", "format", "(", "tls_record", ".", "header", ".", "type", ")", ")", "if", "did_receive_hello_done", ":", "# Send a special Client Key Exchange Record as the payload", "self", ".", "_sock", ".", "send", "(", "self", ".", "_robot_cke_record", ".", "to_bytes", "(", ")", ")", "if", "self", ".", "_robot_should_finish_handshake", ":", "# Then send a CCS record", "ccs_record", "=", "TlsChangeCipherSpecRecord", ".", "from_parameters", "(", "tls_version", "=", "TlsVersionEnum", "[", "self", ".", "_ssl_version", ".", "name", "]", ")", "self", ".", "_sock", ".", "send", "(", "ccs_record", ".", "to_bytes", "(", ")", ")", "# Lastly send a Finished record", "finished_record_bytes", "=", "RobotTlsRecordPayloads", ".", "get_finished_record_bytes", "(", "self", ".", "_ssl_version", ")", "self", ".", "_sock", ".", "send", "(", "finished_record_bytes", ")", "# Return whatever the server sent back by raising an exception", "# The goal is to detect similar/different responses", "while", "True", ":", "try", ":", "tls_record", ",", "len_consumed", "=", "TlsRecordParser", ".", "parse_bytes", "(", "remaining_bytes", ")", "remaining_bytes", "=", "remaining_bytes", "[", "len_consumed", ":", ":", "]", "except", "NotEnoughData", ":", "# Try to get more data", "try", ":", "raw_ssl_bytes", "=", "self", ".", "_sock", ".", "recv", "(", "16381", ")", "if", "not", "raw_ssl_bytes", ":", "# No data?", "raise", "ServerResponseToRobot", "(", "'No data'", ")", "except", "socket", ".", "error", "as", "e", ":", "# Server closed the connection after receiving the CCS payload", "raise", "ServerResponseToRobot", "(", "'socket.error {}'", ".", "format", "(", "str", "(", "e", ")", ")", ")", "remaining_bytes", "=", "remaining_bytes", "+", "raw_ssl_bytes", "continue", "if", "isinstance", "(", "tls_record", ",", "TlsAlertRecord", ")", ":", "raise", "ServerResponseToRobot", "(", "'TLS Alert {} {}'", ".", "format", "(", "tls_record", ".", "alert_description", ",", "tls_record", ".", "alert_severity", ")", ")", "else", ":", "break", "raise", "ServerResponseToRobot", "(", "'Ok'", ")" ]
43.344444
22.366667
def get_users(self, user_ids, nid=None): """Get a listing of data for specific users `user_ids` in a network `nid` :type user_ids: list of str :param user_ids: a list of user ids. These are the same ids that are returned by get_all_users. :type nid: str :param nid: This is the ID of the network to get students from. This is optional and only to override the existing `network_id` entered when created the class :returns: Python object containing returned data, a list of dicts containing user data. """ r = self.request( method="network.get_users", data={"ids": user_ids}, nid=nid ) return self._handle_error(r, "Could not get users.")
[ "def", "get_users", "(", "self", ",", "user_ids", ",", "nid", "=", "None", ")", ":", "r", "=", "self", ".", "request", "(", "method", "=", "\"network.get_users\"", ",", "data", "=", "{", "\"ids\"", ":", "user_ids", "}", ",", "nid", "=", "nid", ")", "return", "self", ".", "_handle_error", "(", "r", ",", "\"Could not get users.\"", ")" ]
39.65
14.95
def css_text(self, path, default=NULL, smart=False, normalize_space=True): """ Get normalized text of node which matches the css path. """ try: return get_node_text(self.css_one(path), smart=smart, normalize_space=normalize_space) except IndexError: if default is NULL: raise else: return default
[ "def", "css_text", "(", "self", ",", "path", ",", "default", "=", "NULL", ",", "smart", "=", "False", ",", "normalize_space", "=", "True", ")", ":", "try", ":", "return", "get_node_text", "(", "self", ".", "css_one", "(", "path", ")", ",", "smart", "=", "smart", ",", "normalize_space", "=", "normalize_space", ")", "except", "IndexError", ":", "if", "default", "is", "NULL", ":", "raise", "else", ":", "return", "default" ]
32.769231
19.230769
def walk(self, dag, walk_func): """ Walks each node of the graph, in parallel if it can. The walk_func is only called when the nodes dependencies have been satisfied """ # First, we'll topologically sort all of the nodes, with nodes that # have no dependencies first. We do this to ensure that we don't call # .join on a thread that hasn't yet been started. # # TODO(ejholmes): An alternative would be to ensure that Thread.join # blocks if the thread has not yet been started. nodes = dag.topological_sort() nodes.reverse() # This maps a node name to a thread of execution. threads = {} # Blocks until all of the given nodes have completed execution (whether # successfully, or errored). Returns True if all nodes returned True. def wait_for(nodes): for node in nodes: thread = threads[node] while thread.is_alive(): threads[node].join(0.5) # For each node in the graph, we're going to allocate a thread to # execute. The thread will block executing walk_func, until all of the # nodes dependencies have executed. for node in nodes: def fn(n, deps): if deps: logger.debug( "%s waiting for %s to complete", n, ", ".join(deps)) # Wait for all dependencies to complete. wait_for(deps) logger.debug("%s starting", n) self.semaphore.acquire() try: return walk_func(n) finally: self.semaphore.release() deps = dag.all_downstreams(node) threads[node] = Thread(target=fn, args=(node, deps), name=node) # Start up all of the threads. for node in nodes: threads[node].start() # Wait for all threads to complete executing. wait_for(nodes)
[ "def", "walk", "(", "self", ",", "dag", ",", "walk_func", ")", ":", "# First, we'll topologically sort all of the nodes, with nodes that", "# have no dependencies first. We do this to ensure that we don't call", "# .join on a thread that hasn't yet been started.", "#", "# TODO(ejholmes): An alternative would be to ensure that Thread.join", "# blocks if the thread has not yet been started.", "nodes", "=", "dag", ".", "topological_sort", "(", ")", "nodes", ".", "reverse", "(", ")", "# This maps a node name to a thread of execution.", "threads", "=", "{", "}", "# Blocks until all of the given nodes have completed execution (whether", "# successfully, or errored). Returns True if all nodes returned True.", "def", "wait_for", "(", "nodes", ")", ":", "for", "node", "in", "nodes", ":", "thread", "=", "threads", "[", "node", "]", "while", "thread", ".", "is_alive", "(", ")", ":", "threads", "[", "node", "]", ".", "join", "(", "0.5", ")", "# For each node in the graph, we're going to allocate a thread to", "# execute. The thread will block executing walk_func, until all of the", "# nodes dependencies have executed.", "for", "node", "in", "nodes", ":", "def", "fn", "(", "n", ",", "deps", ")", ":", "if", "deps", ":", "logger", ".", "debug", "(", "\"%s waiting for %s to complete\"", ",", "n", ",", "\", \"", ".", "join", "(", "deps", ")", ")", "# Wait for all dependencies to complete.", "wait_for", "(", "deps", ")", "logger", ".", "debug", "(", "\"%s starting\"", ",", "n", ")", "self", ".", "semaphore", ".", "acquire", "(", ")", "try", ":", "return", "walk_func", "(", "n", ")", "finally", ":", "self", ".", "semaphore", ".", "release", "(", ")", "deps", "=", "dag", ".", "all_downstreams", "(", "node", ")", "threads", "[", "node", "]", "=", "Thread", "(", "target", "=", "fn", ",", "args", "=", "(", "node", ",", "deps", ")", ",", "name", "=", "node", ")", "# Start up all of the threads.", "for", "node", "in", "nodes", ":", "threads", "[", "node", "]", ".", "start", "(", ")", "# Wait for all threads to complete executing.", "wait_for", "(", "nodes", ")" ]
35.754386
19.561404
def npz_generator(npz_path): """Generate data from an npz file.""" npz_data = np.load(npz_path) X = npz_data['X'] # Y is a binary maxtrix with shape=(n, k), each y will have shape=(k,) y = npz_data['Y'] n = X.shape[0] while True: i = np.random.randint(0, n) yield {'X': X[i], 'Y': y[i]}
[ "def", "npz_generator", "(", "npz_path", ")", ":", "npz_data", "=", "np", ".", "load", "(", "npz_path", ")", "X", "=", "npz_data", "[", "'X'", "]", "# Y is a binary maxtrix with shape=(n, k), each y will have shape=(k,)", "y", "=", "npz_data", "[", "'Y'", "]", "n", "=", "X", ".", "shape", "[", "0", "]", "while", "True", ":", "i", "=", "np", ".", "random", ".", "randint", "(", "0", ",", "n", ")", "yield", "{", "'X'", ":", "X", "[", "i", "]", ",", "'Y'", ":", "y", "[", "i", "]", "}" ]
26.75
19