text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def encode_fetch_request(cls, payloads=(), max_wait_time=100, min_bytes=4096): """ Encodes a FetchRequest struct Arguments: payloads: list of FetchRequestPayload max_wait_time (int, optional): ms to block waiting for min_bytes data. Defaults to 100. min_bytes (int, optional): minimum bytes required to return before max_wait_time. Defaults to 4096. Return: FetchRequest """ return kafka.protocol.fetch.FetchRequest[0]( replica_id=-1, max_wait_time=max_wait_time, min_bytes=min_bytes, topics=[( topic, [( partition, payload.offset, payload.max_bytes) for partition, payload in topic_payloads.items()]) for topic, topic_payloads in group_by_topic_and_partition(payloads).items()])
[ "def", "encode_fetch_request", "(", "cls", ",", "payloads", "=", "(", ")", ",", "max_wait_time", "=", "100", ",", "min_bytes", "=", "4096", ")", ":", "return", "kafka", ".", "protocol", ".", "fetch", ".", "FetchRequest", "[", "0", "]", "(", "replica_id", "=", "-", "1", ",", "max_wait_time", "=", "max_wait_time", ",", "min_bytes", "=", "min_bytes", ",", "topics", "=", "[", "(", "topic", ",", "[", "(", "partition", ",", "payload", ".", "offset", ",", "payload", ".", "max_bytes", ")", "for", "partition", ",", "payload", "in", "topic_payloads", ".", "items", "(", ")", "]", ")", "for", "topic", ",", "topic_payloads", "in", "group_by_topic_and_partition", "(", "payloads", ")", ".", "items", "(", ")", "]", ")" ]
37.64
17.32
def get_common_properties(root): """Read common properties from root of ReSpecTh XML file. Args: root (`~xml.etree.ElementTree.Element`): Root of ReSpecTh XML file Returns: properties (`dict`): Dictionary with common properties """ properties = {} for elem in root.iterfind('commonProperties/property'): name = elem.attrib['name'] if name == 'initial composition': properties['composition'] = {'species': [], 'kind': None} for child in elem.iter('component'): spec = {} spec['species-name'] = child.find('speciesLink').attrib['preferredKey'] units = child.find('amount').attrib['units'] # use InChI for unique species identifier (if present) try: spec['InChI'] = child.find('speciesLink').attrib['InChI'] except KeyError: # TODO: add InChI validator/search warn('Missing InChI for species ' + spec['species-name']) pass # If mole or mass fraction, just set value if units in ['mole fraction', 'mass fraction', 'mole percent']: spec['amount'] = [float(child.find('amount').text)] elif units == 'percent': # assume this means mole percent warn('Assuming percent in composition means mole percent') spec['amount'] = [float(child.find('amount').text)] units = 'mole percent' elif units == 'ppm': # assume molar ppm, convert to mole fraction warn('Assuming molar ppm in composition and converting to mole fraction') spec['amount'] = [float(child.find('amount').text) * 1.e-6] units = 'mole fraction' elif units == 'ppb': # assume molar ppb, convert to mole fraction warn('Assuming molar ppb in composition and converting to mole fraction') spec['amount'] = [float(child.find('amount').text) * 1.e-9] units = 'mole fraction' else: raise KeywordError('Composition units need to be one of: mole fraction, ' 'mass fraction, mole percent, percent, ppm, or ppb.' ) properties['composition']['species'].append(spec) # check consistency of composition type if properties['composition']['kind'] is None: properties['composition']['kind'] = units elif properties['composition']['kind'] != units: raise KeywordError('composition units ' + units + ' not consistent with ' + properties['composition']['kind'] ) elif name in datagroup_properties: field = name.replace(' ', '-') units = elem.attrib['units'] if units == 'Torr': units = 'torr' quantity = 1.0 * unit_registry(units) try: quantity.to(property_units[field]) except pint.DimensionalityError: raise KeywordError('units incompatible for property ' + name) properties[field] = [' '.join([elem.find('value').text, units])] else: raise KeywordError('Property ' + name + ' not supported as common property') return properties
[ "def", "get_common_properties", "(", "root", ")", ":", "properties", "=", "{", "}", "for", "elem", "in", "root", ".", "iterfind", "(", "'commonProperties/property'", ")", ":", "name", "=", "elem", ".", "attrib", "[", "'name'", "]", "if", "name", "==", "'initial composition'", ":", "properties", "[", "'composition'", "]", "=", "{", "'species'", ":", "[", "]", ",", "'kind'", ":", "None", "}", "for", "child", "in", "elem", ".", "iter", "(", "'component'", ")", ":", "spec", "=", "{", "}", "spec", "[", "'species-name'", "]", "=", "child", ".", "find", "(", "'speciesLink'", ")", ".", "attrib", "[", "'preferredKey'", "]", "units", "=", "child", ".", "find", "(", "'amount'", ")", ".", "attrib", "[", "'units'", "]", "# use InChI for unique species identifier (if present)", "try", ":", "spec", "[", "'InChI'", "]", "=", "child", ".", "find", "(", "'speciesLink'", ")", ".", "attrib", "[", "'InChI'", "]", "except", "KeyError", ":", "# TODO: add InChI validator/search", "warn", "(", "'Missing InChI for species '", "+", "spec", "[", "'species-name'", "]", ")", "pass", "# If mole or mass fraction, just set value", "if", "units", "in", "[", "'mole fraction'", ",", "'mass fraction'", ",", "'mole percent'", "]", ":", "spec", "[", "'amount'", "]", "=", "[", "float", "(", "child", ".", "find", "(", "'amount'", ")", ".", "text", ")", "]", "elif", "units", "==", "'percent'", ":", "# assume this means mole percent", "warn", "(", "'Assuming percent in composition means mole percent'", ")", "spec", "[", "'amount'", "]", "=", "[", "float", "(", "child", ".", "find", "(", "'amount'", ")", ".", "text", ")", "]", "units", "=", "'mole percent'", "elif", "units", "==", "'ppm'", ":", "# assume molar ppm, convert to mole fraction", "warn", "(", "'Assuming molar ppm in composition and converting to mole fraction'", ")", "spec", "[", "'amount'", "]", "=", "[", "float", "(", "child", ".", "find", "(", "'amount'", ")", ".", "text", ")", "*", "1.e-6", "]", "units", "=", "'mole fraction'", "elif", "units", "==", "'ppb'", ":", "# assume molar ppb, convert to mole fraction", "warn", "(", "'Assuming molar ppb in composition and converting to mole fraction'", ")", "spec", "[", "'amount'", "]", "=", "[", "float", "(", "child", ".", "find", "(", "'amount'", ")", ".", "text", ")", "*", "1.e-9", "]", "units", "=", "'mole fraction'", "else", ":", "raise", "KeywordError", "(", "'Composition units need to be one of: mole fraction, '", "'mass fraction, mole percent, percent, ppm, or ppb.'", ")", "properties", "[", "'composition'", "]", "[", "'species'", "]", ".", "append", "(", "spec", ")", "# check consistency of composition type", "if", "properties", "[", "'composition'", "]", "[", "'kind'", "]", "is", "None", ":", "properties", "[", "'composition'", "]", "[", "'kind'", "]", "=", "units", "elif", "properties", "[", "'composition'", "]", "[", "'kind'", "]", "!=", "units", ":", "raise", "KeywordError", "(", "'composition units '", "+", "units", "+", "' not consistent with '", "+", "properties", "[", "'composition'", "]", "[", "'kind'", "]", ")", "elif", "name", "in", "datagroup_properties", ":", "field", "=", "name", ".", "replace", "(", "' '", ",", "'-'", ")", "units", "=", "elem", ".", "attrib", "[", "'units'", "]", "if", "units", "==", "'Torr'", ":", "units", "=", "'torr'", "quantity", "=", "1.0", "*", "unit_registry", "(", "units", ")", "try", ":", "quantity", ".", "to", "(", "property_units", "[", "field", "]", ")", "except", "pint", ".", "DimensionalityError", ":", "raise", "KeywordError", "(", "'units incompatible for property '", "+", "name", ")", "properties", "[", "field", "]", "=", "[", "' '", ".", "join", "(", "[", "elem", ".", "find", "(", "'value'", ")", ".", "text", ",", "units", "]", ")", "]", "else", ":", "raise", "KeywordError", "(", "'Property '", "+", "name", "+", "' not supported as common property'", ")", "return", "properties" ]
44.308642
23.938272
def encode(request, data): """ Add request content data to request body, set Content-type header. Should be overridden by subclasses if not using JSON encoding. Args: request (HTTPRequest): The request object. data (dict, None): Data to be encoded. Returns: HTTPRequest: The request object. """ if data is None: return request request.add_header('Content-Type', 'application/json') request.data = json.dumps(data) return request
[ "def", "encode", "(", "request", ",", "data", ")", ":", "if", "data", "is", "None", ":", "return", "request", "request", ".", "add_header", "(", "'Content-Type'", ",", "'application/json'", ")", "request", ".", "data", "=", "json", ".", "dumps", "(", "data", ")", "return", "request" ]
28.157895
20.736842
def _update_weight(self, data, R, n_local_subj, local_weight_offset): """update local weight Parameters ---------- data : list of 2D array, element i has shape=[n_voxel, n_tr] Subjects' fMRI data. R : list of 2D arrays, element i has shape=[n_voxel, n_dim] Each element in the list contains the scanner coordinate matrix of fMRI data of one subject. n_local_subj : integer Number of subjects allocated to this process. local_weight_offset : 1D array Offset of each subject's weights on this process. Returns ------- HTFA Returns the instance itself. """ for s, subj_data in enumerate(data): base = s * self.prior_size centers = self.local_posterior_[base:base + self.K * self.n_dim]\ .reshape((self.K, self.n_dim)) start_idx = base + self.K * self.n_dim end_idx = base + self.prior_size widths = self.local_posterior_[start_idx:end_idx]\ .reshape((self.K, 1)) unique_R, inds = self.get_unique_R(R[s]) F = self.get_factors(unique_R, inds, centers, widths) start_idx = local_weight_offset[s] if s == n_local_subj - 1: self.local_weights_[start_idx:] =\ self.get_weights(subj_data, F).ravel() else: end_idx = local_weight_offset[s + 1] self.local_weights_[start_idx:end_idx] =\ self.get_weights(subj_data, F).ravel() return self
[ "def", "_update_weight", "(", "self", ",", "data", ",", "R", ",", "n_local_subj", ",", "local_weight_offset", ")", ":", "for", "s", ",", "subj_data", "in", "enumerate", "(", "data", ")", ":", "base", "=", "s", "*", "self", ".", "prior_size", "centers", "=", "self", ".", "local_posterior_", "[", "base", ":", "base", "+", "self", ".", "K", "*", "self", ".", "n_dim", "]", ".", "reshape", "(", "(", "self", ".", "K", ",", "self", ".", "n_dim", ")", ")", "start_idx", "=", "base", "+", "self", ".", "K", "*", "self", ".", "n_dim", "end_idx", "=", "base", "+", "self", ".", "prior_size", "widths", "=", "self", ".", "local_posterior_", "[", "start_idx", ":", "end_idx", "]", ".", "reshape", "(", "(", "self", ".", "K", ",", "1", ")", ")", "unique_R", ",", "inds", "=", "self", ".", "get_unique_R", "(", "R", "[", "s", "]", ")", "F", "=", "self", ".", "get_factors", "(", "unique_R", ",", "inds", ",", "centers", ",", "widths", ")", "start_idx", "=", "local_weight_offset", "[", "s", "]", "if", "s", "==", "n_local_subj", "-", "1", ":", "self", ".", "local_weights_", "[", "start_idx", ":", "]", "=", "self", ".", "get_weights", "(", "subj_data", ",", "F", ")", ".", "ravel", "(", ")", "else", ":", "end_idx", "=", "local_weight_offset", "[", "s", "+", "1", "]", "self", ".", "local_weights_", "[", "start_idx", ":", "end_idx", "]", "=", "self", ".", "get_weights", "(", "subj_data", ",", "F", ")", ".", "ravel", "(", ")", "return", "self" ]
35.666667
19.377778
def get_patches_ignore_regex(self): """Returns a string representing a regex for filtering out patches This string is parsed from a comment in the specfile that contains the word filter-out followed by an equal sign. For example, a comment as such: # patches_ignore=(regex) would mean this method returns the string '(regex)' Only a very limited subset of characters are accepted so no fancy stuff like matching groups etc. """ match = re.search(r'# *patches_ignore=([\w *.+?[\]|{,}\-_]+)', self.txt) if not match: return None regex_string = match.group(1) try: return re.compile(regex_string) except Exception: return None
[ "def", "get_patches_ignore_regex", "(", "self", ")", ":", "match", "=", "re", ".", "search", "(", "r'# *patches_ignore=([\\w *.+?[\\]|{,}\\-_]+)'", ",", "self", ".", "txt", ")", "if", "not", "match", ":", "return", "None", "regex_string", "=", "match", ".", "group", "(", "1", ")", "try", ":", "return", "re", ".", "compile", "(", "regex_string", ")", "except", "Exception", ":", "return", "None" ]
34.043478
18.26087
def check_overlap(self, other): """Check overlap with another spectrum. Also see :ref:`pysynphot-command-checko`. This checks whether the wavelength set of the given spectrum is defined everywhere within ``self``. Wavelength values where throughput is zero are excluded from the check. Typical use case is for checking whether a source spectrum is fully defined over the range of a bandpass. This check is asymmetric in the sense that if ``self`` is fully defined within the given spectrum, but not the other way around, it will still only return "partial". If the given spectrum is analytic, the result is always "full". Example of full overlap:: |---------- other ----------| |------ self ------| Examples of partial overlap:: |---------- self ----------| |------ other ------| |---- other ----| |---- self ----| |---- self ----| |---- other ----| Examples of no overlap:: |---- self ----| |---- other ----| |---- other ----| |---- self ----| Parameters ---------- other : `SourceSpectrum` or `SpectralElement` The other spectrum. Returns ------- ans : {'full', 'partial', 'none'} Overlap status. """ if other.isAnalytic and not isinstance(other, Box): # then it's defined everywhere, except for Box return 'full' swave = self.wave[N.where(self.throughput != 0)] s1, s2 = swave.min(), swave.max() owave = other.wave o1, o2 = owave.min(), owave.max() if (s1 >= o1 and s2 <= o2): ans = 'full' elif (s2 < o1) or (o2 < s1): ans = 'none' else: ans = 'partial' return ans
[ "def", "check_overlap", "(", "self", ",", "other", ")", ":", "if", "other", ".", "isAnalytic", "and", "not", "isinstance", "(", "other", ",", "Box", ")", ":", "# then it's defined everywhere, except for Box", "return", "'full'", "swave", "=", "self", ".", "wave", "[", "N", ".", "where", "(", "self", ".", "throughput", "!=", "0", ")", "]", "s1", ",", "s2", "=", "swave", ".", "min", "(", ")", ",", "swave", ".", "max", "(", ")", "owave", "=", "other", ".", "wave", "o1", ",", "o2", "=", "owave", ".", "min", "(", ")", ",", "owave", ".", "max", "(", ")", "if", "(", "s1", ">=", "o1", "and", "s2", "<=", "o2", ")", ":", "ans", "=", "'full'", "elif", "(", "s2", "<", "o1", ")", "or", "(", "o2", "<", "s1", ")", ":", "ans", "=", "'none'", "else", ":", "ans", "=", "'partial'", "return", "ans" ]
28.029851
20.716418
def release(self): """Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. If the number of releases exceeds the number of acquires, raise a ValueError. """ with self.__cond: if self.__value >= self._initial_value: raise ValueError("Semaphore released too many times") self.__value += 1 self.__cond.notify()
[ "def", "release", "(", "self", ")", ":", "with", "self", ".", "__cond", ":", "if", "self", ".", "__value", ">=", "self", ".", "_initial_value", ":", "raise", "ValueError", "(", "\"Semaphore released too many times\"", ")", "self", ".", "__value", "+=", "1", "self", ".", "__cond", ".", "notify", "(", ")" ]
36
20.933333
def parse_args(argv=None): """Parses CLI arguments. Args: argv: optional list of arguments to parse. sys.argv is used by default. Raises: dvc.exceptions.DvcParserError: raised for argument parsing errors. """ parent_parser = get_parent_parser() # Main parser desc = "Data Version Control" parser = DvcParser( prog="dvc", description=desc, parents=[parent_parser], formatter_class=argparse.RawTextHelpFormatter, ) # NOTE: On some python versions action='version' prints to stderr # instead of stdout https://bugs.python.org/issue18920 parser.add_argument( "-V", "--version", action=VersionAction, nargs=0, help="Show program's version.", ) # Sub commands subparsers = parser.add_subparsers( title="Available Commands", metavar="COMMAND", dest="cmd", help="Use dvc COMMAND --help for command-specific help.", ) fix_subparsers(subparsers) for cmd in COMMANDS: cmd.add_parser(subparsers, parent_parser) args = parser.parse_args(argv) return args
[ "def", "parse_args", "(", "argv", "=", "None", ")", ":", "parent_parser", "=", "get_parent_parser", "(", ")", "# Main parser", "desc", "=", "\"Data Version Control\"", "parser", "=", "DvcParser", "(", "prog", "=", "\"dvc\"", ",", "description", "=", "desc", ",", "parents", "=", "[", "parent_parser", "]", ",", "formatter_class", "=", "argparse", ".", "RawTextHelpFormatter", ",", ")", "# NOTE: On some python versions action='version' prints to stderr", "# instead of stdout https://bugs.python.org/issue18920", "parser", ".", "add_argument", "(", "\"-V\"", ",", "\"--version\"", ",", "action", "=", "VersionAction", ",", "nargs", "=", "0", ",", "help", "=", "\"Show program's version.\"", ",", ")", "# Sub commands", "subparsers", "=", "parser", ".", "add_subparsers", "(", "title", "=", "\"Available Commands\"", ",", "metavar", "=", "\"COMMAND\"", ",", "dest", "=", "\"cmd\"", ",", "help", "=", "\"Use dvc COMMAND --help for command-specific help.\"", ",", ")", "fix_subparsers", "(", "subparsers", ")", "for", "cmd", "in", "COMMANDS", ":", "cmd", ".", "add_parser", "(", "subparsers", ",", "parent_parser", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "return", "args" ]
24.195652
22.130435
def scalars_impl(self, run, tag_regex_string): """Given a tag regex and single run, return ScalarEvents. Args: run: A run string. tag_regex_string: A regular expression that captures portions of tags. Raises: ValueError: if the scalars plugin is not registered. Returns: A dictionary that is the JSON-able response. """ if not tag_regex_string: # The user provided no regex. return { _REGEX_VALID_PROPERTY: False, _TAG_TO_EVENTS_PROPERTY: {}, } # Construct the regex. try: regex = re.compile(tag_regex_string) except re.error: return { _REGEX_VALID_PROPERTY: False, _TAG_TO_EVENTS_PROPERTY: {}, } # Fetch the tags for the run. Filter for tags that match the regex. run_to_data = self._multiplexer.PluginRunToTagToContent( scalars_metadata.PLUGIN_NAME) tag_to_data = None try: tag_to_data = run_to_data[run] except KeyError: # The run could not be found. Perhaps a configuration specified a run that # TensorBoard has not read from disk yet. payload = {} if tag_to_data: scalars_plugin_instance = self._get_scalars_plugin() if not scalars_plugin_instance: raise ValueError(('Failed to respond to request for /scalars. ' 'The scalars plugin is oddly not registered.')) form = scalars_plugin.OutputFormat.JSON payload = { tag: scalars_plugin_instance.scalars_impl(tag, run, None, form)[0] for tag in tag_to_data.keys() if regex.match(tag) } return { _REGEX_VALID_PROPERTY: True, _TAG_TO_EVENTS_PROPERTY: payload, }
[ "def", "scalars_impl", "(", "self", ",", "run", ",", "tag_regex_string", ")", ":", "if", "not", "tag_regex_string", ":", "# The user provided no regex.", "return", "{", "_REGEX_VALID_PROPERTY", ":", "False", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "{", "}", ",", "}", "# Construct the regex.", "try", ":", "regex", "=", "re", ".", "compile", "(", "tag_regex_string", ")", "except", "re", ".", "error", ":", "return", "{", "_REGEX_VALID_PROPERTY", ":", "False", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "{", "}", ",", "}", "# Fetch the tags for the run. Filter for tags that match the regex.", "run_to_data", "=", "self", ".", "_multiplexer", ".", "PluginRunToTagToContent", "(", "scalars_metadata", ".", "PLUGIN_NAME", ")", "tag_to_data", "=", "None", "try", ":", "tag_to_data", "=", "run_to_data", "[", "run", "]", "except", "KeyError", ":", "# The run could not be found. Perhaps a configuration specified a run that", "# TensorBoard has not read from disk yet.", "payload", "=", "{", "}", "if", "tag_to_data", ":", "scalars_plugin_instance", "=", "self", ".", "_get_scalars_plugin", "(", ")", "if", "not", "scalars_plugin_instance", ":", "raise", "ValueError", "(", "(", "'Failed to respond to request for /scalars. '", "'The scalars plugin is oddly not registered.'", ")", ")", "form", "=", "scalars_plugin", ".", "OutputFormat", ".", "JSON", "payload", "=", "{", "tag", ":", "scalars_plugin_instance", ".", "scalars_impl", "(", "tag", ",", "run", ",", "None", ",", "form", ")", "[", "0", "]", "for", "tag", "in", "tag_to_data", ".", "keys", "(", ")", "if", "regex", ".", "match", "(", "tag", ")", "}", "return", "{", "_REGEX_VALID_PROPERTY", ":", "True", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "payload", ",", "}" ]
28.87931
21.017241
def sflow_collector_collector_port_number(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") sflow = ET.SubElement(config, "sflow", xmlns="urn:brocade.com:mgmt:brocade-sflow") collector = ET.SubElement(sflow, "collector") collector_ip_address_key = ET.SubElement(collector, "collector-ip-address") collector_ip_address_key.text = kwargs.pop('collector_ip_address') use_vrf_key = ET.SubElement(collector, "use-vrf") use_vrf_key.text = kwargs.pop('use_vrf') collector_port_number = ET.SubElement(collector, "collector-port-number") collector_port_number.text = kwargs.pop('collector_port_number') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "sflow_collector_collector_port_number", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "sflow", "=", "ET", ".", "SubElement", "(", "config", ",", "\"sflow\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-sflow\"", ")", "collector", "=", "ET", ".", "SubElement", "(", "sflow", ",", "\"collector\"", ")", "collector_ip_address_key", "=", "ET", ".", "SubElement", "(", "collector", ",", "\"collector-ip-address\"", ")", "collector_ip_address_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'collector_ip_address'", ")", "use_vrf_key", "=", "ET", ".", "SubElement", "(", "collector", ",", "\"use-vrf\"", ")", "use_vrf_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'use_vrf'", ")", "collector_port_number", "=", "ET", ".", "SubElement", "(", "collector", ",", "\"collector-port-number\"", ")", "collector_port_number", ".", "text", "=", "kwargs", ".", "pop", "(", "'collector_port_number'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
52.133333
21.666667
def inject(fun: Callable) -> Callable: """ A decorator for injection dependencies into functions/methods, based on their type annotations. .. code-block:: python class SomeClass: @inject def __init__(self, my_dep: DepType) -> None: self.my_dep = my_dep .. important:: On the opposite to :class:`~haps.Inject`, dependency is injected at the moment of method invocation. In case of decorating `__init__`, dependency is injected when `SomeClass` instance is created. :param fun: callable with annotated parameters :return: decorated callable """ sig = inspect.signature(fun) injectables: Dict[str, Any] = {} for name, param in sig.parameters.items(): type_ = param.annotation if name == 'self': continue else: injectables[name] = type_ @wraps(fun) def _inner(*args, **kwargs): container = Container() for n, t in injectables.items(): if n not in kwargs: kwargs[n] = container.get_object(t) return fun(*args, **kwargs) return _inner
[ "def", "inject", "(", "fun", ":", "Callable", ")", "->", "Callable", ":", "sig", "=", "inspect", ".", "signature", "(", "fun", ")", "injectables", ":", "Dict", "[", "str", ",", "Any", "]", "=", "{", "}", "for", "name", ",", "param", "in", "sig", ".", "parameters", ".", "items", "(", ")", ":", "type_", "=", "param", ".", "annotation", "if", "name", "==", "'self'", ":", "continue", "else", ":", "injectables", "[", "name", "]", "=", "type_", "@", "wraps", "(", "fun", ")", "def", "_inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "container", "=", "Container", "(", ")", "for", "n", ",", "t", "in", "injectables", ".", "items", "(", ")", ":", "if", "n", "not", "in", "kwargs", ":", "kwargs", "[", "n", "]", "=", "container", ".", "get_object", "(", "t", ")", "return", "fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_inner" ]
27.439024
19.341463
def get_version() : "returns the libdbus library version as a tuple of integers (major, minor, micro)." major = ct.c_int() minor = ct.c_int() micro = ct.c_int() dbus.dbus_get_version(ct.byref(major), ct.byref(minor), ct.byref(micro)) return \ (major.value, minor.value, micro.value)
[ "def", "get_version", "(", ")", ":", "major", "=", "ct", ".", "c_int", "(", ")", "minor", "=", "ct", ".", "c_int", "(", ")", "micro", "=", "ct", ".", "c_int", "(", ")", "dbus", ".", "dbus_get_version", "(", "ct", ".", "byref", "(", "major", ")", ",", "ct", ".", "byref", "(", "minor", ")", ",", "ct", ".", "byref", "(", "micro", ")", ")", "return", "(", "major", ".", "value", ",", "minor", ".", "value", ",", "micro", ".", "value", ")" ]
38.375
24.125
def write_ds9region(self, region, *args, **kwargs): """Create a ds9 compatible region file from the ROI. It calls the `to_ds9` method and write the result to the region file. Only the file name is required. All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method for all accepted parameters and options. Parameters ---------- region : str name of the region file (string) """ lines = self.to_ds9(*args,**kwargs) with open(region,'w') as fo: fo.write("\n".join(lines))
[ "def", "write_ds9region", "(", "self", ",", "region", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "lines", "=", "self", ".", "to_ds9", "(", "*", "args", ",", "*", "*", "kwargs", ")", "with", "open", "(", "region", ",", "'w'", ")", "as", "fo", ":", "fo", ".", "write", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")" ]
40.2
20.8
def items(self): """ Returns an iterator over the named bitfields in the structure as 2-tuples of (key, value). Uses a clone so as to only read from the underlying data once. """ temp = self.clone() return [(f, getattr(temp, f)) for f in iter(self)]
[ "def", "items", "(", "self", ")", ":", "temp", "=", "self", ".", "clone", "(", ")", "return", "[", "(", "f", ",", "getattr", "(", "temp", ",", "f", ")", ")", "for", "f", "in", "iter", "(", "self", ")", "]" ]
34.111111
17.444444
def get_stack_data(self, frame, traceback, event_type): """Get the stack frames data at each of the hooks above (Ie. for each line of the Python code)""" heap_data = Heap(self.options) stack_data = StackFrames(self.options) stack_frames, cur_frame_ind = self.get_stack(frame, traceback) for frame_ind, (frame, lineno) in enumerate(stack_frames): skip_this_stack = False # Skip the self.run calling frame (first frame) if frame_ind == 0: continue # Skip stack after a certain stack frame depth if len(stack_data) > self.options.depth: skip_this_stack = True break # Skip stack when frames dont belong to the current notebook or # current cell, I.e. frames in another global scope altogether # or frames in other cells if (not self.is_notebook_frame(frame) or self.is_other_cell_frame(frame)): if not self.options.step_all: skip_this_stack = True break lineno = 0 # So line markers dont display for these frames else: lineno += 1 # Because cell magic is actually line 1 # Filter out ignored names from the frame locals user_locals = filter_dict( frame.f_locals, ignore_vars + list(self.ipy_shell.user_ns_hidden.keys()) ) # Add frame and heap data stack_data.add(frame, lineno, event_type, user_locals) heap_data.add(user_locals) if not skip_this_stack and not stack_data.is_empty(): self.trace_history.append( stack_data, heap_data, self.stdout.getvalue() )
[ "def", "get_stack_data", "(", "self", ",", "frame", ",", "traceback", ",", "event_type", ")", ":", "heap_data", "=", "Heap", "(", "self", ".", "options", ")", "stack_data", "=", "StackFrames", "(", "self", ".", "options", ")", "stack_frames", ",", "cur_frame_ind", "=", "self", ".", "get_stack", "(", "frame", ",", "traceback", ")", "for", "frame_ind", ",", "(", "frame", ",", "lineno", ")", "in", "enumerate", "(", "stack_frames", ")", ":", "skip_this_stack", "=", "False", "# Skip the self.run calling frame (first frame)", "if", "frame_ind", "==", "0", ":", "continue", "# Skip stack after a certain stack frame depth", "if", "len", "(", "stack_data", ")", ">", "self", ".", "options", ".", "depth", ":", "skip_this_stack", "=", "True", "break", "# Skip stack when frames dont belong to the current notebook or", "# current cell, I.e. frames in another global scope altogether", "# or frames in other cells", "if", "(", "not", "self", ".", "is_notebook_frame", "(", "frame", ")", "or", "self", ".", "is_other_cell_frame", "(", "frame", ")", ")", ":", "if", "not", "self", ".", "options", ".", "step_all", ":", "skip_this_stack", "=", "True", "break", "lineno", "=", "0", "# So line markers dont display for these frames", "else", ":", "lineno", "+=", "1", "# Because cell magic is actually line 1", "# Filter out ignored names from the frame locals", "user_locals", "=", "filter_dict", "(", "frame", ".", "f_locals", ",", "ignore_vars", "+", "list", "(", "self", ".", "ipy_shell", ".", "user_ns_hidden", ".", "keys", "(", ")", ")", ")", "# Add frame and heap data", "stack_data", ".", "add", "(", "frame", ",", "lineno", ",", "event_type", ",", "user_locals", ")", "heap_data", ".", "add", "(", "user_locals", ")", "if", "not", "skip_this_stack", "and", "not", "stack_data", ".", "is_empty", "(", ")", ":", "self", ".", "trace_history", ".", "append", "(", "stack_data", ",", "heap_data", ",", "self", ".", "stdout", ".", "getvalue", "(", ")", ")" ]
39.106383
18.212766
def pipe_commands_to_file(cmds, path, extra_env=None, show_stderr=False): """ Executes the list of commands piping each one into the next and writing stdout of the last process into a file at the given path. """ env = extend_env(extra_env) if extra_env else None env_str = (get_env_str(extra_env) + ' ') if extra_env else '' cmd_strs = [env_str + ' '.join(cmd) for cmd in cmds] logger.info('Saving output of `{0}`'.format(' | '.join(cmd_strs))) with open('/dev/null', 'w') as NULL: # Start processes processes = [] for cmd_str, cmd in zip(cmd_strs, cmds): p_stdin = processes[-1][1].stdout if processes else None p_stderr = None if show_stderr else NULL p = Popen(cmd, env=env, stdout=PIPE, stdin=p_stdin, stderr=p_stderr) processes.append((cmd_str, p)) p_last = processes[-1][1] with open(path, 'wb') as f: shutil.copyfileobj(p_last.stdout, f) # Close processes error = False for cmd_str, p in processes: p.stdout.close() if p.wait() != 0: error = True if error: raise CalledProcessError(cmd=cmd_str, returncode=p.returncode)
[ "def", "pipe_commands_to_file", "(", "cmds", ",", "path", ",", "extra_env", "=", "None", ",", "show_stderr", "=", "False", ")", ":", "env", "=", "extend_env", "(", "extra_env", ")", "if", "extra_env", "else", "None", "env_str", "=", "(", "get_env_str", "(", "extra_env", ")", "+", "' '", ")", "if", "extra_env", "else", "''", "cmd_strs", "=", "[", "env_str", "+", "' '", ".", "join", "(", "cmd", ")", "for", "cmd", "in", "cmds", "]", "logger", ".", "info", "(", "'Saving output of `{0}`'", ".", "format", "(", "' | '", ".", "join", "(", "cmd_strs", ")", ")", ")", "with", "open", "(", "'/dev/null'", ",", "'w'", ")", "as", "NULL", ":", "# Start processes", "processes", "=", "[", "]", "for", "cmd_str", ",", "cmd", "in", "zip", "(", "cmd_strs", ",", "cmds", ")", ":", "p_stdin", "=", "processes", "[", "-", "1", "]", "[", "1", "]", ".", "stdout", "if", "processes", "else", "None", "p_stderr", "=", "None", "if", "show_stderr", "else", "NULL", "p", "=", "Popen", "(", "cmd", ",", "env", "=", "env", ",", "stdout", "=", "PIPE", ",", "stdin", "=", "p_stdin", ",", "stderr", "=", "p_stderr", ")", "processes", ".", "append", "(", "(", "cmd_str", ",", "p", ")", ")", "p_last", "=", "processes", "[", "-", "1", "]", "[", "1", "]", "with", "open", "(", "path", ",", "'wb'", ")", "as", "f", ":", "shutil", ".", "copyfileobj", "(", "p_last", ".", "stdout", ",", "f", ")", "# Close processes", "error", "=", "False", "for", "cmd_str", ",", "p", "in", "processes", ":", "p", ".", "stdout", ".", "close", "(", ")", "if", "p", ".", "wait", "(", ")", "!=", "0", ":", "error", "=", "True", "if", "error", ":", "raise", "CalledProcessError", "(", "cmd", "=", "cmd_str", ",", "returncode", "=", "p", ".", "returncode", ")" ]
36.794118
19.5
def call(func, *args, **kwargs): """ :return: a delegator function that returns a tuple (``func``, (seed tuple,)+ ``args``, ``kwargs``). That is, seed tuple is inserted before supplied positional arguments. By default, a thread wrapping ``func`` and all those arguments is spawned. """ def f(seed_tuple): return func, (seed_tuple,)+args, kwargs return f
[ "def", "call", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "f", "(", "seed_tuple", ")", ":", "return", "func", ",", "(", "seed_tuple", ",", ")", "+", "args", ",", "kwargs", "return", "f" ]
39.6
22.6
def maxnorm_regularizer(scale=1.0): """Max-norm regularization returns a function that can be used to apply max-norm regularization to weights. More about max-norm, see `wiki-max norm <https://en.wikipedia.org/wiki/Matrix_norm#Max_norm>`_. The implementation follows `TensorFlow contrib <https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/layers/python/layers/regularizers.py>`__. Parameters ---------- scale : float A scalar multiplier `Tensor`. 0.0 disables the regularizer. Returns --------- A function with signature `mn(weights, name=None)` that apply Lo regularization. Raises -------- ValueError : If scale is outside of the range [0.0, 1.0] or if scale is not a float. """ if isinstance(scale, numbers.Integral): raise ValueError('scale cannot be an integer: %s' % scale) if isinstance(scale, numbers.Real): if scale < 0.: raise ValueError('Setting a scale less than 0 on a regularizer: %g' % scale) # if scale >= 1.: # raise ValueError('Setting a scale greater than 1 on a regularizer: %g' % # scale) if scale == 0.: tl.logging.info('Scale of 0 disables regularizer.') return lambda _, name=None: None def mn(weights, name='max_regularizer'): """Applies max-norm regularization to weights.""" with tf.name_scope(name) as scope: my_scale = ops.convert_to_tensor(scale, dtype=weights.dtype.base_dtype, name='scale') # if tf.__version__ <= '0.12': # standard_ops_fn = standard_ops.mul # else: standard_ops_fn = standard_ops.multiply return standard_ops_fn(my_scale, standard_ops.reduce_max(standard_ops.abs(weights)), name=scope) return mn
[ "def", "maxnorm_regularizer", "(", "scale", "=", "1.0", ")", ":", "if", "isinstance", "(", "scale", ",", "numbers", ".", "Integral", ")", ":", "raise", "ValueError", "(", "'scale cannot be an integer: %s'", "%", "scale", ")", "if", "isinstance", "(", "scale", ",", "numbers", ".", "Real", ")", ":", "if", "scale", "<", "0.", ":", "raise", "ValueError", "(", "'Setting a scale less than 0 on a regularizer: %g'", "%", "scale", ")", "# if scale >= 1.:", "# raise ValueError('Setting a scale greater than 1 on a regularizer: %g' %", "# scale)", "if", "scale", "==", "0.", ":", "tl", ".", "logging", ".", "info", "(", "'Scale of 0 disables regularizer.'", ")", "return", "lambda", "_", ",", "name", "=", "None", ":", "None", "def", "mn", "(", "weights", ",", "name", "=", "'max_regularizer'", ")", ":", "\"\"\"Applies max-norm regularization to weights.\"\"\"", "with", "tf", ".", "name_scope", "(", "name", ")", "as", "scope", ":", "my_scale", "=", "ops", ".", "convert_to_tensor", "(", "scale", ",", "dtype", "=", "weights", ".", "dtype", ".", "base_dtype", ",", "name", "=", "'scale'", ")", "# if tf.__version__ <= '0.12':", "# standard_ops_fn = standard_ops.mul", "# else:", "standard_ops_fn", "=", "standard_ops", ".", "multiply", "return", "standard_ops_fn", "(", "my_scale", ",", "standard_ops", ".", "reduce_max", "(", "standard_ops", ".", "abs", "(", "weights", ")", ")", ",", "name", "=", "scope", ")", "return", "mn" ]
41.136364
27.886364
def _get_merged(self, tax_id): """Returns tax_id into which `tax_id` has been merged or `tax_id` of not obsolete. """ cmd = """ SELECT COALESCE( (SELECT new_tax_id FROM {merged} WHERE old_tax_id = {x}), {x}) """.format(x=self.placeholder, merged=self.merged) with self.engine.connect() as con: result = con.execute(cmd, (tax_id, tax_id)) return result.fetchone()[0]
[ "def", "_get_merged", "(", "self", ",", "tax_id", ")", ":", "cmd", "=", "\"\"\"\n SELECT COALESCE(\n (SELECT new_tax_id FROM {merged}\n WHERE old_tax_id = {x}), {x})\n \"\"\"", ".", "format", "(", "x", "=", "self", ".", "placeholder", ",", "merged", "=", "self", ".", "merged", ")", "with", "self", ".", "engine", ".", "connect", "(", ")", "as", "con", ":", "result", "=", "con", ".", "execute", "(", "cmd", ",", "(", "tax_id", ",", "tax_id", ")", ")", "return", "result", ".", "fetchone", "(", ")", "[", "0", "]" ]
30.066667
12.333333
def send_bcm(bcm_socket, data): """ Send raw frame to a BCM socket and handle errors. """ try: return bcm_socket.send(data) except OSError as e: base = "Couldn't send CAN BCM frame. OS Error {}: {}\n".format(e.errno, e.strerror) if e.errno == errno.EINVAL: raise can.CanError(base + "You are probably referring to a non-existing frame.") elif e.errno == errno.ENETDOWN: raise can.CanError(base + "The CAN interface appears to be down.") elif e.errno == errno.EBADF: raise can.CanError(base + "The CAN socket appears to be closed.") else: raise e
[ "def", "send_bcm", "(", "bcm_socket", ",", "data", ")", ":", "try", ":", "return", "bcm_socket", ".", "send", "(", "data", ")", "except", "OSError", "as", "e", ":", "base", "=", "\"Couldn't send CAN BCM frame. OS Error {}: {}\\n\"", ".", "format", "(", "e", ".", "errno", ",", "e", ".", "strerror", ")", "if", "e", ".", "errno", "==", "errno", ".", "EINVAL", ":", "raise", "can", ".", "CanError", "(", "base", "+", "\"You are probably referring to a non-existing frame.\"", ")", "elif", "e", ".", "errno", "==", "errno", ".", "ENETDOWN", ":", "raise", "can", ".", "CanError", "(", "base", "+", "\"The CAN interface appears to be down.\"", ")", "elif", "e", ".", "errno", "==", "errno", ".", "EBADF", ":", "raise", "can", ".", "CanError", "(", "base", "+", "\"The CAN socket appears to be closed.\"", ")", "else", ":", "raise", "e" ]
32.3
23.5
def _Dhcpcd(self, interfaces, logger): """Use dhcpcd to activate the interfaces. Args: interfaces: list of string, the output device names to enable. logger: logger object, used to write to SysLog and serial port. """ for interface in interfaces: dhcpcd = ['/sbin/dhcpcd'] try: subprocess.check_call(dhcpcd + ['-x', interface]) except subprocess.CalledProcessError: # Dhcpcd not yet running for this device. logger.info('Dhcpcd not yet running for interface %s.', interface) try: subprocess.check_call(dhcpcd + [interface]) except subprocess.CalledProcessError: # The interface is already active. logger.warning('Could not activate interface %s.', interface)
[ "def", "_Dhcpcd", "(", "self", ",", "interfaces", ",", "logger", ")", ":", "for", "interface", "in", "interfaces", ":", "dhcpcd", "=", "[", "'/sbin/dhcpcd'", "]", "try", ":", "subprocess", ".", "check_call", "(", "dhcpcd", "+", "[", "'-x'", ",", "interface", "]", ")", "except", "subprocess", ".", "CalledProcessError", ":", "# Dhcpcd not yet running for this device.", "logger", ".", "info", "(", "'Dhcpcd not yet running for interface %s.'", ",", "interface", ")", "try", ":", "subprocess", ".", "check_call", "(", "dhcpcd", "+", "[", "interface", "]", ")", "except", "subprocess", ".", "CalledProcessError", ":", "# The interface is already active.", "logger", ".", "warning", "(", "'Could not activate interface %s.'", ",", "interface", ")" ]
39.315789
16.578947
async def uint(self, elem, elem_type, params=None): """ Integer types :param elem: :param elem_type: :param params: :return: """ if self.writing: return IntegerModel(elem, elem_type.WIDTH) if self.modelize else elem else: return elem.val if isinstance(elem, IModel) else elem
[ "async", "def", "uint", "(", "self", ",", "elem", ",", "elem_type", ",", "params", "=", "None", ")", ":", "if", "self", ".", "writing", ":", "return", "IntegerModel", "(", "elem", ",", "elem_type", ".", "WIDTH", ")", "if", "self", ".", "modelize", "else", "elem", "else", ":", "return", "elem", ".", "val", "if", "isinstance", "(", "elem", ",", "IModel", ")", "else", "elem" ]
30
18
def save(self, full=False, force=False): ''' Saves the current entity to Redis. Will only save changed data by default, but you can force a full save by passing ``full=True``. If the underlying entity was deleted and you want to re-save the entity, you can pass ``force=True`` to force a full re-save of the entity. ''' # handle the pre-commit hooks was_new = self._new if was_new: self._before_insert() else: self._before_update() new = self.to_dict() ret, data = self._apply_changes( self._last, new, full or self._new or force, is_new=self._new or force) self._last = data self._new = False self._modified = False self._deleted = False # handle the post-commit hooks if was_new: self._after_insert() else: self._after_update() return ret
[ "def", "save", "(", "self", ",", "full", "=", "False", ",", "force", "=", "False", ")", ":", "# handle the pre-commit hooks", "was_new", "=", "self", ".", "_new", "if", "was_new", ":", "self", ".", "_before_insert", "(", ")", "else", ":", "self", ".", "_before_update", "(", ")", "new", "=", "self", ".", "to_dict", "(", ")", "ret", ",", "data", "=", "self", ".", "_apply_changes", "(", "self", ".", "_last", ",", "new", ",", "full", "or", "self", ".", "_new", "or", "force", ",", "is_new", "=", "self", ".", "_new", "or", "force", ")", "self", ".", "_last", "=", "data", "self", ".", "_new", "=", "False", "self", ".", "_modified", "=", "False", "self", ".", "_deleted", "=", "False", "# handle the post-commit hooks", "if", "was_new", ":", "self", ".", "_after_insert", "(", ")", "else", ":", "self", ".", "_after_update", "(", ")", "return", "ret" ]
33.392857
19.607143
def return_features_base(dbpath, set_object, names): """ Generic function which returns a list of extracted features from the database Parameters ---------- dbpath : string, path to SQLite database file set_object : object (either TestSet or TrainSet) which is stored in the database names : list of strings, a list of feature names which are to be retrieved from the database, if equal to 'all', all features will be returned Returns ------- return_list : list of lists, each 'inside list' corresponds to a single data point, each element of the 'inside list' is a feature (can be of any type) """ engine = create_engine('sqlite:////' + dbpath) session_cl = sessionmaker(bind=engine) session = session_cl() return_list = [] if names == 'all': for i in session.query(set_object).order_by(set_object.id): row_list = [] for feature in i.features: row_list.append(i.features[feature]) return_list.append(row_list[:]) else: for i in session.query(set_object).order_by(set_object.id): row_list = [] for feature in i.features: if feature in names: row_list.append(i.features[feature]) return_list.append(row_list[:]) return return_list
[ "def", "return_features_base", "(", "dbpath", ",", "set_object", ",", "names", ")", ":", "engine", "=", "create_engine", "(", "'sqlite:////'", "+", "dbpath", ")", "session_cl", "=", "sessionmaker", "(", "bind", "=", "engine", ")", "session", "=", "session_cl", "(", ")", "return_list", "=", "[", "]", "if", "names", "==", "'all'", ":", "for", "i", "in", "session", ".", "query", "(", "set_object", ")", ".", "order_by", "(", "set_object", ".", "id", ")", ":", "row_list", "=", "[", "]", "for", "feature", "in", "i", ".", "features", ":", "row_list", ".", "append", "(", "i", ".", "features", "[", "feature", "]", ")", "return_list", ".", "append", "(", "row_list", "[", ":", "]", ")", "else", ":", "for", "i", "in", "session", ".", "query", "(", "set_object", ")", ".", "order_by", "(", "set_object", ".", "id", ")", ":", "row_list", "=", "[", "]", "for", "feature", "in", "i", ".", "features", ":", "if", "feature", "in", "names", ":", "row_list", ".", "append", "(", "i", ".", "features", "[", "feature", "]", ")", "return_list", ".", "append", "(", "row_list", "[", ":", "]", ")", "return", "return_list" ]
38.764706
20.470588
def DoRarExtraction(rarArchive, targetFile, dstDir): """ RAR extraction with exception catching Parameters ---------- rarArchive : RarFile object RarFile object to extract. targetFile : string Target file name. dstDir : string Target directory. Returns ---------- boolean False if rar extraction failed, otherwise True. """ try: rarArchive.extract(targetFile, dstDir) except BaseException as ex: goodlogging.Log.Info("EXTRACT", "Extract failed - Exception: {0}".format(ex)) return False else: return True
[ "def", "DoRarExtraction", "(", "rarArchive", ",", "targetFile", ",", "dstDir", ")", ":", "try", ":", "rarArchive", ".", "extract", "(", "targetFile", ",", "dstDir", ")", "except", "BaseException", "as", "ex", ":", "goodlogging", ".", "Log", ".", "Info", "(", "\"EXTRACT\"", ",", "\"Extract failed - Exception: {0}\"", ".", "format", "(", "ex", ")", ")", "return", "False", "else", ":", "return", "True" ]
20.666667
21.777778
def is_element_visible(driver, selector, by=By.CSS_SELECTOR): """ Returns whether the specified element selector is visible on the page. @Params driver - the webdriver object (required) selector - the locator that is used (required) by - the method to search for the locator (Default: By.CSS_SELECTOR) @Returns Boolean (is element visible) """ try: element = driver.find_element(by=by, value=selector) return element.is_displayed() except Exception: return False
[ "def", "is_element_visible", "(", "driver", ",", "selector", ",", "by", "=", "By", ".", "CSS_SELECTOR", ")", ":", "try", ":", "element", "=", "driver", ".", "find_element", "(", "by", "=", "by", ",", "value", "=", "selector", ")", "return", "element", ".", "is_displayed", "(", ")", "except", "Exception", ":", "return", "False" ]
34.4
17.333333
def numericalparameters(self): """Numeric parameter declaration lines.""" lines = Lines() if self.model.NUMERICAL: lines.add(0, '@cython.final') lines.add(0, 'cdef class NumConsts(object):') for name in ('nmb_methods', 'nmb_stages'): lines.add(1, 'cdef public %s %s' % (TYPE2STR[int], name)) for name in ('dt_increase', 'dt_decrease'): lines.add(1, 'cdef public %s %s' % (TYPE2STR[float], name)) lines.add(1, 'cdef public configutils.Config pub') lines.add(1, 'cdef public double[:, :, :] a_coefs') lines.add(0, 'cdef class NumVars(object):') for name in ('nmb_calls', 'idx_method', 'idx_stage'): lines.add(1, 'cdef public %s %s' % (TYPE2STR[int], name)) for name in ('t0', 't1', 'dt', 'dt_est', 'error', 'last_error', 'extrapolated_error'): lines.add(1, 'cdef public %s %s' % (TYPE2STR[float], name)) lines.add(1, 'cdef public %s f0_ready' % TYPE2STR[bool]) return lines
[ "def", "numericalparameters", "(", "self", ")", ":", "lines", "=", "Lines", "(", ")", "if", "self", ".", "model", ".", "NUMERICAL", ":", "lines", ".", "add", "(", "0", ",", "'@cython.final'", ")", "lines", ".", "add", "(", "0", ",", "'cdef class NumConsts(object):'", ")", "for", "name", "in", "(", "'nmb_methods'", ",", "'nmb_stages'", ")", ":", "lines", ".", "add", "(", "1", ",", "'cdef public %s %s'", "%", "(", "TYPE2STR", "[", "int", "]", ",", "name", ")", ")", "for", "name", "in", "(", "'dt_increase'", ",", "'dt_decrease'", ")", ":", "lines", ".", "add", "(", "1", ",", "'cdef public %s %s'", "%", "(", "TYPE2STR", "[", "float", "]", ",", "name", ")", ")", "lines", ".", "add", "(", "1", ",", "'cdef public configutils.Config pub'", ")", "lines", ".", "add", "(", "1", ",", "'cdef public double[:, :, :] a_coefs'", ")", "lines", ".", "add", "(", "0", ",", "'cdef class NumVars(object):'", ")", "for", "name", "in", "(", "'nmb_calls'", ",", "'idx_method'", ",", "'idx_stage'", ")", ":", "lines", ".", "add", "(", "1", ",", "'cdef public %s %s'", "%", "(", "TYPE2STR", "[", "int", "]", ",", "name", ")", ")", "for", "name", "in", "(", "'t0'", ",", "'t1'", ",", "'dt'", ",", "'dt_est'", ",", "'error'", ",", "'last_error'", ",", "'extrapolated_error'", ")", ":", "lines", ".", "add", "(", "1", ",", "'cdef public %s %s'", "%", "(", "TYPE2STR", "[", "float", "]", ",", "name", ")", ")", "lines", ".", "add", "(", "1", ",", "'cdef public %s f0_ready'", "%", "TYPE2STR", "[", "bool", "]", ")", "return", "lines" ]
54.65
19.65
def remove_prefix(self, id): """ Remove a prefix from pool 'id'. """ if 'prefix' not in request.params: abort(400, 'Missing prefix.') prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = None prefix.save() redirect(url(controller = 'pool', action = 'edit', id = id))
[ "def", "remove_prefix", "(", "self", ",", "id", ")", ":", "if", "'prefix'", "not", "in", "request", ".", "params", ":", "abort", "(", "400", ",", "'Missing prefix.'", ")", "prefix", "=", "Prefix", ".", "get", "(", "int", "(", "request", ".", "params", "[", "'prefix'", "]", ")", ")", "prefix", ".", "pool", "=", "None", "prefix", ".", "save", "(", ")", "redirect", "(", "url", "(", "controller", "=", "'pool'", ",", "action", "=", "'edit'", ",", "id", "=", "id", ")", ")" ]
30.727273
15.818182
def InitUser(): """Initialize application user. Retrieve existing user credentials from datastore or add new user. Returns: AppUser instance of the application user. """ result = AppUser.query(AppUser.user == users.get_current_user()).fetch() if result: app_user = result[0] else: app_user = AppUser(user=users.get_current_user(), email=users.get_current_user().email()) app_user.put() return app_user
[ "def", "InitUser", "(", ")", ":", "result", "=", "AppUser", ".", "query", "(", "AppUser", ".", "user", "==", "users", ".", "get_current_user", "(", ")", ")", ".", "fetch", "(", ")", "if", "result", ":", "app_user", "=", "result", "[", "0", "]", "else", ":", "app_user", "=", "AppUser", "(", "user", "=", "users", ".", "get_current_user", "(", ")", ",", "email", "=", "users", ".", "get_current_user", "(", ")", ".", "email", "(", ")", ")", "app_user", ".", "put", "(", ")", "return", "app_user" ]
24.611111
24.388889
def create_card(self, base_info, payee, invoice_type, detail=None): """ 创建发票卡券模板 注意这里的对象和会员卡有类似之处,但是含义有不同。创建发票卡券模板是创建发票卡券的基础。 详情请参考 https://mp.weixin.qq.com/wiki?id=mp1496561481_1TyO7 :param base_info:发票卡券模板基础信息 :type base_info: dict :param payee: 收款方(开票方)全称,显示在发票详情内。建议一个收款方对应一个发票卡券模板 :param invoice_type: 发票类型描述 :param detail: 备注详情 :return: 发票卡券模板的编号,用于后续该商户发票生成后,作为必填参数在调用插卡接口时传入 """ return self._post( 'platform/createcard', data={ 'invoice_info': { 'base_info': base_info, 'payee': payee, 'type': invoice_type, 'detail': detail, }, }, result_processor=lambda x: x['card_id'], )
[ "def", "create_card", "(", "self", ",", "base_info", ",", "payee", ",", "invoice_type", ",", "detail", "=", "None", ")", ":", "return", "self", ".", "_post", "(", "'platform/createcard'", ",", "data", "=", "{", "'invoice_info'", ":", "{", "'base_info'", ":", "base_info", ",", "'payee'", ":", "payee", ",", "'type'", ":", "invoice_type", ",", "'detail'", ":", "detail", ",", "}", ",", "}", ",", "result_processor", "=", "lambda", "x", ":", "x", "[", "'card_id'", "]", ",", ")" ]
31.923077
14.230769
def applymap(self, func, subset=None, **kwargs): """ Apply a function elementwise, updating the HTML representation with the result. Parameters ---------- func : function ``func`` should take a scalar and return a scalar subset : IndexSlice a valid indexer to limit ``data`` to *before* applying the function. Consider using a pandas.IndexSlice kwargs : dict pass along to ``func`` Returns ------- self : Styler See Also -------- Styler.where """ self._todo.append((lambda instance: getattr(instance, '_applymap'), (func, subset), kwargs)) return self
[ "def", "applymap", "(", "self", ",", "func", ",", "subset", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_todo", ".", "append", "(", "(", "lambda", "instance", ":", "getattr", "(", "instance", ",", "'_applymap'", ")", ",", "(", "func", ",", "subset", ")", ",", "kwargs", ")", ")", "return", "self" ]
28.461538
19.769231
def _step2func(self, samples, force, ipyclient): """ hidden wrapped function to start step 2""" ## print header if self._headers: print("\n Step 2: Filtering reads ") ## If no samples in this assembly then it means you skipped step1, if not self.samples.keys(): raise IPyradWarningExit(FIRST_RUN_1) ## Get sample objects from list of strings, if API. samples = _get_samples(self, samples) if not force: ## print warning and skip if all are finished if all([i.stats.state >= 2 for i in samples]): print(EDITS_EXIST.format(len(samples))) return ## Run samples through rawedit assemble.rawedit.run2(self, samples, force, ipyclient)
[ "def", "_step2func", "(", "self", ",", "samples", ",", "force", ",", "ipyclient", ")", ":", "## print header", "if", "self", ".", "_headers", ":", "print", "(", "\"\\n Step 2: Filtering reads \"", ")", "## If no samples in this assembly then it means you skipped step1,", "if", "not", "self", ".", "samples", ".", "keys", "(", ")", ":", "raise", "IPyradWarningExit", "(", "FIRST_RUN_1", ")", "## Get sample objects from list of strings, if API.", "samples", "=", "_get_samples", "(", "self", ",", "samples", ")", "if", "not", "force", ":", "## print warning and skip if all are finished", "if", "all", "(", "[", "i", ".", "stats", ".", "state", ">=", "2", "for", "i", "in", "samples", "]", ")", ":", "print", "(", "EDITS_EXIST", ".", "format", "(", "len", "(", "samples", ")", ")", ")", "return", "## Run samples through rawedit", "assemble", ".", "rawedit", ".", "run2", "(", "self", ",", "samples", ",", "force", ",", "ipyclient", ")" ]
35.136364
19.590909
def execute(self, lf_raw: str) -> int: """ Very basic model for executing friction logical forms. For now returns answer index (or -1 if no answer can be concluded) """ # Remove "a:" prefixes from attributes (hack) logical_form = re.sub(r"\(a:", r"(", lf_raw) parse = semparse_util.lisp_to_nested_expression(logical_form) if len(parse) < 2: return -1 if parse[0] == 'infer': args = [self._exec_and(arg) for arg in parse[1:]] if None in args: return -1 return self._exec_infer(*args) return -1
[ "def", "execute", "(", "self", ",", "lf_raw", ":", "str", ")", "->", "int", ":", "# Remove \"a:\" prefixes from attributes (hack)", "logical_form", "=", "re", ".", "sub", "(", "r\"\\(a:\"", ",", "r\"(\"", ",", "lf_raw", ")", "parse", "=", "semparse_util", ".", "lisp_to_nested_expression", "(", "logical_form", ")", "if", "len", "(", "parse", ")", "<", "2", ":", "return", "-", "1", "if", "parse", "[", "0", "]", "==", "'infer'", ":", "args", "=", "[", "self", ".", "_exec_and", "(", "arg", ")", "for", "arg", "in", "parse", "[", "1", ":", "]", "]", "if", "None", "in", "args", ":", "return", "-", "1", "return", "self", ".", "_exec_infer", "(", "*", "args", ")", "return", "-", "1" ]
38.8125
14.1875
def line_to(self, x, y): """Adds a line to the path from the current point to position ``(x, y)`` in user-space coordinates. After this call the current point will be ``(x, y)``. If there is no current point before the call to :meth:`line_to` this method will behave as ``context.move_to(x, y)``. :param x: X coordinate of the end of the new line. :param y: Y coordinate of the end of the new line. :type float: x :type float: y """ cairo.cairo_line_to(self._pointer, x, y) self._check_status()
[ "def", "line_to", "(", "self", ",", "x", ",", "y", ")", ":", "cairo", ".", "cairo_line_to", "(", "self", ".", "_pointer", ",", "x", ",", "y", ")", "self", ".", "_check_status", "(", ")" ]
36.125
19.875
def transform(function): """Return a processor for a style's "transform" function. """ def transform_fn(_, result): if isinstance(result, Nothing): return result lgr.debug("Transforming %r with %r", result, function) try: return function(result) except: exctype, value, tb = sys.exc_info() try: new_exc = StyleFunctionError(function, exctype, value) # Remove the "During handling ..." since we're # reraising with the traceback. new_exc.__cause__ = None six.reraise(StyleFunctionError, new_exc, tb) finally: # Remove circular reference. # https://docs.python.org/2/library/sys.html#sys.exc_info del tb return transform_fn
[ "def", "transform", "(", "function", ")", ":", "def", "transform_fn", "(", "_", ",", "result", ")", ":", "if", "isinstance", "(", "result", ",", "Nothing", ")", ":", "return", "result", "lgr", ".", "debug", "(", "\"Transforming %r with %r\"", ",", "result", ",", "function", ")", "try", ":", "return", "function", "(", "result", ")", "except", ":", "exctype", ",", "value", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "try", ":", "new_exc", "=", "StyleFunctionError", "(", "function", ",", "exctype", ",", "value", ")", "# Remove the \"During handling ...\" since we're", "# reraising with the traceback.", "new_exc", ".", "__cause__", "=", "None", "six", ".", "reraise", "(", "StyleFunctionError", ",", "new_exc", ",", "tb", ")", "finally", ":", "# Remove circular reference.", "# https://docs.python.org/2/library/sys.html#sys.exc_info", "del", "tb", "return", "transform_fn" ]
40
15.826087
def get_formset_class(self, form, name): """ Either return the formset class that was provided as argument to the __init__ method, or build one based on the ``parent_model`` and ``model`` attributes. """ if self.formset_class is not None: return self.formset_class formset_class = inlineformset_factory( self.get_parent_model(form, name), self.get_model(form, name), **self.formset_factory_kwargs) return formset_class
[ "def", "get_formset_class", "(", "self", ",", "form", ",", "name", ")", ":", "if", "self", ".", "formset_class", "is", "not", "None", ":", "return", "self", ".", "formset_class", "formset_class", "=", "inlineformset_factory", "(", "self", ".", "get_parent_model", "(", "form", ",", "name", ")", ",", "self", ".", "get_model", "(", "form", ",", "name", ")", ",", "*", "*", "self", ".", "formset_factory_kwargs", ")", "return", "formset_class" ]
39.846154
8.461538
def save_form(self, commit=True): """ This calls Django's ``ModelForm.save()``. It only takes care of saving this actual form, and leaves the nested forms and formsets alone. We separate this out of the :meth:`~django_superform.forms.SuperModelForm.save` method to make extensibility easier. """ return super(SuperModelFormMixin, self).save(commit=commit)
[ "def", "save_form", "(", "self", ",", "commit", "=", "True", ")", ":", "return", "super", "(", "SuperModelFormMixin", ",", "self", ")", ".", "save", "(", "commit", "=", "commit", ")" ]
38
19.454545
def get_visible_commands(self) -> List[str]: """Returns a list of commands that have not been hidden or disabled.""" commands = self.get_all_commands() # Remove the hidden commands for name in self.hidden_commands: if name in commands: commands.remove(name) # Remove the disabled commands for name in self.disabled_commands: if name in commands: commands.remove(name) return commands
[ "def", "get_visible_commands", "(", "self", ")", "->", "List", "[", "str", "]", ":", "commands", "=", "self", ".", "get_all_commands", "(", ")", "# Remove the hidden commands", "for", "name", "in", "self", ".", "hidden_commands", ":", "if", "name", "in", "commands", ":", "commands", ".", "remove", "(", "name", ")", "# Remove the disabled commands", "for", "name", "in", "self", ".", "disabled_commands", ":", "if", "name", "in", "commands", ":", "commands", ".", "remove", "(", "name", ")", "return", "commands" ]
32.266667
11.666667
def _check_param_limits(self, q, chiA, chiB, allow_extrap): """ Checks that params are within allowed range of paramters. Raises a warning if outside self.soft_param_lims limits and raises an error if outside self.hard_param_lims. If allow_extrap=True, skips these checks. """ if q < 1: raise ValueError('Mass ratio should be >= 1.') chiAmag = np.sqrt(np.sum(chiA**2)) chiBmag = np.sqrt(np.sum(chiB**2)) if chiAmag > 1 + 1e-14: raise ValueError('Spin magnitude of BhA > 1.') if chiBmag > 1 + 1e-14: raise ValueError('Spin magnitude of BhB > 1.') if self.aligned_spin_only: if np.sqrt(np.sum(chiA[:2]**2)) > 1e-14: raise ValueError('The x & y components of chiA should be zero.') if np.sqrt(np.sum(chiB[:2]**2)) > 1e-14: raise ValueError('The x & y components of chiB should be zero.') # Do not check param limits if allow_extrap=True if allow_extrap: return if q > self.hard_param_lims['q']+ 1e-14: raise ValueError('Mass ratio outside allowed range.') elif q > self.soft_param_lims['q']: warnings.warn('Mass ratio outside training range.') if chiAmag > self.hard_param_lims['chiAmag']+ 1e-14: raise ValueError('Spin magnitude of BhA outside allowed range.') elif chiAmag > self.soft_param_lims['chiAmag']: warnings.warn('Spin magnitude of BhA outside training range.') if chiBmag > self.hard_param_lims['chiBmag']+ 1e-14: raise ValueError('Spin magnitude of BhB outside allowed range.') elif chiBmag > self.soft_param_lims['chiBmag']: warnings.warn('Spin magnitude of BhB outside training range.')
[ "def", "_check_param_limits", "(", "self", ",", "q", ",", "chiA", ",", "chiB", ",", "allow_extrap", ")", ":", "if", "q", "<", "1", ":", "raise", "ValueError", "(", "'Mass ratio should be >= 1.'", ")", "chiAmag", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "chiA", "**", "2", ")", ")", "chiBmag", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "chiB", "**", "2", ")", ")", "if", "chiAmag", ">", "1", "+", "1e-14", ":", "raise", "ValueError", "(", "'Spin magnitude of BhA > 1.'", ")", "if", "chiBmag", ">", "1", "+", "1e-14", ":", "raise", "ValueError", "(", "'Spin magnitude of BhB > 1.'", ")", "if", "self", ".", "aligned_spin_only", ":", "if", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "chiA", "[", ":", "2", "]", "**", "2", ")", ")", ">", "1e-14", ":", "raise", "ValueError", "(", "'The x & y components of chiA should be zero.'", ")", "if", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "chiB", "[", ":", "2", "]", "**", "2", ")", ")", ">", "1e-14", ":", "raise", "ValueError", "(", "'The x & y components of chiB should be zero.'", ")", "# Do not check param limits if allow_extrap=True", "if", "allow_extrap", ":", "return", "if", "q", ">", "self", ".", "hard_param_lims", "[", "'q'", "]", "+", "1e-14", ":", "raise", "ValueError", "(", "'Mass ratio outside allowed range.'", ")", "elif", "q", ">", "self", ".", "soft_param_lims", "[", "'q'", "]", ":", "warnings", ".", "warn", "(", "'Mass ratio outside training range.'", ")", "if", "chiAmag", ">", "self", ".", "hard_param_lims", "[", "'chiAmag'", "]", "+", "1e-14", ":", "raise", "ValueError", "(", "'Spin magnitude of BhA outside allowed range.'", ")", "elif", "chiAmag", ">", "self", ".", "soft_param_lims", "[", "'chiAmag'", "]", ":", "warnings", ".", "warn", "(", "'Spin magnitude of BhA outside training range.'", ")", "if", "chiBmag", ">", "self", ".", "hard_param_lims", "[", "'chiBmag'", "]", "+", "1e-14", ":", "raise", "ValueError", "(", "'Spin magnitude of BhB outside allowed range.'", ")", "elif", "chiBmag", ">", "self", ".", "soft_param_lims", "[", "'chiBmag'", "]", ":", "warnings", ".", "warn", "(", "'Spin magnitude of BhB outside training range.'", ")" ]
43.731707
21.536585
def enable(self, snmp_agent, snmp_location=None, snmp_interface=None): """ Enable SNMP on the engine. Specify a list of interfaces by ID to enable only on those interfaces. Only interfaces that have NDI's are supported. :param str,Element snmp_agent: the SNMP agent reference for this engine :param str snmp_location: the SNMP location identifier for the engine :param list snmp_interface: list of interface IDs to enable SNMP :raises ElementNotFound: unable to resolve snmp_agent :raises InterfaceNotFound: specified interface by ID not found """ agent = element_resolver(snmp_agent) snmp_interface = [] if not snmp_interface else snmp_interface interfaces = self._iface_dict(snmp_interface) self.engine.data.update( snmp_agent_ref=agent, snmp_location=snmp_location if snmp_location else '', snmp_interface=interfaces)
[ "def", "enable", "(", "self", ",", "snmp_agent", ",", "snmp_location", "=", "None", ",", "snmp_interface", "=", "None", ")", ":", "agent", "=", "element_resolver", "(", "snmp_agent", ")", "snmp_interface", "=", "[", "]", "if", "not", "snmp_interface", "else", "snmp_interface", "interfaces", "=", "self", ".", "_iface_dict", "(", "snmp_interface", ")", "self", ".", "engine", ".", "data", ".", "update", "(", "snmp_agent_ref", "=", "agent", ",", "snmp_location", "=", "snmp_location", "if", "snmp_location", "else", "''", ",", "snmp_interface", "=", "interfaces", ")" ]
48.15
19.75
def split_data_cwl_items(items, default_keys=None): """Split a set of CWL output dictionaries into data samples and CWL items. Handles cases where we're arrayed on multiple things, like a set of regional VCF calls and data objects. """ key_lens = set([]) for data in items: key_lens.add(len(_get_all_cwlkeys([data], default_keys))) extra_key_len = min(list(key_lens)) if len(key_lens) > 1 else None data_out = [] extra_out = [] for data in items: if extra_key_len and len(_get_all_cwlkeys([data], default_keys)) == extra_key_len: extra_out.append(data) else: data_out.append(data) if len(extra_out) == 0: return data_out, {} else: cwl_keys = extra_out[0]["cwl_keys"] for extra in extra_out[1:]: cur_cwl_keys = extra["cwl_keys"] assert cur_cwl_keys == cwl_keys, pprint.pformat(extra_out) cwl_extras = collections.defaultdict(list) for data in items: for key in cwl_keys: cwl_extras[key].append(data[key]) data_final = [] for data in data_out: for key in cwl_keys: data.pop(key) data_final.append(data) return data_final, dict(cwl_extras)
[ "def", "split_data_cwl_items", "(", "items", ",", "default_keys", "=", "None", ")", ":", "key_lens", "=", "set", "(", "[", "]", ")", "for", "data", "in", "items", ":", "key_lens", ".", "add", "(", "len", "(", "_get_all_cwlkeys", "(", "[", "data", "]", ",", "default_keys", ")", ")", ")", "extra_key_len", "=", "min", "(", "list", "(", "key_lens", ")", ")", "if", "len", "(", "key_lens", ")", ">", "1", "else", "None", "data_out", "=", "[", "]", "extra_out", "=", "[", "]", "for", "data", "in", "items", ":", "if", "extra_key_len", "and", "len", "(", "_get_all_cwlkeys", "(", "[", "data", "]", ",", "default_keys", ")", ")", "==", "extra_key_len", ":", "extra_out", ".", "append", "(", "data", ")", "else", ":", "data_out", ".", "append", "(", "data", ")", "if", "len", "(", "extra_out", ")", "==", "0", ":", "return", "data_out", ",", "{", "}", "else", ":", "cwl_keys", "=", "extra_out", "[", "0", "]", "[", "\"cwl_keys\"", "]", "for", "extra", "in", "extra_out", "[", "1", ":", "]", ":", "cur_cwl_keys", "=", "extra", "[", "\"cwl_keys\"", "]", "assert", "cur_cwl_keys", "==", "cwl_keys", ",", "pprint", ".", "pformat", "(", "extra_out", ")", "cwl_extras", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "data", "in", "items", ":", "for", "key", "in", "cwl_keys", ":", "cwl_extras", "[", "key", "]", ".", "append", "(", "data", "[", "key", "]", ")", "data_final", "=", "[", "]", "for", "data", "in", "data_out", ":", "for", "key", "in", "cwl_keys", ":", "data", ".", "pop", "(", "key", ")", "data_final", ".", "append", "(", "data", ")", "return", "data_final", ",", "dict", "(", "cwl_extras", ")" ]
36.941176
15.852941
def deletegroup(self, group_id): """ Deletes an group by ID :param group_id: id of the group to delete :return: True if it deleted, False if it couldn't. False could happen for several reasons, but there isn't a good way of differentiating them """ request = requests.delete( '{0}/{1}'.format(self.groups_url, group_id), headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) return request.status_code == 200
[ "def", "deletegroup", "(", "self", ",", "group_id", ")", ":", "request", "=", "requests", ".", "delete", "(", "'{0}/{1}'", ".", "format", "(", "self", ".", "groups_url", ",", "group_id", ")", ",", "headers", "=", "self", ".", "headers", ",", "verify", "=", "self", ".", "verify_ssl", ",", "auth", "=", "self", ".", "auth", ",", "timeout", "=", "self", ".", "timeout", ")", "return", "request", ".", "status_code", "==", "200" ]
42.416667
24.583333
def _create_color_buttons(self): """Create color choice buttons""" button_size = (30, 30) button_style = wx.NO_BORDER try: self.linecolor_choice = \ csel.ColourSelect(self, -1, unichr(0x2500), (0, 0, 0), size=button_size, style=button_style) except UnicodeEncodeError: # ANSI wxPython installed self.linecolor_choice = \ csel.ColourSelect(self, -1, "-", (0, 0, 0), size=button_size, style=button_style) self.bgcolor_choice = \ csel.ColourSelect(self, -1, "", (255, 255, 255), size=button_size, style=button_style) self.textcolor_choice = \ csel.ColourSelect(self, -1, "A", (0, 0, 0), size=button_size, style=button_style) self.linecolor_choice.SetToolTipString(_(u"Border line color")) self.bgcolor_choice.SetToolTipString(_(u"Cell background")) self.textcolor_choice.SetToolTipString(_(u"Text color")) self.AddControl(self.linecolor_choice) self.AddControl(self.bgcolor_choice) self.AddControl(self.textcolor_choice) self.linecolor_choice.Bind(csel.EVT_COLOURSELECT, self.OnLineColor) self.bgcolor_choice.Bind(csel.EVT_COLOURSELECT, self.OnBGColor) self.textcolor_choice.Bind(csel.EVT_COLOURSELECT, self.OnTextColor)
[ "def", "_create_color_buttons", "(", "self", ")", ":", "button_size", "=", "(", "30", ",", "30", ")", "button_style", "=", "wx", ".", "NO_BORDER", "try", ":", "self", ".", "linecolor_choice", "=", "csel", ".", "ColourSelect", "(", "self", ",", "-", "1", ",", "unichr", "(", "0x2500", ")", ",", "(", "0", ",", "0", ",", "0", ")", ",", "size", "=", "button_size", ",", "style", "=", "button_style", ")", "except", "UnicodeEncodeError", ":", "# ANSI wxPython installed", "self", ".", "linecolor_choice", "=", "csel", ".", "ColourSelect", "(", "self", ",", "-", "1", ",", "\"-\"", ",", "(", "0", ",", "0", ",", "0", ")", ",", "size", "=", "button_size", ",", "style", "=", "button_style", ")", "self", ".", "bgcolor_choice", "=", "csel", ".", "ColourSelect", "(", "self", ",", "-", "1", ",", "\"\"", ",", "(", "255", ",", "255", ",", "255", ")", ",", "size", "=", "button_size", ",", "style", "=", "button_style", ")", "self", ".", "textcolor_choice", "=", "csel", ".", "ColourSelect", "(", "self", ",", "-", "1", ",", "\"A\"", ",", "(", "0", ",", "0", ",", "0", ")", ",", "size", "=", "button_size", ",", "style", "=", "button_style", ")", "self", ".", "linecolor_choice", ".", "SetToolTipString", "(", "_", "(", "u\"Border line color\"", ")", ")", "self", ".", "bgcolor_choice", ".", "SetToolTipString", "(", "_", "(", "u\"Cell background\"", ")", ")", "self", ".", "textcolor_choice", ".", "SetToolTipString", "(", "_", "(", "u\"Text color\"", ")", ")", "self", ".", "AddControl", "(", "self", ".", "linecolor_choice", ")", "self", ".", "AddControl", "(", "self", ".", "bgcolor_choice", ")", "self", ".", "AddControl", "(", "self", ".", "textcolor_choice", ")", "self", ".", "linecolor_choice", ".", "Bind", "(", "csel", ".", "EVT_COLOURSELECT", ",", "self", ".", "OnLineColor", ")", "self", ".", "bgcolor_choice", ".", "Bind", "(", "csel", ".", "EVT_COLOURSELECT", ",", "self", ".", "OnBGColor", ")", "self", ".", "textcolor_choice", ".", "Bind", "(", "csel", ".", "EVT_COLOURSELECT", ",", "self", ".", "OnTextColor", ")" ]
42.294118
21.205882
def requestedFormat(request,acceptedFormat): """Return the response format requested by client Client could specify requested format using: (options are processed in this order) - `format` field in http request - `Accept` header in http request Example: chooseFormat(request, ['text/html','application/json']) Args: acceptedFormat: list containing all the accepted format Returns: string: the user requested mime-type (if supported) Raises: ValueError: if user request a mime-type not supported """ if 'format' in request.args: fieldFormat = request.args.get('format') if fieldFormat not in acceptedFormat: raise ValueError("requested format not supported: "+ fieldFormat) return fieldFormat else: return request.accept_mimetypes.best_match(acceptedFormat)
[ "def", "requestedFormat", "(", "request", ",", "acceptedFormat", ")", ":", "if", "'format'", "in", "request", ".", "args", ":", "fieldFormat", "=", "request", ".", "args", ".", "get", "(", "'format'", ")", "if", "fieldFormat", "not", "in", "acceptedFormat", ":", "raise", "ValueError", "(", "\"requested format not supported: \"", "+", "fieldFormat", ")", "return", "fieldFormat", "else", ":", "return", "request", ".", "accept_mimetypes", ".", "best_match", "(", "acceptedFormat", ")" ]
41.347826
17.608696
def get_assessment_offered_mdata(): """Return default mdata map for AssessmentOffered""" return { 'level': { 'element_label': { 'text': 'level', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts an osid.id.Id object', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_id_values': [''], 'syntax': 'ID', 'id_set': [], }, 'start_time': { 'element_label': { 'text': 'start time', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'enter a valid datetime object.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_date_time_values': [None], 'syntax': 'DATETIME', 'date_time_set': [], }, 'grade_system': { 'element_label': { 'text': 'grade system', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts an osid.id.Id object', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_id_values': [''], 'syntax': 'ID', 'id_set': [], }, 'items_shuffled': { 'element_label': { 'text': 'items shuffled', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'enter either true or false.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_boolean_values': [None], 'syntax': 'BOOLEAN', }, 'score_system': { 'element_label': { 'text': 'score system', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts an osid.id.Id object', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_id_values': [''], 'syntax': 'ID', 'id_set': [], }, 'deadline': { 'element_label': { 'text': 'deadline', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'enter a valid datetime object.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_date_time_values': [None], 'syntax': 'DATETIME', 'date_time_set': [], }, 'duration': { 'element_label': { 'text': 'duration', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'enter a valid duration object.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_duration_values': [None], 'syntax': 'DURATION', 'date_time_set': [], }, 'assessment': { 'element_label': { 'text': 'assessment', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'accepts an osid.id.Id object', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_id_values': [''], 'syntax': 'ID', 'id_set': [], }, 'items_sequential': { 'element_label': { 'text': 'items sequential', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'instructions': { 'text': 'enter either true or false.', 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE), 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE), 'formatTypeId': str(DEFAULT_FORMAT_TYPE), }, 'required': False, 'read_only': False, 'linked': False, 'array': False, 'default_boolean_values': [None], 'syntax': 'BOOLEAN', }, }
[ "def", "get_assessment_offered_mdata", "(", ")", ":", "return", "{", "'level'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'level'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'accepts an osid.id.Id object'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_id_values'", ":", "[", "''", "]", ",", "'syntax'", ":", "'ID'", ",", "'id_set'", ":", "[", "]", ",", "}", ",", "'start_time'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'start time'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'enter a valid datetime object.'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_date_time_values'", ":", "[", "None", "]", ",", "'syntax'", ":", "'DATETIME'", ",", "'date_time_set'", ":", "[", "]", ",", "}", ",", "'grade_system'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'grade system'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'accepts an osid.id.Id object'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_id_values'", ":", "[", "''", "]", ",", "'syntax'", ":", "'ID'", ",", "'id_set'", ":", "[", "]", ",", "}", ",", "'items_shuffled'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'items shuffled'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'enter either true or false.'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_boolean_values'", ":", "[", "None", "]", ",", "'syntax'", ":", "'BOOLEAN'", ",", "}", ",", "'score_system'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'score system'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'accepts an osid.id.Id object'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_id_values'", ":", "[", "''", "]", ",", "'syntax'", ":", "'ID'", ",", "'id_set'", ":", "[", "]", ",", "}", ",", "'deadline'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'deadline'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'enter a valid datetime object.'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_date_time_values'", ":", "[", "None", "]", ",", "'syntax'", ":", "'DATETIME'", ",", "'date_time_set'", ":", "[", "]", ",", "}", ",", "'duration'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'duration'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'enter a valid duration object.'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_duration_values'", ":", "[", "None", "]", ",", "'syntax'", ":", "'DURATION'", ",", "'date_time_set'", ":", "[", "]", ",", "}", ",", "'assessment'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'assessment'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'accepts an osid.id.Id object'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_id_values'", ":", "[", "''", "]", ",", "'syntax'", ":", "'ID'", ",", "'id_set'", ":", "[", "]", ",", "}", ",", "'items_sequential'", ":", "{", "'element_label'", ":", "{", "'text'", ":", "'items sequential'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'instructions'", ":", "{", "'text'", ":", "'enter either true or false.'", ",", "'languageTypeId'", ":", "str", "(", "DEFAULT_LANGUAGE_TYPE", ")", ",", "'scriptTypeId'", ":", "str", "(", "DEFAULT_SCRIPT_TYPE", ")", ",", "'formatTypeId'", ":", "str", "(", "DEFAULT_FORMAT_TYPE", ")", ",", "}", ",", "'required'", ":", "False", ",", "'read_only'", ":", "False", ",", "'linked'", ":", "False", ",", "'array'", ":", "False", ",", "'default_boolean_values'", ":", "[", "None", "]", ",", "'syntax'", ":", "'BOOLEAN'", ",", "}", ",", "}" ]
37.031414
15.230366
def accel_move_tab_left(self, *args): # TODO KEYBINDINGS ONLY """ Callback to move a tab to the left """ pos = self.get_notebook().get_current_page() if pos != 0: self.move_tab(pos, pos - 1) return True
[ "def", "accel_move_tab_left", "(", "self", ",", "*", "args", ")", ":", "# TODO KEYBINDINGS ONLY", "pos", "=", "self", ".", "get_notebook", "(", ")", ".", "get_current_page", "(", ")", "if", "pos", "!=", "0", ":", "self", ".", "move_tab", "(", "pos", ",", "pos", "-", "1", ")", "return", "True" ]
35.428571
9.428571
def satellites_configuration(self): """Return all the configuration data of satellites :return: dict containing satellites data Output looks like this :: {'arbiter' : [{'property1':'value1' ..}, {'property2', 'value11' ..}, ..], 'scheduler': [..], 'poller': [..], 'reactionner': [..], 'receiver': [..], 'broker: [..]' } :rtype: dict """ res = {} for s_type in ['arbiter', 'scheduler', 'poller', 'reactionner', 'receiver', 'broker']: lst = [] res[s_type] = lst for daemon in getattr(self.app.conf, s_type + 's'): cls = daemon.__class__ env = {} all_props = [cls.properties, cls.running_properties] for props in all_props: for prop in props: if not hasattr(daemon, prop): continue if prop in ["realms", "conf", "con", "tags", "modules", "cfg", "broks", "cfg_to_manage"]: continue val = getattr(daemon, prop) # give a try to a json able object try: json.dumps(val) env[prop] = val except TypeError as exp: logger.warning('satellites_configuration, %s: %s', prop, str(exp)) lst.append(env) return res
[ "def", "satellites_configuration", "(", "self", ")", ":", "res", "=", "{", "}", "for", "s_type", "in", "[", "'arbiter'", ",", "'scheduler'", ",", "'poller'", ",", "'reactionner'", ",", "'receiver'", ",", "'broker'", "]", ":", "lst", "=", "[", "]", "res", "[", "s_type", "]", "=", "lst", "for", "daemon", "in", "getattr", "(", "self", ".", "app", ".", "conf", ",", "s_type", "+", "'s'", ")", ":", "cls", "=", "daemon", ".", "__class__", "env", "=", "{", "}", "all_props", "=", "[", "cls", ".", "properties", ",", "cls", ".", "running_properties", "]", "for", "props", "in", "all_props", ":", "for", "prop", "in", "props", ":", "if", "not", "hasattr", "(", "daemon", ",", "prop", ")", ":", "continue", "if", "prop", "in", "[", "\"realms\"", ",", "\"conf\"", ",", "\"con\"", ",", "\"tags\"", ",", "\"modules\"", ",", "\"cfg\"", ",", "\"broks\"", ",", "\"cfg_to_manage\"", "]", ":", "continue", "val", "=", "getattr", "(", "daemon", ",", "prop", ")", "# give a try to a json able object", "try", ":", "json", ".", "dumps", "(", "val", ")", "env", "[", "prop", "]", "=", "val", "except", "TypeError", "as", "exp", ":", "logger", ".", "warning", "(", "'satellites_configuration, %s: %s'", ",", "prop", ",", "str", "(", "exp", ")", ")", "lst", ".", "append", "(", "env", ")", "return", "res" ]
37.119048
17.952381
def erase_characters(self, count=None): """Erase the indicated # of characters, starting with the character at cursor position. Character attributes are set cursor attributes. The cursor remains in the same position. :param int count: number of characters to erase. .. note:: Using cursor attributes for character attributes may seem illogical, but if recall that a terminal emulator emulates a type writer, it starts to make sense. The only way a type writer could erase a character is by typing over it. """ self.dirty.add(self.cursor.y) count = count or 1 line = self.buffer[self.cursor.y] for x in range(self.cursor.x, min(self.cursor.x + count, self.columns)): line[x] = self.cursor.attrs
[ "def", "erase_characters", "(", "self", ",", "count", "=", "None", ")", ":", "self", ".", "dirty", ".", "add", "(", "self", ".", "cursor", ".", "y", ")", "count", "=", "count", "or", "1", "line", "=", "self", ".", "buffer", "[", "self", ".", "cursor", ".", "y", "]", "for", "x", "in", "range", "(", "self", ".", "cursor", ".", "x", ",", "min", "(", "self", ".", "cursor", ".", "x", "+", "count", ",", "self", ".", "columns", ")", ")", ":", "line", "[", "x", "]", "=", "self", ".", "cursor", ".", "attrs" ]
39.809524
19.52381
def put_events(environment, start_response, headers): """ Store events in backends POST body should contain a JSON encoded version of: { namespace: namespace_name (optional), events: { stream_name1 : [event1, event2, ...], stream_name2 : [event1, event2, ...], ... } } Where each event is a dictionary of keys and values. """ errors = [] events_to_insert = defaultdict(list) request_json = environment['json'] namespace = request_json.get('namespace', settings.default_namespace) # Validate streams and events for stream, events in request_json.get('events', {}).iteritems(): try: validate_stream(stream) except Exception, e: log.exception('put_events: stream validation failed for `%s`', stream) errors.append(repr(e)) continue for event in events: try: events_to_insert[stream].append(validate_event_and_assign_id(event)) except Exception, e: log.exception('put_events: event validation failed for `%s`', event) errors.append(repr(e)) results = {} for stream, events in events_to_insert.iteritems(): backends = router.backends_to_mutate(namespace, stream) for backend, configuration in backends.iteritems(): results[(stream, backend.name)] = execute_greenlet_async( backend.insert, namespace, stream, events, configuration) wait(results.values()) # Did any insertion fail? success = True response = defaultdict(dict) for (stream, backend), result in results.iteritems(): try: result.get() response[stream][backend] = { 'num_inserted': len(events_to_insert[stream]) } except Exception, e: log.exception('put_events: insertion to backend `%s` failed.', backend) success = False response[stream][backend] = {'num_inserted': -1, ERRORS_FIELD: [repr(e)]} response[SUCCESS_FIELD] = success and not errors if errors: response[ERRORS_FIELD] = errors start_response('200 OK', headers) return response
[ "def", "put_events", "(", "environment", ",", "start_response", ",", "headers", ")", ":", "errors", "=", "[", "]", "events_to_insert", "=", "defaultdict", "(", "list", ")", "request_json", "=", "environment", "[", "'json'", "]", "namespace", "=", "request_json", ".", "get", "(", "'namespace'", ",", "settings", ".", "default_namespace", ")", "# Validate streams and events", "for", "stream", ",", "events", "in", "request_json", ".", "get", "(", "'events'", ",", "{", "}", ")", ".", "iteritems", "(", ")", ":", "try", ":", "validate_stream", "(", "stream", ")", "except", "Exception", ",", "e", ":", "log", ".", "exception", "(", "'put_events: stream validation failed for `%s`'", ",", "stream", ")", "errors", ".", "append", "(", "repr", "(", "e", ")", ")", "continue", "for", "event", "in", "events", ":", "try", ":", "events_to_insert", "[", "stream", "]", ".", "append", "(", "validate_event_and_assign_id", "(", "event", ")", ")", "except", "Exception", ",", "e", ":", "log", ".", "exception", "(", "'put_events: event validation failed for `%s`'", ",", "event", ")", "errors", ".", "append", "(", "repr", "(", "e", ")", ")", "results", "=", "{", "}", "for", "stream", ",", "events", "in", "events_to_insert", ".", "iteritems", "(", ")", ":", "backends", "=", "router", ".", "backends_to_mutate", "(", "namespace", ",", "stream", ")", "for", "backend", ",", "configuration", "in", "backends", ".", "iteritems", "(", ")", ":", "results", "[", "(", "stream", ",", "backend", ".", "name", ")", "]", "=", "execute_greenlet_async", "(", "backend", ".", "insert", ",", "namespace", ",", "stream", ",", "events", ",", "configuration", ")", "wait", "(", "results", ".", "values", "(", ")", ")", "# Did any insertion fail?", "success", "=", "True", "response", "=", "defaultdict", "(", "dict", ")", "for", "(", "stream", ",", "backend", ")", ",", "result", "in", "results", ".", "iteritems", "(", ")", ":", "try", ":", "result", ".", "get", "(", ")", "response", "[", "stream", "]", "[", "backend", "]", "=", "{", "'num_inserted'", ":", "len", "(", "events_to_insert", "[", "stream", "]", ")", "}", "except", "Exception", ",", "e", ":", "log", ".", "exception", "(", "'put_events: insertion to backend `%s` failed.'", ",", "backend", ")", "success", "=", "False", "response", "[", "stream", "]", "[", "backend", "]", "=", "{", "'num_inserted'", ":", "-", "1", ",", "ERRORS_FIELD", ":", "[", "repr", "(", "e", ")", "]", "}", "response", "[", "SUCCESS_FIELD", "]", "=", "success", "and", "not", "errors", "if", "errors", ":", "response", "[", "ERRORS_FIELD", "]", "=", "errors", "start_response", "(", "'200 OK'", ",", "headers", ")", "return", "response" ]
33.016393
20.196721
def get_collection(self, name): '''get a collection, if it exists, otherwise return None. ''' from sregistry.database.models import Collection return Collection.query.filter(Collection.name == name).first()
[ "def", "get_collection", "(", "self", ",", "name", ")", ":", "from", "sregistry", ".", "database", ".", "models", "import", "Collection", "return", "Collection", ".", "query", ".", "filter", "(", "Collection", ".", "name", "==", "name", ")", ".", "first", "(", ")" ]
43.6
20.4
def reset(self): """Reset itself and recursively all its children.""" watchers.MATCHER.debug("Node <%s> reset", self) self._reset() for child in self.children: child.node.reset()
[ "def", "reset", "(", "self", ")", ":", "watchers", ".", "MATCHER", ".", "debug", "(", "\"Node <%s> reset\"", ",", "self", ")", "self", ".", "_reset", "(", ")", "for", "child", "in", "self", ".", "children", ":", "child", ".", "node", ".", "reset", "(", ")" ]
36.166667
12.166667
def _read_message(self): """ Reads a message from the socket and converts it to a message. """ # first 4 bytes is Big-Endian payload length payload_info = self._read_bytes_from_socket(4) read_len = unpack(">I", payload_info)[0] # now read the payload payload = self._read_bytes_from_socket(read_len) # pylint: disable=no-member message = cast_channel_pb2.CastMessage() message.ParseFromString(payload) return message
[ "def", "_read_message", "(", "self", ")", ":", "# first 4 bytes is Big-Endian payload length", "payload_info", "=", "self", ".", "_read_bytes_from_socket", "(", "4", ")", "read_len", "=", "unpack", "(", "\">I\"", ",", "payload_info", ")", "[", "0", "]", "# now read the payload", "payload", "=", "self", ".", "_read_bytes_from_socket", "(", "read_len", ")", "# pylint: disable=no-member", "message", "=", "cast_channel_pb2", ".", "CastMessage", "(", ")", "message", ".", "ParseFromString", "(", "payload", ")", "return", "message" ]
34.714286
16.214286
def create(obj: PersistedObject, obj_type: Type[Any], arg_name: str): """ Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests https://github.com/nose-devs/nose/issues/725 :param obj: :param obj_type: :param arg_name: :return: """ return MissingMandatoryAttributeFiles('Multifile object ' + str(obj) + ' cannot be built from constructor of ' 'type ' + get_pretty_type_str(obj_type) + ', mandatory constructor argument \'' + arg_name + '\'was not found on ' 'filesystem')
[ "def", "create", "(", "obj", ":", "PersistedObject", ",", "obj_type", ":", "Type", "[", "Any", "]", ",", "arg_name", ":", "str", ")", ":", "return", "MissingMandatoryAttributeFiles", "(", "'Multifile object '", "+", "str", "(", "obj", ")", "+", "' cannot be built from constructor of '", "'type '", "+", "get_pretty_type_str", "(", "obj_type", ")", "+", "', mandatory constructor argument \\''", "+", "arg_name", "+", "'\\'was not found on '", "'filesystem'", ")" ]
48.266667
33.066667
def _zforce(self,R,z,phi=0.,t=0.): """ NAME: zforce PURPOSE: evaluate vertical force K_z (R,z) INPUT: R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: K_z (R,z) HISTORY: 2010-04-16 - Written - Bovy (NYU) DOCTEST: """ if True: if isinstance(R,nu.ndarray): if not isinstance(z,nu.ndarray): z= nu.ones_like(R)*z out= nu.array([self._zforce(rr,zz) for rr,zz in zip(R,z)]) return out if R > 16.*self._hr or R > 6.: return self._kp.zforce(R,z) if R < 1.: R4max= 1. else: R4max= R kmax= self._kmaxFac*self._beta maxj0zeroIndx= nu.argmin((self._j0zeros-kmax*R4max)**2.) #close enough ks= nu.array([0.5*(self._glx+1.)*self._dj0zeros[ii+1] + self._j0zeros[ii] for ii in range(maxj0zeroIndx)]).flatten() weights= nu.array([self._glw*self._dj0zeros[ii+1] for ii in range(maxj0zeroIndx)]).flatten() evalInt= ks*special.jn(0,ks*R)*(self._alpha**2.+ks**2.)**-1.5*(nu.exp(-ks*nu.fabs(z))-nu.exp(-self._beta*nu.fabs(z)))/(self._beta**2.-ks**2.) if z > 0.: return -2.*nu.pi*self._alpha*self._beta*nu.sum(weights*evalInt) else: return 2.*nu.pi*self._alpha*self._beta*nu.sum(weights*evalInt)
[ "def", "_zforce", "(", "self", ",", "R", ",", "z", ",", "phi", "=", "0.", ",", "t", "=", "0.", ")", ":", "if", "True", ":", "if", "isinstance", "(", "R", ",", "nu", ".", "ndarray", ")", ":", "if", "not", "isinstance", "(", "z", ",", "nu", ".", "ndarray", ")", ":", "z", "=", "nu", ".", "ones_like", "(", "R", ")", "*", "z", "out", "=", "nu", ".", "array", "(", "[", "self", ".", "_zforce", "(", "rr", ",", "zz", ")", "for", "rr", ",", "zz", "in", "zip", "(", "R", ",", "z", ")", "]", ")", "return", "out", "if", "R", ">", "16.", "*", "self", ".", "_hr", "or", "R", ">", "6.", ":", "return", "self", ".", "_kp", ".", "zforce", "(", "R", ",", "z", ")", "if", "R", "<", "1.", ":", "R4max", "=", "1.", "else", ":", "R4max", "=", "R", "kmax", "=", "self", ".", "_kmaxFac", "*", "self", ".", "_beta", "maxj0zeroIndx", "=", "nu", ".", "argmin", "(", "(", "self", ".", "_j0zeros", "-", "kmax", "*", "R4max", ")", "**", "2.", ")", "#close enough", "ks", "=", "nu", ".", "array", "(", "[", "0.5", "*", "(", "self", ".", "_glx", "+", "1.", ")", "*", "self", ".", "_dj0zeros", "[", "ii", "+", "1", "]", "+", "self", ".", "_j0zeros", "[", "ii", "]", "for", "ii", "in", "range", "(", "maxj0zeroIndx", ")", "]", ")", ".", "flatten", "(", ")", "weights", "=", "nu", ".", "array", "(", "[", "self", ".", "_glw", "*", "self", ".", "_dj0zeros", "[", "ii", "+", "1", "]", "for", "ii", "in", "range", "(", "maxj0zeroIndx", ")", "]", ")", ".", "flatten", "(", ")", "evalInt", "=", "ks", "*", "special", ".", "jn", "(", "0", ",", "ks", "*", "R", ")", "*", "(", "self", ".", "_alpha", "**", "2.", "+", "ks", "**", "2.", ")", "**", "-", "1.5", "*", "(", "nu", ".", "exp", "(", "-", "ks", "*", "nu", ".", "fabs", "(", "z", ")", ")", "-", "nu", ".", "exp", "(", "-", "self", ".", "_beta", "*", "nu", ".", "fabs", "(", "z", ")", ")", ")", "/", "(", "self", ".", "_beta", "**", "2.", "-", "ks", "**", "2.", ")", "if", "z", ">", "0.", ":", "return", "-", "2.", "*", "nu", ".", "pi", "*", "self", ".", "_alpha", "*", "self", ".", "_beta", "*", "nu", ".", "sum", "(", "weights", "*", "evalInt", ")", "else", ":", "return", "2.", "*", "nu", ".", "pi", "*", "self", ".", "_alpha", "*", "self", ".", "_beta", "*", "nu", ".", "sum", "(", "weights", "*", "evalInt", ")" ]
42.676471
24.794118
def read(input_taxonomy_io): '''Parse in a taxonomy from the given I/O. Parameters ---------- input_taxonomy_io: io an open io object that is iterable. This stream is neither opened nor closed by this method Returns ------- A GreenGenesTaxonomy instance with the taxonomy parsed in Raises ------ DuplicateTaxonomyException: if there is duplicate taxonomy entries MalformedGreenGenesTaxonomyException: if there is something else amiss''' tax = {} for line in input_taxonomy_io: if len(line.strip()) == 0: continue #ignore empty lines splits = line.split("\t") if len(splits) != 2: raise MalformedGreenGenesTaxonomyException("Unexpected number of tab-separated fields found in taxonomy file, on line %s" % line) name = splits[0].strip() taxonomy = [t.strip() for t in splits[1].split(';')] while len(taxonomy) > 0 and taxonomy[-1] == '': taxonomy = taxonomy[:-1] for lineage in taxonomy: if lineage == '': raise MalformedGreenGenesTaxonomyException("Encountered a taxonomy string with the middle of the taxonomy string missing: %s" % line) if name in tax: raise DuplicateTaxonomyException("Duplicate definition of taxonomy for %s" % name) tax[name] = taxonomy return GreenGenesTaxonomy(tax)
[ "def", "read", "(", "input_taxonomy_io", ")", ":", "tax", "=", "{", "}", "for", "line", "in", "input_taxonomy_io", ":", "if", "len", "(", "line", ".", "strip", "(", ")", ")", "==", "0", ":", "continue", "#ignore empty lines", "splits", "=", "line", ".", "split", "(", "\"\\t\"", ")", "if", "len", "(", "splits", ")", "!=", "2", ":", "raise", "MalformedGreenGenesTaxonomyException", "(", "\"Unexpected number of tab-separated fields found in taxonomy file, on line %s\"", "%", "line", ")", "name", "=", "splits", "[", "0", "]", ".", "strip", "(", ")", "taxonomy", "=", "[", "t", ".", "strip", "(", ")", "for", "t", "in", "splits", "[", "1", "]", ".", "split", "(", "';'", ")", "]", "while", "len", "(", "taxonomy", ")", ">", "0", "and", "taxonomy", "[", "-", "1", "]", "==", "''", ":", "taxonomy", "=", "taxonomy", "[", ":", "-", "1", "]", "for", "lineage", "in", "taxonomy", ":", "if", "lineage", "==", "''", ":", "raise", "MalformedGreenGenesTaxonomyException", "(", "\"Encountered a taxonomy string with the middle of the taxonomy string missing: %s\"", "%", "line", ")", "if", "name", "in", "tax", ":", "raise", "DuplicateTaxonomyException", "(", "\"Duplicate definition of taxonomy for %s\"", "%", "name", ")", "tax", "[", "name", "]", "=", "taxonomy", "return", "GreenGenesTaxonomy", "(", "tax", ")" ]
42.457143
25.714286
def gc(ctx): """Runs housekeeping tasks to free up space. For now, this only removes saved but unused (unreachable) test results. """ vcs = ctx.obj['vcs'] count = 0 with locking.lock(vcs, locking.Lock.tests_history): known_signatures = set(get_committed_signatures(vcs) + get_staged_signatures(vcs)) for signature in get_signatures_with_results(vcs): if signature not in known_signatures: count += 1 remove_results(vcs, signature) click.echo('Removed {} saved results.'.format(count))
[ "def", "gc", "(", "ctx", ")", ":", "vcs", "=", "ctx", ".", "obj", "[", "'vcs'", "]", "count", "=", "0", "with", "locking", ".", "lock", "(", "vcs", ",", "locking", ".", "Lock", ".", "tests_history", ")", ":", "known_signatures", "=", "set", "(", "get_committed_signatures", "(", "vcs", ")", "+", "get_staged_signatures", "(", "vcs", ")", ")", "for", "signature", "in", "get_signatures_with_results", "(", "vcs", ")", ":", "if", "signature", "not", "in", "known_signatures", ":", "count", "+=", "1", "remove_results", "(", "vcs", ",", "signature", ")", "click", ".", "echo", "(", "'Removed {} saved results.'", ".", "format", "(", "count", ")", ")" ]
40
19.642857
def get_provider(self, name): """Allows for lazy instantiation of providers (Jinja2 templating is heavy, so only instantiate it if necessary).""" if name not in self.providers: cls = self.provider_classes[name] # instantiate the provider self.providers[name] = cls(self) return self.providers[name]
[ "def", "get_provider", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "providers", ":", "cls", "=", "self", ".", "provider_classes", "[", "name", "]", "# instantiate the provider", "self", ".", "providers", "[", "name", "]", "=", "cls", "(", "self", ")", "return", "self", ".", "providers", "[", "name", "]" ]
44.875
3.625
def cuts_from_bbox(mask_nii, cuts=3): """Finds equi-spaced cuts for presenting images""" from nibabel.affines import apply_affine mask_data = mask_nii.get_data() > 0.0 # First, project the number of masked voxels on each axes ijk_counts = [ mask_data.sum(2).sum(1), # project sagittal planes to transverse (i) axis mask_data.sum(2).sum(0), # project coronal planes to to longitudinal (j) axis mask_data.sum(1).sum(0), # project axial planes to vertical (k) axis ] # If all voxels are masked in a slice (say that happens at k=10), # then the value for ijk_counts for the projection to k (ie. ijk_counts[2]) # at that element of the orthogonal axes (ijk_counts[2][10]) is # the total number of voxels in that slice (ie. Ni x Nj). # Here we define some thresholds to consider the plane as "masked" # The thresholds vary because of the shape of the brain # I have manually found that for the axial view requiring 30% # of the slice elements to be masked drops almost empty boxes # in the mosaic of axial planes (and also addresses #281) ijk_th = [ int((mask_data.shape[1] * mask_data.shape[2]) * 0.2), # sagittal int((mask_data.shape[0] * mask_data.shape[2]) * 0.0), # coronal int((mask_data.shape[0] * mask_data.shape[1]) * 0.3), # axial ] vox_coords = [] for ax, (c, th) in enumerate(zip(ijk_counts, ijk_th)): B = np.argwhere(c > th) if B.size: smin, smax = B.min(), B.max() # Avoid too narrow selections of cuts (very small masks) if not B.size or (th > 0 and (smin + cuts + 1) >= smax): B = np.argwhere(c > 0) # Resort to full plane if mask is seemingly empty smin, smax = B.min(), B.max() if B.size else (0, mask_data.shape[ax]) inc = (smax - smin) / (cuts + 1) vox_coords.append([smin + (i + 1) * inc for i in range(cuts)]) ras_coords = [] for cross in np.array(vox_coords).T: ras_coords.append(apply_affine( mask_nii.affine, cross).tolist()) ras_cuts = [list(coords) for coords in np.transpose(ras_coords)] return {k: v for k, v in zip(['x', 'y', 'z'], ras_cuts)}
[ "def", "cuts_from_bbox", "(", "mask_nii", ",", "cuts", "=", "3", ")", ":", "from", "nibabel", ".", "affines", "import", "apply_affine", "mask_data", "=", "mask_nii", ".", "get_data", "(", ")", ">", "0.0", "# First, project the number of masked voxels on each axes", "ijk_counts", "=", "[", "mask_data", ".", "sum", "(", "2", ")", ".", "sum", "(", "1", ")", ",", "# project sagittal planes to transverse (i) axis", "mask_data", ".", "sum", "(", "2", ")", ".", "sum", "(", "0", ")", ",", "# project coronal planes to to longitudinal (j) axis", "mask_data", ".", "sum", "(", "1", ")", ".", "sum", "(", "0", ")", ",", "# project axial planes to vertical (k) axis", "]", "# If all voxels are masked in a slice (say that happens at k=10),", "# then the value for ijk_counts for the projection to k (ie. ijk_counts[2])", "# at that element of the orthogonal axes (ijk_counts[2][10]) is", "# the total number of voxels in that slice (ie. Ni x Nj).", "# Here we define some thresholds to consider the plane as \"masked\"", "# The thresholds vary because of the shape of the brain", "# I have manually found that for the axial view requiring 30%", "# of the slice elements to be masked drops almost empty boxes", "# in the mosaic of axial planes (and also addresses #281)", "ijk_th", "=", "[", "int", "(", "(", "mask_data", ".", "shape", "[", "1", "]", "*", "mask_data", ".", "shape", "[", "2", "]", ")", "*", "0.2", ")", ",", "# sagittal", "int", "(", "(", "mask_data", ".", "shape", "[", "0", "]", "*", "mask_data", ".", "shape", "[", "2", "]", ")", "*", "0.0", ")", ",", "# coronal", "int", "(", "(", "mask_data", ".", "shape", "[", "0", "]", "*", "mask_data", ".", "shape", "[", "1", "]", ")", "*", "0.3", ")", ",", "# axial", "]", "vox_coords", "=", "[", "]", "for", "ax", ",", "(", "c", ",", "th", ")", "in", "enumerate", "(", "zip", "(", "ijk_counts", ",", "ijk_th", ")", ")", ":", "B", "=", "np", ".", "argwhere", "(", "c", ">", "th", ")", "if", "B", ".", "size", ":", "smin", ",", "smax", "=", "B", ".", "min", "(", ")", ",", "B", ".", "max", "(", ")", "# Avoid too narrow selections of cuts (very small masks)", "if", "not", "B", ".", "size", "or", "(", "th", ">", "0", "and", "(", "smin", "+", "cuts", "+", "1", ")", ">=", "smax", ")", ":", "B", "=", "np", ".", "argwhere", "(", "c", ">", "0", ")", "# Resort to full plane if mask is seemingly empty", "smin", ",", "smax", "=", "B", ".", "min", "(", ")", ",", "B", ".", "max", "(", ")", "if", "B", ".", "size", "else", "(", "0", ",", "mask_data", ".", "shape", "[", "ax", "]", ")", "inc", "=", "(", "smax", "-", "smin", ")", "/", "(", "cuts", "+", "1", ")", "vox_coords", ".", "append", "(", "[", "smin", "+", "(", "i", "+", "1", ")", "*", "inc", "for", "i", "in", "range", "(", "cuts", ")", "]", ")", "ras_coords", "=", "[", "]", "for", "cross", "in", "np", ".", "array", "(", "vox_coords", ")", ".", "T", ":", "ras_coords", ".", "append", "(", "apply_affine", "(", "mask_nii", ".", "affine", ",", "cross", ")", ".", "tolist", "(", ")", ")", "ras_cuts", "=", "[", "list", "(", "coords", ")", "for", "coords", "in", "np", ".", "transpose", "(", "ras_coords", ")", "]", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "zip", "(", "[", "'x'", ",", "'y'", ",", "'z'", "]", ",", "ras_cuts", ")", "}" ]
44.530612
23.877551
def update_gol(self): """ Function that performs one step of the Game of Life """ updated_grid = [[self.update_cell(row, col) \ for col in range(self.get_grid_width())] \ for row in range(self.get_grid_height())] self.replace_grid(updated_grid)
[ "def", "update_gol", "(", "self", ")", ":", "updated_grid", "=", "[", "[", "self", ".", "update_cell", "(", "row", ",", "col", ")", "for", "col", "in", "range", "(", "self", ".", "get_grid_width", "(", ")", ")", "]", "for", "row", "in", "range", "(", "self", ".", "get_grid_height", "(", ")", ")", "]", "self", ".", "replace_grid", "(", "updated_grid", ")" ]
34.9
17.5
def debug(s, *args): """debug(s, x1, ..., xn) logs s.format(x1, ..., xn).""" # Get the path name and line number of the function which called us. previous_frame = inspect.currentframe().f_back try: pathname, lineno, _, _, _ = inspect.getframeinfo(previous_frame) # if path is in cwd, simplify it cwd = os.path.abspath(os.getcwd()) pathname = os.path.abspath(pathname) if os.path.commonprefix([cwd, pathname]) == cwd: pathname = os.path.relpath(pathname, cwd) except Exception: # pylint: disable=broad-except pathname = '<UNKNOWN-FILE>.py' lineno = 0 if _FORMATTER: # log could have not been initialized. _FORMATTER.pathname = pathname _FORMATTER.lineno = lineno logger = logging.getLogger(__package__) logger.debug(s.format(*args))
[ "def", "debug", "(", "s", ",", "*", "args", ")", ":", "# Get the path name and line number of the function which called us.", "previous_frame", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", "try", ":", "pathname", ",", "lineno", ",", "_", ",", "_", ",", "_", "=", "inspect", ".", "getframeinfo", "(", "previous_frame", ")", "# if path is in cwd, simplify it", "cwd", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "getcwd", "(", ")", ")", "pathname", "=", "os", ".", "path", ".", "abspath", "(", "pathname", ")", "if", "os", ".", "path", ".", "commonprefix", "(", "[", "cwd", ",", "pathname", "]", ")", "==", "cwd", ":", "pathname", "=", "os", ".", "path", ".", "relpath", "(", "pathname", ",", "cwd", ")", "except", "Exception", ":", "# pylint: disable=broad-except", "pathname", "=", "'<UNKNOWN-FILE>.py'", "lineno", "=", "0", "if", "_FORMATTER", ":", "# log could have not been initialized.", "_FORMATTER", ".", "pathname", "=", "pathname", "_FORMATTER", ".", "lineno", "=", "lineno", "logger", "=", "logging", ".", "getLogger", "(", "__package__", ")", "logger", ".", "debug", "(", "s", ".", "format", "(", "*", "args", ")", ")" ]
43.684211
12.263158
def U(Document, __raw__=None, **update): """Generate a MongoDB update document through paramater interpolation. Arguments passed by name have their name interpreted as an optional operation prefix (defaulting to `set`, e.g. `push`), a double-underscore separated field reference (e.g. `foo`, or `foo__bar`, or `foo__S__bar`, or `foo__27__bar`) Because this utility is likely going to be used frequently it has been given a single-character name. """ ops = Update(__raw__) args = _process_arguments(Document, UPDATE_ALIASES, {}, update, UPDATE_PASSTHROUGH) for operation, _, field, value in args: if not operation: operation = DEFAULT_UPDATE if isinstance(operation, tuple): operation, cast = ('$' + operation[0]), operation[1] if cast in UPDATE_MAGIC: value = cast(value, field) else: value = cast(value) if operation in ops and ~field in ops[operation] and isinstance(value, Mapping): ops[operation][~field].update(value) continue else: operation = '$' + operation ops &= Update({operation: {~field: value}}) return ops
[ "def", "U", "(", "Document", ",", "__raw__", "=", "None", ",", "*", "*", "update", ")", ":", "ops", "=", "Update", "(", "__raw__", ")", "args", "=", "_process_arguments", "(", "Document", ",", "UPDATE_ALIASES", ",", "{", "}", ",", "update", ",", "UPDATE_PASSTHROUGH", ")", "for", "operation", ",", "_", ",", "field", ",", "value", "in", "args", ":", "if", "not", "operation", ":", "operation", "=", "DEFAULT_UPDATE", "if", "isinstance", "(", "operation", ",", "tuple", ")", ":", "operation", ",", "cast", "=", "(", "'$'", "+", "operation", "[", "0", "]", ")", ",", "operation", "[", "1", "]", "if", "cast", "in", "UPDATE_MAGIC", ":", "value", "=", "cast", "(", "value", ",", "field", ")", "else", ":", "value", "=", "cast", "(", "value", ")", "if", "operation", "in", "ops", "and", "~", "field", "in", "ops", "[", "operation", "]", "and", "isinstance", "(", "value", ",", "Mapping", ")", ":", "ops", "[", "operation", "]", "[", "~", "field", "]", ".", "update", "(", "value", ")", "continue", "else", ":", "operation", "=", "'$'", "+", "operation", "ops", "&=", "Update", "(", "{", "operation", ":", "{", "~", "field", ":", "value", "}", "}", ")", "return", "ops" ]
31.235294
26.558824
def shutdown(self): """Shutdown ZAP.""" if not self.is_running(): self.logger.warn('ZAP is not running.') return self.logger.debug('Shutting down ZAP.') self.zap.core.shutdown() timeout_time = time.time() + self.timeout while self.is_running(): if time.time() > timeout_time: raise ZAPError('Timed out waiting for ZAP to shutdown.') time.sleep(2) self.logger.debug('ZAP shutdown successfully.')
[ "def", "shutdown", "(", "self", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "logger", ".", "warn", "(", "'ZAP is not running.'", ")", "return", "self", ".", "logger", ".", "debug", "(", "'Shutting down ZAP.'", ")", "self", ".", "zap", ".", "core", ".", "shutdown", "(", ")", "timeout_time", "=", "time", ".", "time", "(", ")", "+", "self", ".", "timeout", "while", "self", ".", "is_running", "(", ")", ":", "if", "time", ".", "time", "(", ")", ">", "timeout_time", ":", "raise", "ZAPError", "(", "'Timed out waiting for ZAP to shutdown.'", ")", "time", ".", "sleep", "(", "2", ")", "self", ".", "logger", ".", "debug", "(", "'ZAP shutdown successfully.'", ")" ]
31.375
17.3125
def feed_fetch_force(request, id, redirect_to): """Forcibly fetch tweets for the feed""" feed = Feed.objects.get(id=id) feed.fetch(force=True) msg = _("Fetched tweets for %s" % feed.name) messages.success(request, msg, fail_silently=True) return HttpResponseRedirect(redirect_to)
[ "def", "feed_fetch_force", "(", "request", ",", "id", ",", "redirect_to", ")", ":", "feed", "=", "Feed", ".", "objects", ".", "get", "(", "id", "=", "id", ")", "feed", ".", "fetch", "(", "force", "=", "True", ")", "msg", "=", "_", "(", "\"Fetched tweets for %s\"", "%", "feed", ".", "name", ")", "messages", ".", "success", "(", "request", ",", "msg", ",", "fail_silently", "=", "True", ")", "return", "HttpResponseRedirect", "(", "redirect_to", ")" ]
42.428571
7.571429
def set_max_beds(self, max_beds): """ The maximum number of beds. :param max_beds: :return: """ if not isinstance(max_beds, int): raise DaftException("Maximum number of beds should be an integer.") self._max_beds = str(max_beds) self._query_params += str(QueryParam.MAX_BEDS) + self._max_beds
[ "def", "set_max_beds", "(", "self", ",", "max_beds", ")", ":", "if", "not", "isinstance", "(", "max_beds", ",", "int", ")", ":", "raise", "DaftException", "(", "\"Maximum number of beds should be an integer.\"", ")", "self", ".", "_max_beds", "=", "str", "(", "max_beds", ")", "self", ".", "_query_params", "+=", "str", "(", "QueryParam", ".", "MAX_BEDS", ")", "+", "self", ".", "_max_beds" ]
32.636364
15
def results(self): """ Returns a list of tuple, ordered by similarity. """ d = dict() words = [word.strip() for word in self.haystack] if not words: raise NoResultException('No similar word found.') for w in words: d[w] = Levenshtein.ratio(self.needle, w) return sorted(d.items(), key=operator.itemgetter(1), reverse=True)
[ "def", "results", "(", "self", ")", ":", "d", "=", "dict", "(", ")", "words", "=", "[", "word", ".", "strip", "(", ")", "for", "word", "in", "self", ".", "haystack", "]", "if", "not", "words", ":", "raise", "NoResultException", "(", "'No similar word found.'", ")", "for", "w", "in", "words", ":", "d", "[", "w", "]", "=", "Levenshtein", ".", "ratio", "(", "self", ".", "needle", ",", "w", ")", "return", "sorted", "(", "d", ".", "items", "(", ")", ",", "key", "=", "operator", ".", "itemgetter", "(", "1", ")", ",", "reverse", "=", "True", ")" ]
28.571429
21.285714
def render_headers(self): """ Renders the headers for this request field. """ lines = [] sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] for sort_key in sort_keys: if self.headers.get(sort_key, False): lines.append('%s: %s' % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: lines.append('%s: %s' % (header_name, header_value)) lines.append('\r\n') return '\r\n'.join(lines)
[ "def", "render_headers", "(", "self", ")", ":", "lines", "=", "[", "]", "sort_keys", "=", "[", "'Content-Disposition'", ",", "'Content-Type'", ",", "'Content-Location'", "]", "for", "sort_key", "in", "sort_keys", ":", "if", "self", ".", "headers", ".", "get", "(", "sort_key", ",", "False", ")", ":", "lines", ".", "append", "(", "'%s: %s'", "%", "(", "sort_key", ",", "self", ".", "headers", "[", "sort_key", "]", ")", ")", "for", "header_name", ",", "header_value", "in", "self", ".", "headers", ".", "items", "(", ")", ":", "if", "header_name", "not", "in", "sort_keys", ":", "if", "header_value", ":", "lines", ".", "append", "(", "'%s: %s'", "%", "(", "header_name", ",", "header_value", ")", ")", "lines", ".", "append", "(", "'\\r\\n'", ")", "return", "'\\r\\n'", ".", "join", "(", "lines", ")" ]
34.666667
19
def to_grayscale(img): """Convert PIL image to numpy grayscale array and numpy alpha array. Args: img (PIL.Image): PIL Image object. Returns: (gray, alpha): both numpy arrays. """ gray = numpy.asarray(ImageOps.grayscale(img)).astype(numpy.float) imbands = img.getbands() alpha = None if 'A' in imbands: alpha = numpy.asarray(img.split()[-1]).astype(numpy.float) return gray, alpha
[ "def", "to_grayscale", "(", "img", ")", ":", "gray", "=", "numpy", ".", "asarray", "(", "ImageOps", ".", "grayscale", "(", "img", ")", ")", ".", "astype", "(", "numpy", ".", "float", ")", "imbands", "=", "img", ".", "getbands", "(", ")", "alpha", "=", "None", "if", "'A'", "in", "imbands", ":", "alpha", "=", "numpy", ".", "asarray", "(", "img", ".", "split", "(", ")", "[", "-", "1", "]", ")", ".", "astype", "(", "numpy", ".", "float", ")", "return", "gray", ",", "alpha" ]
24.941176
21.470588
def to_structure(matrix, alpha=1): """Compute best matching 3D genome structure from underlying input matrix using ShRec3D-derived method from Lesne et al., 2014. Link: https://www.ncbi.nlm.nih.gov/pubmed/25240436 The method performs two steps: first compute distance matrix by treating contact data as an adjacency graph (of weights equal to a power law function of the data), then embed the resulting distance matrix into 3D space. The alpha parameter influences the weighting of contacts: if alpha < 1 long-range interactions are prioritized; if alpha >> 1 short-range interactions have more weight wahen computing the distance matrix. """ connected = largest_connected_component(matrix) distances = to_distance(connected, alpha) n, m = connected.shape bary = np.sum(np.triu(distances, 1)) / (n**2) # barycenters d = np.array(np.sum(distances**2, 0) / n - bary) # distances to origin gram = np.array([(d[i] + d[j] - distances[i][j]**2) / 2 for i, j in itertools.product(range(n), range(m))]).reshape(n, m) normalized = gram / np.linalg.norm(gram, 'fro') try: symmetric = np.array((normalized + normalized.T) / 2, dtype=np.longfloat) # just in case except AttributeError: symmetric = np.array((normalized + normalized.T) / 2) from scipy import linalg eigen_values, eigen_vectors = linalg.eigh(symmetric) if not (eigen_values >= 0).all(): warnings.warn("Negative eigen values were found.") idx = eigen_values.argsort()[-3:][::-1] values = eigen_values[idx] vectors = eigen_vectors[:, idx] coordinates = vectors * np.sqrt(values) return coordinates
[ "def", "to_structure", "(", "matrix", ",", "alpha", "=", "1", ")", ":", "connected", "=", "largest_connected_component", "(", "matrix", ")", "distances", "=", "to_distance", "(", "connected", ",", "alpha", ")", "n", ",", "m", "=", "connected", ".", "shape", "bary", "=", "np", ".", "sum", "(", "np", ".", "triu", "(", "distances", ",", "1", ")", ")", "/", "(", "n", "**", "2", ")", "# barycenters", "d", "=", "np", ".", "array", "(", "np", ".", "sum", "(", "distances", "**", "2", ",", "0", ")", "/", "n", "-", "bary", ")", "# distances to origin", "gram", "=", "np", ".", "array", "(", "[", "(", "d", "[", "i", "]", "+", "d", "[", "j", "]", "-", "distances", "[", "i", "]", "[", "j", "]", "**", "2", ")", "/", "2", "for", "i", ",", "j", "in", "itertools", ".", "product", "(", "range", "(", "n", ")", ",", "range", "(", "m", ")", ")", "]", ")", ".", "reshape", "(", "n", ",", "m", ")", "normalized", "=", "gram", "/", "np", ".", "linalg", ".", "norm", "(", "gram", ",", "'fro'", ")", "try", ":", "symmetric", "=", "np", ".", "array", "(", "(", "normalized", "+", "normalized", ".", "T", ")", "/", "2", ",", "dtype", "=", "np", ".", "longfloat", ")", "# just in case", "except", "AttributeError", ":", "symmetric", "=", "np", ".", "array", "(", "(", "normalized", "+", "normalized", ".", "T", ")", "/", "2", ")", "from", "scipy", "import", "linalg", "eigen_values", ",", "eigen_vectors", "=", "linalg", ".", "eigh", "(", "symmetric", ")", "if", "not", "(", "eigen_values", ">=", "0", ")", ".", "all", "(", ")", ":", "warnings", ".", "warn", "(", "\"Negative eigen values were found.\"", ")", "idx", "=", "eigen_values", ".", "argsort", "(", ")", "[", "-", "3", ":", "]", "[", ":", ":", "-", "1", "]", "values", "=", "eigen_values", "[", "idx", "]", "vectors", "=", "eigen_vectors", "[", ":", ",", "idx", "]", "coordinates", "=", "vectors", "*", "np", ".", "sqrt", "(", "values", ")", "return", "coordinates" ]
42.6
21.55
def format_config_for_graphql(config): '''This recursive descent thing formats a config dict for GraphQL.''' def _format_config_subdict(config, current_indent=0): check.dict_param(config, 'config', key_type=str) printer = IndentingStringIoPrinter(indent_level=2, current_indent=current_indent) printer.line('{') n_elements = len(config) for i, key in enumerate(sorted(config, key=lambda x: x[0])): value = config[key] with printer.with_indent(): formatted_value = ( _format_config_item(value, current_indent=printer.current_indent) .lstrip(' ') .rstrip('\n') ) printer.line( '{key}: {formatted_value}{comma}'.format( key=key, formatted_value=formatted_value, comma=',' if i != n_elements - 1 else '', ) ) printer.line('}') return printer.read() def _format_config_sublist(config, current_indent=0): printer = IndentingStringIoPrinter(indent_level=2, current_indent=current_indent) printer.line('[') n_elements = len(config) for i, value in enumerate(config): with printer.with_indent(): formatted_value = ( _format_config_item(value, current_indent=printer.current_indent) .lstrip(' ') .rstrip('\n') ) printer.line( '{formatted_value}{comma}'.format( formatted_value=formatted_value, comma=',' if i != n_elements - 1 else '' ) ) printer.line(']') return printer.read() def _format_config_item(config, current_indent=0): printer = IndentingStringIoPrinter(indent_level=2, current_indent=current_indent) if isinstance(config, dict): return _format_config_subdict(config, printer.current_indent) elif isinstance(config, list): return _format_config_sublist(config, printer.current_indent) elif isinstance(config, bool): return repr(config).lower() else: return repr(config).replace('\'', '"') check.dict_param(config, 'config', key_type=str) if not isinstance(config, dict): check.failed('Expected a dict to format as config, got: {item}'.format(item=repr(config))) return _format_config_subdict(config)
[ "def", "format_config_for_graphql", "(", "config", ")", ":", "def", "_format_config_subdict", "(", "config", ",", "current_indent", "=", "0", ")", ":", "check", ".", "dict_param", "(", "config", ",", "'config'", ",", "key_type", "=", "str", ")", "printer", "=", "IndentingStringIoPrinter", "(", "indent_level", "=", "2", ",", "current_indent", "=", "current_indent", ")", "printer", ".", "line", "(", "'{'", ")", "n_elements", "=", "len", "(", "config", ")", "for", "i", ",", "key", "in", "enumerate", "(", "sorted", "(", "config", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", ")", ":", "value", "=", "config", "[", "key", "]", "with", "printer", ".", "with_indent", "(", ")", ":", "formatted_value", "=", "(", "_format_config_item", "(", "value", ",", "current_indent", "=", "printer", ".", "current_indent", ")", ".", "lstrip", "(", "' '", ")", ".", "rstrip", "(", "'\\n'", ")", ")", "printer", ".", "line", "(", "'{key}: {formatted_value}{comma}'", ".", "format", "(", "key", "=", "key", ",", "formatted_value", "=", "formatted_value", ",", "comma", "=", "','", "if", "i", "!=", "n_elements", "-", "1", "else", "''", ",", ")", ")", "printer", ".", "line", "(", "'}'", ")", "return", "printer", ".", "read", "(", ")", "def", "_format_config_sublist", "(", "config", ",", "current_indent", "=", "0", ")", ":", "printer", "=", "IndentingStringIoPrinter", "(", "indent_level", "=", "2", ",", "current_indent", "=", "current_indent", ")", "printer", ".", "line", "(", "'['", ")", "n_elements", "=", "len", "(", "config", ")", "for", "i", ",", "value", "in", "enumerate", "(", "config", ")", ":", "with", "printer", ".", "with_indent", "(", ")", ":", "formatted_value", "=", "(", "_format_config_item", "(", "value", ",", "current_indent", "=", "printer", ".", "current_indent", ")", ".", "lstrip", "(", "' '", ")", ".", "rstrip", "(", "'\\n'", ")", ")", "printer", ".", "line", "(", "'{formatted_value}{comma}'", ".", "format", "(", "formatted_value", "=", "formatted_value", ",", "comma", "=", "','", "if", "i", "!=", "n_elements", "-", "1", "else", "''", ")", ")", "printer", ".", "line", "(", "']'", ")", "return", "printer", ".", "read", "(", ")", "def", "_format_config_item", "(", "config", ",", "current_indent", "=", "0", ")", ":", "printer", "=", "IndentingStringIoPrinter", "(", "indent_level", "=", "2", ",", "current_indent", "=", "current_indent", ")", "if", "isinstance", "(", "config", ",", "dict", ")", ":", "return", "_format_config_subdict", "(", "config", ",", "printer", ".", "current_indent", ")", "elif", "isinstance", "(", "config", ",", "list", ")", ":", "return", "_format_config_sublist", "(", "config", ",", "printer", ".", "current_indent", ")", "elif", "isinstance", "(", "config", ",", "bool", ")", ":", "return", "repr", "(", "config", ")", ".", "lower", "(", ")", "else", ":", "return", "repr", "(", "config", ")", ".", "replace", "(", "'\\''", ",", "'\"'", ")", "check", ".", "dict_param", "(", "config", ",", "'config'", ",", "key_type", "=", "str", ")", "if", "not", "isinstance", "(", "config", ",", "dict", ")", ":", "check", ".", "failed", "(", "'Expected a dict to format as config, got: {item}'", ".", "format", "(", "item", "=", "repr", "(", "config", ")", ")", ")", "return", "_format_config_subdict", "(", "config", ")" ]
37.80597
21.41791
def plot_reliability_diagram(confidence, labels, filepath): """ Takes in confidence values for predictions and correct labels for the data, plots a reliability diagram. :param confidence: nb_samples x nb_classes (e.g., output of softmax) :param labels: vector of nb_samples :param filepath: where to save the diagram :return: """ assert len(confidence.shape) == 2 assert len(labels.shape) == 1 assert confidence.shape[0] == labels.shape[0] print('Saving reliability diagram at: ' + str(filepath)) if confidence.max() <= 1.: # confidence array is output of softmax bins_start = [b / 10. for b in xrange(0, 10)] bins_end = [b / 10. for b in xrange(1, 11)] bins_center = [(b + .5) / 10. for b in xrange(0, 10)] preds_conf = np.max(confidence, axis=1) preds_l = np.argmax(confidence, axis=1) else: raise ValueError('Confidence values go above 1.') print(preds_conf.shape, preds_l.shape) # Create var for reliability diagram # Will contain mean accuracies for each bin reliability_diag = [] num_points = [] # keeps the number of points in each bar # Find average accuracy per confidence bin for bin_start, bin_end in zip(bins_start, bins_end): above = preds_conf >= bin_start if bin_end == 1.: below = preds_conf <= bin_end else: below = preds_conf < bin_end mask = np.multiply(above, below) num_points.append(np.sum(mask)) bin_mean_acc = max(0, np.mean(preds_l[mask] == labels[mask])) reliability_diag.append(bin_mean_acc) # Plot diagram assert len(reliability_diag) == len(bins_center) print(reliability_diag) print(bins_center) print(num_points) fig, ax1 = plt.subplots() _ = ax1.bar(bins_center, reliability_diag, width=.1, alpha=0.8) plt.xlim([0, 1.]) ax1.set_ylim([0, 1.]) ax2 = ax1.twinx() print(sum(num_points)) ax2.plot(bins_center, num_points, color='r', linestyle='-', linewidth=7.0) ax2.set_ylabel('Number of points in the data', fontsize=16, color='r') if len(np.argwhere(confidence[0] != 0.)) == 1: # This is a DkNN diagram ax1.set_xlabel('Prediction Credibility', fontsize=16) else: # This is a softmax diagram ax1.set_xlabel('Prediction Confidence', fontsize=16) ax1.set_ylabel('Prediction Accuracy', fontsize=16) ax1.tick_params(axis='both', labelsize=14) ax2.tick_params(axis='both', labelsize=14, colors='r') fig.tight_layout() plt.savefig(filepath, bbox_inches='tight')
[ "def", "plot_reliability_diagram", "(", "confidence", ",", "labels", ",", "filepath", ")", ":", "assert", "len", "(", "confidence", ".", "shape", ")", "==", "2", "assert", "len", "(", "labels", ".", "shape", ")", "==", "1", "assert", "confidence", ".", "shape", "[", "0", "]", "==", "labels", ".", "shape", "[", "0", "]", "print", "(", "'Saving reliability diagram at: '", "+", "str", "(", "filepath", ")", ")", "if", "confidence", ".", "max", "(", ")", "<=", "1.", ":", "# confidence array is output of softmax", "bins_start", "=", "[", "b", "/", "10.", "for", "b", "in", "xrange", "(", "0", ",", "10", ")", "]", "bins_end", "=", "[", "b", "/", "10.", "for", "b", "in", "xrange", "(", "1", ",", "11", ")", "]", "bins_center", "=", "[", "(", "b", "+", ".5", ")", "/", "10.", "for", "b", "in", "xrange", "(", "0", ",", "10", ")", "]", "preds_conf", "=", "np", ".", "max", "(", "confidence", ",", "axis", "=", "1", ")", "preds_l", "=", "np", ".", "argmax", "(", "confidence", ",", "axis", "=", "1", ")", "else", ":", "raise", "ValueError", "(", "'Confidence values go above 1.'", ")", "print", "(", "preds_conf", ".", "shape", ",", "preds_l", ".", "shape", ")", "# Create var for reliability diagram", "# Will contain mean accuracies for each bin", "reliability_diag", "=", "[", "]", "num_points", "=", "[", "]", "# keeps the number of points in each bar", "# Find average accuracy per confidence bin", "for", "bin_start", ",", "bin_end", "in", "zip", "(", "bins_start", ",", "bins_end", ")", ":", "above", "=", "preds_conf", ">=", "bin_start", "if", "bin_end", "==", "1.", ":", "below", "=", "preds_conf", "<=", "bin_end", "else", ":", "below", "=", "preds_conf", "<", "bin_end", "mask", "=", "np", ".", "multiply", "(", "above", ",", "below", ")", "num_points", ".", "append", "(", "np", ".", "sum", "(", "mask", ")", ")", "bin_mean_acc", "=", "max", "(", "0", ",", "np", ".", "mean", "(", "preds_l", "[", "mask", "]", "==", "labels", "[", "mask", "]", ")", ")", "reliability_diag", ".", "append", "(", "bin_mean_acc", ")", "# Plot diagram", "assert", "len", "(", "reliability_diag", ")", "==", "len", "(", "bins_center", ")", "print", "(", "reliability_diag", ")", "print", "(", "bins_center", ")", "print", "(", "num_points", ")", "fig", ",", "ax1", "=", "plt", ".", "subplots", "(", ")", "_", "=", "ax1", ".", "bar", "(", "bins_center", ",", "reliability_diag", ",", "width", "=", ".1", ",", "alpha", "=", "0.8", ")", "plt", ".", "xlim", "(", "[", "0", ",", "1.", "]", ")", "ax1", ".", "set_ylim", "(", "[", "0", ",", "1.", "]", ")", "ax2", "=", "ax1", ".", "twinx", "(", ")", "print", "(", "sum", "(", "num_points", ")", ")", "ax2", ".", "plot", "(", "bins_center", ",", "num_points", ",", "color", "=", "'r'", ",", "linestyle", "=", "'-'", ",", "linewidth", "=", "7.0", ")", "ax2", ".", "set_ylabel", "(", "'Number of points in the data'", ",", "fontsize", "=", "16", ",", "color", "=", "'r'", ")", "if", "len", "(", "np", ".", "argwhere", "(", "confidence", "[", "0", "]", "!=", "0.", ")", ")", "==", "1", ":", "# This is a DkNN diagram", "ax1", ".", "set_xlabel", "(", "'Prediction Credibility'", ",", "fontsize", "=", "16", ")", "else", ":", "# This is a softmax diagram", "ax1", ".", "set_xlabel", "(", "'Prediction Confidence'", ",", "fontsize", "=", "16", ")", "ax1", ".", "set_ylabel", "(", "'Prediction Accuracy'", ",", "fontsize", "=", "16", ")", "ax1", ".", "tick_params", "(", "axis", "=", "'both'", ",", "labelsize", "=", "14", ")", "ax2", ".", "tick_params", "(", "axis", "=", "'both'", ",", "labelsize", "=", "14", ",", "colors", "=", "'r'", ")", "fig", ".", "tight_layout", "(", ")", "plt", ".", "savefig", "(", "filepath", ",", "bbox_inches", "=", "'tight'", ")" ]
35.205882
15.882353
def _WritePathInfo(self, client_id, path_info): """Writes a single path info record for given client.""" if client_id not in self.metadatas: raise db.UnknownClientError(client_id) path_record = self._GetPathRecord(client_id, path_info) path_record.AddPathInfo(path_info) parent_path_info = path_info.GetParent() if parent_path_info is not None: parent_path_record = self._GetPathRecord(client_id, parent_path_info) parent_path_record.AddChild(path_info)
[ "def", "_WritePathInfo", "(", "self", ",", "client_id", ",", "path_info", ")", ":", "if", "client_id", "not", "in", "self", ".", "metadatas", ":", "raise", "db", ".", "UnknownClientError", "(", "client_id", ")", "path_record", "=", "self", ".", "_GetPathRecord", "(", "client_id", ",", "path_info", ")", "path_record", ".", "AddPathInfo", "(", "path_info", ")", "parent_path_info", "=", "path_info", ".", "GetParent", "(", ")", "if", "parent_path_info", "is", "not", "None", ":", "parent_path_record", "=", "self", ".", "_GetPathRecord", "(", "client_id", ",", "parent_path_info", ")", "parent_path_record", ".", "AddChild", "(", "path_info", ")" ]
40.5
13.333333
def methodReturnReceived(self, mret): """ Called when a method return message is received """ d, timeout = self._pendingCalls.get(mret.reply_serial, (None, None)) if timeout: timeout.cancel() if d: del self._pendingCalls[mret.reply_serial] d.callback(mret)
[ "def", "methodReturnReceived", "(", "self", ",", "mret", ")", ":", "d", ",", "timeout", "=", "self", ".", "_pendingCalls", ".", "get", "(", "mret", ".", "reply_serial", ",", "(", "None", ",", "None", ")", ")", "if", "timeout", ":", "timeout", ".", "cancel", "(", ")", "if", "d", ":", "del", "self", ".", "_pendingCalls", "[", "mret", ".", "reply_serial", "]", "d", ".", "callback", "(", "mret", ")" ]
33.1
13.9
def _extract_columns(self, table_name): ''' a method to extract the column properties of an existing table ''' import re from sqlalchemy import MetaData, VARCHAR, INTEGER, BLOB, BOOLEAN, FLOAT from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, BIT, BYTEA # retrieve list of tables metadata_object = MetaData() table_list = self.engine.table_names() # determine columns prior_columns = {} if table_name in table_list: metadata_object.reflect(self.engine) existing_table = metadata_object.tables[table_name] for column in existing_table.columns: column_type = None column_length = None if column.type.__class__ == FLOAT().__class__: column_type = 'float' elif column.type.__class__ == DOUBLE_PRECISION().__class__: # Postgres column_type = 'float' elif column.type.__class__ == INTEGER().__class__: column_type = 'integer' elif column.type.__class__ == VARCHAR().__class__: column_length = getattr(column.type, 'length', None) if column_length == 1: if column.primary_key: column_length = None column_type = 'string' elif column.type.__class__ == BLOB().__class__: column_type = 'list' elif column.type.__class__ in (BIT().__class__, BYTEA().__class__): column_type = 'list' elif column.type.__class__ == BOOLEAN().__class__: column_type = 'boolean' prior_columns[column.key] = (column.key, column_type, '', column_length) return prior_columns
[ "def", "_extract_columns", "(", "self", ",", "table_name", ")", ":", "import", "re", "from", "sqlalchemy", "import", "MetaData", ",", "VARCHAR", ",", "INTEGER", ",", "BLOB", ",", "BOOLEAN", ",", "FLOAT", "from", "sqlalchemy", ".", "dialects", ".", "postgresql", "import", "DOUBLE_PRECISION", ",", "BIT", ",", "BYTEA", "# retrieve list of tables", "metadata_object", "=", "MetaData", "(", ")", "table_list", "=", "self", ".", "engine", ".", "table_names", "(", ")", "# determine columns", "prior_columns", "=", "{", "}", "if", "table_name", "in", "table_list", ":", "metadata_object", ".", "reflect", "(", "self", ".", "engine", ")", "existing_table", "=", "metadata_object", ".", "tables", "[", "table_name", "]", "for", "column", "in", "existing_table", ".", "columns", ":", "column_type", "=", "None", "column_length", "=", "None", "if", "column", ".", "type", ".", "__class__", "==", "FLOAT", "(", ")", ".", "__class__", ":", "column_type", "=", "'float'", "elif", "column", ".", "type", ".", "__class__", "==", "DOUBLE_PRECISION", "(", ")", ".", "__class__", ":", "# Postgres", "column_type", "=", "'float'", "elif", "column", ".", "type", ".", "__class__", "==", "INTEGER", "(", ")", ".", "__class__", ":", "column_type", "=", "'integer'", "elif", "column", ".", "type", ".", "__class__", "==", "VARCHAR", "(", ")", ".", "__class__", ":", "column_length", "=", "getattr", "(", "column", ".", "type", ",", "'length'", ",", "None", ")", "if", "column_length", "==", "1", ":", "if", "column", ".", "primary_key", ":", "column_length", "=", "None", "column_type", "=", "'string'", "elif", "column", ".", "type", ".", "__class__", "==", "BLOB", "(", ")", ".", "__class__", ":", "column_type", "=", "'list'", "elif", "column", ".", "type", ".", "__class__", "in", "(", "BIT", "(", ")", ".", "__class__", ",", "BYTEA", "(", ")", ".", "__class__", ")", ":", "column_type", "=", "'list'", "elif", "column", ".", "type", ".", "__class__", "==", "BOOLEAN", "(", ")", ".", "__class__", ":", "column_type", "=", "'boolean'", "prior_columns", "[", "column", ".", "key", "]", "=", "(", "column", ".", "key", ",", "column_type", ",", "''", ",", "column_length", ")", "return", "prior_columns" ]
45.268293
18.146341
def verify_response_duration(self, expected=None, zero=0, threshold_percent=0, break_in_fail=True): """ Verify that response duration is in bounds. :param expected: seconds what is expected duration :param zero: seconds if one to normalize duration before calculating error rate :param threshold_percent: allowed error in percents :param break_in_fail: boolean, True if raise TestStepFail when out of bounds :return: (duration, expected duration, error) """ was = self.timedelta - zero error = abs(was/expected)*100.0 - 100.0 if expected > 0 else 0 msg = "should: %.3f, was: %.3f, error: %.3f %%" % (expected, was, error) self.logger.debug(msg) if abs(error) > threshold_percent: msg = "Thread::wait error(%.2f %%) was out of bounds (%.2f %%)" \ % (error, threshold_percent) self.logger.debug(msg) if break_in_fail: raise TestStepFail(msg) return was, expected, error
[ "def", "verify_response_duration", "(", "self", ",", "expected", "=", "None", ",", "zero", "=", "0", ",", "threshold_percent", "=", "0", ",", "break_in_fail", "=", "True", ")", ":", "was", "=", "self", ".", "timedelta", "-", "zero", "error", "=", "abs", "(", "was", "/", "expected", ")", "*", "100.0", "-", "100.0", "if", "expected", ">", "0", "else", "0", "msg", "=", "\"should: %.3f, was: %.3f, error: %.3f %%\"", "%", "(", "expected", ",", "was", ",", "error", ")", "self", ".", "logger", ".", "debug", "(", "msg", ")", "if", "abs", "(", "error", ")", ">", "threshold_percent", ":", "msg", "=", "\"Thread::wait error(%.2f %%) was out of bounds (%.2f %%)\"", "%", "(", "error", ",", "threshold_percent", ")", "self", ".", "logger", ".", "debug", "(", "msg", ")", "if", "break_in_fail", ":", "raise", "TestStepFail", "(", "msg", ")", "return", "was", ",", "expected", ",", "error" ]
48.272727
18
def isValue(self): """Indicate that |ASN.1| object represents ASN.1 value. If *isValue* is `False` then this object represents just ASN.1 schema. If *isValue* is `True` then, in addition to its ASN.1 schema features, this object can also be used like a Python built-in object (e.g. `int`, `str`, `dict` etc.). Returns ------- : :class:`bool` :class:`False` if object represents just ASN.1 schema. :class:`True` if object represents ASN.1 schema and can be used as a normal value. Note ---- There is an important distinction between PyASN1 schema and value objects. The PyASN1 schema objects can only participate in ASN.1 schema-related operations (e.g. defining or testing the structure of the data). Most obvious uses of ASN.1 schema is to guide serialisation codecs whilst encoding/decoding serialised ASN.1 contents. The PyASN1 value objects can **additionally** participate in many operations involving regular Python objects (e.g. arithmetic, comprehension etc). """ for componentValue in self._componentValues: if componentValue is noValue or not componentValue.isValue: return False return True
[ "def", "isValue", "(", "self", ")", ":", "for", "componentValue", "in", "self", ".", "_componentValues", ":", "if", "componentValue", "is", "noValue", "or", "not", "componentValue", ".", "isValue", ":", "return", "False", "return", "True" ]
41.580645
29.516129
def download(supported_tags, date_array, tag, sat_id, ftp_site='cdaweb.gsfc.nasa.gov', data_path=None, user=None, password=None, fake_daily_files_from_monthly=False): """Routine to download NASA CDAWeb CDF data. This routine is intended to be used by pysat instrument modules supporting a particular NASA CDAWeb dataset. Parameters ----------- supported_tags : dict dict of dicts. Keys are supported tag names for download. Value is a dict with 'dir', 'remote_fname', 'local_fname'. Inteded to be pre-set with functools.partial then assigned to new instrument code. date_array : array_like Array of datetimes to download data for. Provided by pysat. tag : (str or NoneType) tag or None (default=None) sat_id : (str or NoneType) satellite id or None (default=None) data_path : (string or NoneType) Path to data directory. If None is specified, the value previously set in Instrument.files.data_path is used. (default=None) user : (string or NoneType) Username to be passed along to resource with relevant data. (default=None) password : (string or NoneType) User password to be passed along to resource with relevant data. (default=None) fake_daily_files_from_monthly : bool Some CDAWeb instrument data files are stored by month.This flag, when true, accomodates this reality with user feedback on a monthly time frame. Returns -------- Void : (NoneType) Downloads data to disk. Examples -------- :: # download support added to cnofs_vefi.py using code below rn = '{year:4d}/cnofs_vefi_bfield_1sec_{year:4d}{month:02d}{day:02d}_v05.cdf' ln = 'cnofs_vefi_bfield_1sec_{year:4d}{month:02d}{day:02d}_v05.cdf' dc_b_tag = {'dir':'/pub/data/cnofs/vefi/bfield_1sec', 'remote_fname':rn, 'local_fname':ln} supported_tags = {'dc_b':dc_b_tag} download = functools.partial(nasa_cdaweb_methods.download, supported_tags=supported_tags) """ import os import ftplib # connect to CDAWeb default port ftp = ftplib.FTP(ftp_site) # user anonymous, passwd anonymous@ ftp.login() try: ftp_dict = supported_tags[tag] except KeyError: raise ValueError('Tag name unknown.') # path to relevant file on CDAWeb ftp.cwd(ftp_dict['dir']) # naming scheme for files on the CDAWeb server remote_fname = ftp_dict['remote_fname'] # naming scheme for local files, should be closely related # to CDAWeb scheme, though directory structures may be reduced # if desired local_fname = ftp_dict['local_fname'] for date in date_array: # format files for specific dates and download location formatted_remote_fname = remote_fname.format(year=date.year, month=date.month, day=date.day) formatted_local_fname = local_fname.format(year=date.year, month=date.month, day=date.day) saved_local_fname = os.path.join(data_path,formatted_local_fname) # perform download try: print('Attempting to download file for '+date.strftime('%x')) sys.stdout.flush() ftp.retrbinary('RETR '+formatted_remote_fname, open(saved_local_fname,'wb').write) print('Finished.') except ftplib.error_perm as exception: # if exception[0][0:3] != '550': if str(exception.args[0]).split(" ", 1)[0] != '550': raise else: os.remove(saved_local_fname) print('File not available for '+ date.strftime('%x')) ftp.close()
[ "def", "download", "(", "supported_tags", ",", "date_array", ",", "tag", ",", "sat_id", ",", "ftp_site", "=", "'cdaweb.gsfc.nasa.gov'", ",", "data_path", "=", "None", ",", "user", "=", "None", ",", "password", "=", "None", ",", "fake_daily_files_from_monthly", "=", "False", ")", ":", "import", "os", "import", "ftplib", "# connect to CDAWeb default port", "ftp", "=", "ftplib", ".", "FTP", "(", "ftp_site", ")", "# user anonymous, passwd anonymous@", "ftp", ".", "login", "(", ")", "try", ":", "ftp_dict", "=", "supported_tags", "[", "tag", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Tag name unknown.'", ")", "# path to relevant file on CDAWeb", "ftp", ".", "cwd", "(", "ftp_dict", "[", "'dir'", "]", ")", "# naming scheme for files on the CDAWeb server", "remote_fname", "=", "ftp_dict", "[", "'remote_fname'", "]", "# naming scheme for local files, should be closely related", "# to CDAWeb scheme, though directory structures may be reduced", "# if desired", "local_fname", "=", "ftp_dict", "[", "'local_fname'", "]", "for", "date", "in", "date_array", ":", "# format files for specific dates and download location", "formatted_remote_fname", "=", "remote_fname", ".", "format", "(", "year", "=", "date", ".", "year", ",", "month", "=", "date", ".", "month", ",", "day", "=", "date", ".", "day", ")", "formatted_local_fname", "=", "local_fname", ".", "format", "(", "year", "=", "date", ".", "year", ",", "month", "=", "date", ".", "month", ",", "day", "=", "date", ".", "day", ")", "saved_local_fname", "=", "os", ".", "path", ".", "join", "(", "data_path", ",", "formatted_local_fname", ")", "# perform download ", "try", ":", "print", "(", "'Attempting to download file for '", "+", "date", ".", "strftime", "(", "'%x'", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "ftp", ".", "retrbinary", "(", "'RETR '", "+", "formatted_remote_fname", ",", "open", "(", "saved_local_fname", ",", "'wb'", ")", ".", "write", ")", "print", "(", "'Finished.'", ")", "except", "ftplib", ".", "error_perm", "as", "exception", ":", "# if exception[0][0:3] != '550':", "if", "str", "(", "exception", ".", "args", "[", "0", "]", ")", ".", "split", "(", "\" \"", ",", "1", ")", "[", "0", "]", "!=", "'550'", ":", "raise", "else", ":", "os", ".", "remove", "(", "saved_local_fname", ")", "print", "(", "'File not available for '", "+", "date", ".", "strftime", "(", "'%x'", ")", ")", "ftp", ".", "close", "(", ")" ]
37.330097
21.009709
def group_by(self, key_selector=identity, element_selector=identity, result_selector=lambda key, grouping: grouping): '''Groups the elements according to the value of a key extracted by a selector function. Note: This method has different behaviour to itertools.groupby in the Python standard library because it aggregates all items with the same key, rather than returning groups of consecutive items of the same key. Note: This method uses deferred execution, but consumption of a single result will lead to evaluation of the whole source sequence. Args: key_selector: An optional unary function used to extract a key from each element in the source sequence. The default is the identity function. element_selector: A optional unary function to map elements in the source sequence to elements in a resulting Grouping. The default is the identity function. result_selector: An optional binary function to create a result from each group. The first positional argument is the key identifying the group. The second argument is a Grouping object containing the members of the group. The default is a function which simply returns the Grouping. Returns: A Queryable sequence of elements of the where each element represents a group. If the default result_selector is relied upon this is a Grouping object. Raises: ValueError: If the Queryable is closed(). TypeError: If key_selector is not callable. TypeError: If element_selector is not callable. TypeError: If result_selector is not callable. ''' if self.closed(): raise ValueError("Attempt to call group_by() on a closed " "Queryable.") if not is_callable(key_selector): raise TypeError("group_by() parameter key_selector={0} is not " "callable".format(repr(key_selector))) if not is_callable(element_selector): raise TypeError("group_by() parameter element_selector={0} is not " "callable".format(repr(element_selector))) if not is_callable(result_selector): raise TypeError("group_by() parameter result_selector={0} is not " "callable".format(repr(result_selector))) return self._create(self._generate_group_by_result(key_selector, element_selector, result_selector))
[ "def", "group_by", "(", "self", ",", "key_selector", "=", "identity", ",", "element_selector", "=", "identity", ",", "result_selector", "=", "lambda", "key", ",", "grouping", ":", "grouping", ")", ":", "if", "self", ".", "closed", "(", ")", ":", "raise", "ValueError", "(", "\"Attempt to call group_by() on a closed \"", "\"Queryable.\"", ")", "if", "not", "is_callable", "(", "key_selector", ")", ":", "raise", "TypeError", "(", "\"group_by() parameter key_selector={0} is not \"", "\"callable\"", ".", "format", "(", "repr", "(", "key_selector", ")", ")", ")", "if", "not", "is_callable", "(", "element_selector", ")", ":", "raise", "TypeError", "(", "\"group_by() parameter element_selector={0} is not \"", "\"callable\"", ".", "format", "(", "repr", "(", "element_selector", ")", ")", ")", "if", "not", "is_callable", "(", "result_selector", ")", ":", "raise", "TypeError", "(", "\"group_by() parameter result_selector={0} is not \"", "\"callable\"", ".", "format", "(", "repr", "(", "result_selector", ")", ")", ")", "return", "self", ".", "_create", "(", "self", ".", "_generate_group_by_result", "(", "key_selector", ",", "element_selector", ",", "result_selector", ")", ")" ]
46.5
27.224138
def get_vboxes(self): """ Get the maximum ID of the VBoxes :return: Maximum VBox ID :rtype: int """ vbox_list = [] vbox_max = None for node in self.nodes: if node['type'] == 'VirtualBoxVM': vbox_list.append(node['vbox_id']) if len(vbox_list) > 0: vbox_max = max(vbox_list) return vbox_max
[ "def", "get_vboxes", "(", "self", ")", ":", "vbox_list", "=", "[", "]", "vbox_max", "=", "None", "for", "node", "in", "self", ".", "nodes", ":", "if", "node", "[", "'type'", "]", "==", "'VirtualBoxVM'", ":", "vbox_list", ".", "append", "(", "node", "[", "'vbox_id'", "]", ")", "if", "len", "(", "vbox_list", ")", ">", "0", ":", "vbox_max", "=", "max", "(", "vbox_list", ")", "return", "vbox_max" ]
24.6875
13.5625
def __write_list_tmpl(html_tpl): ''' doing for directory. ''' out_dir = os.path.join(os.getcwd(), CRUD_PATH, 'infolist') if os.path.exists(out_dir): pass else: os.mkdir(out_dir) # for var_name in VAR_NAMES: for var_name, bl_val in SWITCH_DICS.items(): if var_name.startswith('dic_'): outfile = os.path.join(out_dir, 'infolist' + '_' + var_name.split('_')[1] + '.html') html_view_str_arr = [] # tview_var = eval('dic_vars.' + var_name) subdir = '' for the_val2 in bl_val: # sig = eval('html_vars.html_' + x) sig = HTML_DICS['html_' + the_val2] if sig['type'] == 'select': html_view_str_arr.append(func_gen_html.gen_select_list(sig)) elif sig['type'] == 'radio': html_view_str_arr.append(func_gen_html.gen_radio_list(sig)) elif sig['type'] == 'checkbox': html_view_str_arr.append(func_gen_html.gen_checkbox_list(sig)) with open(outfile, 'w') as outfileo: outstr = minify( html_tpl.replace( 'xxxxxx', ''.join(html_view_str_arr) ).replace( 'yyyyyy', var_name.split('_')[1][:2] ).replace( 'ssssss', subdir ).replace( 'kkkk', KIND_DICS['kind_' + var_name.split('_')[-1]] ) ) outfileo.write(outstr)
[ "def", "__write_list_tmpl", "(", "html_tpl", ")", ":", "out_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "CRUD_PATH", ",", "'infolist'", ")", "if", "os", ".", "path", ".", "exists", "(", "out_dir", ")", ":", "pass", "else", ":", "os", ".", "mkdir", "(", "out_dir", ")", "# for var_name in VAR_NAMES:", "for", "var_name", ",", "bl_val", "in", "SWITCH_DICS", ".", "items", "(", ")", ":", "if", "var_name", ".", "startswith", "(", "'dic_'", ")", ":", "outfile", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "'infolist'", "+", "'_'", "+", "var_name", ".", "split", "(", "'_'", ")", "[", "1", "]", "+", "'.html'", ")", "html_view_str_arr", "=", "[", "]", "# tview_var = eval('dic_vars.' + var_name)", "subdir", "=", "''", "for", "the_val2", "in", "bl_val", ":", "# sig = eval('html_vars.html_' + x)", "sig", "=", "HTML_DICS", "[", "'html_'", "+", "the_val2", "]", "if", "sig", "[", "'type'", "]", "==", "'select'", ":", "html_view_str_arr", ".", "append", "(", "func_gen_html", ".", "gen_select_list", "(", "sig", ")", ")", "elif", "sig", "[", "'type'", "]", "==", "'radio'", ":", "html_view_str_arr", ".", "append", "(", "func_gen_html", ".", "gen_radio_list", "(", "sig", ")", ")", "elif", "sig", "[", "'type'", "]", "==", "'checkbox'", ":", "html_view_str_arr", ".", "append", "(", "func_gen_html", ".", "gen_checkbox_list", "(", "sig", ")", ")", "with", "open", "(", "outfile", ",", "'w'", ")", "as", "outfileo", ":", "outstr", "=", "minify", "(", "html_tpl", ".", "replace", "(", "'xxxxxx'", ",", "''", ".", "join", "(", "html_view_str_arr", ")", ")", ".", "replace", "(", "'yyyyyy'", ",", "var_name", ".", "split", "(", "'_'", ")", "[", "1", "]", "[", ":", "2", "]", ")", ".", "replace", "(", "'ssssss'", ",", "subdir", ")", ".", "replace", "(", "'kkkk'", ",", "KIND_DICS", "[", "'kind_'", "+", "var_name", ".", "split", "(", "'_'", ")", "[", "-", "1", "]", "]", ")", ")", "outfileo", ".", "write", "(", "outstr", ")" ]
37.681818
16.545455
def get_ipv6_neighbors_table(self): """ Get IPv6 neighbors table information. Return a list of dictionaries having the following set of keys: * interface (string) * mac (string) * ip (string) * age (float) in seconds * state (string) For example:: [ { 'interface' : 'MgmtEth0/RSP0/CPU0/0', 'mac' : '5c:5e:ab:da:3c:f0', 'ip' : '2001:db8:1:1::1', 'age' : 1454496274.84, 'state' : 'REACH' }, { 'interface': 'MgmtEth0/RSP0/CPU0/0', 'mac' : '66:0e:94:96:e0:ff', 'ip' : '2001:db8:1:1::2', 'age' : 1435641582.49, 'state' : 'STALE' } ] """ ipv6_neighbors_table = [] command = "show ipv6 neighbors" output = self._send_command(command) ipv6_neighbors = "" fields = re.split(r"^IPv6\s+Address.*Interface$", output, flags=(re.M | re.I)) if len(fields) == 2: ipv6_neighbors = fields[1].strip() for entry in ipv6_neighbors.splitlines(): # typical format of an entry in the IOS IPv6 neighbors table: # 2002:FFFF:233::1 0 2894.0fed.be30 REACH Fa3/1/2.233 ip, age, mac, state, interface = entry.split() mac = "" if mac == "-" else napalm.base.helpers.mac(mac) ip = napalm.base.helpers.ip(ip) ipv6_neighbors_table.append( { "interface": interface, "mac": mac, "ip": ip, "age": float(age), "state": state, } ) return ipv6_neighbors_table
[ "def", "get_ipv6_neighbors_table", "(", "self", ")", ":", "ipv6_neighbors_table", "=", "[", "]", "command", "=", "\"show ipv6 neighbors\"", "output", "=", "self", ".", "_send_command", "(", "command", ")", "ipv6_neighbors", "=", "\"\"", "fields", "=", "re", ".", "split", "(", "r\"^IPv6\\s+Address.*Interface$\"", ",", "output", ",", "flags", "=", "(", "re", ".", "M", "|", "re", ".", "I", ")", ")", "if", "len", "(", "fields", ")", "==", "2", ":", "ipv6_neighbors", "=", "fields", "[", "1", "]", ".", "strip", "(", ")", "for", "entry", "in", "ipv6_neighbors", ".", "splitlines", "(", ")", ":", "# typical format of an entry in the IOS IPv6 neighbors table:", "# 2002:FFFF:233::1 0 2894.0fed.be30 REACH Fa3/1/2.233", "ip", ",", "age", ",", "mac", ",", "state", ",", "interface", "=", "entry", ".", "split", "(", ")", "mac", "=", "\"\"", "if", "mac", "==", "\"-\"", "else", "napalm", ".", "base", ".", "helpers", ".", "mac", "(", "mac", ")", "ip", "=", "napalm", ".", "base", ".", "helpers", ".", "ip", "(", "ip", ")", "ipv6_neighbors_table", ".", "append", "(", "{", "\"interface\"", ":", "interface", ",", "\"mac\"", ":", "mac", ",", "\"ip\"", ":", "ip", ",", "\"age\"", ":", "float", "(", "age", ")", ",", "\"state\"", ":", "state", ",", "}", ")", "return", "ipv6_neighbors_table" ]
36.480769
14.519231
def add_fields(store_name, field_names): """ A class-decorator that creates layout managers with a set of named fields. """ def decorate(cls): def _add(index, name): def _set_dir(self, value): getattr(self, store_name)[index] = value def _get_dir(self): return getattr(self, store_name)[index] setattr(cls, name, property(_get_dir, _set_dir)) for index, field_name in enumerate(field_names): _add(index, field_name) return cls return decorate
[ "def", "add_fields", "(", "store_name", ",", "field_names", ")", ":", "def", "decorate", "(", "cls", ")", ":", "def", "_add", "(", "index", ",", "name", ")", ":", "def", "_set_dir", "(", "self", ",", "value", ")", ":", "getattr", "(", "self", ",", "store_name", ")", "[", "index", "]", "=", "value", "def", "_get_dir", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "store_name", ")", "[", "index", "]", "setattr", "(", "cls", ",", "name", ",", "property", "(", "_get_dir", ",", "_set_dir", ")", ")", "for", "index", ",", "field_name", "in", "enumerate", "(", "field_names", ")", ":", "_add", "(", "index", ",", "field_name", ")", "return", "cls", "return", "decorate" ]
30.882353
21.235294
def _message(self, beacon_config, invert_hello=False): """ Overridden :meth:`.WBeaconGouverneurMessenger._message` method. Appends encoded host group names to requests and responses. :param beacon_config: beacon configuration :return: bytes """ m = WBeaconGouverneurMessenger._message(self, beacon_config, invert_hello=invert_hello) hostgroups = self._message_hostgroup_generate() if len(hostgroups) > 0: m += (WHostgroupBeaconMessenger.__message_groups_splitter__ + hostgroups) return m
[ "def", "_message", "(", "self", ",", "beacon_config", ",", "invert_hello", "=", "False", ")", ":", "m", "=", "WBeaconGouverneurMessenger", ".", "_message", "(", "self", ",", "beacon_config", ",", "invert_hello", "=", "invert_hello", ")", "hostgroups", "=", "self", ".", "_message_hostgroup_generate", "(", ")", "if", "len", "(", "hostgroups", ")", ">", "0", ":", "m", "+=", "(", "WHostgroupBeaconMessenger", ".", "__message_groups_splitter__", "+", "hostgroups", ")", "return", "m" ]
41.5
19.416667
def remove(self, projectname=None, complete=False, yes=False, all_projects=False, **kwargs): """ Delete an existing experiment and/or projectname Parameters ---------- projectname: str The name for which the data shall be removed. If True, the project will be determined by the experiment. If not None, all experiments for the given project will be removed. complete: bool If set, delete not only the experiments and config files, but also all the project files yes: bool If True/set, do not ask for confirmation all_projects: bool If True/set, all projects are removed Warnings -------- This will remove the entire folder and all the related informations in the configurations! """ self.app_main(**kwargs) if projectname in self.config.projects: self.projectname = projectname all_experiments = self.config.experiments projects_info = self.config.projects if all_projects: experiments = list(all_experiments.keys()) projects = list(projects_info.keys()) elif projectname is not None: experiments = all_experiments.project_map[projectname] projects = [self.projectname] else: experiments = [self.experiment] projects = [self.projectname] if not yes: if complete: msg = ('Are you sure to remove all experiments (%s) and ' 'directories for the project instances %s?' % ( ', '.join(experiments), ', '.join(projects))) else: msg = ('Are you sure to remove the experiments %s' % ( ', '.join(experiments))) answer = '' while answer.lower() not in ['n', 'no', 'y', 'yes']: answer = input(msg + '[y/n] ') if answer.lower() in ['n', 'no']: return for exp in experiments: if not self.is_archived(exp): self.logger.debug("Removing experiment %s", exp) try: exp_dict = self.fix_paths(all_experiments[exp]) except KeyError: # experiment has been removed already pass else: if osp.exists(exp_dict['expdir']): shutil.rmtree(exp_dict['expdir']) self.config.remove_experiment(exp) if complete: for project in projects: self.logger.debug("Removing project %s", project) projectdir = projects_info.pop(project)['root'] if osp.exists(projectdir): shutil.rmtree(projectdir)
[ "def", "remove", "(", "self", ",", "projectname", "=", "None", ",", "complete", "=", "False", ",", "yes", "=", "False", ",", "all_projects", "=", "False", ",", "*", "*", "kwargs", ")", ":", "self", ".", "app_main", "(", "*", "*", "kwargs", ")", "if", "projectname", "in", "self", ".", "config", ".", "projects", ":", "self", ".", "projectname", "=", "projectname", "all_experiments", "=", "self", ".", "config", ".", "experiments", "projects_info", "=", "self", ".", "config", ".", "projects", "if", "all_projects", ":", "experiments", "=", "list", "(", "all_experiments", ".", "keys", "(", ")", ")", "projects", "=", "list", "(", "projects_info", ".", "keys", "(", ")", ")", "elif", "projectname", "is", "not", "None", ":", "experiments", "=", "all_experiments", ".", "project_map", "[", "projectname", "]", "projects", "=", "[", "self", ".", "projectname", "]", "else", ":", "experiments", "=", "[", "self", ".", "experiment", "]", "projects", "=", "[", "self", ".", "projectname", "]", "if", "not", "yes", ":", "if", "complete", ":", "msg", "=", "(", "'Are you sure to remove all experiments (%s) and '", "'directories for the project instances %s?'", "%", "(", "', '", ".", "join", "(", "experiments", ")", ",", "', '", ".", "join", "(", "projects", ")", ")", ")", "else", ":", "msg", "=", "(", "'Are you sure to remove the experiments %s'", "%", "(", "', '", ".", "join", "(", "experiments", ")", ")", ")", "answer", "=", "''", "while", "answer", ".", "lower", "(", ")", "not", "in", "[", "'n'", ",", "'no'", ",", "'y'", ",", "'yes'", "]", ":", "answer", "=", "input", "(", "msg", "+", "'[y/n] '", ")", "if", "answer", ".", "lower", "(", ")", "in", "[", "'n'", ",", "'no'", "]", ":", "return", "for", "exp", "in", "experiments", ":", "if", "not", "self", ".", "is_archived", "(", "exp", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"Removing experiment %s\"", ",", "exp", ")", "try", ":", "exp_dict", "=", "self", ".", "fix_paths", "(", "all_experiments", "[", "exp", "]", ")", "except", "KeyError", ":", "# experiment has been removed already", "pass", "else", ":", "if", "osp", ".", "exists", "(", "exp_dict", "[", "'expdir'", "]", ")", ":", "shutil", ".", "rmtree", "(", "exp_dict", "[", "'expdir'", "]", ")", "self", ".", "config", ".", "remove_experiment", "(", "exp", ")", "if", "complete", ":", "for", "project", "in", "projects", ":", "self", ".", "logger", ".", "debug", "(", "\"Removing project %s\"", ",", "project", ")", "projectdir", "=", "projects_info", ".", "pop", "(", "project", ")", "[", "'root'", "]", "if", "osp", ".", "exists", "(", "projectdir", ")", ":", "shutil", ".", "rmtree", "(", "projectdir", ")" ]
40.710145
17.231884
def get_dev_alarms(auth, url, devid=None, devip=None): """ function takes the devId of a specific device and issues a RESTFUL call to get the current alarms for the target device. :param devid: int or str value of the target device :param devip: str of ipv4 address of the target device :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return:list of dictionaries containing the alarms for this device :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.alarms import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> dev_alarms = get_dev_alarms(auth.creds, auth.url, devip='10.101.0.221') >>> assert 'ackStatus' in dev_alarms[0] """ # checks to see if the imc credentials are already available if devip is not None: devid = get_dev_details(devip, auth, url)['id'] f_url = url + "/imcrs/fault/alarm?operatorName=admin&deviceId=" + \ str(devid) + "&desc=false" response = requests.get(f_url, auth=auth, headers=HEADERS) try: if response.status_code == 200: dev_alarm = (json.loads(response.text)) if 'alarm' in dev_alarm: return dev_alarm['alarm'] else: return "Device has no alarms" except requests.exceptions.RequestException as error: return "Error:\n" + str(error) + ' get_dev_alarms: An Error has occured'
[ "def", "get_dev_alarms", "(", "auth", ",", "url", ",", "devid", "=", "None", ",", "devip", "=", "None", ")", ":", "# checks to see if the imc credentials are already available", "if", "devip", "is", "not", "None", ":", "devid", "=", "get_dev_details", "(", "devip", ",", "auth", ",", "url", ")", "[", "'id'", "]", "f_url", "=", "url", "+", "\"/imcrs/fault/alarm?operatorName=admin&deviceId=\"", "+", "str", "(", "devid", ")", "+", "\"&desc=false\"", "response", "=", "requests", ".", "get", "(", "f_url", ",", "auth", "=", "auth", ",", "headers", "=", "HEADERS", ")", "try", ":", "if", "response", ".", "status_code", "==", "200", ":", "dev_alarm", "=", "(", "json", ".", "loads", "(", "response", ".", "text", ")", ")", "if", "'alarm'", "in", "dev_alarm", ":", "return", "dev_alarm", "[", "'alarm'", "]", "else", ":", "return", "\"Device has no alarms\"", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "error", ":", "return", "\"Error:\\n\"", "+", "str", "(", "error", ")", "+", "' get_dev_alarms: An Error has occured'" ]
35.976744
25
def setRandomParams(self): """ set random hyperparameters """ params = SP.randn(self.getNumberParams()) self.setParams(params)
[ "def", "setRandomParams", "(", "self", ")", ":", "params", "=", "SP", ".", "randn", "(", "self", ".", "getNumberParams", "(", ")", ")", "self", ".", "setParams", "(", "params", ")" ]
26.833333
6.5
def poll(self, command="start", *, delay=120): """ Poll a point every x seconds (delay=x sec) Can be stopped by using point.poll('stop') or .poll(0) or .poll(False) or by setting a delay = 0 :param command: (str) start or stop polling :param delay: (int) time delay between polls in seconds :type command: str :type delay: int :Example: device.poll() device.poll('stop') device.poll(delay = 5) """ if delay > 120: self._log.warning( "Segmentation not supported, forcing delay to 120 seconds (or higher)" ) delay = 120 # for each in self.points: # each.value # self._log.info('Complete') if ( str(command).lower() == "stop" or command == False or command == 0 or delay == 0 ): if isinstance(self._polling_task.task, DevicePoll): self._polling_task.task.stop() while self._polling_task.task.is_alive(): pass self._polling_task.task = None self._polling_task.running = False self._log.info("Polling stopped") elif self._polling_task.task is None: self._polling_task.task = DevicePoll(self, delay=delay) self._polling_task.task.start() self._polling_task.running = True self._log.info( "Polling started, values read every {} seconds".format(delay) ) elif self._polling_task.running: self._polling_task.task.stop() while self._polling_task.task.is_alive(): pass self._polling_task.running = False self._polling_task.task = DevicePoll(self, delay=delay) self._polling_task.task.start() self._polling_task.running = True self._log.info("Polling started, every values read each %s seconds" % delay) else: raise RuntimeError("Stop polling before redefining it")
[ "def", "poll", "(", "self", ",", "command", "=", "\"start\"", ",", "*", ",", "delay", "=", "120", ")", ":", "if", "delay", ">", "120", ":", "self", ".", "_log", ".", "warning", "(", "\"Segmentation not supported, forcing delay to 120 seconds (or higher)\"", ")", "delay", "=", "120", "# for each in self.points:\r", "# each.value\r", "# self._log.info('Complete')\r", "if", "(", "str", "(", "command", ")", ".", "lower", "(", ")", "==", "\"stop\"", "or", "command", "==", "False", "or", "command", "==", "0", "or", "delay", "==", "0", ")", ":", "if", "isinstance", "(", "self", ".", "_polling_task", ".", "task", ",", "DevicePoll", ")", ":", "self", ".", "_polling_task", ".", "task", ".", "stop", "(", ")", "while", "self", ".", "_polling_task", ".", "task", ".", "is_alive", "(", ")", ":", "pass", "self", ".", "_polling_task", ".", "task", "=", "None", "self", ".", "_polling_task", ".", "running", "=", "False", "self", ".", "_log", ".", "info", "(", "\"Polling stopped\"", ")", "elif", "self", ".", "_polling_task", ".", "task", "is", "None", ":", "self", ".", "_polling_task", ".", "task", "=", "DevicePoll", "(", "self", ",", "delay", "=", "delay", ")", "self", ".", "_polling_task", ".", "task", ".", "start", "(", ")", "self", ".", "_polling_task", ".", "running", "=", "True", "self", ".", "_log", ".", "info", "(", "\"Polling started, values read every {} seconds\"", ".", "format", "(", "delay", ")", ")", "elif", "self", ".", "_polling_task", ".", "running", ":", "self", ".", "_polling_task", ".", "task", ".", "stop", "(", ")", "while", "self", ".", "_polling_task", ".", "task", ".", "is_alive", "(", ")", ":", "pass", "self", ".", "_polling_task", ".", "running", "=", "False", "self", ".", "_polling_task", ".", "task", "=", "DevicePoll", "(", "self", ",", "delay", "=", "delay", ")", "self", ".", "_polling_task", ".", "task", ".", "start", "(", ")", "self", ".", "_polling_task", ".", "running", "=", "True", "self", ".", "_log", ".", "info", "(", "\"Polling started, every values read each %s seconds\"", "%", "delay", ")", "else", ":", "raise", "RuntimeError", "(", "\"Stop polling before redefining it\"", ")" ]
35.065574
18.47541
def start_workunit(self, workunit): """Implementation of Reporter callback.""" if not self.is_under_main_root(workunit): return label_format = self._get_label_format(workunit) if label_format == LabelFormat.FULL: if not WorkUnitLabel.SUPPRESS_LABEL in workunit.labels: self._emit_indented_workunit_label(workunit) # Start output on a new line. tool_output_format = self._get_tool_output_format(workunit) if tool_output_format == ToolOutputFormat.INDENT: self.emit(self._prefix(workunit, '\n')) elif tool_output_format == ToolOutputFormat.UNINDENTED: self.emit('\n') elif label_format == LabelFormat.DOT: self.emit('.') self.flush()
[ "def", "start_workunit", "(", "self", ",", "workunit", ")", ":", "if", "not", "self", ".", "is_under_main_root", "(", "workunit", ")", ":", "return", "label_format", "=", "self", ".", "_get_label_format", "(", "workunit", ")", "if", "label_format", "==", "LabelFormat", ".", "FULL", ":", "if", "not", "WorkUnitLabel", ".", "SUPPRESS_LABEL", "in", "workunit", ".", "labels", ":", "self", ".", "_emit_indented_workunit_label", "(", "workunit", ")", "# Start output on a new line.", "tool_output_format", "=", "self", ".", "_get_tool_output_format", "(", "workunit", ")", "if", "tool_output_format", "==", "ToolOutputFormat", ".", "INDENT", ":", "self", ".", "emit", "(", "self", ".", "_prefix", "(", "workunit", ",", "'\\n'", ")", ")", "elif", "tool_output_format", "==", "ToolOutputFormat", ".", "UNINDENTED", ":", "self", ".", "emit", "(", "'\\n'", ")", "elif", "label_format", "==", "LabelFormat", ".", "DOT", ":", "self", ".", "emit", "(", "'.'", ")", "self", ".", "flush", "(", ")" ]
35.25
16.85
def full_pixels(space, data, gap_pixels=1): """returns the given data distributed in the space ensuring it's full pixels and with the given gap. this will result in minor sub-pixel inaccuracies. XXX - figure out where to place these guys as they are quite useful """ available = space - (len(data) - 1) * gap_pixels # 8 recs 7 gaps res = [] for i, val in enumerate(data): # convert data to 0..1 scale so we deal with fractions data_sum = sum(data[i:]) norm = val * 1.0 / data_sum w = max(int(round(available * norm)), 1) res.append(w) available -= w return res
[ "def", "full_pixels", "(", "space", ",", "data", ",", "gap_pixels", "=", "1", ")", ":", "available", "=", "space", "-", "(", "len", "(", "data", ")", "-", "1", ")", "*", "gap_pixels", "# 8 recs 7 gaps", "res", "=", "[", "]", "for", "i", ",", "val", "in", "enumerate", "(", "data", ")", ":", "# convert data to 0..1 scale so we deal with fractions", "data_sum", "=", "sum", "(", "data", "[", "i", ":", "]", ")", "norm", "=", "val", "*", "1.0", "/", "data_sum", "w", "=", "max", "(", "int", "(", "round", "(", "available", "*", "norm", ")", ")", ",", "1", ")", "res", ".", "append", "(", "w", ")", "available", "-=", "w", "return", "res" ]
33.105263
18.315789
def cleanup(first_I, first_Z): """ cleans up unbalanced steps failure can be from unbalanced final step, or from missing steps, this takes care of missing steps """ cont = 0 Nmin = len(first_I) if len(first_Z) < Nmin: Nmin = len(first_Z) for kk in range(Nmin): if first_I[kk][0] != first_Z[kk][0]: print("\n WARNING: ") if first_I[kk] < first_Z[kk]: del first_I[kk] else: del first_Z[kk] print("Unmatched step number: ", kk + 1, ' ignored') cont = 1 if cont == 1: return first_I, first_Z, cont return first_I, first_Z, cont
[ "def", "cleanup", "(", "first_I", ",", "first_Z", ")", ":", "cont", "=", "0", "Nmin", "=", "len", "(", "first_I", ")", "if", "len", "(", "first_Z", ")", "<", "Nmin", ":", "Nmin", "=", "len", "(", "first_Z", ")", "for", "kk", "in", "range", "(", "Nmin", ")", ":", "if", "first_I", "[", "kk", "]", "[", "0", "]", "!=", "first_Z", "[", "kk", "]", "[", "0", "]", ":", "print", "(", "\"\\n WARNING: \"", ")", "if", "first_I", "[", "kk", "]", "<", "first_Z", "[", "kk", "]", ":", "del", "first_I", "[", "kk", "]", "else", ":", "del", "first_Z", "[", "kk", "]", "print", "(", "\"Unmatched step number: \"", ",", "kk", "+", "1", ",", "' ignored'", ")", "cont", "=", "1", "if", "cont", "==", "1", ":", "return", "first_I", ",", "first_Z", ",", "cont", "return", "first_I", ",", "first_Z", ",", "cont" ]
30.681818
11.863636
def strptime(s, fmt, tzinfo=None): """ A function to replace strptime in the time module. Should behave identically to the strptime function except it returns a datetime.datetime object instead of a time.struct_time object. Also takes an optional tzinfo parameter which is a time zone info object. """ res = time.strptime(s, fmt) return datetime.datetime(tzinfo=tzinfo, *res[:6])
[ "def", "strptime", "(", "s", ",", "fmt", ",", "tzinfo", "=", "None", ")", ":", "res", "=", "time", ".", "strptime", "(", "s", ",", "fmt", ")", "return", "datetime", ".", "datetime", "(", "tzinfo", "=", "tzinfo", ",", "*", "res", "[", ":", "6", "]", ")" ]
42.222222
14.222222
def iter(self, headers_only=False, page_size=None): """Yield all time series objects selected by the query. The generator returned iterates over :class:`~google.cloud.monitoring_v3.types.TimeSeries` objects containing points ordered from oldest to newest. Note that the :class:`Query` object itself is an iterable, such that the following are equivalent:: for timeseries in query: ... for timeseries in query.iter(): ... :type headers_only: bool :param headers_only: Whether to omit the point data from the time series objects. :type page_size: int :param page_size: (Optional) The maximum number of points in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. :raises: :exc:`ValueError` if the query time interval has not been specified. """ if self._end_time is None: raise ValueError("Query time interval not specified.") params = self._build_query_params(headers_only, page_size) for ts in self._client.list_time_series(**params): yield ts
[ "def", "iter", "(", "self", ",", "headers_only", "=", "False", ",", "page_size", "=", "None", ")", ":", "if", "self", ".", "_end_time", "is", "None", ":", "raise", "ValueError", "(", "\"Query time interval not specified.\"", ")", "params", "=", "self", ".", "_build_query_params", "(", "headers_only", ",", "page_size", ")", "for", "ts", "in", "self", ".", "_client", ".", "list_time_series", "(", "*", "*", "params", ")", ":", "yield", "ts" ]
35.571429
21.971429
def decrypt(self, ciphertext): 'Decrypt a block of cipher text using the AES block cipher.' if len(ciphertext) != 16: raise ValueError('wrong block length') rounds = len(self._Kd) - 1 (s1, s2, s3) = [3, 2, 1] a = [0, 0, 0, 0] # Convert ciphertext to (ints ^ key) t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in xrange(0, 4)] # Apply round transforms for r in xrange(1, rounds): for i in xrange(0, 4): a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^ self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^ self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^ self.T8[ t[(i + s3) % 4] & 0xFF] ^ self._Kd[r][i]) t = copy.copy(a) # The last round is special result = [ ] for i in xrange(0, 4): tt = self._Kd[rounds][i] result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) return result
[ "def", "decrypt", "(", "self", ",", "ciphertext", ")", ":", "if", "len", "(", "ciphertext", ")", "!=", "16", ":", "raise", "ValueError", "(", "'wrong block length'", ")", "rounds", "=", "len", "(", "self", ".", "_Kd", ")", "-", "1", "(", "s1", ",", "s2", ",", "s3", ")", "=", "[", "3", ",", "2", ",", "1", "]", "a", "=", "[", "0", ",", "0", ",", "0", ",", "0", "]", "# Convert ciphertext to (ints ^ key)", "t", "=", "[", "(", "_compact_word", "(", "ciphertext", "[", "4", "*", "i", ":", "4", "*", "i", "+", "4", "]", ")", "^", "self", ".", "_Kd", "[", "0", "]", "[", "i", "]", ")", "for", "i", "in", "xrange", "(", "0", ",", "4", ")", "]", "# Apply round transforms", "for", "r", "in", "xrange", "(", "1", ",", "rounds", ")", ":", "for", "i", "in", "xrange", "(", "0", ",", "4", ")", ":", "a", "[", "i", "]", "=", "(", "self", ".", "T5", "[", "(", "t", "[", "i", "]", ">>", "24", ")", "&", "0xFF", "]", "^", "self", ".", "T6", "[", "(", "t", "[", "(", "i", "+", "s1", ")", "%", "4", "]", ">>", "16", ")", "&", "0xFF", "]", "^", "self", ".", "T7", "[", "(", "t", "[", "(", "i", "+", "s2", ")", "%", "4", "]", ">>", "8", ")", "&", "0xFF", "]", "^", "self", ".", "T8", "[", "t", "[", "(", "i", "+", "s3", ")", "%", "4", "]", "&", "0xFF", "]", "^", "self", ".", "_Kd", "[", "r", "]", "[", "i", "]", ")", "t", "=", "copy", ".", "copy", "(", "a", ")", "# The last round is special", "result", "=", "[", "]", "for", "i", "in", "xrange", "(", "0", ",", "4", ")", ":", "tt", "=", "self", ".", "_Kd", "[", "rounds", "]", "[", "i", "]", "result", ".", "append", "(", "(", "self", ".", "Si", "[", "(", "t", "[", "i", "]", ">>", "24", ")", "&", "0xFF", "]", "^", "(", "tt", ">>", "24", ")", ")", "&", "0xFF", ")", "result", ".", "append", "(", "(", "self", ".", "Si", "[", "(", "t", "[", "(", "i", "+", "s1", ")", "%", "4", "]", ">>", "16", ")", "&", "0xFF", "]", "^", "(", "tt", ">>", "16", ")", ")", "&", "0xFF", ")", "result", ".", "append", "(", "(", "self", ".", "Si", "[", "(", "t", "[", "(", "i", "+", "s2", ")", "%", "4", "]", ">>", "8", ")", "&", "0xFF", "]", "^", "(", "tt", ">>", "8", ")", ")", "&", "0xFF", ")", "result", ".", "append", "(", "(", "self", ".", "Si", "[", "t", "[", "(", "i", "+", "s3", ")", "%", "4", "]", "&", "0xFF", "]", "^", "tt", ")", "&", "0xFF", ")", "return", "result" ]
40.454545
23.30303
def _query_iterator(result, chunksize, columns, index_col=None, coerce_float=True, parse_dates=None): """Return generator through chunked result set""" while True: data = result.fetchmany(chunksize) if not data: break else: yield _wrap_result(data, columns, index_col=index_col, coerce_float=coerce_float, parse_dates=parse_dates)
[ "def", "_query_iterator", "(", "result", ",", "chunksize", ",", "columns", ",", "index_col", "=", "None", ",", "coerce_float", "=", "True", ",", "parse_dates", "=", "None", ")", ":", "while", "True", ":", "data", "=", "result", ".", "fetchmany", "(", "chunksize", ")", "if", "not", "data", ":", "break", "else", ":", "yield", "_wrap_result", "(", "data", ",", "columns", ",", "index_col", "=", "index_col", ",", "coerce_float", "=", "coerce_float", ",", "parse_dates", "=", "parse_dates", ")" ]
41.5
19.916667
def get_form(self, request, obj=None, **kwargs): """ Extend the form for the given plugin with the form SharableCascadeForm """ Form = type(str('ExtSharableForm'), (SharableCascadeForm, kwargs.pop('form', self.form)), {}) Form.base_fields['shared_glossary'].limit_choices_to = dict(plugin_type=self.__class__.__name__) kwargs.update(form=Form) return super(SharableGlossaryMixin, self).get_form(request, obj, **kwargs)
[ "def", "get_form", "(", "self", ",", "request", ",", "obj", "=", "None", ",", "*", "*", "kwargs", ")", ":", "Form", "=", "type", "(", "str", "(", "'ExtSharableForm'", ")", ",", "(", "SharableCascadeForm", ",", "kwargs", ".", "pop", "(", "'form'", ",", "self", ".", "form", ")", ")", ",", "{", "}", ")", "Form", ".", "base_fields", "[", "'shared_glossary'", "]", ".", "limit_choices_to", "=", "dict", "(", "plugin_type", "=", "self", ".", "__class__", ".", "__name__", ")", "kwargs", ".", "update", "(", "form", "=", "Form", ")", "return", "super", "(", "SharableGlossaryMixin", ",", "self", ")", ".", "get_form", "(", "request", ",", "obj", ",", "*", "*", "kwargs", ")" ]
58.375
27.625
def canonicalize(ctx, statement, namespace_targets, version, api, config_fn): """Canonicalize statement Target namespaces can be provided in the following manner: bel stmt canonicalize "<BELStmt>" --namespace_targets '{"HGNC": ["EG", "SP"], "CHEMBL": ["CHEBI"]}' the value of target_namespaces must be JSON and embedded in single quotes reserving double quotes for the dictionary elements """ if config_fn: config = bel.db.Config.merge_config(ctx.config, override_config_fn=config_fn) else: config = ctx.config # Configuration - will return the first truthy result in list else the default option if namespace_targets: namespace_targets = json.loads(namespace_targets) namespace_targets = utils.first_true( [namespace_targets, config.get("canonical")], None ) api = utils.first_true([api, config.get("api", None)], None) version = utils.first_true([version, config.get("bel_version", None)], None) print("------------------------------") print("BEL version: {}".format(version)) print("API Endpoint: {}".format(api)) print("------------------------------") bo = BEL(version=version, endpoint=api) bo.parse(statement).canonicalize(namespace_targets=namespace_targets) if bo.ast is None: print(bo.original_bel_stmt) print(bo.parse_visualize_error) print(bo.validation_messages) else: print("ORIGINAL ", bo.original_bel_stmt) print("CANONICAL", bo.ast) if bo.validation_messages: print(bo.validation_messages) else: print("No problems found") return
[ "def", "canonicalize", "(", "ctx", ",", "statement", ",", "namespace_targets", ",", "version", ",", "api", ",", "config_fn", ")", ":", "if", "config_fn", ":", "config", "=", "bel", ".", "db", ".", "Config", ".", "merge_config", "(", "ctx", ".", "config", ",", "override_config_fn", "=", "config_fn", ")", "else", ":", "config", "=", "ctx", ".", "config", "# Configuration - will return the first truthy result in list else the default option", "if", "namespace_targets", ":", "namespace_targets", "=", "json", ".", "loads", "(", "namespace_targets", ")", "namespace_targets", "=", "utils", ".", "first_true", "(", "[", "namespace_targets", ",", "config", ".", "get", "(", "\"canonical\"", ")", "]", ",", "None", ")", "api", "=", "utils", ".", "first_true", "(", "[", "api", ",", "config", ".", "get", "(", "\"api\"", ",", "None", ")", "]", ",", "None", ")", "version", "=", "utils", ".", "first_true", "(", "[", "version", ",", "config", ".", "get", "(", "\"bel_version\"", ",", "None", ")", "]", ",", "None", ")", "print", "(", "\"------------------------------\"", ")", "print", "(", "\"BEL version: {}\"", ".", "format", "(", "version", ")", ")", "print", "(", "\"API Endpoint: {}\"", ".", "format", "(", "api", ")", ")", "print", "(", "\"------------------------------\"", ")", "bo", "=", "BEL", "(", "version", "=", "version", ",", "endpoint", "=", "api", ")", "bo", ".", "parse", "(", "statement", ")", ".", "canonicalize", "(", "namespace_targets", "=", "namespace_targets", ")", "if", "bo", ".", "ast", "is", "None", ":", "print", "(", "bo", ".", "original_bel_stmt", ")", "print", "(", "bo", ".", "parse_visualize_error", ")", "print", "(", "bo", ".", "validation_messages", ")", "else", ":", "print", "(", "\"ORIGINAL \"", ",", "bo", ".", "original_bel_stmt", ")", "print", "(", "\"CANONICAL\"", ",", "bo", ".", "ast", ")", "if", "bo", ".", "validation_messages", ":", "print", "(", "bo", ".", "validation_messages", ")", "else", ":", "print", "(", "\"No problems found\"", ")", "return" ]
36.311111
22.444444
def sort(self): """ Sort the families by template name. .. rubric:: Example >>> party = Party(families=[Family(template=Template(name='b')), ... Family(template=Template(name='a'))]) >>> party[0] Family of 0 detections from template b >>> party.sort()[0] Family of 0 detections from template a """ self.families.sort(key=lambda x: x.template.name) return self
[ "def", "sort", "(", "self", ")", ":", "self", ".", "families", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", ".", "template", ".", "name", ")", "return", "self" ]
29.1875
19.3125
def backup_download(self, filename=None): """Download backup file from WebDAV (cloud only).""" if self.deploymentType != 'Cloud': logging.warning( 'This functionality is not available in Server version') return None remote_file = self.backup_progress()['fileName'] local_file = filename or remote_file url = self._options['server'] + '/webdav/backupmanager/' + remote_file try: logging.debug('Writing file to %s' % local_file) with open(local_file, 'wb') as file: try: resp = self._session.get(url, headers=self._options['headers'], stream=True) except Exception: raise JIRAError() if not resp.ok: logging.error("Something went wrong with download: %s" % resp.text) raise JIRAError(resp.text) for block in resp.iter_content(1024): file.write(block) except JIRAError as je: logging.error('Unable to access remote backup file: %s' % je) except IOError as ioe: logging.error(ioe) return None
[ "def", "backup_download", "(", "self", ",", "filename", "=", "None", ")", ":", "if", "self", ".", "deploymentType", "!=", "'Cloud'", ":", "logging", ".", "warning", "(", "'This functionality is not available in Server version'", ")", "return", "None", "remote_file", "=", "self", ".", "backup_progress", "(", ")", "[", "'fileName'", "]", "local_file", "=", "filename", "or", "remote_file", "url", "=", "self", ".", "_options", "[", "'server'", "]", "+", "'/webdav/backupmanager/'", "+", "remote_file", "try", ":", "logging", ".", "debug", "(", "'Writing file to %s'", "%", "local_file", ")", "with", "open", "(", "local_file", ",", "'wb'", ")", "as", "file", ":", "try", ":", "resp", "=", "self", ".", "_session", ".", "get", "(", "url", ",", "headers", "=", "self", ".", "_options", "[", "'headers'", "]", ",", "stream", "=", "True", ")", "except", "Exception", ":", "raise", "JIRAError", "(", ")", "if", "not", "resp", ".", "ok", ":", "logging", ".", "error", "(", "\"Something went wrong with download: %s\"", "%", "resp", ".", "text", ")", "raise", "JIRAError", "(", "resp", ".", "text", ")", "for", "block", "in", "resp", ".", "iter_content", "(", "1024", ")", ":", "file", ".", "write", "(", "block", ")", "except", "JIRAError", "as", "je", ":", "logging", ".", "error", "(", "'Unable to access remote backup file: %s'", "%", "je", ")", "except", "IOError", "as", "ioe", ":", "logging", ".", "error", "(", "ioe", ")", "return", "None" ]
45.653846
16.346154
def pow4(x, alpha, a, b, c): """pow4 Parameters ---------- x: int alpha: float a: float b: float c: float Returns ------- float c - (a*x+b)**-alpha """ return c - (a*x+b)**-alpha
[ "def", "pow4", "(", "x", ",", "alpha", ",", "a", ",", "b", ",", "c", ")", ":", "return", "c", "-", "(", "a", "*", "x", "+", "b", ")", "**", "-", "alpha" ]
13.176471
23.176471
def set_attributes(self, obj, **attributes): """ Set attributes. :param obj: requested object. :param attributes: dictionary of {attribute: value} to set """ attributes_url = '{}/{}/attributes'.format(self.session_url, obj.ref) attributes_list = [{u'name': str(name), u'value': str(value)} for name, value in attributes.items()] self._request(RestMethod.patch, attributes_url, headers={'Content-Type': 'application/json'}, data=json.dumps(attributes_list))
[ "def", "set_attributes", "(", "self", ",", "obj", ",", "*", "*", "attributes", ")", ":", "attributes_url", "=", "'{}/{}/attributes'", ".", "format", "(", "self", ".", "session_url", ",", "obj", ".", "ref", ")", "attributes_list", "=", "[", "{", "u'name'", ":", "str", "(", "name", ")", ",", "u'value'", ":", "str", "(", "value", ")", "}", "for", "name", ",", "value", "in", "attributes", ".", "items", "(", ")", "]", "self", ".", "_request", "(", "RestMethod", ".", "patch", ",", "attributes_url", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ",", "data", "=", "json", ".", "dumps", "(", "attributes_list", ")", ")" ]
47.818182
26.727273
def normalize_name(name, overrides=None): '''Normalize the key name to title case. For example, ``normalize_name('content-id')`` will become ``Content-Id`` Args: name (str): The name to normalize. overrides (set, sequence): A set or sequence containing keys that should be cased to themselves. For example, passing ``set('WARC-Type')`` will normalize any key named "warc-type" to ``WARC-Type`` instead of the default ``Warc-Type``. Returns: str ''' normalized_name = name.title() if overrides: override_map = dict([(name.title(), name) for name in overrides]) return override_map.get(normalized_name, normalized_name) else: return normalized_name
[ "def", "normalize_name", "(", "name", ",", "overrides", "=", "None", ")", ":", "normalized_name", "=", "name", ".", "title", "(", ")", "if", "overrides", ":", "override_map", "=", "dict", "(", "[", "(", "name", ".", "title", "(", ")", ",", "name", ")", "for", "name", "in", "overrides", "]", ")", "return", "override_map", ".", "get", "(", "normalized_name", ",", "normalized_name", ")", "else", ":", "return", "normalized_name" ]
31.041667
26.958333