text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
async def build_hardware_controller( cls, config: robot_configs.robot_config = None, port: str = None, loop: asyncio.AbstractEventLoop = None, force: bool = False) -> 'API': """ Build a hardware controller that will actually talk to hardware. This method should not be used outside of a real robot, and on a real robot only one true hardware controller may be active at one time. :param config: A config to preload. If not specified, load the default. :param port: A port to connect to. If not specified, the default port (found by scanning for connected FT232Rs). :param loop: An event loop to use. If not specified, use the result of :py:meth:`asyncio.get_event_loop`. :param force: If `True`, connect even if a lockfile is present. See :py:meth:`Controller.__init__`. """ if None is Controller: raise RuntimeError( 'The hardware controller may only be instantiated on a robot') checked_loop = loop or asyncio.get_event_loop() backend = Controller(config, checked_loop, force=force) await backend.connect(port) return cls(backend, config=config, loop=checked_loop)
[ "async", "def", "build_hardware_controller", "(", "cls", ",", "config", ":", "robot_configs", ".", "robot_config", "=", "None", ",", "port", ":", "str", "=", "None", ",", "loop", ":", "asyncio", ".", "AbstractEventLoop", "=", "None", ",", "force", ":", "bool", "=", "False", ")", "->", "'API'", ":", "if", "None", "is", "Controller", ":", "raise", "RuntimeError", "(", "'The hardware controller may only be instantiated on a robot'", ")", "checked_loop", "=", "loop", "or", "asyncio", ".", "get_event_loop", "(", ")", "backend", "=", "Controller", "(", "config", ",", "checked_loop", ",", "force", "=", "force", ")", "await", "backend", ".", "connect", "(", "port", ")", "return", "cls", "(", "backend", ",", "config", "=", "config", ",", "loop", "=", "checked_loop", ")" ]
49.807692
20.769231
def prefer_master(nodes: List[DiscoveredNode]) -> Optional[DiscoveredNode]: """ Select the master if available, otherwise fall back to a replica. """ return max(nodes, key=attrgetter("state"))
[ "def", "prefer_master", "(", "nodes", ":", "List", "[", "DiscoveredNode", "]", ")", "->", "Optional", "[", "DiscoveredNode", "]", ":", "return", "max", "(", "nodes", ",", "key", "=", "attrgetter", "(", "\"state\"", ")", ")" ]
40.8
14
def validate_trail_settings(self, ct, aws_region, trail): """Validates logging, SNS and S3 settings for the global trail. Has the capability to: - start logging for the trail - create SNS topics & queues - configure or modify a S3 bucket for logging """ self.log.debug('Validating trail {}/{}/{}'.format( self.account.account_name, aws_region, trail['Name'] )) status = ct.get_trail_status(Name=trail['Name']) if not status['IsLogging']: self.log.warning('Logging is disabled for {}/{}/{}'.format( self.account.account_name, aws_region, trail['Name'] )) self.start_logging(aws_region, trail['Name']) if 'SnsTopicName' not in trail or not trail['SnsTopicName']: self.log.warning('SNS Notifications not enabled for {}/{}/{}'.format( self.account.account_name, aws_region, trail['Name'] )) self.create_sns_topic(aws_region) self.enable_sns_notification(aws_region, trail['Name']) if not self.validate_sns_topic_subscription(aws_region): self.log.warning( 'SNS Notification configured but not subscribed for {}/{}/{}'.format( self.account.account_name, aws_region, trail['Name'] ) ) self.subscribe_sns_topic_to_sqs(aws_region) if trail['S3BucketName'] != self.bucket_name: self.log.warning('CloudTrail is logging to an incorrect bucket for {}/{}/{}'.format( self.account.account_name, trail['S3BucketName'], trail['Name'] )) self.set_s3_bucket(aws_region, trail['Name'], self.bucket_name) if not trail.get('S3KeyPrefix') or trail['S3KeyPrefix'] != self.account.account_name: self.log.warning('Missing or incorrect S3KeyPrefix for {}/{}/{}'.format( self.account.account_name, aws_region, trail['Name'] )) self.set_s3_prefix(aws_region, trail['Name'])
[ "def", "validate_trail_settings", "(", "self", ",", "ct", ",", "aws_region", ",", "trail", ")", ":", "self", ".", "log", ".", "debug", "(", "'Validating trail {}/{}/{}'", ".", "format", "(", "self", ".", "account", ".", "account_name", ",", "aws_region", ",", "trail", "[", "'Name'", "]", ")", ")", "status", "=", "ct", ".", "get_trail_status", "(", "Name", "=", "trail", "[", "'Name'", "]", ")", "if", "not", "status", "[", "'IsLogging'", "]", ":", "self", ".", "log", ".", "warning", "(", "'Logging is disabled for {}/{}/{}'", ".", "format", "(", "self", ".", "account", ".", "account_name", ",", "aws_region", ",", "trail", "[", "'Name'", "]", ")", ")", "self", ".", "start_logging", "(", "aws_region", ",", "trail", "[", "'Name'", "]", ")", "if", "'SnsTopicName'", "not", "in", "trail", "or", "not", "trail", "[", "'SnsTopicName'", "]", ":", "self", ".", "log", ".", "warning", "(", "'SNS Notifications not enabled for {}/{}/{}'", ".", "format", "(", "self", ".", "account", ".", "account_name", ",", "aws_region", ",", "trail", "[", "'Name'", "]", ")", ")", "self", ".", "create_sns_topic", "(", "aws_region", ")", "self", ".", "enable_sns_notification", "(", "aws_region", ",", "trail", "[", "'Name'", "]", ")", "if", "not", "self", ".", "validate_sns_topic_subscription", "(", "aws_region", ")", ":", "self", ".", "log", ".", "warning", "(", "'SNS Notification configured but not subscribed for {}/{}/{}'", ".", "format", "(", "self", ".", "account", ".", "account_name", ",", "aws_region", ",", "trail", "[", "'Name'", "]", ")", ")", "self", ".", "subscribe_sns_topic_to_sqs", "(", "aws_region", ")", "if", "trail", "[", "'S3BucketName'", "]", "!=", "self", ".", "bucket_name", ":", "self", ".", "log", ".", "warning", "(", "'CloudTrail is logging to an incorrect bucket for {}/{}/{}'", ".", "format", "(", "self", ".", "account", ".", "account_name", ",", "trail", "[", "'S3BucketName'", "]", ",", "trail", "[", "'Name'", "]", ")", ")", "self", ".", "set_s3_bucket", "(", "aws_region", ",", "trail", "[", "'Name'", "]", ",", "self", ".", "bucket_name", ")", "if", "not", "trail", ".", "get", "(", "'S3KeyPrefix'", ")", "or", "trail", "[", "'S3KeyPrefix'", "]", "!=", "self", ".", "account", ".", "account_name", ":", "self", ".", "log", ".", "warning", "(", "'Missing or incorrect S3KeyPrefix for {}/{}/{}'", ".", "format", "(", "self", ".", "account", ".", "account_name", ",", "aws_region", ",", "trail", "[", "'Name'", "]", ")", ")", "self", ".", "set_s3_prefix", "(", "aws_region", ",", "trail", "[", "'Name'", "]", ")" ]
38.258621
20.017241
def FileEntryExistsByPathSpec(self, path_spec): """Determines if a file entry for a path specification exists. Args: path_spec (PathSpec): path specification. Returns: bool: True if the file entry exists. """ # All checks for correct path spec is done in SQLiteBlobFile. # Therefore, attempt to open the path specification and # check if errors occurred. try: file_object = resolver.Resolver.OpenFileObject( path_spec, resolver_context=self._resolver_context) except (IOError, ValueError, errors.AccessError, errors.PathSpecError): return False file_object.close() return True
[ "def", "FileEntryExistsByPathSpec", "(", "self", ",", "path_spec", ")", ":", "# All checks for correct path spec is done in SQLiteBlobFile.", "# Therefore, attempt to open the path specification and", "# check if errors occurred.", "try", ":", "file_object", "=", "resolver", ".", "Resolver", ".", "OpenFileObject", "(", "path_spec", ",", "resolver_context", "=", "self", ".", "_resolver_context", ")", "except", "(", "IOError", ",", "ValueError", ",", "errors", ".", "AccessError", ",", "errors", ".", "PathSpecError", ")", ":", "return", "False", "file_object", ".", "close", "(", ")", "return", "True" ]
31.9
20.65
def get_alexa_rankings(self, domains): """Retrieves the most recent VT info for a set of domains. Args: domains: list of string domains. Returns: A dict with the domain as key and the VT report as value. """ api_name = 'alexa_rankings' (all_responses, domains) = self._bulk_cache_lookup(api_name, domains) responses = self._request_reports(domains) for domain, response in zip(domains, responses): xml_response = self._extract_response_xml(domain, response) if self._cache: self._cache.cache_value(api_name, domain, response) all_responses[domain] = xml_response return all_responses
[ "def", "get_alexa_rankings", "(", "self", ",", "domains", ")", ":", "api_name", "=", "'alexa_rankings'", "(", "all_responses", ",", "domains", ")", "=", "self", ".", "_bulk_cache_lookup", "(", "api_name", ",", "domains", ")", "responses", "=", "self", ".", "_request_reports", "(", "domains", ")", "for", "domain", ",", "response", "in", "zip", "(", "domains", ",", "responses", ")", ":", "xml_response", "=", "self", ".", "_extract_response_xml", "(", "domain", ",", "response", ")", "if", "self", ".", "_cache", ":", "self", ".", "_cache", ".", "cache_value", "(", "api_name", ",", "domain", ",", "response", ")", "all_responses", "[", "domain", "]", "=", "xml_response", "return", "all_responses" ]
35.8
20.25
def prepare_prop_defs(prop_defs, prop_name, cls_names): """ Examines and adds any missing defs to the prop_defs dictionary for use with the RdfPropertyMeta.__prepare__ method Args: ----- prop_defs: the defintions from the rdf vocabulary defintion prop_name: the property name cls_names: the name of the associated classes Returns: -------- prop_defs """ def get_def(prop_defs, def_fields, default_val=None): """ returns the cross corelated fields for delealing with mutiple vocabularies args: prop_defs: the propertry definition object def_fields: list of the mapped field names default_val: Default value if none of the fields are found """ rtn_list = [] for fld in def_fields: if prop_defs.get(fld): rtn_list += prop_defs.get(fld) if not rtn_list and default_val: rtn_list.append(default_val) elif rtn_list: try: rtn_list = list(set(rtn_list)) except TypeError as e: # This deals with a domain that required a conjunction of two # rdf_Classes # pdb.set_trace() new_rtn = [] for item in rtn_list: if isinstance(item, MODULE.rdfclass.RdfClassBase): new_rtn.append(\ "|".join(merge_rdf_list(item['owl_unionOf']))) elif isinstance(item, list): new_rtn.append("|".join(item)) else: new_rtn.append(item) rtn_list = list(set(new_rtn)) new_rtn = [] for item in rtn_list: if "|" in item: new_rtn.append([Uri(domain) \ for domain in item.split("|")]) else: new_rtn.append(Uri(item)) rtn_list = new_rtn # pdb.set_trace() return rtn_list required_def_defaults = { Uri('kds_rangeDef'): [{}], Uri('rdfs_range'): [Uri("xsd_string")], Uri('rdfs_domain'): cls_names, Uri('rdfs_label'): [NSM.nouri(prop_name)], Uri('kds_formDefault'): [{ Uri('kds:appliesToClass'): Uri('kdr:AllClasses'), Uri('kds:formFieldName'): "emailaddr", Uri('kds:formLabelName'): [NSM.nouri(prop_name)], Uri('kds:formFieldHelp'): find_values(DESCRIPTION_FIELDS, prop_defs, None), Uri('kds:fieldType'): { Uri('rdf:type'): Uri('kdr:TextField') } }], Uri('kds_propertyValidation'): [], Uri('kds_propertySecurity'): [], Uri('kds_propertyProcessing'): [] } for prop in required_def_defaults: if prop not in prop_defs.keys(): prop_defs[prop] = required_def_defaults[prop] prop_defs['rdfs_domain'] = get_def(prop_defs, DOMAIN_FIELDS, cls_names) prop_defs['rdfs_range'] = get_def(prop_defs, RANGE_FIELDS, Uri('xsd_string')) return prop_defs
[ "def", "prepare_prop_defs", "(", "prop_defs", ",", "prop_name", ",", "cls_names", ")", ":", "def", "get_def", "(", "prop_defs", ",", "def_fields", ",", "default_val", "=", "None", ")", ":", "\"\"\" returns the cross corelated fields for delealing with mutiple\n vocabularies\n\n args:\n prop_defs: the propertry definition object\n def_fields: list of the mapped field names\n default_val: Default value if none of the fields are found\n \"\"\"", "rtn_list", "=", "[", "]", "for", "fld", "in", "def_fields", ":", "if", "prop_defs", ".", "get", "(", "fld", ")", ":", "rtn_list", "+=", "prop_defs", ".", "get", "(", "fld", ")", "if", "not", "rtn_list", "and", "default_val", ":", "rtn_list", ".", "append", "(", "default_val", ")", "elif", "rtn_list", ":", "try", ":", "rtn_list", "=", "list", "(", "set", "(", "rtn_list", ")", ")", "except", "TypeError", "as", "e", ":", "# This deals with a domain that required a conjunction of two", "# rdf_Classes", "# pdb.set_trace()", "new_rtn", "=", "[", "]", "for", "item", "in", "rtn_list", ":", "if", "isinstance", "(", "item", ",", "MODULE", ".", "rdfclass", ".", "RdfClassBase", ")", ":", "new_rtn", ".", "append", "(", "\"|\"", ".", "join", "(", "merge_rdf_list", "(", "item", "[", "'owl_unionOf'", "]", ")", ")", ")", "elif", "isinstance", "(", "item", ",", "list", ")", ":", "new_rtn", ".", "append", "(", "\"|\"", ".", "join", "(", "item", ")", ")", "else", ":", "new_rtn", ".", "append", "(", "item", ")", "rtn_list", "=", "list", "(", "set", "(", "new_rtn", ")", ")", "new_rtn", "=", "[", "]", "for", "item", "in", "rtn_list", ":", "if", "\"|\"", "in", "item", ":", "new_rtn", ".", "append", "(", "[", "Uri", "(", "domain", ")", "for", "domain", "in", "item", ".", "split", "(", "\"|\"", ")", "]", ")", "else", ":", "new_rtn", ".", "append", "(", "Uri", "(", "item", ")", ")", "rtn_list", "=", "new_rtn", "# pdb.set_trace()", "return", "rtn_list", "required_def_defaults", "=", "{", "Uri", "(", "'kds_rangeDef'", ")", ":", "[", "{", "}", "]", ",", "Uri", "(", "'rdfs_range'", ")", ":", "[", "Uri", "(", "\"xsd_string\"", ")", "]", ",", "Uri", "(", "'rdfs_domain'", ")", ":", "cls_names", ",", "Uri", "(", "'rdfs_label'", ")", ":", "[", "NSM", ".", "nouri", "(", "prop_name", ")", "]", ",", "Uri", "(", "'kds_formDefault'", ")", ":", "[", "{", "Uri", "(", "'kds:appliesToClass'", ")", ":", "Uri", "(", "'kdr:AllClasses'", ")", ",", "Uri", "(", "'kds:formFieldName'", ")", ":", "\"emailaddr\"", ",", "Uri", "(", "'kds:formLabelName'", ")", ":", "[", "NSM", ".", "nouri", "(", "prop_name", ")", "]", ",", "Uri", "(", "'kds:formFieldHelp'", ")", ":", "find_values", "(", "DESCRIPTION_FIELDS", ",", "prop_defs", ",", "None", ")", ",", "Uri", "(", "'kds:fieldType'", ")", ":", "{", "Uri", "(", "'rdf:type'", ")", ":", "Uri", "(", "'kdr:TextField'", ")", "}", "}", "]", ",", "Uri", "(", "'kds_propertyValidation'", ")", ":", "[", "]", ",", "Uri", "(", "'kds_propertySecurity'", ")", ":", "[", "]", ",", "Uri", "(", "'kds_propertyProcessing'", ")", ":", "[", "]", "}", "for", "prop", "in", "required_def_defaults", ":", "if", "prop", "not", "in", "prop_defs", ".", "keys", "(", ")", ":", "prop_defs", "[", "prop", "]", "=", "required_def_defaults", "[", "prop", "]", "prop_defs", "[", "'rdfs_domain'", "]", "=", "get_def", "(", "prop_defs", ",", "DOMAIN_FIELDS", ",", "cls_names", ")", "prop_defs", "[", "'rdfs_range'", "]", "=", "get_def", "(", "prop_defs", ",", "RANGE_FIELDS", ",", "Uri", "(", "'xsd_string'", ")", ")", "return", "prop_defs" ]
37.505747
15.850575
def Crowl_Louvar_LFL(atoms): r'''Calculates lower flammability limit, using the Crowl-Louvar [1]_ correlation. Uses molecular formula only. The lower flammability limit of a gas is air is: .. math:: C_mH_xO_y + zO_2 \to mCO_2 + \frac{x}{2}H_2O \text{LFL} = \frac{0.55}{4.76m + 1.19x - 2.38y + 1} Parameters ---------- atoms : dict Dictionary of atoms and atom counts Returns ------- LFL : float Lower flammability limit, mole fraction Notes ----- Coefficient of 0.55 taken from [2]_ Examples -------- Hexane, example from [1]_, lit. 1.2 % >>> Crowl_Louvar_LFL({'H': 14, 'C': 6}) 0.011899610558199915 References ---------- .. [1] Crowl, Daniel A., and Joseph F. Louvar. Chemical Process Safety: Fundamentals with Applications. 2E. Upper Saddle River, N.J: Prentice Hall, 2001. .. [2] Jones, G. W. "Inflammation Limits and Their Practical Application in Hazardous Industrial Operations." Chemical Reviews 22, no. 1 (February 1, 1938): 1-26. doi:10.1021/cr60071a001 ''' nC, nH, nO = 0, 0, 0 if 'C' in atoms and atoms['C']: nC = atoms['C'] else: return None if 'H' in atoms: nH = atoms['H'] if 'O' in atoms: nO = atoms['O'] return 0.55/(4.76*nC + 1.19*nH - 2.38*nO + 1.)
[ "def", "Crowl_Louvar_LFL", "(", "atoms", ")", ":", "nC", ",", "nH", ",", "nO", "=", "0", ",", "0", ",", "0", "if", "'C'", "in", "atoms", "and", "atoms", "[", "'C'", "]", ":", "nC", "=", "atoms", "[", "'C'", "]", "else", ":", "return", "None", "if", "'H'", "in", "atoms", ":", "nH", "=", "atoms", "[", "'H'", "]", "if", "'O'", "in", "atoms", ":", "nO", "=", "atoms", "[", "'O'", "]", "return", "0.55", "/", "(", "4.76", "*", "nC", "+", "1.19", "*", "nH", "-", "2.38", "*", "nO", "+", "1.", ")" ]
26.137255
23.588235
def compare(self, buf, offset=0, length=1, ignore=""): """Compare buffer""" for i in range(offset, offset + length): if isinstance(self.m_types, (type(Union), type(Structure))): if compare(self.m_buf[i], buf[i], ignore=ignore): return 1 elif self.m_buf[i] != buf[i]: return 1 return 0
[ "def", "compare", "(", "self", ",", "buf", ",", "offset", "=", "0", ",", "length", "=", "1", ",", "ignore", "=", "\"\"", ")", ":", "for", "i", "in", "range", "(", "offset", ",", "offset", "+", "length", ")", ":", "if", "isinstance", "(", "self", ".", "m_types", ",", "(", "type", "(", "Union", ")", ",", "type", "(", "Structure", ")", ")", ")", ":", "if", "compare", "(", "self", ".", "m_buf", "[", "i", "]", ",", "buf", "[", "i", "]", ",", "ignore", "=", "ignore", ")", ":", "return", "1", "elif", "self", ".", "m_buf", "[", "i", "]", "!=", "buf", "[", "i", "]", ":", "return", "1", "return", "0" ]
34.181818
19.272727
def get_election_votes(self, election): """Get all votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.votes.all()
[ "def", "get_election_votes", "(", "self", ",", "election", ")", ":", "candidate_election", "=", "CandidateElection", ".", "objects", ".", "get", "(", "candidate", "=", "self", ",", "election", "=", "election", ")", "return", "candidate_election", ".", "votes", ".", "all", "(", ")" ]
37
14.428571
def to_dict(self) -> dict: '''vote info as dict''' return { "version": self.version, "description": self.description, "count_mode": self.count_mode, "start_block": self.start_block, "end_block": self.end_block, "choices": self.choices, "vote_metainfo": self.vote_metainfo }
[ "def", "to_dict", "(", "self", ")", "->", "dict", ":", "return", "{", "\"version\"", ":", "self", ".", "version", ",", "\"description\"", ":", "self", ".", "description", ",", "\"count_mode\"", ":", "self", ".", "count_mode", ",", "\"start_block\"", ":", "self", ".", "start_block", ",", "\"end_block\"", ":", "self", ".", "end_block", ",", "\"choices\"", ":", "self", ".", "choices", ",", "\"vote_metainfo\"", ":", "self", ".", "vote_metainfo", "}" ]
30.916667
11.916667
def onConnect(self, client, userdata, flags, rc): """! The callback for when the client receives a CONNACK response from the server. @param client @param userdata @param flags @param rc """ for sub in self.subsciption: (result, mid) = self.client.subscribe(sub)
[ "def", "onConnect", "(", "self", ",", "client", ",", "userdata", ",", "flags", ",", "rc", ")", ":", "for", "sub", "in", "self", ".", "subsciption", ":", "(", "result", ",", "mid", ")", "=", "self", ".", "client", ".", "subscribe", "(", "sub", ")" ]
29.818182
17.363636
def memoize(fun): """A decorator for memoizing functions. Only works on functions that take simple arguments - arguments that take list-like or dict-like arguments will not be memoized, and this function will raise a TypeError. """ @funcutils.wraps(fun) def wrapper(*args, **kwargs): do_memoization = sportsref.get_option('memoize') if not do_memoization: return fun(*args, **kwargs) hash_args = tuple(args) hash_kwargs = frozenset(sorted(kwargs.items())) key = (hash_args, hash_kwargs) def _copy(v): if isinstance(v, pq): return v.clone() else: return copy.deepcopy(v) try: ret = _copy(cache[key]) return ret except KeyError: cache[key] = fun(*args, **kwargs) ret = _copy(cache[key]) return ret except TypeError: print('memoization type error in function {} for arguments {}' .format(fun.__name__, key)) raise cache = {} return wrapper
[ "def", "memoize", "(", "fun", ")", ":", "@", "funcutils", ".", "wraps", "(", "fun", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "do_memoization", "=", "sportsref", ".", "get_option", "(", "'memoize'", ")", "if", "not", "do_memoization", ":", "return", "fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", "hash_args", "=", "tuple", "(", "args", ")", "hash_kwargs", "=", "frozenset", "(", "sorted", "(", "kwargs", ".", "items", "(", ")", ")", ")", "key", "=", "(", "hash_args", ",", "hash_kwargs", ")", "def", "_copy", "(", "v", ")", ":", "if", "isinstance", "(", "v", ",", "pq", ")", ":", "return", "v", ".", "clone", "(", ")", "else", ":", "return", "copy", ".", "deepcopy", "(", "v", ")", "try", ":", "ret", "=", "_copy", "(", "cache", "[", "key", "]", ")", "return", "ret", "except", "KeyError", ":", "cache", "[", "key", "]", "=", "fun", "(", "*", "args", ",", "*", "*", "kwargs", ")", "ret", "=", "_copy", "(", "cache", "[", "key", "]", ")", "return", "ret", "except", "TypeError", ":", "print", "(", "'memoization type error in function {} for arguments {}'", ".", "format", "(", "fun", ".", "__name__", ",", "key", ")", ")", "raise", "cache", "=", "{", "}", "return", "wrapper" ]
28.5
18.447368
def sign(self, message): """ >>> authlen = OmapiHMACMD5Authenticator.authlen >>> len(OmapiHMACMD5Authenticator(b"foo", 16*b"x").sign(b"baz")) == authlen True @type message: bytes @rtype: bytes @returns: a signature of length self.authlen """ return hmac.HMAC(self.key, message, digestmod=hashlib.md5).digest()
[ "def", "sign", "(", "self", ",", "message", ")", ":", "return", "hmac", ".", "HMAC", "(", "self", ".", "key", ",", "message", ",", "digestmod", "=", "hashlib", ".", "md5", ")", ".", "digest", "(", ")" ]
28.909091
19.454545
def split_scoped_hparams(scopes, merged_hparams): """Split single HParams with scoped keys into multiple.""" split_values = {scope: {} for scope in scopes} merged_values = merged_hparams.values() for scoped_key, value in six.iteritems(merged_values): scope = scoped_key.split(".")[0] key = scoped_key[len(scope) + 1:] split_values[scope][key] = value return [ hparam.HParams(**split_values[scope]) for scope in scopes ]
[ "def", "split_scoped_hparams", "(", "scopes", ",", "merged_hparams", ")", ":", "split_values", "=", "{", "scope", ":", "{", "}", "for", "scope", "in", "scopes", "}", "merged_values", "=", "merged_hparams", ".", "values", "(", ")", "for", "scoped_key", ",", "value", "in", "six", ".", "iteritems", "(", "merged_values", ")", ":", "scope", "=", "scoped_key", ".", "split", "(", "\".\"", ")", "[", "0", "]", "key", "=", "scoped_key", "[", "len", "(", "scope", ")", "+", "1", ":", "]", "split_values", "[", "scope", "]", "[", "key", "]", "=", "value", "return", "[", "hparam", ".", "HParams", "(", "*", "*", "split_values", "[", "scope", "]", ")", "for", "scope", "in", "scopes", "]" ]
36.583333
14.583333
def SSTORE(self, offset, value): """Save word to storage""" storage_address = self.address self._publish('will_evm_write_storage', storage_address, offset, value) #refund = Operators.ITEBV(256, # previous_value != 0, # Operators.ITEBV(256, value != 0, 0, GSTORAGEREFUND), # 0) if istainted(self.pc): for taint in get_taints(self.pc): value = taint_with(value, taint) self.world.set_storage_data(storage_address, offset, value) self._publish('did_evm_write_storage', storage_address, offset, value)
[ "def", "SSTORE", "(", "self", ",", "offset", ",", "value", ")", ":", "storage_address", "=", "self", ".", "address", "self", ".", "_publish", "(", "'will_evm_write_storage'", ",", "storage_address", ",", "offset", ",", "value", ")", "#refund = Operators.ITEBV(256,", "# previous_value != 0,", "# Operators.ITEBV(256, value != 0, 0, GSTORAGEREFUND),", "# 0)", "if", "istainted", "(", "self", ".", "pc", ")", ":", "for", "taint", "in", "get_taints", "(", "self", ".", "pc", ")", ":", "value", "=", "taint_with", "(", "value", ",", "taint", ")", "self", ".", "world", ".", "set_storage_data", "(", "storage_address", ",", "offset", ",", "value", ")", "self", ".", "_publish", "(", "'did_evm_write_storage'", ",", "storage_address", ",", "offset", ",", "value", ")" ]
47.5
17.357143
def delete(self) : "deletes the document from the database" if self.URL is None : raise DeletionError("Can't delete a document that was not saved") r = self.connection.session.delete(self.URL) data = r.json() if (r.status_code != 200 and r.status_code != 202) or 'error' in data : raise DeletionError(data['errorMessage'], data) self.reset(self.collection) self.modified = True
[ "def", "delete", "(", "self", ")", ":", "if", "self", ".", "URL", "is", "None", ":", "raise", "DeletionError", "(", "\"Can't delete a document that was not saved\"", ")", "r", "=", "self", ".", "connection", ".", "session", ".", "delete", "(", "self", ".", "URL", ")", "data", "=", "r", ".", "json", "(", ")", "if", "(", "r", ".", "status_code", "!=", "200", "and", "r", ".", "status_code", "!=", "202", ")", "or", "'error'", "in", "data", ":", "raise", "DeletionError", "(", "data", "[", "'errorMessage'", "]", ",", "data", ")", "self", ".", "reset", "(", "self", ".", "collection", ")", "self", ".", "modified", "=", "True" ]
37.333333
21.833333
def to_internal_value(self, data): """ List of dicts of native values <- List of dicts of primitive datatypes. """ if html.is_html_input(data): data = html.parse_html_list(data) if isinstance(data, type('')) or isinstance(data, collections.Mapping) or not hasattr(data, '__iter__'): self.fail('not_a_list', input_type=type(data).__name__) if not self.allow_empty and len(data) == 0: self.fail('empty') return [self.child.run_validation(item) for item in data]
[ "def", "to_internal_value", "(", "self", ",", "data", ")", ":", "if", "html", ".", "is_html_input", "(", "data", ")", ":", "data", "=", "html", ".", "parse_html_list", "(", "data", ")", "if", "isinstance", "(", "data", ",", "type", "(", "''", ")", ")", "or", "isinstance", "(", "data", ",", "collections", ".", "Mapping", ")", "or", "not", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "self", ".", "fail", "(", "'not_a_list'", ",", "input_type", "=", "type", "(", "data", ")", ".", "__name__", ")", "if", "not", "self", ".", "allow_empty", "and", "len", "(", "data", ")", "==", "0", ":", "self", ".", "fail", "(", "'empty'", ")", "return", "[", "self", ".", "child", ".", "run_validation", "(", "item", ")", "for", "item", "in", "data", "]" ]
49.181818
18.090909
def __ConstructRelativePath(self, method_config, request, relative_path=None): """Determine the relative path for request.""" python_param_names = util.MapParamNames( method_config.path_params, type(request)) params = dict([(param, getattr(request, param, None)) for param in python_param_names]) params = util.MapRequestParams(params, type(request)) return util.ExpandRelativePath(method_config, params, relative_path=relative_path)
[ "def", "__ConstructRelativePath", "(", "self", ",", "method_config", ",", "request", ",", "relative_path", "=", "None", ")", ":", "python_param_names", "=", "util", ".", "MapParamNames", "(", "method_config", ".", "path_params", ",", "type", "(", "request", ")", ")", "params", "=", "dict", "(", "[", "(", "param", ",", "getattr", "(", "request", ",", "param", ",", "None", ")", ")", "for", "param", "in", "python_param_names", "]", ")", "params", "=", "util", ".", "MapRequestParams", "(", "params", ",", "type", "(", "request", ")", ")", "return", "util", ".", "ExpandRelativePath", "(", "method_config", ",", "params", ",", "relative_path", "=", "relative_path", ")" ]
57
15.6
def ensure_workspace(self, name, layout, workspace_id): """Looks for a workspace with workspace_id. If none is found, create a new one, add it, and change to it. """ workspace = next((workspace for workspace in self.document_model.workspaces if workspace.workspace_id == workspace_id), None) if not workspace: workspace = self.new_workspace(name=name, layout=layout, workspace_id=workspace_id) self._change_workspace(workspace)
[ "def", "ensure_workspace", "(", "self", ",", "name", ",", "layout", ",", "workspace_id", ")", ":", "workspace", "=", "next", "(", "(", "workspace", "for", "workspace", "in", "self", ".", "document_model", ".", "workspaces", "if", "workspace", ".", "workspace_id", "==", "workspace_id", ")", ",", "None", ")", "if", "not", "workspace", ":", "workspace", "=", "self", ".", "new_workspace", "(", "name", "=", "name", ",", "layout", "=", "layout", ",", "workspace_id", "=", "workspace_id", ")", "self", ".", "_change_workspace", "(", "workspace", ")" ]
53.333333
27.555556
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
[ "def", "set_meta", "(", "mcs", ",", "bases", ",", "attr", ")", ":", "# pop the meta class from the attributes", "meta", "=", "attr", ".", "pop", "(", "mcs", ".", "_meta_cls", ",", "types", ".", "ClassType", "(", "mcs", ".", "_meta_cls", ",", "(", ")", ",", "{", "}", ")", ")", "# get a list of the meta public class attributes", "meta_attrs", "=", "get_public_attributes", "(", "meta", ")", "# check all bases for meta", "for", "base", "in", "bases", ":", "base_meta", "=", "getattr", "(", "base", ",", "mcs", ".", "_meta_cls", ",", "None", ")", "# skip if base has no meta", "if", "base_meta", "is", "None", ":", "continue", "# loop over base meta", "for", "a", "in", "get_public_attributes", "(", "base_meta", ",", "as_list", "=", "False", ")", ":", "# skip if already in meta", "if", "a", "in", "meta_attrs", ":", "continue", "# copy meta-option attribute from base", "setattr", "(", "meta", ",", "a", ",", "getattr", "(", "base_meta", ",", "a", ")", ")", "attr", "[", "mcs", ".", "_meta_attr", "]", "=", "meta", "# set _meta combined from bases", "return", "attr" ]
40.483871
15.774194
def qindex2index(index): """ from a QIndex (row/column coordinate system), get the buffer index of the byte """ r = index.row() c = index.column() if c > 0x10: return (0x10 * r) + c - 0x11 else: return (0x10 * r) + c
[ "def", "qindex2index", "(", "index", ")", ":", "r", "=", "index", ".", "row", "(", ")", "c", "=", "index", ".", "column", "(", ")", "if", "c", ">", "0x10", ":", "return", "(", "0x10", "*", "r", ")", "+", "c", "-", "0x11", "else", ":", "return", "(", "0x10", "*", "r", ")", "+", "c" ]
34.125
12.625
def iMath_propagate_labels_through_mask(image, labels, stopping_value=100, propagation_method=0): """ >>> import ants >>> wms = ants.image_read('~/desktop/wms.nii.gz') >>> thal = ants.image_read('~/desktop/thal.nii.gz') >>> img2 = ants.iMath_propagate_labels_through_mask(wms, thal, 500, 0) """ return iMath(image, 'PropagateLabelsThroughMask', labels, stopping_value, propagation_method)
[ "def", "iMath_propagate_labels_through_mask", "(", "image", ",", "labels", ",", "stopping_value", "=", "100", ",", "propagation_method", "=", "0", ")", ":", "return", "iMath", "(", "image", ",", "'PropagateLabelsThroughMask'", ",", "labels", ",", "stopping_value", ",", "propagation_method", ")" ]
51.125
24.625
def substitute_namespace_into_graph(self, graph): """ Creates a graph from the local namespace of the code (to be used after the execution of the code) :param graph: The graph to use as a recipient of the namespace :return: the updated graph """ for key, value in self.namespace.items(): try: nodes = graph.vs.select(name=key) for node in nodes: for k, v in value.items(): node[k] = v except: pass try: nodes = graph.es.select(name=key) for node in nodes: for k, v in value.items(): node[k] = v except: pass return graph
[ "def", "substitute_namespace_into_graph", "(", "self", ",", "graph", ")", ":", "for", "key", ",", "value", "in", "self", ".", "namespace", ".", "items", "(", ")", ":", "try", ":", "nodes", "=", "graph", ".", "vs", ".", "select", "(", "name", "=", "key", ")", "for", "node", "in", "nodes", ":", "for", "k", ",", "v", "in", "value", ".", "items", "(", ")", ":", "node", "[", "k", "]", "=", "v", "except", ":", "pass", "try", ":", "nodes", "=", "graph", ".", "es", ".", "select", "(", "name", "=", "key", ")", "for", "node", "in", "nodes", ":", "for", "k", ",", "v", "in", "value", ".", "items", "(", ")", ":", "node", "[", "k", "]", "=", "v", "except", ":", "pass", "return", "graph" ]
34.217391
15.695652
def grab_server(self, onerror = None): """Disable processing of requests on all other client connections until the server is ungrabbed. Server grabbing should be avoided as much as possible.""" request.GrabServer(display = self.display, onerror = onerror)
[ "def", "grab_server", "(", "self", ",", "onerror", "=", "None", ")", ":", "request", ".", "GrabServer", "(", "display", "=", "self", ".", "display", ",", "onerror", "=", "onerror", ")" ]
51.5
8.166667
def _build_static_table_mapping(): """ Build static table mapping from header name to tuple with next structure: (<minimal index of header>, <mapping from header value to it index>). static_table_mapping used for hash searching. """ static_table_mapping = {} for index, (name, value) in enumerate(CocaineHeaders.STATIC_TABLE, 1): header_name_search_result = static_table_mapping.setdefault(name, (index, {})) header_name_search_result[1][value] = index return static_table_mapping
[ "def", "_build_static_table_mapping", "(", ")", ":", "static_table_mapping", "=", "{", "}", "for", "index", ",", "(", "name", ",", "value", ")", "in", "enumerate", "(", "CocaineHeaders", ".", "STATIC_TABLE", ",", "1", ")", ":", "header_name_search_result", "=", "static_table_mapping", ".", "setdefault", "(", "name", ",", "(", "index", ",", "{", "}", ")", ")", "header_name_search_result", "[", "1", "]", "[", "value", "]", "=", "index", "return", "static_table_mapping" ]
43.166667
19.666667
def process_event(self, event, client, args, force_dispatch=False): """Process an incoming event. Offers it to each module according to self.module_ordering, continuing to the next unless the module inhibits propagation. Returns True if a module inhibited propagation, otherwise False. """ if not self.running: _log.debug("Ignoring '%s' event - controller not running.", event) return # We keep a copy of the state of loaded modules before this event, # and restore it when we're done. This lets us handle events that # result in other events being dispatched in a graceful manner. old_loaded = self.loaded_on_this_event self.loaded_on_this_event = set(old_loaded or []) if not force_dispatch else set() try: _log.debug("Controller is dispatching '%s' event", event) for module_name in self.module_ordering: if module_name in self.loaded_on_this_event and not force_dispatch: _log.debug("Not dispatching %s to '%s' because it was just " "loaded (%r).", event, module_name, self.loaded_on_this_event) continue module = self.loaded_modules[module_name] if module.handle_event(event, client, args): return True finally: self.loaded_on_this_event = old_loaded
[ "def", "process_event", "(", "self", ",", "event", ",", "client", ",", "args", ",", "force_dispatch", "=", "False", ")", ":", "if", "not", "self", ".", "running", ":", "_log", ".", "debug", "(", "\"Ignoring '%s' event - controller not running.\"", ",", "event", ")", "return", "# We keep a copy of the state of loaded modules before this event,", "# and restore it when we're done. This lets us handle events that", "# result in other events being dispatched in a graceful manner.", "old_loaded", "=", "self", ".", "loaded_on_this_event", "self", ".", "loaded_on_this_event", "=", "set", "(", "old_loaded", "or", "[", "]", ")", "if", "not", "force_dispatch", "else", "set", "(", ")", "try", ":", "_log", ".", "debug", "(", "\"Controller is dispatching '%s' event\"", ",", "event", ")", "for", "module_name", "in", "self", ".", "module_ordering", ":", "if", "module_name", "in", "self", ".", "loaded_on_this_event", "and", "not", "force_dispatch", ":", "_log", ".", "debug", "(", "\"Not dispatching %s to '%s' because it was just \"", "\"loaded (%r).\"", ",", "event", ",", "module_name", ",", "self", ".", "loaded_on_this_event", ")", "continue", "module", "=", "self", ".", "loaded_modules", "[", "module_name", "]", "if", "module", ".", "handle_event", "(", "event", ",", "client", ",", "args", ")", ":", "return", "True", "finally", ":", "self", ".", "loaded_on_this_event", "=", "old_loaded" ]
47.193548
25.451613
def add_property(attribute, type): """Add a property to a class """ def decorator(cls): """Decorator """ private = "_" + attribute def getAttr(self): """Property getter """ if getattr(self, private) is None: setattr(self, private, type()) return getattr(self, private) def setAttr(self, value): """Property setter """ setattr(self, private, value) setattr(cls, attribute, property(getAttr, setAttr)) setattr(cls, private, None) return cls return decorator
[ "def", "add_property", "(", "attribute", ",", "type", ")", ":", "def", "decorator", "(", "cls", ")", ":", "\"\"\"Decorator\n \"\"\"", "private", "=", "\"_\"", "+", "attribute", "def", "getAttr", "(", "self", ")", ":", "\"\"\"Property getter\n \"\"\"", "if", "getattr", "(", "self", ",", "private", ")", "is", "None", ":", "setattr", "(", "self", ",", "private", ",", "type", "(", ")", ")", "return", "getattr", "(", "self", ",", "private", ")", "def", "setAttr", "(", "self", ",", "value", ")", ":", "\"\"\"Property setter\n \"\"\"", "setattr", "(", "self", ",", "private", ",", "value", ")", "setattr", "(", "cls", ",", "attribute", ",", "property", "(", "getAttr", ",", "setAttr", ")", ")", "setattr", "(", "cls", ",", "private", ",", "None", ")", "return", "cls", "return", "decorator" ]
24.6
11.64
def remove_perm(self, subj_str, perm_str): """Remove permission from a subject. Args: subj_str : str Subject for which to remove permission(s) perm_str : str Permission to remove. Implicitly removes all higher permissions. E.g., ``write`` will also remove ``changePermission`` if previously granted. """ self._assert_valid_permission(perm_str) for perm_str in self._equal_or_higher_perm(perm_str): self._perm_dict.setdefault(perm_str, set()).discard(subj_str)
[ "def", "remove_perm", "(", "self", ",", "subj_str", ",", "perm_str", ")", ":", "self", ".", "_assert_valid_permission", "(", "perm_str", ")", "for", "perm_str", "in", "self", ".", "_equal_or_higher_perm", "(", "perm_str", ")", ":", "self", ".", "_perm_dict", ".", "setdefault", "(", "perm_str", ",", "set", "(", ")", ")", ".", "discard", "(", "subj_str", ")" ]
37.066667
22.6
def shutdown(message=None, timeout=5, force_close=True, reboot=False, # pylint: disable=redefined-outer-name in_seconds=False, only_on_pending_reboot=False): ''' Shutdown a running system. Args: message (str): The message to display to the user before shutting down. timeout (int): The length of time (in seconds) that the shutdown dialog box should be displayed. While this dialog box is displayed, the shutdown can be aborted using the ``system.shutdown_abort`` function. If timeout is not zero, InitiateSystemShutdown displays a dialog box on the specified computer. The dialog box displays the name of the user who called the function, the message specified by the lpMessage parameter, and prompts the user to log off. The dialog box beeps when it is created and remains on top of other windows (system modal). The dialog box can be moved but not closed. A timer counts down the remaining time before the shutdown occurs. If timeout is zero, the computer shuts down immediately without displaying the dialog box and cannot be stopped by ``system.shutdown_abort``. Default is 5 minutes in_seconds (bool): ``True`` will cause the ``timeout`` parameter to be in seconds. ``False`` will be in minutes. Default is ``False``. .. versionadded:: 2015.8.0 force_close (bool): ``True`` will force close all open applications. ``False`` will display a dialog box instructing the user to close open applications. Default is ``True``. reboot (bool): ``True`` restarts the computer immediately after shutdown. ``False`` powers down the system. Default is ``False``. only_on_pending_reboot (bool): If this is set to True, then the shutdown will only proceed if the system reports a pending reboot. To optionally shutdown in a highstate, consider using the shutdown state instead of this module. only_on_pending_reboot (bool): If ``True`` the shutdown will only proceed if there is a reboot pending. ``False`` will shutdown the system. Default is ``False``. Returns: bool: ``True`` if successful (a shutdown or reboot will occur), otherwise ``False`` CLI Example: .. code-block:: bash salt '*' system.shutdown "System will shutdown in 5 minutes" ''' if six.PY2: message = _to_unicode(message) timeout = _convert_minutes_seconds(timeout, in_seconds) if only_on_pending_reboot and not get_pending_reboot(): return False if message and not isinstance(message, six.string_types): message = message.decode('utf-8') try: win32api.InitiateSystemShutdown('127.0.0.1', message, timeout, force_close, reboot) return True except pywintypes.error as exc: (number, context, message) = exc.args log.error('Failed to shutdown the system') log.error('nbr: %s', number) log.error('ctx: %s', context) log.error('msg: %s', message) return False
[ "def", "shutdown", "(", "message", "=", "None", ",", "timeout", "=", "5", ",", "force_close", "=", "True", ",", "reboot", "=", "False", ",", "# pylint: disable=redefined-outer-name", "in_seconds", "=", "False", ",", "only_on_pending_reboot", "=", "False", ")", ":", "if", "six", ".", "PY2", ":", "message", "=", "_to_unicode", "(", "message", ")", "timeout", "=", "_convert_minutes_seconds", "(", "timeout", ",", "in_seconds", ")", "if", "only_on_pending_reboot", "and", "not", "get_pending_reboot", "(", ")", ":", "return", "False", "if", "message", "and", "not", "isinstance", "(", "message", ",", "six", ".", "string_types", ")", ":", "message", "=", "message", ".", "decode", "(", "'utf-8'", ")", "try", ":", "win32api", ".", "InitiateSystemShutdown", "(", "'127.0.0.1'", ",", "message", ",", "timeout", ",", "force_close", ",", "reboot", ")", "return", "True", "except", "pywintypes", ".", "error", "as", "exc", ":", "(", "number", ",", "context", ",", "message", ")", "=", "exc", ".", "args", "log", ".", "error", "(", "'Failed to shutdown the system'", ")", "log", ".", "error", "(", "'nbr: %s'", ",", "number", ")", "log", ".", "error", "(", "'ctx: %s'", ",", "context", ")", "log", ".", "error", "(", "'msg: %s'", ",", "message", ")", "return", "False" ]
38.529412
27.023529
def _serialize_item(item): """Internal function: serialize native types.""" # Recursively serialize lists, tuples, and dicts. if isinstance(item, (list, tuple)): return [_serialize_item(subitem) for subitem in item] elif isinstance(item, dict): return dict([(key, _serialize_item(value)) for (key, value) in iteritems(item)]) # Serialize strings. elif isinstance(item, string_types): # Replace glSomething by something (needed for WebGL commands). if item.startswith('gl'): return re.sub(r'^gl([A-Z])', lambda m: m.group(1).lower(), item) else: return item # Process NumPy arrays that are not buffers (typically, uniform values). elif isinstance(item, np.ndarray): return _serialize_item(item.ravel().tolist()) # Serialize numbers. else: try: return np.asscalar(item) except Exception: return item
[ "def", "_serialize_item", "(", "item", ")", ":", "# Recursively serialize lists, tuples, and dicts.", "if", "isinstance", "(", "item", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "_serialize_item", "(", "subitem", ")", "for", "subitem", "in", "item", "]", "elif", "isinstance", "(", "item", ",", "dict", ")", ":", "return", "dict", "(", "[", "(", "key", ",", "_serialize_item", "(", "value", ")", ")", "for", "(", "key", ",", "value", ")", "in", "iteritems", "(", "item", ")", "]", ")", "# Serialize strings.", "elif", "isinstance", "(", "item", ",", "string_types", ")", ":", "# Replace glSomething by something (needed for WebGL commands).", "if", "item", ".", "startswith", "(", "'gl'", ")", ":", "return", "re", ".", "sub", "(", "r'^gl([A-Z])'", ",", "lambda", "m", ":", "m", ".", "group", "(", "1", ")", ".", "lower", "(", ")", ",", "item", ")", "else", ":", "return", "item", "# Process NumPy arrays that are not buffers (typically, uniform values).", "elif", "isinstance", "(", "item", ",", "np", ".", "ndarray", ")", ":", "return", "_serialize_item", "(", "item", ".", "ravel", "(", ")", ".", "tolist", "(", ")", ")", "# Serialize numbers.", "else", ":", "try", ":", "return", "np", ".", "asscalar", "(", "item", ")", "except", "Exception", ":", "return", "item" ]
35.074074
18.555556
def _parse_typed_parameter_typed_value(values): ''' Creates Arguments in a TypedParametervalue. ''' type_, value = _expand_one_key_dictionary(values) _current_parameter_value.type = type_ if _is_simple_type(value): arg = Argument(value) _current_parameter_value.add_argument(arg) elif isinstance(value, list): for idx in value: arg = Argument(idx) _current_parameter_value.add_argument(arg)
[ "def", "_parse_typed_parameter_typed_value", "(", "values", ")", ":", "type_", ",", "value", "=", "_expand_one_key_dictionary", "(", "values", ")", "_current_parameter_value", ".", "type", "=", "type_", "if", "_is_simple_type", "(", "value", ")", ":", "arg", "=", "Argument", "(", "value", ")", "_current_parameter_value", ".", "add_argument", "(", "arg", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "for", "idx", "in", "value", ":", "arg", "=", "Argument", "(", "idx", ")", "_current_parameter_value", ".", "add_argument", "(", "arg", ")" ]
32.428571
15
def poll_integration_alert_data(integration_alert): """Poll for updates on waiting IntegrationAlerts.""" logger.info("Polling information for integration alert %s", integration_alert) try: configured_integration = integration_alert.configured_integration integration_actions_instance = configured_integration.integration.module output_data, output_file_content = integration_actions_instance.poll_for_updates( json.loads(integration_alert.output_data) ) integration_alert.status = IntegrationAlertStatuses.DONE.name integration_alert.output_data = json.dumps(output_data) polling_integration_alerts.remove(integration_alert) except exceptions.IntegrationNoMethodImplementationError: logger.error("No poll_for_updates function found for integration alert %s", integration_alert) integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING.name except exceptions.IntegrationPollEventError: # This does not always indicate an error, this is also raised when need to try again later logger.debug("Polling for integration alert %s failed", integration_alert) except exceptions.IntegrationOutputFormatError: logger.error("Integration alert %s formatting error", integration_alert) integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING_FORMATTING.name except Exception: logger.exception("Error polling integration alert %s", integration_alert) integration_alert.status = IntegrationAlertStatuses.ERROR_POLLING.name
[ "def", "poll_integration_alert_data", "(", "integration_alert", ")", ":", "logger", ".", "info", "(", "\"Polling information for integration alert %s\"", ",", "integration_alert", ")", "try", ":", "configured_integration", "=", "integration_alert", ".", "configured_integration", "integration_actions_instance", "=", "configured_integration", ".", "integration", ".", "module", "output_data", ",", "output_file_content", "=", "integration_actions_instance", ".", "poll_for_updates", "(", "json", ".", "loads", "(", "integration_alert", ".", "output_data", ")", ")", "integration_alert", ".", "status", "=", "IntegrationAlertStatuses", ".", "DONE", ".", "name", "integration_alert", ".", "output_data", "=", "json", ".", "dumps", "(", "output_data", ")", "polling_integration_alerts", ".", "remove", "(", "integration_alert", ")", "except", "exceptions", ".", "IntegrationNoMethodImplementationError", ":", "logger", ".", "error", "(", "\"No poll_for_updates function found for integration alert %s\"", ",", "integration_alert", ")", "integration_alert", ".", "status", "=", "IntegrationAlertStatuses", ".", "ERROR_POLLING", ".", "name", "except", "exceptions", ".", "IntegrationPollEventError", ":", "# This does not always indicate an error, this is also raised when need to try again later", "logger", ".", "debug", "(", "\"Polling for integration alert %s failed\"", ",", "integration_alert", ")", "except", "exceptions", ".", "IntegrationOutputFormatError", ":", "logger", ".", "error", "(", "\"Integration alert %s formatting error\"", ",", "integration_alert", ")", "integration_alert", ".", "status", "=", "IntegrationAlertStatuses", ".", "ERROR_POLLING_FORMATTING", ".", "name", "except", "Exception", ":", "logger", ".", "exception", "(", "\"Error polling integration alert %s\"", ",", "integration_alert", ")", "integration_alert", ".", "status", "=", "IntegrationAlertStatuses", ".", "ERROR_POLLING", ".", "name" ]
47.333333
33.636364
def logical_chassis_fwdl_sanity_output_fwdl_cmd_status(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logical_chassis_fwdl_sanity = ET.Element("logical_chassis_fwdl_sanity") config = logical_chassis_fwdl_sanity output = ET.SubElement(logical_chassis_fwdl_sanity, "output") fwdl_cmd_status = ET.SubElement(output, "fwdl-cmd-status") fwdl_cmd_status.text = kwargs.pop('fwdl_cmd_status') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "logical_chassis_fwdl_sanity_output_fwdl_cmd_status", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "logical_chassis_fwdl_sanity", "=", "ET", ".", "Element", "(", "\"logical_chassis_fwdl_sanity\"", ")", "config", "=", "logical_chassis_fwdl_sanity", "output", "=", "ET", ".", "SubElement", "(", "logical_chassis_fwdl_sanity", ",", "\"output\"", ")", "fwdl_cmd_status", "=", "ET", ".", "SubElement", "(", "output", ",", "\"fwdl-cmd-status\"", ")", "fwdl_cmd_status", ".", "text", "=", "kwargs", ".", "pop", "(", "'fwdl_cmd_status'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
46.25
18.166667
def build_paste(uid, shortid, type, nick, time, fmt, code, filename, mime): "Build a 'paste' object" return locals()
[ "def", "build_paste", "(", "uid", ",", "shortid", ",", "type", ",", "nick", ",", "time", ",", "fmt", ",", "code", ",", "filename", ",", "mime", ")", ":", "return", "locals", "(", ")" ]
43.333333
20
def loading(self): """Context manager for when you need to instantiate entities upon unpacking""" if getattr(self, '_initialized', False): raise ValueError("Already loading") self._initialized = False yield self._initialized = True
[ "def", "loading", "(", "self", ")", ":", "if", "getattr", "(", "self", ",", "'_initialized'", ",", "False", ")", ":", "raise", "ValueError", "(", "\"Already loading\"", ")", "self", ".", "_initialized", "=", "False", "yield", "self", ".", "_initialized", "=", "True" ]
39.571429
11.285714
def get_popular_subreddits(self, *args, **kwargs): """Return a get_content generator for the most active subreddits. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. """ url = self.config['popular_subreddits'] return self.get_content(url, *args, **kwargs)
[ "def", "get_popular_subreddits", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "url", "=", "self", ".", "config", "[", "'popular_subreddits'", "]", "return", "self", ".", "get_content", "(", "url", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
40.666667
18
def format_image_iter( data_fetch, x_start=0, y_start=0, width=32, height=32, frame=0, columns=1, downsample=1 ): """Return the ANSI escape sequence to render a bitmap image. data_fetch Function that takes three arguments (x position, y position, and frame) and returns a Colour corresponding to the pixel stored there, or Transparent if the requested pixel is out of bounds. x_start Offset from the left of the image data to render from. Defaults to 0. y_start Offset from the top of the image data to render from. Defaults to 0. width Width of the image data to render. Defaults to 32. height Height of the image data to render. Defaults to 32. frame Single frame number/object, or a list to render in sequence. Defaults to frame 0. columns Number of frames to render per line (useful for printing tilemaps!). Defaults to 1. downsample Shrink larger images by printing every nth pixel only. Defaults to 1. """ frames = [] try: frame_iter = iter( frame ) frames = [f for f in frame_iter] except TypeError: frames = [frame] rows = math.ceil( len( frames )/columns ) for r in range( rows ): for y in range( 0, height, 2*downsample ): result = [] for c in range( min( (len( frames )-r*columns), columns ) ): row = [] for x in range( 0, width, downsample ): fr = frames[r*columns + c] c1 = data_fetch( x_start+x, y_start+y, fr ) c2 = data_fetch( x_start+x, y_start+y+downsample, fr ) row.append( (c1, c2) ) prev_pixel = None pointer = 0 while pointer < len( row ): start = pointer pixel = row[pointer] while pointer < len( row ) and (row[pointer] == pixel): pointer += 1 result.append( format_pixels( pixel[0], pixel[1], repeat=pointer-start ) ) yield ''.join( result ) return
[ "def", "format_image_iter", "(", "data_fetch", ",", "x_start", "=", "0", ",", "y_start", "=", "0", ",", "width", "=", "32", ",", "height", "=", "32", ",", "frame", "=", "0", ",", "columns", "=", "1", ",", "downsample", "=", "1", ")", ":", "frames", "=", "[", "]", "try", ":", "frame_iter", "=", "iter", "(", "frame", ")", "frames", "=", "[", "f", "for", "f", "in", "frame_iter", "]", "except", "TypeError", ":", "frames", "=", "[", "frame", "]", "rows", "=", "math", ".", "ceil", "(", "len", "(", "frames", ")", "/", "columns", ")", "for", "r", "in", "range", "(", "rows", ")", ":", "for", "y", "in", "range", "(", "0", ",", "height", ",", "2", "*", "downsample", ")", ":", "result", "=", "[", "]", "for", "c", "in", "range", "(", "min", "(", "(", "len", "(", "frames", ")", "-", "r", "*", "columns", ")", ",", "columns", ")", ")", ":", "row", "=", "[", "]", "for", "x", "in", "range", "(", "0", ",", "width", ",", "downsample", ")", ":", "fr", "=", "frames", "[", "r", "*", "columns", "+", "c", "]", "c1", "=", "data_fetch", "(", "x_start", "+", "x", ",", "y_start", "+", "y", ",", "fr", ")", "c2", "=", "data_fetch", "(", "x_start", "+", "x", ",", "y_start", "+", "y", "+", "downsample", ",", "fr", ")", "row", ".", "append", "(", "(", "c1", ",", "c2", ")", ")", "prev_pixel", "=", "None", "pointer", "=", "0", "while", "pointer", "<", "len", "(", "row", ")", ":", "start", "=", "pointer", "pixel", "=", "row", "[", "pointer", "]", "while", "pointer", "<", "len", "(", "row", ")", "and", "(", "row", "[", "pointer", "]", "==", "pixel", ")", ":", "pointer", "+=", "1", "result", ".", "append", "(", "format_pixels", "(", "pixel", "[", "0", "]", ",", "pixel", "[", "1", "]", ",", "repeat", "=", "pointer", "-", "start", ")", ")", "yield", "''", ".", "join", "(", "result", ")", "return" ]
36.894737
25.403509
def cli(env, context_id, include): """List IPSEC VPN tunnel context details. Additional resources can be joined using multiple instances of the include option, for which the following choices are available. \b at: address translations is: internal subnets rs: remote subnets sr: statically routed subnets ss: service subnets """ mask = _get_tunnel_context_mask(('at' in include), ('is' in include), ('rs' in include), ('sr' in include), ('ss' in include)) manager = SoftLayer.IPSECManager(env.client) context = manager.get_tunnel_context(context_id, mask=mask) env.out('Context Details:') env.fout(_get_context_table(context)) for relation in include: if relation == 'at': env.out('Address Translations:') env.fout(_get_address_translations_table( context.get('addressTranslations', []))) elif relation == 'is': env.out('Internal Subnets:') env.fout(_get_subnets_table(context.get('internalSubnets', []))) elif relation == 'rs': env.out('Remote Subnets:') env.fout(_get_subnets_table(context.get('customerSubnets', []))) elif relation == 'sr': env.out('Static Subnets:') env.fout(_get_subnets_table(context.get('staticRouteSubnets', []))) elif relation == 'ss': env.out('Service Subnets:') env.fout(_get_subnets_table(context.get('serviceSubnets', [])))
[ "def", "cli", "(", "env", ",", "context_id", ",", "include", ")", ":", "mask", "=", "_get_tunnel_context_mask", "(", "(", "'at'", "in", "include", ")", ",", "(", "'is'", "in", "include", ")", ",", "(", "'rs'", "in", "include", ")", ",", "(", "'sr'", "in", "include", ")", ",", "(", "'ss'", "in", "include", ")", ")", "manager", "=", "SoftLayer", ".", "IPSECManager", "(", "env", ".", "client", ")", "context", "=", "manager", ".", "get_tunnel_context", "(", "context_id", ",", "mask", "=", "mask", ")", "env", ".", "out", "(", "'Context Details:'", ")", "env", ".", "fout", "(", "_get_context_table", "(", "context", ")", ")", "for", "relation", "in", "include", ":", "if", "relation", "==", "'at'", ":", "env", ".", "out", "(", "'Address Translations:'", ")", "env", ".", "fout", "(", "_get_address_translations_table", "(", "context", ".", "get", "(", "'addressTranslations'", ",", "[", "]", ")", ")", ")", "elif", "relation", "==", "'is'", ":", "env", ".", "out", "(", "'Internal Subnets:'", ")", "env", ".", "fout", "(", "_get_subnets_table", "(", "context", ".", "get", "(", "'internalSubnets'", ",", "[", "]", ")", ")", ")", "elif", "relation", "==", "'rs'", ":", "env", ".", "out", "(", "'Remote Subnets:'", ")", "env", ".", "fout", "(", "_get_subnets_table", "(", "context", ".", "get", "(", "'customerSubnets'", ",", "[", "]", ")", ")", ")", "elif", "relation", "==", "'sr'", ":", "env", ".", "out", "(", "'Static Subnets:'", ")", "env", ".", "fout", "(", "_get_subnets_table", "(", "context", ".", "get", "(", "'staticRouteSubnets'", ",", "[", "]", ")", ")", ")", "elif", "relation", "==", "'ss'", ":", "env", ".", "out", "(", "'Service Subnets:'", ")", "env", ".", "fout", "(", "_get_subnets_table", "(", "context", ".", "get", "(", "'serviceSubnets'", ",", "[", "]", ")", ")", ")" ]
39.04878
16.707317
def save_index(self, filename): ''' Save the current Layout's index to a .json file. Args: filename (str): Filename to write to. Note: At the moment, this won't serialize directory-specific config files. This means reconstructed indexes will only work properly in cases where there aren't multiple layout specs within a project. ''' data = {} for f in self.files.values(): entities = {v.entity.id: v.value for k, v in f.tags.items()} data[f.path] = {'domains': f.domains, 'entities': entities} with open(filename, 'w') as outfile: json.dump(data, outfile)
[ "def", "save_index", "(", "self", ",", "filename", ")", ":", "data", "=", "{", "}", "for", "f", "in", "self", ".", "files", ".", "values", "(", ")", ":", "entities", "=", "{", "v", ".", "entity", ".", "id", ":", "v", ".", "value", "for", "k", ",", "v", "in", "f", ".", "tags", ".", "items", "(", ")", "}", "data", "[", "f", ".", "path", "]", "=", "{", "'domains'", ":", "f", ".", "domains", ",", "'entities'", ":", "entities", "}", "with", "open", "(", "filename", ",", "'w'", ")", "as", "outfile", ":", "json", ".", "dump", "(", "data", ",", "outfile", ")" ]
41.375
23.25
def flush(self, meta=None): '''Flush all model keys from the database''' pattern = self.basekey(meta) if meta else self.namespace return self.client.delpattern('%s*' % pattern)
[ "def", "flush", "(", "self", ",", "meta", "=", "None", ")", ":", "pattern", "=", "self", ".", "basekey", "(", "meta", ")", "if", "meta", "else", "self", ".", "namespace", "return", "self", ".", "client", ".", "delpattern", "(", "'%s*'", "%", "pattern", ")" ]
50
16
def trigger(self, event: str, **kwargs: Any) -> None: """Trigger all handlers for an event to (asynchronously) execute""" event = event.upper() for func in self._event_handlers[event]: self.loop.create_task(func(**kwargs)) # This will unblock anyone that is awaiting on the next loop update, # while still ensuring the next `await client.wait(event)` doesn't # immediately fire. async_event = self._events[event] async_event.set() async_event.clear()
[ "def", "trigger", "(", "self", ",", "event", ":", "str", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "event", "=", "event", ".", "upper", "(", ")", "for", "func", "in", "self", ".", "_event_handlers", "[", "event", "]", ":", "self", ".", "loop", ".", "create_task", "(", "func", "(", "*", "*", "kwargs", ")", ")", "# This will unblock anyone that is awaiting on the next loop update,", "# while still ensuring the next `await client.wait(event)` doesn't", "# immediately fire.", "async_event", "=", "self", ".", "_events", "[", "event", "]", "async_event", ".", "set", "(", ")", "async_event", ".", "clear", "(", ")" ]
47.636364
13.909091
def version(self): """Get the version of MongoDB that this Server runs as a tuple.""" if not self.__version: command = (self.name, '--version') logger.debug(command) stdout, _ = subprocess.Popen( command, stdout=subprocess.PIPE).communicate() version_output = str(stdout) match = re.search(self.version_patt, version_output) if match is None: raise ServersError( 'Could not determine version of %s from string: %s' % (self.name, version_output)) version_string = match.group('version') self.__version = tuple(map(int, version_string.split('.'))) return self.__version
[ "def", "version", "(", "self", ")", ":", "if", "not", "self", ".", "__version", ":", "command", "=", "(", "self", ".", "name", ",", "'--version'", ")", "logger", ".", "debug", "(", "command", ")", "stdout", ",", "_", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", ".", "communicate", "(", ")", "version_output", "=", "str", "(", "stdout", ")", "match", "=", "re", ".", "search", "(", "self", ".", "version_patt", ",", "version_output", ")", "if", "match", "is", "None", ":", "raise", "ServersError", "(", "'Could not determine version of %s from string: %s'", "%", "(", "self", ".", "name", ",", "version_output", ")", ")", "version_string", "=", "match", ".", "group", "(", "'version'", ")", "self", ".", "__version", "=", "tuple", "(", "map", "(", "int", ",", "version_string", ".", "split", "(", "'.'", ")", ")", ")", "return", "self", ".", "__version" ]
46.5
12.625
def get(self, filename): """ returns subtitles as string """ params = { "v": 'dreambox', "kolejka": "false", "nick": "", "pass": "", "napios": sys.platform, "l": self.language.upper(), "f": self.prepareHash(filename), } params['t'] = self.discombobulate(params['f']) url = self.url_base + urllib.urlencode(params) subs = urllib.urlopen(url).read() if subs.startswith('brak pliku tymczasowego'): raise NapiprojektException('napiprojekt.pl API error') if subs[0:4] != 'NPc0': # napiprojekt keeps subtitles in cp1250 # ... but, sometimes they are in utf8 for cdc in ['cp1250', 'utf8']: try: return codecs.decode(subs, cdc) except: pass
[ "def", "get", "(", "self", ",", "filename", ")", ":", "params", "=", "{", "\"v\"", ":", "'dreambox'", ",", "\"kolejka\"", ":", "\"false\"", ",", "\"nick\"", ":", "\"\"", ",", "\"pass\"", ":", "\"\"", ",", "\"napios\"", ":", "sys", ".", "platform", ",", "\"l\"", ":", "self", ".", "language", ".", "upper", "(", ")", ",", "\"f\"", ":", "self", ".", "prepareHash", "(", "filename", ")", ",", "}", "params", "[", "'t'", "]", "=", "self", ".", "discombobulate", "(", "params", "[", "'f'", "]", ")", "url", "=", "self", ".", "url_base", "+", "urllib", ".", "urlencode", "(", "params", ")", "subs", "=", "urllib", ".", "urlopen", "(", "url", ")", ".", "read", "(", ")", "if", "subs", ".", "startswith", "(", "'brak pliku tymczasowego'", ")", ":", "raise", "NapiprojektException", "(", "'napiprojekt.pl API error'", ")", "if", "subs", "[", "0", ":", "4", "]", "!=", "'NPc0'", ":", "# napiprojekt keeps subtitles in cp1250", "# ... but, sometimes they are in utf8", "for", "cdc", "in", "[", "'cp1250'", ",", "'utf8'", "]", ":", "try", ":", "return", "codecs", ".", "decode", "(", "subs", ",", "cdc", ")", "except", ":", "pass" ]
28.290323
18.645161
def _dict_to_fields(d, jsonify=True): """ Convert dict to tuple, for faster sqlite3 import """ x = [] for k in constants._keys: v = d[k] if jsonify and (k in ('attributes', 'extra')): x.append(_jsonify(v)) else: x.append(v) return tuple(x)
[ "def", "_dict_to_fields", "(", "d", ",", "jsonify", "=", "True", ")", ":", "x", "=", "[", "]", "for", "k", "in", "constants", ".", "_keys", ":", "v", "=", "d", "[", "k", "]", "if", "jsonify", "and", "(", "k", "in", "(", "'attributes'", ",", "'extra'", ")", ")", ":", "x", ".", "append", "(", "_jsonify", "(", "v", ")", ")", "else", ":", "x", ".", "append", "(", "v", ")", "return", "tuple", "(", "x", ")" ]
25
13.833333
def get_groups(self, condition=None, page_size=1000): """Return an iterator over all groups in this device cloud account Optionally, a condition can be specified to limit the number of groups returned. Examples:: # Get all groups and print information about them for group in dc.devicecore.get_groups(): print group # Iterate over all devices which are in a group with a specific # ID. group = dc.devicore.get_groups(group_id == 123)[0] for device in dc.devicecore.get_devices(group_path == group.get_path()): print device.get_mac() :param condition: A condition to use when filtering the results set. If unspecified, all groups will be returned. :param int page_size: The number of results to fetch in a single page. In general, the default will suffice. :returns: Generator over the groups in this device cloud account. No guarantees about the order of results is provided and child links between nodes will not be populated. """ query_kwargs = {} if condition is not None: query_kwargs["condition"] = condition.compile() for group_data in self._conn.iter_json_pages("/ws/Group", page_size=page_size, **query_kwargs): yield Group.from_json(group_data)
[ "def", "get_groups", "(", "self", ",", "condition", "=", "None", ",", "page_size", "=", "1000", ")", ":", "query_kwargs", "=", "{", "}", "if", "condition", "is", "not", "None", ":", "query_kwargs", "[", "\"condition\"", "]", "=", "condition", ".", "compile", "(", ")", "for", "group_data", "in", "self", ".", "_conn", ".", "iter_json_pages", "(", "\"/ws/Group\"", ",", "page_size", "=", "page_size", ",", "*", "*", "query_kwargs", ")", ":", "yield", "Group", ".", "from_json", "(", "group_data", ")" ]
43.59375
24.5625
def fit_predict(self, data, labels, unkown=None): """\ Fit and classify data efficiently. :param data: sparse input matrix (ideal dtype is `numpy.float32`) :type data: :class:`scipy.sparse.csr_matrix` :param labels: the labels associated with data :type labels: iterable :param unkown: the label to attribute if no label is known :returns: the labels guessed for data :rtype: `numpy.array` """ self.fit(data, labels) return self._predict_from_bmus(self._bmus, unkown)
[ "def", "fit_predict", "(", "self", ",", "data", ",", "labels", ",", "unkown", "=", "None", ")", ":", "self", ".", "fit", "(", "data", ",", "labels", ")", "return", "self", ".", "_predict_from_bmus", "(", "self", ".", "_bmus", ",", "unkown", ")" ]
39.357143
13.571429
def _check_optimization_errors(self): """ Parses three potential optimization errors: failing to converge within the allowed number of optimization cycles, failure to determine the lamda needed to continue, and inconsistent size of MO files due to a linear dependence in the AO basis. """ if read_pattern( self.text, { "key": r"MAXIMUM OPTIMIZATION CYCLES REACHED" }, terminate_on_match=True).get('key') == [[]]: self.data["errors"] += ["out_of_opt_cycles"] elif read_pattern( self.text, { "key": r"UNABLE TO DETERMINE Lamda IN FormD" }, terminate_on_match=True).get('key') == [[]]: self.data["errors"] += ["unable_to_determine_lamda"] elif read_pattern( self.text, { "key": r"Inconsistent size for SCF MO coefficient file" }, terminate_on_match=True).get('key') == [[]]: self.data["errors"] += ["linear_dependent_basis"]
[ "def", "_check_optimization_errors", "(", "self", ")", ":", "if", "read_pattern", "(", "self", ".", "text", ",", "{", "\"key\"", ":", "r\"MAXIMUM OPTIMIZATION CYCLES REACHED\"", "}", ",", "terminate_on_match", "=", "True", ")", ".", "get", "(", "'key'", ")", "==", "[", "[", "]", "]", ":", "self", ".", "data", "[", "\"errors\"", "]", "+=", "[", "\"out_of_opt_cycles\"", "]", "elif", "read_pattern", "(", "self", ".", "text", ",", "{", "\"key\"", ":", "r\"UNABLE TO DETERMINE Lamda IN FormD\"", "}", ",", "terminate_on_match", "=", "True", ")", ".", "get", "(", "'key'", ")", "==", "[", "[", "]", "]", ":", "self", ".", "data", "[", "\"errors\"", "]", "+=", "[", "\"unable_to_determine_lamda\"", "]", "elif", "read_pattern", "(", "self", ".", "text", ",", "{", "\"key\"", ":", "r\"Inconsistent size for SCF MO coefficient file\"", "}", ",", "terminate_on_match", "=", "True", ")", ".", "get", "(", "'key'", ")", "==", "[", "[", "]", "]", ":", "self", ".", "data", "[", "\"errors\"", "]", "+=", "[", "\"linear_dependent_basis\"", "]" ]
45.916667
20.75
def _pred(aclass): """ :param aclass :return: boolean """ isaclass = inspect.isclass(aclass) return isaclass and aclass.__module__ == _pred.__module__
[ "def", "_pred", "(", "aclass", ")", ":", "isaclass", "=", "inspect", ".", "isclass", "(", "aclass", ")", "return", "isaclass", "and", "aclass", ".", "__module__", "==", "_pred", ".", "__module__" ]
24
12.571429
def get_sequence_rule_lookup_session_for_bank(self, bank_id, proxy): """Gets the ``OsidSession`` associated with the sequence rule lookup service for the given bank. arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank`` arg: proxy (osid.proxy.Proxy): a proxy return: (osid.assessment.authoring.SequenceRuleLookupSession) - a ``SequenceRuleLookupSession`` raise: NotFound - no ``Bank`` found by the given ``Id`` raise: NullArgument - ``bank_id or proxy is null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_sequence_rule_lookup()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_sequence_rule_lookup()`` and ``supports_visible_federation()`` are ``true``.* """ if not self.supports_sequence_rule_lookup(): raise errors.Unimplemented() ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.SequenceRuleLookupSession(bank_id, proxy, self._runtime)
[ "def", "get_sequence_rule_lookup_session_for_bank", "(", "self", ",", "bank_id", ",", "proxy", ")", ":", "if", "not", "self", ".", "supports_sequence_rule_lookup", "(", ")", ":", "raise", "errors", ".", "Unimplemented", "(", ")", "##", "# Also include check to see if the catalog Id is found otherwise raise errors.NotFound", "##", "# pylint: disable=no-member", "return", "sessions", ".", "SequenceRuleLookupSession", "(", "bank_id", ",", "proxy", ",", "self", ".", "_runtime", ")" ]
50.833333
21.458333
def update_cluster_topology(self, assignment): """Modify the cluster-topology with given assignment. Change the replica set of partitions as in given assignment. :param assignment: dict representing actions to be used to update the current cluster-topology :raises: InvalidBrokerIdError when broker-id is invalid :raises: InvalidPartitionError when partition-name is invalid """ try: for partition_name, replica_ids in six.iteritems(assignment): try: new_replicas = [self.brokers[b_id] for b_id in replica_ids] except KeyError: self.log.error( "Invalid replicas %s for topic-partition %s-%s.", ', '.join([str(id) for id in replica_ids]), partition_name[0], partition_name[1], ) raise InvalidBrokerIdError( "Invalid replicas {0}.".format( ', '.join([str(id) for id in replica_ids]) ), ) try: partition = self.partitions[partition_name] old_replicas = [broker for broker in partition.replicas] # No change needed. Save ourself some CPU time. # Replica order matters as the first one is the leader. if new_replicas == old_replicas: continue # Remove old partitions from broker # This also updates partition replicas for broker in old_replicas: broker.remove_partition(partition) # Add new partition to brokers for broker in new_replicas: broker.add_partition(partition) except KeyError: self.log.error( "Invalid topic-partition %s-%s.", partition_name[0], partition_name[1], ) raise InvalidPartitionError( "Invalid topic-partition {0}-{1}." .format(partition_name[0], partition_name[1]), ) except KeyError: self.log.error("Could not parse given assignment {0}".format(assignment)) raise
[ "def", "update_cluster_topology", "(", "self", ",", "assignment", ")", ":", "try", ":", "for", "partition_name", ",", "replica_ids", "in", "six", ".", "iteritems", "(", "assignment", ")", ":", "try", ":", "new_replicas", "=", "[", "self", ".", "brokers", "[", "b_id", "]", "for", "b_id", "in", "replica_ids", "]", "except", "KeyError", ":", "self", ".", "log", ".", "error", "(", "\"Invalid replicas %s for topic-partition %s-%s.\"", ",", "', '", ".", "join", "(", "[", "str", "(", "id", ")", "for", "id", "in", "replica_ids", "]", ")", ",", "partition_name", "[", "0", "]", ",", "partition_name", "[", "1", "]", ",", ")", "raise", "InvalidBrokerIdError", "(", "\"Invalid replicas {0}.\"", ".", "format", "(", "', '", ".", "join", "(", "[", "str", "(", "id", ")", "for", "id", "in", "replica_ids", "]", ")", ")", ",", ")", "try", ":", "partition", "=", "self", ".", "partitions", "[", "partition_name", "]", "old_replicas", "=", "[", "broker", "for", "broker", "in", "partition", ".", "replicas", "]", "# No change needed. Save ourself some CPU time.", "# Replica order matters as the first one is the leader.", "if", "new_replicas", "==", "old_replicas", ":", "continue", "# Remove old partitions from broker", "# This also updates partition replicas", "for", "broker", "in", "old_replicas", ":", "broker", ".", "remove_partition", "(", "partition", ")", "# Add new partition to brokers", "for", "broker", "in", "new_replicas", ":", "broker", ".", "add_partition", "(", "partition", ")", "except", "KeyError", ":", "self", ".", "log", ".", "error", "(", "\"Invalid topic-partition %s-%s.\"", ",", "partition_name", "[", "0", "]", ",", "partition_name", "[", "1", "]", ",", ")", "raise", "InvalidPartitionError", "(", "\"Invalid topic-partition {0}-{1}.\"", ".", "format", "(", "partition_name", "[", "0", "]", ",", "partition_name", "[", "1", "]", ")", ",", ")", "except", "KeyError", ":", "self", ".", "log", ".", "error", "(", "\"Could not parse given assignment {0}\"", ".", "format", "(", "assignment", ")", ")", "raise" ]
43.732143
19.839286
def accept_vpc_peering_connection(name=None, conn_id=None, conn_name=None, region=None, key=None, keyid=None, profile=None): ''' Accept a VPC pending requested peering connection between two VPCs. name Name of this state conn_id The connection ID to accept. Exclusive with conn_name. String type. conn_name The name of the VPC peering connection to accept. Exclusive with conn_id. String type. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. .. versionadded:: 2016.11.0 Example: .. code-block:: yaml boto_vpc.accept_vpc_peering_connection: - conn_name: salt_peering_connection # usage with vpc peering connection id and region boto_vpc.accept_vpc_peering_connection: - conn_id: pbx-1873d472 - region: us-west-2 ''' log.debug('Called state to accept VPC peering connection') pending = __salt__['boto_vpc.is_peering_connection_pending']( conn_id=conn_id, conn_name=conn_name, region=region, key=key, keyid=keyid, profile=profile) ret = { 'name': name, 'result': True, 'changes': {}, 'comment': 'Boto VPC peering state' } if not pending: ret['result'] = True ret['changes'].update({'old': 'No pending VPC peering connection found. Nothing to be done.'}) return ret if __opts__['test']: ret['changes'].update({'old': 'Pending VPC peering connection found and can be accepted'}) return ret fun = 'boto_vpc.accept_vpc_peering_connection' log.debug('Calling `%s()` to accept this VPC peering connection', fun) result = __salt__[fun](conn_id=conn_id, name=conn_name, region=region, key=key, keyid=keyid, profile=profile) if 'error' in result: ret['comment'] = "Failed to accept VPC peering: {0}".format(result['error']) ret['result'] = False return ret ret['changes'].update({'old': '', 'new': result['msg']}) return ret
[ "def", "accept_vpc_peering_connection", "(", "name", "=", "None", ",", "conn_id", "=", "None", ",", "conn_name", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "log", ".", "debug", "(", "'Called state to accept VPC peering connection'", ")", "pending", "=", "__salt__", "[", "'boto_vpc.is_peering_connection_pending'", "]", "(", "conn_id", "=", "conn_id", ",", "conn_name", "=", "conn_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "'Boto VPC peering state'", "}", "if", "not", "pending", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'changes'", "]", ".", "update", "(", "{", "'old'", ":", "'No pending VPC peering connection found. Nothing to be done.'", "}", ")", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'changes'", "]", ".", "update", "(", "{", "'old'", ":", "'Pending VPC peering connection found and can be accepted'", "}", ")", "return", "ret", "fun", "=", "'boto_vpc.accept_vpc_peering_connection'", "log", ".", "debug", "(", "'Calling `%s()` to accept this VPC peering connection'", ",", "fun", ")", "result", "=", "__salt__", "[", "fun", "]", "(", "conn_id", "=", "conn_id", ",", "name", "=", "conn_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "'error'", "in", "result", ":", "ret", "[", "'comment'", "]", "=", "\"Failed to accept VPC peering: {0}\"", ".", "format", "(", "result", "[", "'error'", "]", ")", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "ret", "[", "'changes'", "]", ".", "update", "(", "{", "'old'", ":", "''", ",", "'new'", ":", "result", "[", "'msg'", "]", "}", ")", "return", "ret" ]
28.74026
26.324675
def get_typeof(self, name): """Get the GType of a GObject property. This function returns 0 if the property does not exist. """ # logger.debug('VipsObject.get_typeof: self = %s, name = %s', # str(self), name) pspec = self._get_pspec(name) if pspec is None: # need to clear any error, this is horrible Error('') return 0 return pspec.value_type
[ "def", "get_typeof", "(", "self", ",", "name", ")", ":", "# logger.debug('VipsObject.get_typeof: self = %s, name = %s',", "# str(self), name)", "pspec", "=", "self", ".", "_get_pspec", "(", "name", ")", "if", "pspec", "is", "None", ":", "# need to clear any error, this is horrible", "Error", "(", "''", ")", "return", "0", "return", "pspec", ".", "value_type" ]
26.176471
20.411765
def add_context(self, name, indices, level=None): """ Add a new context level to the hierarchy. By default, new contexts are added to the lowest level of the hierarchy. To insert the context elsewhere in the hierarchy, use the ``level`` argument. For example, ``level=0`` would insert the context at the highest level of the hierarchy. Parameters ---------- name : str indices : list Token indices at which each chunk in the context begins. level : int Level in the hierarchy at which to insert the context. By default, inserts context at the lowest level of the hierarchy """ self._validate_context((name, indices)) if level is None: level = len(self.contexts_ranked) self.contexts_ranked.insert(level, name) self.contexts[name] = indices
[ "def", "add_context", "(", "self", ",", "name", ",", "indices", ",", "level", "=", "None", ")", ":", "self", ".", "_validate_context", "(", "(", "name", ",", "indices", ")", ")", "if", "level", "is", "None", ":", "level", "=", "len", "(", "self", ".", "contexts_ranked", ")", "self", ".", "contexts_ranked", ".", "insert", "(", "level", ",", "name", ")", "self", ".", "contexts", "[", "name", "]", "=", "indices" ]
34.423077
21.576923
def decipher_atom_keys(self, forcefield='DLF', dict_key='atom_ids'): """ Decipher force field atom ids. This takes all values in :attr:`MolecularSystem.system['atom_ids']` that match force field type criteria and creates :attr:`MolecularSystem.system['elements']` with the corresponding periodic table of elements equivalents. If a forcefield is not supported by this method, the :func:`MolecularSystem.swap_atom_keys()` can be used instead. DLF stands for DL_F notation. See: C. W. Yong, Descriptions and Implementations of DL_F Notation: A Natural Chemical Expression System of Atom Types for Molecular Simulations, J. Chem. Inf. Model., 2016, 56, 1405–1409. Parameters ---------- forcefield : :class:`str` The forcefield used to decipher atom ids. Allowed (not case sensitive): 'OPLS', 'OPLS2005', 'OPLSAA', 'OPLS3', 'DLF', 'DL_F'. (default='DLF') dict_key : :class:`str` The :attr:`MolecularSystem.system` dictionary key to the array containing the force field atom ids. (default='atom_ids') Returns ------- None : :class:`NoneType` """ # In case there is no 'atom_ids' key we try 'elements'. This is for # XYZ and MOL files mostly. But, we keep the dict_key keyword for # someone who would want to decipher 'elements' even if 'atom_ids' key # is present in the system's dictionary. if 'atom_ids' not in self.system.keys(): dict_key = 'elements' # I do it on temporary object so that it only finishes when successful temp = deepcopy(self.system[dict_key]) for element in range(len(temp)): temp[element] = "{0}".format( decipher_atom_key( temp[element], forcefield=forcefield)) self.system['elements'] = temp
[ "def", "decipher_atom_keys", "(", "self", ",", "forcefield", "=", "'DLF'", ",", "dict_key", "=", "'atom_ids'", ")", ":", "# In case there is no 'atom_ids' key we try 'elements'. This is for", "# XYZ and MOL files mostly. But, we keep the dict_key keyword for", "# someone who would want to decipher 'elements' even if 'atom_ids' key", "# is present in the system's dictionary.", "if", "'atom_ids'", "not", "in", "self", ".", "system", ".", "keys", "(", ")", ":", "dict_key", "=", "'elements'", "# I do it on temporary object so that it only finishes when successful", "temp", "=", "deepcopy", "(", "self", ".", "system", "[", "dict_key", "]", ")", "for", "element", "in", "range", "(", "len", "(", "temp", ")", ")", ":", "temp", "[", "element", "]", "=", "\"{0}\"", ".", "format", "(", "decipher_atom_key", "(", "temp", "[", "element", "]", ",", "forcefield", "=", "forcefield", ")", ")", "self", ".", "system", "[", "'elements'", "]", "=", "temp" ]
40.957447
22.234043
def set(self, value): """ Sets the value of the object :param value: A unicode string containing an IPv4 address, IPv4 address with CIDR, an IPv6 address or IPv6 address with CIDR """ if not isinstance(value, str_cls): raise TypeError(unwrap( ''' %s value must be a unicode string, not %s ''', type_name(self), type_name(value) )) original_value = value has_cidr = value.find('/') != -1 cidr = 0 if has_cidr: parts = value.split('/', 1) value = parts[0] cidr = int(parts[1]) if cidr < 0: raise ValueError(unwrap( ''' %s value contains a CIDR range less than 0 ''', type_name(self) )) if value.find(':') != -1: family = socket.AF_INET6 if cidr > 128: raise ValueError(unwrap( ''' %s value contains a CIDR range bigger than 128, the maximum value for an IPv6 address ''', type_name(self) )) cidr_size = 128 else: family = socket.AF_INET if cidr > 32: raise ValueError(unwrap( ''' %s value contains a CIDR range bigger than 32, the maximum value for an IPv4 address ''', type_name(self) )) cidr_size = 32 cidr_bytes = b'' if has_cidr: cidr_mask = '1' * cidr cidr_mask += '0' * (cidr_size - len(cidr_mask)) cidr_bytes = int_to_bytes(int(cidr_mask, 2)) cidr_bytes = (b'\x00' * ((cidr_size // 8) - len(cidr_bytes))) + cidr_bytes self._native = original_value self.contents = inet_pton(family, value) + cidr_bytes self._bytes = self.contents self._header = None if self._trailer != b'': self._trailer = b''
[ "def", "set", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "str_cls", ")", ":", "raise", "TypeError", "(", "unwrap", "(", "'''\n %s value must be a unicode string, not %s\n '''", ",", "type_name", "(", "self", ")", ",", "type_name", "(", "value", ")", ")", ")", "original_value", "=", "value", "has_cidr", "=", "value", ".", "find", "(", "'/'", ")", "!=", "-", "1", "cidr", "=", "0", "if", "has_cidr", ":", "parts", "=", "value", ".", "split", "(", "'/'", ",", "1", ")", "value", "=", "parts", "[", "0", "]", "cidr", "=", "int", "(", "parts", "[", "1", "]", ")", "if", "cidr", "<", "0", ":", "raise", "ValueError", "(", "unwrap", "(", "'''\n %s value contains a CIDR range less than 0\n '''", ",", "type_name", "(", "self", ")", ")", ")", "if", "value", ".", "find", "(", "':'", ")", "!=", "-", "1", ":", "family", "=", "socket", ".", "AF_INET6", "if", "cidr", ">", "128", ":", "raise", "ValueError", "(", "unwrap", "(", "'''\n %s value contains a CIDR range bigger than 128, the maximum\n value for an IPv6 address\n '''", ",", "type_name", "(", "self", ")", ")", ")", "cidr_size", "=", "128", "else", ":", "family", "=", "socket", ".", "AF_INET", "if", "cidr", ">", "32", ":", "raise", "ValueError", "(", "unwrap", "(", "'''\n %s value contains a CIDR range bigger than 32, the maximum\n value for an IPv4 address\n '''", ",", "type_name", "(", "self", ")", ")", ")", "cidr_size", "=", "32", "cidr_bytes", "=", "b''", "if", "has_cidr", ":", "cidr_mask", "=", "'1'", "*", "cidr", "cidr_mask", "+=", "'0'", "*", "(", "cidr_size", "-", "len", "(", "cidr_mask", ")", ")", "cidr_bytes", "=", "int_to_bytes", "(", "int", "(", "cidr_mask", ",", "2", ")", ")", "cidr_bytes", "=", "(", "b'\\x00'", "*", "(", "(", "cidr_size", "//", "8", ")", "-", "len", "(", "cidr_bytes", ")", ")", ")", "+", "cidr_bytes", "self", ".", "_native", "=", "original_value", "self", ".", "contents", "=", "inet_pton", "(", "family", ",", "value", ")", "+", "cidr_bytes", "self", ".", "_bytes", "=", "self", ".", "contents", "self", ".", "_header", "=", "None", "if", "self", ".", "_trailer", "!=", "b''", ":", "self", ".", "_trailer", "=", "b''" ]
30.928571
16.328571
def pivot(self, index, column, value): """ Pivot the frame designated by the three columns: index, column, and value. Index and column should be of type enum, int, or time. For cases of multiple indexes for a column label, the aggregation method is to pick the first occurrence in the data frame :param index: Index is a column that will be the row label :param column: The labels for the columns in the pivoted Frame :param value: The column of values for the given index and column label :returns: """ assert_is_type(index, str) assert_is_type(column, str) assert_is_type(value, str) col_names = self.names if index not in col_names: raise H2OValueError("Index not in H2OFrame") if column not in col_names: raise H2OValueError("Column not in H2OFrame") if value not in col_names: raise H2OValueError("Value column not in H2OFrame") if self.type(column) not in ["enum","time","int"]: raise H2OValueError("'column' argument is not type enum, time or int") if self.type(index) not in ["enum","time","int"]: raise H2OValueError("'index' argument is not type enum, time or int") return H2OFrame._expr(expr=ExprNode("pivot",self,index,column,value))
[ "def", "pivot", "(", "self", ",", "index", ",", "column", ",", "value", ")", ":", "assert_is_type", "(", "index", ",", "str", ")", "assert_is_type", "(", "column", ",", "str", ")", "assert_is_type", "(", "value", ",", "str", ")", "col_names", "=", "self", ".", "names", "if", "index", "not", "in", "col_names", ":", "raise", "H2OValueError", "(", "\"Index not in H2OFrame\"", ")", "if", "column", "not", "in", "col_names", ":", "raise", "H2OValueError", "(", "\"Column not in H2OFrame\"", ")", "if", "value", "not", "in", "col_names", ":", "raise", "H2OValueError", "(", "\"Value column not in H2OFrame\"", ")", "if", "self", ".", "type", "(", "column", ")", "not", "in", "[", "\"enum\"", ",", "\"time\"", ",", "\"int\"", "]", ":", "raise", "H2OValueError", "(", "\"'column' argument is not type enum, time or int\"", ")", "if", "self", ".", "type", "(", "index", ")", "not", "in", "[", "\"enum\"", ",", "\"time\"", ",", "\"int\"", "]", ":", "raise", "H2OValueError", "(", "\"'index' argument is not type enum, time or int\"", ")", "return", "H2OFrame", ".", "_expr", "(", "expr", "=", "ExprNode", "(", "\"pivot\"", ",", "self", ",", "index", ",", "column", ",", "value", ")", ")" ]
51.269231
22.269231
def get_sms_connection(backend=None, fail_silently=False, **kwds): """Load an sms backend and return an instance of it. If backend is None (default) settings.SMS_BACKEND is used. Both fail_silently and other keyword arguments are used in the constructor of the backend. https://github.com/django/django/blob/master/django/core/mail/__init__.py#L28 """ klass = import_string(backend or settings.SMS_BACKEND) return klass(fail_silently=fail_silently, **kwds)
[ "def", "get_sms_connection", "(", "backend", "=", "None", ",", "fail_silently", "=", "False", ",", "*", "*", "kwds", ")", ":", "klass", "=", "import_string", "(", "backend", "or", "settings", ".", "SMS_BACKEND", ")", "return", "klass", "(", "fail_silently", "=", "fail_silently", ",", "*", "*", "kwds", ")" ]
38
21.916667
def assert_valid_rule_class(clazz): """ Asserts that a given rule clazz is valid by checking a number of its properties: - Rules must extend from LineRule or CommitRule - Rule classes must have id and name string attributes. The options_spec is optional, but if set, it must be a list of gitlint Options. - Rule classes must have a validate method. In case of a CommitRule, validate must take a single commit parameter. In case of LineRule, validate must take line and commit as first and second parameters. - LineRule classes must have a target class attributes that is set to either CommitMessageTitle or CommitMessageBody. - User Rule id's cannot start with R, T, B or M as these rule ids are reserved for gitlint itself. """ # Rules must extend from LineRule or CommitRule if not (issubclass(clazz, rules.LineRule) or issubclass(clazz, rules.CommitRule)): msg = u"User-defined rule class '{0}' must extend from {1}.{2} or {1}.{3}" raise UserRuleError(msg.format(clazz.__name__, rules.CommitRule.__module__, rules.LineRule.__name__, rules.CommitRule.__name__)) # Rules must have an id attribute if not hasattr(clazz, 'id') or clazz.id is None or not clazz.id: raise UserRuleError(u"User-defined rule class '{0}' must have an 'id' attribute".format(clazz.__name__)) # Rule id's cannot start with gitlint reserved letters if clazz.id[0].upper() in ['R', 'T', 'B', 'M']: msg = u"The id '{1}' of '{0}' is invalid. Gitlint reserves ids starting with R,T,B,M" raise UserRuleError(msg.format(clazz.__name__, clazz.id[0])) # Rules must have a name attribute if not hasattr(clazz, 'name') or clazz.name is None or not clazz.name: raise UserRuleError(u"User-defined rule class '{0}' must have a 'name' attribute".format(clazz.__name__)) # if set, options_spec must be a list of RuleOption if not isinstance(clazz.options_spec, list): msg = u"The options_spec attribute of user-defined rule class '{0}' must be a list of {1}.{2}" raise UserRuleError(msg.format(clazz.__name__, options.RuleOption.__module__, options.RuleOption.__name__)) # check that all items in options_spec are actual gitlint options for option in clazz.options_spec: if not isinstance(option, options.RuleOption): msg = u"The options_spec attribute of user-defined rule class '{0}' must be a list of {1}.{2}" raise UserRuleError(msg.format(clazz.__name__, options.RuleOption.__module__, options.RuleOption.__name__)) # Rules must have a validate method. We use isroutine() as it's both python 2 and 3 compatible. # For more info see http://stackoverflow.com/a/17019998/381010 if not hasattr(clazz, 'validate') or not inspect.isroutine(clazz.validate): raise UserRuleError(u"User-defined rule class '{0}' must have a 'validate' method".format(clazz.__name__)) # LineRules must have a valid target: rules.CommitMessageTitle or rules.CommitMessageBody if issubclass(clazz, rules.LineRule): if clazz.target not in [rules.CommitMessageTitle, rules.CommitMessageBody]: msg = u"The target attribute of the user-defined LineRule class '{0}' must be either {1}.{2} or {1}.{3}" msg = msg.format(clazz.__name__, rules.CommitMessageTitle.__module__, rules.CommitMessageTitle.__name__, rules.CommitMessageBody.__name__) raise UserRuleError(msg)
[ "def", "assert_valid_rule_class", "(", "clazz", ")", ":", "# Rules must extend from LineRule or CommitRule", "if", "not", "(", "issubclass", "(", "clazz", ",", "rules", ".", "LineRule", ")", "or", "issubclass", "(", "clazz", ",", "rules", ".", "CommitRule", ")", ")", ":", "msg", "=", "u\"User-defined rule class '{0}' must extend from {1}.{2} or {1}.{3}\"", "raise", "UserRuleError", "(", "msg", ".", "format", "(", "clazz", ".", "__name__", ",", "rules", ".", "CommitRule", ".", "__module__", ",", "rules", ".", "LineRule", ".", "__name__", ",", "rules", ".", "CommitRule", ".", "__name__", ")", ")", "# Rules must have an id attribute", "if", "not", "hasattr", "(", "clazz", ",", "'id'", ")", "or", "clazz", ".", "id", "is", "None", "or", "not", "clazz", ".", "id", ":", "raise", "UserRuleError", "(", "u\"User-defined rule class '{0}' must have an 'id' attribute\"", ".", "format", "(", "clazz", ".", "__name__", ")", ")", "# Rule id's cannot start with gitlint reserved letters", "if", "clazz", ".", "id", "[", "0", "]", ".", "upper", "(", ")", "in", "[", "'R'", ",", "'T'", ",", "'B'", ",", "'M'", "]", ":", "msg", "=", "u\"The id '{1}' of '{0}' is invalid. Gitlint reserves ids starting with R,T,B,M\"", "raise", "UserRuleError", "(", "msg", ".", "format", "(", "clazz", ".", "__name__", ",", "clazz", ".", "id", "[", "0", "]", ")", ")", "# Rules must have a name attribute", "if", "not", "hasattr", "(", "clazz", ",", "'name'", ")", "or", "clazz", ".", "name", "is", "None", "or", "not", "clazz", ".", "name", ":", "raise", "UserRuleError", "(", "u\"User-defined rule class '{0}' must have a 'name' attribute\"", ".", "format", "(", "clazz", ".", "__name__", ")", ")", "# if set, options_spec must be a list of RuleOption", "if", "not", "isinstance", "(", "clazz", ".", "options_spec", ",", "list", ")", ":", "msg", "=", "u\"The options_spec attribute of user-defined rule class '{0}' must be a list of {1}.{2}\"", "raise", "UserRuleError", "(", "msg", ".", "format", "(", "clazz", ".", "__name__", ",", "options", ".", "RuleOption", ".", "__module__", ",", "options", ".", "RuleOption", ".", "__name__", ")", ")", "# check that all items in options_spec are actual gitlint options", "for", "option", "in", "clazz", ".", "options_spec", ":", "if", "not", "isinstance", "(", "option", ",", "options", ".", "RuleOption", ")", ":", "msg", "=", "u\"The options_spec attribute of user-defined rule class '{0}' must be a list of {1}.{2}\"", "raise", "UserRuleError", "(", "msg", ".", "format", "(", "clazz", ".", "__name__", ",", "options", ".", "RuleOption", ".", "__module__", ",", "options", ".", "RuleOption", ".", "__name__", ")", ")", "# Rules must have a validate method. We use isroutine() as it's both python 2 and 3 compatible.", "# For more info see http://stackoverflow.com/a/17019998/381010", "if", "not", "hasattr", "(", "clazz", ",", "'validate'", ")", "or", "not", "inspect", ".", "isroutine", "(", "clazz", ".", "validate", ")", ":", "raise", "UserRuleError", "(", "u\"User-defined rule class '{0}' must have a 'validate' method\"", ".", "format", "(", "clazz", ".", "__name__", ")", ")", "# LineRules must have a valid target: rules.CommitMessageTitle or rules.CommitMessageBody", "if", "issubclass", "(", "clazz", ",", "rules", ".", "LineRule", ")", ":", "if", "clazz", ".", "target", "not", "in", "[", "rules", ".", "CommitMessageTitle", ",", "rules", ".", "CommitMessageBody", "]", ":", "msg", "=", "u\"The target attribute of the user-defined LineRule class '{0}' must be either {1}.{2} or {1}.{3}\"", "msg", "=", "msg", ".", "format", "(", "clazz", ".", "__name__", ",", "rules", ".", "CommitMessageTitle", ".", "__module__", ",", "rules", ".", "CommitMessageTitle", ".", "__name__", ",", "rules", ".", "CommitMessageBody", ".", "__name__", ")", "raise", "UserRuleError", "(", "msg", ")" ]
63.472727
36.927273
def filename_matches_glob(filename: str, globtext: str) -> bool: """ The ``glob.glob`` function doesn't do exclusion very well. We don't want to have to specify root directories for exclusion patterns. We don't want to have to trawl a massive set of files to find exclusion files. So let's implement a glob match. Args: filename: filename globtext: glob Returns: does the filename match the glob? See also: - https://stackoverflow.com/questions/20638040/glob-exclude-pattern """ # Quick check on basename-only matching if fnmatch(filename, globtext): log.debug("{!r} matches {!r}", filename, globtext) return True bname = basename(filename) if fnmatch(bname, globtext): log.debug("{!r} matches {!r}", bname, globtext) return True # Directory matching: is actually accomplished by the code above! # Otherwise: return False
[ "def", "filename_matches_glob", "(", "filename", ":", "str", ",", "globtext", ":", "str", ")", "->", "bool", ":", "# Quick check on basename-only matching", "if", "fnmatch", "(", "filename", ",", "globtext", ")", ":", "log", ".", "debug", "(", "\"{!r} matches {!r}\"", ",", "filename", ",", "globtext", ")", "return", "True", "bname", "=", "basename", "(", "filename", ")", "if", "fnmatch", "(", "bname", ",", "globtext", ")", ":", "log", ".", "debug", "(", "\"{!r} matches {!r}\"", ",", "bname", ",", "globtext", ")", "return", "True", "# Directory matching: is actually accomplished by the code above!", "# Otherwise:", "return", "False" ]
33.9
21.366667
def _inject_target(self, target_adaptor): """Inject a target, respecting all sources of dependencies.""" target_cls = self._target_types[target_adaptor.type_alias] declared_deps = target_adaptor.dependencies implicit_deps = (Address.parse(s, relative_to=target_adaptor.address.spec_path, subproject_roots=self._address_mapper.subproject_roots) for s in target_cls.compute_dependency_specs(kwargs=target_adaptor.kwargs())) for dep in declared_deps: self._dependent_address_map[dep].add(target_adaptor.address) for dep in implicit_deps: self._implicit_dependent_address_map[dep].add(target_adaptor.address)
[ "def", "_inject_target", "(", "self", ",", "target_adaptor", ")", ":", "target_cls", "=", "self", ".", "_target_types", "[", "target_adaptor", ".", "type_alias", "]", "declared_deps", "=", "target_adaptor", ".", "dependencies", "implicit_deps", "=", "(", "Address", ".", "parse", "(", "s", ",", "relative_to", "=", "target_adaptor", ".", "address", ".", "spec_path", ",", "subproject_roots", "=", "self", ".", "_address_mapper", ".", "subproject_roots", ")", "for", "s", "in", "target_cls", ".", "compute_dependency_specs", "(", "kwargs", "=", "target_adaptor", ".", "kwargs", "(", ")", ")", ")", "for", "dep", "in", "declared_deps", ":", "self", ".", "_dependent_address_map", "[", "dep", "]", ".", "add", "(", "target_adaptor", ".", "address", ")", "for", "dep", "in", "implicit_deps", ":", "self", ".", "_implicit_dependent_address_map", "[", "dep", "]", ".", "add", "(", "target_adaptor", ".", "address", ")" ]
55.384615
23.384615
def normal(self, t, i): """Handle normal chars.""" current = [] if t == "\\": try: t = next(i) current.extend(self.reference(t, i)) except StopIteration: current.append(t) elif t == "(": current.extend(self.subgroup(t, i)) elif self.verbose and t == "#": current.extend(self.verbose_comment(t, i)) elif t == "[": current.extend(self.char_groups(t, i)) else: current.append(t) return current
[ "def", "normal", "(", "self", ",", "t", ",", "i", ")", ":", "current", "=", "[", "]", "if", "t", "==", "\"\\\\\"", ":", "try", ":", "t", "=", "next", "(", "i", ")", "current", ".", "extend", "(", "self", ".", "reference", "(", "t", ",", "i", ")", ")", "except", "StopIteration", ":", "current", ".", "append", "(", "t", ")", "elif", "t", "==", "\"(\"", ":", "current", ".", "extend", "(", "self", ".", "subgroup", "(", "t", ",", "i", ")", ")", "elif", "self", ".", "verbose", "and", "t", "==", "\"#\"", ":", "current", ".", "extend", "(", "self", ".", "verbose_comment", "(", "t", ",", "i", ")", ")", "elif", "t", "==", "\"[\"", ":", "current", ".", "extend", "(", "self", ".", "char_groups", "(", "t", ",", "i", ")", ")", "else", ":", "current", ".", "append", "(", "t", ")", "return", "current" ]
27.85
16.15
def allele_counts_dataframe(variant_and_allele_reads_generator): """ Creates a DataFrame containing number of reads supporting the ref vs. alt alleles for each variant. """ df_builder = DataFrameBuilder( AlleleCount, extra_column_fns={ "gene": lambda variant, _: ";".join(variant.gene_names), }) for variant, allele_reads in variant_and_allele_reads_generator: counts = count_alleles_at_variant_locus(variant, allele_reads) df_builder.add(variant, counts) return df_builder.to_dataframe()
[ "def", "allele_counts_dataframe", "(", "variant_and_allele_reads_generator", ")", ":", "df_builder", "=", "DataFrameBuilder", "(", "AlleleCount", ",", "extra_column_fns", "=", "{", "\"gene\"", ":", "lambda", "variant", ",", "_", ":", "\";\"", ".", "join", "(", "variant", ".", "gene_names", ")", ",", "}", ")", "for", "variant", ",", "allele_reads", "in", "variant_and_allele_reads_generator", ":", "counts", "=", "count_alleles_at_variant_locus", "(", "variant", ",", "allele_reads", ")", "df_builder", ".", "add", "(", "variant", ",", "counts", ")", "return", "df_builder", ".", "to_dataframe", "(", ")" ]
39.642857
15.071429
def scandir(path, app=None): ''' Config-aware scandir. Currently, only aware of ``exclude_fnc``. :param path: absolute path :type path: str :param app: flask application :type app: flask.Flask or None :returns: filtered scandir entries :rtype: iterator ''' exclude = app and app.config.get('exclude_fnc') if exclude: return ( item for item in compat.scandir(path) if not exclude(item.path) ) return compat.scandir(path)
[ "def", "scandir", "(", "path", ",", "app", "=", "None", ")", ":", "exclude", "=", "app", "and", "app", ".", "config", ".", "get", "(", "'exclude_fnc'", ")", "if", "exclude", ":", "return", "(", "item", "for", "item", "in", "compat", ".", "scandir", "(", "path", ")", "if", "not", "exclude", "(", "item", ".", "path", ")", ")", "return", "compat", ".", "scandir", "(", "path", ")" ]
26.631579
17.789474
def put_meta(request): """MNStorage.updateSystemMetadata(session, pid, sysmeta) → boolean. TODO: Currently, this call allows making breaking changes to SysMeta. We need to clarify what can be modified and what the behavior should be when working with SIDs and chains. """ if django.conf.settings.REQUIRE_WHITELIST_FOR_UPDATE: d1_gmn.app.auth.assert_create_update_delete_permission(request) d1_gmn.app.util.coerce_put_post(request) d1_gmn.app.views.assert_db.post_has_mime_parts( request, (('field', 'pid'), ('file', 'sysmeta')) ) pid = request.POST['pid'] d1_gmn.app.auth.assert_allowed(request, d1_gmn.app.auth.WRITE_LEVEL, pid) new_sysmeta_pyxb = d1_gmn.app.sysmeta.deserialize(request.FILES['sysmeta']) d1_gmn.app.views.assert_sysmeta.has_matching_modified_timestamp(new_sysmeta_pyxb) d1_gmn.app.views.create.set_mn_controlled_values( request, new_sysmeta_pyxb, is_modification=True ) d1_gmn.app.sysmeta.create_or_update(new_sysmeta_pyxb) d1_gmn.app.event_log.log_update_event( pid, request, timestamp=d1_common.date_time.normalize_datetime_to_utc( new_sysmeta_pyxb.dateUploaded ), ) return d1_gmn.app.views.util.http_response_with_boolean_true_type()
[ "def", "put_meta", "(", "request", ")", ":", "if", "django", ".", "conf", ".", "settings", ".", "REQUIRE_WHITELIST_FOR_UPDATE", ":", "d1_gmn", ".", "app", ".", "auth", ".", "assert_create_update_delete_permission", "(", "request", ")", "d1_gmn", ".", "app", ".", "util", ".", "coerce_put_post", "(", "request", ")", "d1_gmn", ".", "app", ".", "views", ".", "assert_db", ".", "post_has_mime_parts", "(", "request", ",", "(", "(", "'field'", ",", "'pid'", ")", ",", "(", "'file'", ",", "'sysmeta'", ")", ")", ")", "pid", "=", "request", ".", "POST", "[", "'pid'", "]", "d1_gmn", ".", "app", ".", "auth", ".", "assert_allowed", "(", "request", ",", "d1_gmn", ".", "app", ".", "auth", ".", "WRITE_LEVEL", ",", "pid", ")", "new_sysmeta_pyxb", "=", "d1_gmn", ".", "app", ".", "sysmeta", ".", "deserialize", "(", "request", ".", "FILES", "[", "'sysmeta'", "]", ")", "d1_gmn", ".", "app", ".", "views", ".", "assert_sysmeta", ".", "has_matching_modified_timestamp", "(", "new_sysmeta_pyxb", ")", "d1_gmn", ".", "app", ".", "views", ".", "create", ".", "set_mn_controlled_values", "(", "request", ",", "new_sysmeta_pyxb", ",", "is_modification", "=", "True", ")", "d1_gmn", ".", "app", ".", "sysmeta", ".", "create_or_update", "(", "new_sysmeta_pyxb", ")", "d1_gmn", ".", "app", ".", "event_log", ".", "log_update_event", "(", "pid", ",", "request", ",", "timestamp", "=", "d1_common", ".", "date_time", ".", "normalize_datetime_to_utc", "(", "new_sysmeta_pyxb", ".", "dateUploaded", ")", ",", ")", "return", "d1_gmn", ".", "app", ".", "views", ".", "util", ".", "http_response_with_boolean_true_type", "(", ")" ]
42.366667
23.833333
def snmp_server_agtconfig_location(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp") agtconfig = ET.SubElement(snmp_server, "agtconfig") location = ET.SubElement(agtconfig, "location") location.text = kwargs.pop('location') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "snmp_server_agtconfig_location", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "snmp_server", "=", "ET", ".", "SubElement", "(", "config", ",", "\"snmp-server\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-snmp\"", ")", "agtconfig", "=", "ET", ".", "SubElement", "(", "snmp_server", ",", "\"agtconfig\"", ")", "location", "=", "ET", ".", "SubElement", "(", "agtconfig", ",", "\"location\"", ")", "location", ".", "text", "=", "kwargs", ".", "pop", "(", "'location'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
43.454545
16.454545
def get_public_key_info(self): """ Analyze the public key information we have in our scriptSig. Returns {'status': true, 'type': 'singlesig' | 'multisig', 'public_keys': [...], 'num_sigs': ...} on success Returns {'error': ...} on error """ script_parts = virtualchain.btc_script_deserialize(base64.b64decode(self.sig)) if len(script_parts) < 2: return {'error': 'Signature script does not appear to encode any public keys'} if len(script_parts) == 2: # possibly p2pkh pubkey = script_parts[1].encode('hex') try: pubkey_object = virtualchain.ecdsalib.ecdsa_public_key(pubkey) except: return {'error': 'Could not instantiate public key {}'.format(pubkey)} if virtualchain.address_reencode(pubkey_object.address()) != virtualchain.address_reencode(self.address): return {'error': 'Public key does not match owner address {}'.format(self.address)} return {'status': True, 'type': 'singlesig', 'public_keys': [pubkey], 'num_sigs': 1} else: # possibly p2sh multisig. redeem_script = script_parts[-1] if virtualchain.address_reencode(virtualchain.btc_make_p2sh_address(redeem_script)) != virtualchain.address_reencode(self.address): return {'error': 'Multisig redeem script does not match owner address {}'.format(self.address)} m, pubkey_hexes = virtualchain.parse_multisig_redeemscript(redeem_script) for pkh in pubkey_hexes: try: virtualchain.ecdsalib.ecdsa_public_key(pkh) except: return {'error': 'Invalid public key string in multisig script'} return {'status': True, 'type': 'multisig', 'public_keys': pubkey_hexes, 'num_sigs': m}
[ "def", "get_public_key_info", "(", "self", ")", ":", "script_parts", "=", "virtualchain", ".", "btc_script_deserialize", "(", "base64", ".", "b64decode", "(", "self", ".", "sig", ")", ")", "if", "len", "(", "script_parts", ")", "<", "2", ":", "return", "{", "'error'", ":", "'Signature script does not appear to encode any public keys'", "}", "if", "len", "(", "script_parts", ")", "==", "2", ":", "# possibly p2pkh", "pubkey", "=", "script_parts", "[", "1", "]", ".", "encode", "(", "'hex'", ")", "try", ":", "pubkey_object", "=", "virtualchain", ".", "ecdsalib", ".", "ecdsa_public_key", "(", "pubkey", ")", "except", ":", "return", "{", "'error'", ":", "'Could not instantiate public key {}'", ".", "format", "(", "pubkey", ")", "}", "if", "virtualchain", ".", "address_reencode", "(", "pubkey_object", ".", "address", "(", ")", ")", "!=", "virtualchain", ".", "address_reencode", "(", "self", ".", "address", ")", ":", "return", "{", "'error'", ":", "'Public key does not match owner address {}'", ".", "format", "(", "self", ".", "address", ")", "}", "return", "{", "'status'", ":", "True", ",", "'type'", ":", "'singlesig'", ",", "'public_keys'", ":", "[", "pubkey", "]", ",", "'num_sigs'", ":", "1", "}", "else", ":", "# possibly p2sh multisig.", "redeem_script", "=", "script_parts", "[", "-", "1", "]", "if", "virtualchain", ".", "address_reencode", "(", "virtualchain", ".", "btc_make_p2sh_address", "(", "redeem_script", ")", ")", "!=", "virtualchain", ".", "address_reencode", "(", "self", ".", "address", ")", ":", "return", "{", "'error'", ":", "'Multisig redeem script does not match owner address {}'", ".", "format", "(", "self", ".", "address", ")", "}", "m", ",", "pubkey_hexes", "=", "virtualchain", ".", "parse_multisig_redeemscript", "(", "redeem_script", ")", "for", "pkh", "in", "pubkey_hexes", ":", "try", ":", "virtualchain", ".", "ecdsalib", ".", "ecdsa_public_key", "(", "pkh", ")", "except", ":", "return", "{", "'error'", ":", "'Invalid public key string in multisig script'", "}", "return", "{", "'status'", ":", "True", ",", "'type'", ":", "'multisig'", ",", "'public_keys'", ":", "pubkey_hexes", ",", "'num_sigs'", ":", "m", "}" ]
49.078947
33.342105
def failure(path, downloader): """Display warning message via stderr or GUI.""" base = os.path.basename(path) if sys.stdin.isatty(): print('INFO [{}]: Failed to download {!r}'.format(downloader, base)) else: notify_failure(base, downloader)
[ "def", "failure", "(", "path", ",", "downloader", ")", ":", "base", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "if", "sys", ".", "stdin", ".", "isatty", "(", ")", ":", "print", "(", "'INFO [{}]: Failed to download {!r}'", ".", "format", "(", "downloader", ",", "base", ")", ")", "else", ":", "notify_failure", "(", "base", ",", "downloader", ")" ]
38
14
def json(self): """Produce a JSON representation for the web interface""" d = { 'id': self.id, 'format': self.formatclass.__name__,'label': self.label, 'mimetype': self.formatclass.mimetype, 'schema': self.formatclass.schema } if self.unique: d['unique'] = True if self.filename: d['filename'] = self.filename if self.extension: d['extension'] = self.extension if self.acceptarchive: d['acceptarchive'] = self.acceptarchive #d['parameters'] = {} #The actual parameters are included as XML, and transformed by clam.js using XSLT (parameter.xsl) to generate the forms parametersxml = '' for parameter in self.parameters: parametersxml += parameter.xml() d['parametersxml'] = '<?xml version="1.0" encoding="utf-8" ?><parameters>' + parametersxml + '</parameters>' d['converters'] = [ {'id':x.id, 'label':x.label} for x in self.converters ] d['inputsources'] = [ {'id':x.id, 'label':x.label} for x in self.inputsources ] return json.dumps(d)
[ "def", "json", "(", "self", ")", ":", "d", "=", "{", "'id'", ":", "self", ".", "id", ",", "'format'", ":", "self", ".", "formatclass", ".", "__name__", ",", "'label'", ":", "self", ".", "label", ",", "'mimetype'", ":", "self", ".", "formatclass", ".", "mimetype", ",", "'schema'", ":", "self", ".", "formatclass", ".", "schema", "}", "if", "self", ".", "unique", ":", "d", "[", "'unique'", "]", "=", "True", "if", "self", ".", "filename", ":", "d", "[", "'filename'", "]", "=", "self", ".", "filename", "if", "self", ".", "extension", ":", "d", "[", "'extension'", "]", "=", "self", ".", "extension", "if", "self", ".", "acceptarchive", ":", "d", "[", "'acceptarchive'", "]", "=", "self", ".", "acceptarchive", "#d['parameters'] = {}", "#The actual parameters are included as XML, and transformed by clam.js using XSLT (parameter.xsl) to generate the forms", "parametersxml", "=", "''", "for", "parameter", "in", "self", ".", "parameters", ":", "parametersxml", "+=", "parameter", ".", "xml", "(", ")", "d", "[", "'parametersxml'", "]", "=", "'<?xml version=\"1.0\" encoding=\"utf-8\" ?><parameters>'", "+", "parametersxml", "+", "'</parameters>'", "d", "[", "'converters'", "]", "=", "[", "{", "'id'", ":", "x", ".", "id", ",", "'label'", ":", "x", ".", "label", "}", "for", "x", "in", "self", ".", "converters", "]", "d", "[", "'inputsources'", "]", "=", "[", "{", "'id'", ":", "x", ".", "id", ",", "'label'", ":", "x", ".", "label", "}", "for", "x", "in", "self", ".", "inputsources", "]", "return", "json", ".", "dumps", "(", "d", ")" ]
49.590909
27.363636
def get_service_location_info(self, service_location_id): """ Request service location info Parameters ---------- service_location_id : int Returns ------- dict """ url = urljoin(URLS['servicelocation'], service_location_id, "info") headers = {"Authorization": "Bearer {}".format(self.access_token)} r = requests.get(url, headers=headers) r.raise_for_status() return r.json()
[ "def", "get_service_location_info", "(", "self", ",", "service_location_id", ")", ":", "url", "=", "urljoin", "(", "URLS", "[", "'servicelocation'", "]", ",", "service_location_id", ",", "\"info\"", ")", "headers", "=", "{", "\"Authorization\"", ":", "\"Bearer {}\"", ".", "format", "(", "self", ".", "access_token", ")", "}", "r", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ")", "r", ".", "raise_for_status", "(", ")", "return", "r", ".", "json", "(", ")" ]
27.823529
19.588235
def timeout(seconds, err_msg="xtls: function run too long."): """ 超时检测 :param seconds: 函数最长运行时间 :param err_msg: 超时提示 """ def decorator(function): def _on_timeout(signum, frame): raise TimeoutError(err_msg) @wraps(function) def wrapper(*args, **kwargs): signal.signal(signal.SIGALRM, _on_timeout) signal.alarm(seconds) try: result = function(*args, **kwargs) finally: signal.alarm(0) return result return wrapper return decorator
[ "def", "timeout", "(", "seconds", ",", "err_msg", "=", "\"xtls: function run too long.\"", ")", ":", "def", "decorator", "(", "function", ")", ":", "def", "_on_timeout", "(", "signum", ",", "frame", ")", ":", "raise", "TimeoutError", "(", "err_msg", ")", "@", "wraps", "(", "function", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "signal", ".", "signal", "(", "signal", ".", "SIGALRM", ",", "_on_timeout", ")", "signal", ".", "alarm", "(", "seconds", ")", "try", ":", "result", "=", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "signal", ".", "alarm", "(", "0", ")", "return", "result", "return", "wrapper", "return", "decorator" ]
23.875
17.125
def unwrap_or(self, default: U) -> Union[T, U]: """ Returns the contained value or ``default``. Args: default: The default value. Returns: The contained value if the :py:class:`Option` is ``Some``, otherwise ``default``. Notes: If you wish to use a result of a function call as the default, it is recommnded to use :py:meth:`unwrap_or_else` instead. Examples: >>> Some(0).unwrap_or(3) 0 >>> NONE.unwrap_or(0) 0 """ return self.unwrap_or_else(lambda: default)
[ "def", "unwrap_or", "(", "self", ",", "default", ":", "U", ")", "->", "Union", "[", "T", ",", "U", "]", ":", "return", "self", ".", "unwrap_or_else", "(", "lambda", ":", "default", ")" ]
27.863636
20.681818
def init_app(self, app): """ Initialize the captcha extension to the given app object. """ self.enabled = app.config.get("CAPTCHA_ENABLE", True) self.digits = app.config.get("CAPTCHA_LENGTH", 4) self.max = 10**self.digits self.image_generator = ImageCaptcha() self.rand = SystemRandom() def _generate(): if not self.enabled: return "" base64_captcha = self.generate() return Markup("<img src='{}'>".format("data:image/png;base64, {}".format(base64_captcha))) app.jinja_env.globals['captcha'] = _generate # Check for sessions that do not persist on the server. Issue a warning because they are most likely open to replay attacks. # This addon is built upon flask-session. session_type = app.config.get('SESSION_TYPE', None) if session_type is None or session_type == "null": raise RuntimeWarning("Flask-Sessionstore is not set to use a server persistent storage type. This likely means that captchas are vulnerable to replay attacks.") elif session_type == "sqlalchemy": # I have to do this as of version 0.3.1 of flask-session if using sqlalchemy as the session type in order to create the initial database. # Flask-sessionstore seems to have the same problem. app.session_interface.db.create_all()
[ "def", "init_app", "(", "self", ",", "app", ")", ":", "self", ".", "enabled", "=", "app", ".", "config", ".", "get", "(", "\"CAPTCHA_ENABLE\"", ",", "True", ")", "self", ".", "digits", "=", "app", ".", "config", ".", "get", "(", "\"CAPTCHA_LENGTH\"", ",", "4", ")", "self", ".", "max", "=", "10", "**", "self", ".", "digits", "self", ".", "image_generator", "=", "ImageCaptcha", "(", ")", "self", ".", "rand", "=", "SystemRandom", "(", ")", "def", "_generate", "(", ")", ":", "if", "not", "self", ".", "enabled", ":", "return", "\"\"", "base64_captcha", "=", "self", ".", "generate", "(", ")", "return", "Markup", "(", "\"<img src='{}'>\"", ".", "format", "(", "\"data:image/png;base64, {}\"", ".", "format", "(", "base64_captcha", ")", ")", ")", "app", ".", "jinja_env", ".", "globals", "[", "'captcha'", "]", "=", "_generate", "# Check for sessions that do not persist on the server. Issue a warning because they are most likely open to replay attacks.", "# This addon is built upon flask-session.", "session_type", "=", "app", ".", "config", ".", "get", "(", "'SESSION_TYPE'", ",", "None", ")", "if", "session_type", "is", "None", "or", "session_type", "==", "\"null\"", ":", "raise", "RuntimeWarning", "(", "\"Flask-Sessionstore is not set to use a server persistent storage type. This likely means that captchas are vulnerable to replay attacks.\"", ")", "elif", "session_type", "==", "\"sqlalchemy\"", ":", "# I have to do this as of version 0.3.1 of flask-session if using sqlalchemy as the session type in order to create the initial database.", "# Flask-sessionstore seems to have the same problem. ", "app", ".", "session_interface", ".", "db", ".", "create_all", "(", ")" ]
52
27.407407
def construct_xblock(self, block_type, scope_ids, field_data=None, *args, **kwargs): r""" Construct a new xblock of the type identified by block_type, passing \*args and \*\*kwargs into `__init__`. """ return self.construct_xblock_from_class( cls=self.load_block_type(block_type), scope_ids=scope_ids, field_data=field_data, *args, **kwargs )
[ "def", "construct_xblock", "(", "self", ",", "block_type", ",", "scope_ids", ",", "field_data", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "construct_xblock_from_class", "(", "cls", "=", "self", ".", "load_block_type", "(", "block_type", ")", ",", "scope_ids", "=", "scope_ids", ",", "field_data", "=", "field_data", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
38.909091
14.636364
def single_node_env(args): """Sets up environment for a single-node TF session. Args: :args: command line arguments as either argparse args or argv list """ # setup ARGV for the TF process if isinstance(args, list): sys.argv = args elif args.argv: sys.argv = args.argv # setup ENV for Hadoop-compatibility and/or GPU allocation num_gpus = args.num_gpus if 'num_gpus' in args else 1 util.single_node_env(num_gpus)
[ "def", "single_node_env", "(", "args", ")", ":", "# setup ARGV for the TF process", "if", "isinstance", "(", "args", ",", "list", ")", ":", "sys", ".", "argv", "=", "args", "elif", "args", ".", "argv", ":", "sys", ".", "argv", "=", "args", ".", "argv", "# setup ENV for Hadoop-compatibility and/or GPU allocation", "num_gpus", "=", "args", ".", "num_gpus", "if", "'num_gpus'", "in", "args", "else", "1", "util", ".", "single_node_env", "(", "num_gpus", ")" ]
28.933333
18.333333
def Point2HexColor(a, lfrac, tfrac): """ Return web-safe hex triplets. """ [H,S,V] = [math.floor(360 * a), lfrac, tfrac] RGB = hsvToRGB(H, S, V) H = [hex(int(math.floor(255 * x))) for x in RGB] HEX = [a[a.find('x') + 1:] for a in H] HEX = ['0' + h if len(h) == 1 else h for h in HEX] return '#' + ''.join(HEX)
[ "def", "Point2HexColor", "(", "a", ",", "lfrac", ",", "tfrac", ")", ":", "[", "H", ",", "S", ",", "V", "]", "=", "[", "math", ".", "floor", "(", "360", "*", "a", ")", ",", "lfrac", ",", "tfrac", "]", "RGB", "=", "hsvToRGB", "(", "H", ",", "S", ",", "V", ")", "H", "=", "[", "hex", "(", "int", "(", "math", ".", "floor", "(", "255", "*", "x", ")", ")", ")", "for", "x", "in", "RGB", "]", "HEX", "=", "[", "a", "[", "a", ".", "find", "(", "'x'", ")", "+", "1", ":", "]", "for", "a", "in", "H", "]", "HEX", "=", "[", "'0'", "+", "h", "if", "len", "(", "h", ")", "==", "1", "else", "h", "for", "h", "in", "HEX", "]", "return", "'#'", "+", "''", ".", "join", "(", "HEX", ")" ]
21
19.5
def get_installed_version(dist_name, working_set=None): """Get the installed version of dist_name avoiding pkg_resources cache""" # Create a requirement that we'll look for inside of setuptools. req = pkg_resources.Requirement.parse(dist_name) if working_set is None: # We want to avoid having this cached, so we need to construct a new # working set each time. working_set = pkg_resources.WorkingSet() # Get the installed distribution from our working set dist = working_set.find(req) # Check to see if we got an installed distribution or not, if we did # we want to return it's version. return dist.version if dist else None
[ "def", "get_installed_version", "(", "dist_name", ",", "working_set", "=", "None", ")", ":", "# Create a requirement that we'll look for inside of setuptools.", "req", "=", "pkg_resources", ".", "Requirement", ".", "parse", "(", "dist_name", ")", "if", "working_set", "is", "None", ":", "# We want to avoid having this cached, so we need to construct a new", "# working set each time.", "working_set", "=", "pkg_resources", ".", "WorkingSet", "(", ")", "# Get the installed distribution from our working set", "dist", "=", "working_set", ".", "find", "(", "req", ")", "# Check to see if we got an installed distribution or not, if we did", "# we want to return it's version.", "return", "dist", ".", "version", "if", "dist", "else", "None" ]
42.125
18.8125
def addmags(*mags): """ mags is either list of magnitudes or list of (mag, err) pairs """ tot = 0 uncs = [] for mag in mags: try: tot += 10**(-0.4*mag) except: m, dm = mag f = 10**(-0.4*m) tot += f unc = f * (1 - 10**(-0.4*dm)) uncs.append(unc) totmag = -2.5*np.log10(tot) if len(uncs) > 0: f_unc = np.sqrt(np.array([u**2 for u in uncs]).sum()) return totmag, -2.5*np.log10(1 - f_unc/tot) else: return totmag
[ "def", "addmags", "(", "*", "mags", ")", ":", "tot", "=", "0", "uncs", "=", "[", "]", "for", "mag", "in", "mags", ":", "try", ":", "tot", "+=", "10", "**", "(", "-", "0.4", "*", "mag", ")", "except", ":", "m", ",", "dm", "=", "mag", "f", "=", "10", "**", "(", "-", "0.4", "*", "m", ")", "tot", "+=", "f", "unc", "=", "f", "*", "(", "1", "-", "10", "**", "(", "-", "0.4", "*", "dm", ")", ")", "uncs", ".", "append", "(", "unc", ")", "totmag", "=", "-", "2.5", "*", "np", ".", "log10", "(", "tot", ")", "if", "len", "(", "uncs", ")", ">", "0", ":", "f_unc", "=", "np", ".", "sqrt", "(", "np", ".", "array", "(", "[", "u", "**", "2", "for", "u", "in", "uncs", "]", ")", ".", "sum", "(", ")", ")", "return", "totmag", ",", "-", "2.5", "*", "np", ".", "log10", "(", "1", "-", "f_unc", "/", "tot", ")", "else", ":", "return", "totmag" ]
24.363636
17.909091
def on_start(self): """ start the service """ LOGGER.debug("natsd.Service.on_start") self.service = threading.Thread(target=self.run_event_loop, name=self.serviceQ + " service thread") self.service.start() while not self.is_started: time.sleep(0.01)
[ "def", "on_start", "(", "self", ")", ":", "LOGGER", ".", "debug", "(", "\"natsd.Service.on_start\"", ")", "self", ".", "service", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "run_event_loop", ",", "name", "=", "self", ".", "serviceQ", "+", "\" service thread\"", ")", "self", ".", "service", ".", "start", "(", ")", "while", "not", "self", ".", "is_started", ":", "time", ".", "sleep", "(", "0.01", ")" ]
34.333333
15.444444
def SNM0(T, Tc, Vc, omega, delta_SRK=None): r'''Calculates saturated liquid density using the Mchaweh, Moshfeghian model [1]_. Designed for simple calculations. .. math:: V_s = V_c/(1+1.169\tau^{1/3}+1.818\tau^{2/3}-2.658\tau+2.161\tau^{4/3} \tau = 1-\frac{(T/T_c)}{\alpha_{SRK}} \alpha_{SRK} = [1 + m(1-\sqrt{T/T_C}]^2 m = 0.480+1.574\omega-0.176\omega^2 If the fit parameter `delta_SRK` is provided, the following is used: .. math:: V_s = V_C/(1+1.169\tau^{1/3}+1.818\tau^{2/3}-2.658\tau+2.161\tau^{4/3}) /\left[1+\delta_{SRK}(\alpha_{SRK}-1)^{1/3}\right] Parameters ---------- T : float Temperature of fluid [K] Tc : float Critical temperature of fluid [K] Vc : float Critical volume of fluid [m^3/mol] omega : float Acentric factor for fluid, [-] delta_SRK : float, optional Fitting parameter [-] Returns ------- Vs : float Saturation liquid volume, [m^3/mol] Notes ----- 73 fit parameters have been gathered from the article. Examples -------- Argon, without the fit parameter and with it. Tabulated result in Perry's is 3.4613e-05. The fit increases the error on this occasion. >>> SNM0(121, 150.8, 7.49e-05, -0.004) 3.4402256402733416e-05 >>> SNM0(121, 150.8, 7.49e-05, -0.004, -0.03259620) 3.493288100008123e-05 References ---------- .. [1] Mchaweh, A., A. Alsaygh, Kh. Nasrifar, and M. Moshfeghian. "A Simplified Method for Calculating Saturated Liquid Densities." Fluid Phase Equilibria 224, no. 2 (October 1, 2004): 157-67. doi:10.1016/j.fluid.2004.06.054 ''' Tr = T/Tc m = 0.480 + 1.574*omega - 0.176*omega*omega alpha_SRK = (1. + m*(1. - Tr**0.5))**2 tau = 1. - Tr/alpha_SRK rho0 = 1. + 1.169*tau**(1/3.) + 1.818*tau**(2/3.) - 2.658*tau + 2.161*tau**(4/3.) V0 = 1./rho0 if not delta_SRK: return Vc*V0 else: return Vc*V0/(1. + delta_SRK*(alpha_SRK - 1.)**(1/3.))
[ "def", "SNM0", "(", "T", ",", "Tc", ",", "Vc", ",", "omega", ",", "delta_SRK", "=", "None", ")", ":", "Tr", "=", "T", "/", "Tc", "m", "=", "0.480", "+", "1.574", "*", "omega", "-", "0.176", "*", "omega", "*", "omega", "alpha_SRK", "=", "(", "1.", "+", "m", "*", "(", "1.", "-", "Tr", "**", "0.5", ")", ")", "**", "2", "tau", "=", "1.", "-", "Tr", "/", "alpha_SRK", "rho0", "=", "1.", "+", "1.169", "*", "tau", "**", "(", "1", "/", "3.", ")", "+", "1.818", "*", "tau", "**", "(", "2", "/", "3.", ")", "-", "2.658", "*", "tau", "+", "2.161", "*", "tau", "**", "(", "4", "/", "3.", ")", "V0", "=", "1.", "/", "rho0", "if", "not", "delta_SRK", ":", "return", "Vc", "*", "V0", "else", ":", "return", "Vc", "*", "V0", "/", "(", "1.", "+", "delta_SRK", "*", "(", "alpha_SRK", "-", "1.", ")", "**", "(", "1", "/", "3.", ")", ")" ]
28.628571
24.342857
def write_branch_data(self, file): """ Writes branch data to file. """ branch_attr = ["r", "x", "b", "rate_a", "rate_b", "rate_c", "ratio", "phase_shift", "online", "ang_min", "ang_max", "p_from", "q_from", "p_to", "q_to", "mu_s_from", "mu_s_to", "mu_angmin", "mu_angmax"] file.write("\n%%%% branch data\n") file.write("%%\tfbus\ttbus\tr\tx\tb\trateA\trateB\trateC\tratio" "\tangle\tstatus") file.write("\tangmin\tangmax") file.write("\tPf\tQf\tPt\tQt") file.write("\tmu_Sf\tmu_St") file.write("\tmu_angmin\tmu_angmax") file.write("\n%sbranch = [\n" % self._prefix) for branch in self.case.branches: vals = [getattr(branch, a) for a in branch_attr] vals.insert(0, branch.to_bus._i) vals.insert(0, branch.from_bus._i) file.write("\t%d\t%d\t%g\t%g\t%g\t%g\t%g\t%g\t%g\t%g\t%d\t%g\t%g" "\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f;\n" % tuple(vals)) file.write("];\n")
[ "def", "write_branch_data", "(", "self", ",", "file", ")", ":", "branch_attr", "=", "[", "\"r\"", ",", "\"x\"", ",", "\"b\"", ",", "\"rate_a\"", ",", "\"rate_b\"", ",", "\"rate_c\"", ",", "\"ratio\"", ",", "\"phase_shift\"", ",", "\"online\"", ",", "\"ang_min\"", ",", "\"ang_max\"", ",", "\"p_from\"", ",", "\"q_from\"", ",", "\"p_to\"", ",", "\"q_to\"", ",", "\"mu_s_from\"", ",", "\"mu_s_to\"", ",", "\"mu_angmin\"", ",", "\"mu_angmax\"", "]", "file", ".", "write", "(", "\"\\n%%%% branch data\\n\"", ")", "file", ".", "write", "(", "\"%%\\tfbus\\ttbus\\tr\\tx\\tb\\trateA\\trateB\\trateC\\tratio\"", "\"\\tangle\\tstatus\"", ")", "file", ".", "write", "(", "\"\\tangmin\\tangmax\"", ")", "file", ".", "write", "(", "\"\\tPf\\tQf\\tPt\\tQt\"", ")", "file", ".", "write", "(", "\"\\tmu_Sf\\tmu_St\"", ")", "file", ".", "write", "(", "\"\\tmu_angmin\\tmu_angmax\"", ")", "file", ".", "write", "(", "\"\\n%sbranch = [\\n\"", "%", "self", ".", "_prefix", ")", "for", "branch", "in", "self", ".", "case", ".", "branches", ":", "vals", "=", "[", "getattr", "(", "branch", ",", "a", ")", "for", "a", "in", "branch_attr", "]", "vals", ".", "insert", "(", "0", ",", "branch", ".", "to_bus", ".", "_i", ")", "vals", ".", "insert", "(", "0", ",", "branch", ".", "from_bus", ".", "_i", ")", "file", ".", "write", "(", "\"\\t%d\\t%d\\t%g\\t%g\\t%g\\t%g\\t%g\\t%g\\t%g\\t%g\\t%d\\t%g\\t%g\"", "\"\\t%.4f\\t%.4f\\t%.4f\\t%.4f\\t%.4f\\t%.4f\\t%.4f\\t%.4f;\\n\"", "%", "tuple", "(", "vals", ")", ")", "file", ".", "write", "(", "\"];\\n\"", ")" ]
40.481481
17.296296
def update_presubscriptions(self, presubscriptions): """Update pre-subscription data. Pre-subscription data will be removed for empty list. :param presubscriptions: list of `Presubscription` objects (Required) :returns: None """ api = self._get_api(mds.SubscriptionsApi) presubscriptions_list = [] for presubscription in presubscriptions: if not isinstance(presubscription, dict): presubscription = presubscription.to_dict() presubscription = { "endpoint_name": presubscription.get("device_id", None), "endpoint_type": presubscription.get("device_type", None), "_resource_path": presubscription.get("resource_paths", None) } presubscriptions_list.append(PresubscriptionData(**presubscription)) return api.update_pre_subscriptions(presubscriptions_list)
[ "def", "update_presubscriptions", "(", "self", ",", "presubscriptions", ")", ":", "api", "=", "self", ".", "_get_api", "(", "mds", ".", "SubscriptionsApi", ")", "presubscriptions_list", "=", "[", "]", "for", "presubscription", "in", "presubscriptions", ":", "if", "not", "isinstance", "(", "presubscription", ",", "dict", ")", ":", "presubscription", "=", "presubscription", ".", "to_dict", "(", ")", "presubscription", "=", "{", "\"endpoint_name\"", ":", "presubscription", ".", "get", "(", "\"device_id\"", ",", "None", ")", ",", "\"endpoint_type\"", ":", "presubscription", ".", "get", "(", "\"device_type\"", ",", "None", ")", ",", "\"_resource_path\"", ":", "presubscription", ".", "get", "(", "\"resource_paths\"", ",", "None", ")", "}", "presubscriptions_list", ".", "append", "(", "PresubscriptionData", "(", "*", "*", "presubscription", ")", ")", "return", "api", ".", "update_pre_subscriptions", "(", "presubscriptions_list", ")" ]
50.666667
20.388889
def getPrimaryRole(store, primaryRoleName, createIfNotFound=False): """ Get Role object corresponding to an identifier name. If the role name passed is the empty string, it is assumed that the user is not authenticated, and the 'Everybody' role is primary. If the role name passed is non-empty, but has no corresponding role, the 'Authenticated' role - which is a member of 'Everybody' - is primary. Finally, a specific role can be primary if one exists for the user's given credentials, that will automatically always be a member of 'Authenticated', and by extension, of 'Everybody'. @param primaryRoleName: a unicode string identifying the role to be retrieved. This corresponds to L{Role}'s externalID attribute. @param createIfNotFound: a boolean. If True, create a role for the given primary role name if no exact match is found. The default, False, will instead retrieve the 'nearest match' role, which can be Authenticated or Everybody depending on whether the user is logged in or not. @return: a L{Role}. """ if not primaryRoleName: return getEveryoneRole(store) ff = store.findUnique(Role, Role.externalID == primaryRoleName, default=None) if ff is not None: return ff authRole = getAuthenticatedRole(store) if createIfNotFound: role = Role(store=store, externalID=primaryRoleName) role.becomeMemberOf(authRole) return role return authRole
[ "def", "getPrimaryRole", "(", "store", ",", "primaryRoleName", ",", "createIfNotFound", "=", "False", ")", ":", "if", "not", "primaryRoleName", ":", "return", "getEveryoneRole", "(", "store", ")", "ff", "=", "store", ".", "findUnique", "(", "Role", ",", "Role", ".", "externalID", "==", "primaryRoleName", ",", "default", "=", "None", ")", "if", "ff", "is", "not", "None", ":", "return", "ff", "authRole", "=", "getAuthenticatedRole", "(", "store", ")", "if", "createIfNotFound", ":", "role", "=", "Role", "(", "store", "=", "store", ",", "externalID", "=", "primaryRoleName", ")", "role", ".", "becomeMemberOf", "(", "authRole", ")", "return", "role", "return", "authRole" ]
44.787879
24
def update_aliases( cr, registry, model_name, set_parent_thread_id, alias_defaults=None, defaults_id_key=False): """ Update a model's aliases according to how they are configured in the model's create() method. :param model_name: The name of the model whose aliases are to be updated. \ The model_id is also set as the aliases' alias_parent_model_id. :param set_parent_thread_id': When set, set the ids of the resources as \ their alias' alias_parent_thread_id :param alias_defaults: Static dictionary, recorded as a string on each \ alias :param defaults_id_key: When defined, add this key to each alias' \ defaults dictionary with the resource id as its value. """ model_id = registry['ir.model'].search( cr, SUPERUSER_ID, [('model', '=', model_name)])[0] vals = {'alias_parent_model_id': model_id} if defaults_id_key and alias_defaults is None: alias_defaults = {} res_ids = registry[model_name].search( cr, SUPERUSER_ID, [], context={'active_test': False}) for res in registry[model_name].browse( cr, SUPERUSER_ID, res_ids): if set_parent_thread_id: vals['alias_parent_thread_id'] = res.id if defaults_id_key: alias_defaults[defaults_id_key] = res.id if alias_defaults is not None: vals['alias_defaults'] = str(alias_defaults) res.alias_id.write(vals)
[ "def", "update_aliases", "(", "cr", ",", "registry", ",", "model_name", ",", "set_parent_thread_id", ",", "alias_defaults", "=", "None", ",", "defaults_id_key", "=", "False", ")", ":", "model_id", "=", "registry", "[", "'ir.model'", "]", ".", "search", "(", "cr", ",", "SUPERUSER_ID", ",", "[", "(", "'model'", ",", "'='", ",", "model_name", ")", "]", ")", "[", "0", "]", "vals", "=", "{", "'alias_parent_model_id'", ":", "model_id", "}", "if", "defaults_id_key", "and", "alias_defaults", "is", "None", ":", "alias_defaults", "=", "{", "}", "res_ids", "=", "registry", "[", "model_name", "]", ".", "search", "(", "cr", ",", "SUPERUSER_ID", ",", "[", "]", ",", "context", "=", "{", "'active_test'", ":", "False", "}", ")", "for", "res", "in", "registry", "[", "model_name", "]", ".", "browse", "(", "cr", ",", "SUPERUSER_ID", ",", "res_ids", ")", ":", "if", "set_parent_thread_id", ":", "vals", "[", "'alias_parent_thread_id'", "]", "=", "res", ".", "id", "if", "defaults_id_key", ":", "alias_defaults", "[", "defaults_id_key", "]", "=", "res", ".", "id", "if", "alias_defaults", "is", "not", "None", ":", "vals", "[", "'alias_defaults'", "]", "=", "str", "(", "alias_defaults", ")", "res", ".", "alias_id", ".", "write", "(", "vals", ")" ]
44.15625
15.15625
def validate(self, raw_data, **kwargs): """Convert the raw_data to a float. """ try: converted_data = float(raw_data) super(FloatField, self).validate(converted_data, **kwargs) return raw_data except ValueError: raise ValidationException(self.messages['invalid'], repr(raw_data))
[ "def", "validate", "(", "self", ",", "raw_data", ",", "*", "*", "kwargs", ")", ":", "try", ":", "converted_data", "=", "float", "(", "raw_data", ")", "super", "(", "FloatField", ",", "self", ")", ".", "validate", "(", "converted_data", ",", "*", "*", "kwargs", ")", "return", "raw_data", "except", "ValueError", ":", "raise", "ValidationException", "(", "self", ".", "messages", "[", "'invalid'", "]", ",", "repr", "(", "raw_data", ")", ")" ]
35.1
16.9
def main(): """Main function. Mostly parsing the command line arguments. """ parser = argparse.ArgumentParser() parser.add_argument('--backend', choices=['gatttool', 'bluepy', 'pygatt'], default='gatttool') parser.add_argument('-v', '--verbose', action='store_const', const=True) subparsers = parser.add_subparsers(help='sub-command help', ) parser_poll = subparsers.add_parser('poll', help='poll data from a sensor') parser_poll.add_argument('mac', type=valid_miflora_mac) parser_poll.set_defaults(func=poll) parser_scan = subparsers.add_parser('scan', help='scan for devices') parser_scan.set_defaults(func=scan) parser_scan = subparsers.add_parser('backends', help='list the available backends') parser_scan.set_defaults(func=list_backends) args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) if not hasattr(args, "func"): parser.print_help() sys.exit(0) args.func(args)
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--backend'", ",", "choices", "=", "[", "'gatttool'", ",", "'bluepy'", ",", "'pygatt'", "]", ",", "default", "=", "'gatttool'", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "action", "=", "'store_const'", ",", "const", "=", "True", ")", "subparsers", "=", "parser", ".", "add_subparsers", "(", "help", "=", "'sub-command help'", ",", ")", "parser_poll", "=", "subparsers", ".", "add_parser", "(", "'poll'", ",", "help", "=", "'poll data from a sensor'", ")", "parser_poll", ".", "add_argument", "(", "'mac'", ",", "type", "=", "valid_miflora_mac", ")", "parser_poll", ".", "set_defaults", "(", "func", "=", "poll", ")", "parser_scan", "=", "subparsers", ".", "add_parser", "(", "'scan'", ",", "help", "=", "'scan for devices'", ")", "parser_scan", ".", "set_defaults", "(", "func", "=", "scan", ")", "parser_scan", "=", "subparsers", ".", "add_parser", "(", "'backends'", ",", "help", "=", "'list the available backends'", ")", "parser_scan", ".", "set_defaults", "(", "func", "=", "list_backends", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "if", "args", ".", "verbose", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "DEBUG", ")", "if", "not", "hasattr", "(", "args", ",", "\"func\"", ")", ":", "parser", ".", "print_help", "(", ")", "sys", ".", "exit", "(", "0", ")", "args", ".", "func", "(", "args", ")" ]
32.7
24.1
def is_base_form(self, univ_pos, morphology=None): """ Check whether we're dealing with an uninflected paradigm, so we can avoid lemmatization entirely. """ morphology = {} if morphology is None else morphology others = [key for key in morphology if key not in (POS, 'Number', 'POS', 'VerbForm', 'Tense')] if univ_pos == 'noun' and morphology.get('Number') == 'sing': return True elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf': return True # This maps 'VBP' to base form -- probably just need 'IS_BASE' # morphology elif univ_pos == 'verb' and (morphology.get('VerbForm') == 'fin' and morphology.get('Tense') == 'pres' and morphology.get('Number') is None and not others): return True elif univ_pos == 'adj' and morphology.get('Degree') == 'pos': return True elif VerbForm_inf in morphology: return True elif VerbForm_none in morphology: return True elif Number_sing in morphology: return True elif Degree_pos in morphology: return True else: return False
[ "def", "is_base_form", "(", "self", ",", "univ_pos", ",", "morphology", "=", "None", ")", ":", "morphology", "=", "{", "}", "if", "morphology", "is", "None", "else", "morphology", "others", "=", "[", "key", "for", "key", "in", "morphology", "if", "key", "not", "in", "(", "POS", ",", "'Number'", ",", "'POS'", ",", "'VerbForm'", ",", "'Tense'", ")", "]", "if", "univ_pos", "==", "'noun'", "and", "morphology", ".", "get", "(", "'Number'", ")", "==", "'sing'", ":", "return", "True", "elif", "univ_pos", "==", "'verb'", "and", "morphology", ".", "get", "(", "'VerbForm'", ")", "==", "'inf'", ":", "return", "True", "# This maps 'VBP' to base form -- probably just need 'IS_BASE'", "# morphology", "elif", "univ_pos", "==", "'verb'", "and", "(", "morphology", ".", "get", "(", "'VerbForm'", ")", "==", "'fin'", "and", "morphology", ".", "get", "(", "'Tense'", ")", "==", "'pres'", "and", "morphology", ".", "get", "(", "'Number'", ")", "is", "None", "and", "not", "others", ")", ":", "return", "True", "elif", "univ_pos", "==", "'adj'", "and", "morphology", ".", "get", "(", "'Degree'", ")", "==", "'pos'", ":", "return", "True", "elif", "VerbForm_inf", "in", "morphology", ":", "return", "True", "elif", "VerbForm_none", "in", "morphology", ":", "return", "True", "elif", "Number_sing", "in", "morphology", ":", "return", "True", "elif", "Degree_pos", "in", "morphology", ":", "return", "True", "else", ":", "return", "False" ]
42.419355
17.516129
def _put_input(self, input_source: str) -> None: """Put input string to ffmpeg command.""" input_cmd = shlex.split(str(input_source)) if len(input_cmd) > 1: self._argv.extend(input_cmd) else: self._argv.extend(["-i", input_source])
[ "def", "_put_input", "(", "self", ",", "input_source", ":", "str", ")", "->", "None", ":", "input_cmd", "=", "shlex", ".", "split", "(", "str", "(", "input_source", ")", ")", "if", "len", "(", "input_cmd", ")", ">", "1", ":", "self", ".", "_argv", ".", "extend", "(", "input_cmd", ")", "else", ":", "self", ".", "_argv", ".", "extend", "(", "[", "\"-i\"", ",", "input_source", "]", ")" ]
40.142857
9.428571
def add_mag_drifts(inst): """Adds ion drifts in magnetic coordinates using ion drifts in S/C coordinates along with pre-calculated unit vectors for magnetic coordinates. Note ---- Requires ion drifts under labels 'iv_*' where * = (x,y,z) along with unit vectors labels 'unit_zonal_*', 'unit_fa_*', and 'unit_mer_*', where the unit vectors are expressed in S/C coordinates. These vectors are calculated by add_mag_drift_unit_vectors. Parameters ---------- inst : pysat.Instrument Instrument object will be modified to include new ion drift magnitudes Returns ------- None Instrument object modified in place """ inst['iv_zon'] = {'data':inst['unit_zon_x'] * inst['iv_x'] + inst['unit_zon_y']*inst['iv_y'] + inst['unit_zon_z']*inst['iv_z'], 'units':'m/s', 'long_name':'Zonal ion velocity', 'notes':('Ion velocity relative to co-rotation along zonal ' 'direction, normal to meridional plane. Positive east. ' 'Velocity obtained using ion velocities relative ' 'to co-rotation in the instrument frame along ' 'with the corresponding unit vectors expressed in ' 'the instrument frame. '), 'label': 'Zonal Ion Velocity', 'axis': 'Zonal Ion Velocity', 'desc': 'Zonal ion velocity', 'scale': 'Linear', 'value_min':-500., 'value_max':500.} inst['iv_fa'] = {'data':inst['unit_fa_x'] * inst['iv_x'] + inst['unit_fa_y'] * inst['iv_y'] + inst['unit_fa_z'] * inst['iv_z'], 'units':'m/s', 'long_name':'Field-Aligned ion velocity', 'notes':('Ion velocity relative to co-rotation along magnetic field line. Positive along the field. ', 'Velocity obtained using ion velocities relative ' 'to co-rotation in the instrument frame along ' 'with the corresponding unit vectors expressed in ' 'the instrument frame. '), 'label':'Field-Aligned Ion Velocity', 'axis':'Field-Aligned Ion Velocity', 'desc':'Field-Aligned Ion Velocity', 'scale':'Linear', 'value_min':-500., 'value_max':500.} inst['iv_mer'] = {'data':inst['unit_mer_x'] * inst['iv_x'] + inst['unit_mer_y']*inst['iv_y'] + inst['unit_mer_z']*inst['iv_z'], 'units':'m/s', 'long_name':'Meridional ion velocity', 'notes':('Velocity along meridional direction, perpendicular ' 'to field and within meridional plane. Positive is up at magnetic equator. ', 'Velocity obtained using ion velocities relative ' 'to co-rotation in the instrument frame along ' 'with the corresponding unit vectors expressed in ' 'the instrument frame. '), 'label':'Meridional Ion Velocity', 'axis':'Meridional Ion Velocity', 'desc':'Meridional Ion Velocity', 'scale':'Linear', 'value_min':-500., 'value_max':500.} return
[ "def", "add_mag_drifts", "(", "inst", ")", ":", "inst", "[", "'iv_zon'", "]", "=", "{", "'data'", ":", "inst", "[", "'unit_zon_x'", "]", "*", "inst", "[", "'iv_x'", "]", "+", "inst", "[", "'unit_zon_y'", "]", "*", "inst", "[", "'iv_y'", "]", "+", "inst", "[", "'unit_zon_z'", "]", "*", "inst", "[", "'iv_z'", "]", ",", "'units'", ":", "'m/s'", ",", "'long_name'", ":", "'Zonal ion velocity'", ",", "'notes'", ":", "(", "'Ion velocity relative to co-rotation along zonal '", "'direction, normal to meridional plane. Positive east. '", "'Velocity obtained using ion velocities relative '", "'to co-rotation in the instrument frame along '", "'with the corresponding unit vectors expressed in '", "'the instrument frame. '", ")", ",", "'label'", ":", "'Zonal Ion Velocity'", ",", "'axis'", ":", "'Zonal Ion Velocity'", ",", "'desc'", ":", "'Zonal ion velocity'", ",", "'scale'", ":", "'Linear'", ",", "'value_min'", ":", "-", "500.", ",", "'value_max'", ":", "500.", "}", "inst", "[", "'iv_fa'", "]", "=", "{", "'data'", ":", "inst", "[", "'unit_fa_x'", "]", "*", "inst", "[", "'iv_x'", "]", "+", "inst", "[", "'unit_fa_y'", "]", "*", "inst", "[", "'iv_y'", "]", "+", "inst", "[", "'unit_fa_z'", "]", "*", "inst", "[", "'iv_z'", "]", ",", "'units'", ":", "'m/s'", ",", "'long_name'", ":", "'Field-Aligned ion velocity'", ",", "'notes'", ":", "(", "'Ion velocity relative to co-rotation along magnetic field line. Positive along the field. '", ",", "'Velocity obtained using ion velocities relative '", "'to co-rotation in the instrument frame along '", "'with the corresponding unit vectors expressed in '", "'the instrument frame. '", ")", ",", "'label'", ":", "'Field-Aligned Ion Velocity'", ",", "'axis'", ":", "'Field-Aligned Ion Velocity'", ",", "'desc'", ":", "'Field-Aligned Ion Velocity'", ",", "'scale'", ":", "'Linear'", ",", "'value_min'", ":", "-", "500.", ",", "'value_max'", ":", "500.", "}", "inst", "[", "'iv_mer'", "]", "=", "{", "'data'", ":", "inst", "[", "'unit_mer_x'", "]", "*", "inst", "[", "'iv_x'", "]", "+", "inst", "[", "'unit_mer_y'", "]", "*", "inst", "[", "'iv_y'", "]", "+", "inst", "[", "'unit_mer_z'", "]", "*", "inst", "[", "'iv_z'", "]", ",", "'units'", ":", "'m/s'", ",", "'long_name'", ":", "'Meridional ion velocity'", ",", "'notes'", ":", "(", "'Velocity along meridional direction, perpendicular '", "'to field and within meridional plane. Positive is up at magnetic equator. '", ",", "'Velocity obtained using ion velocities relative '", "'to co-rotation in the instrument frame along '", "'with the corresponding unit vectors expressed in '", "'the instrument frame. '", ")", ",", "'label'", ":", "'Meridional Ion Velocity'", ",", "'axis'", ":", "'Meridional Ion Velocity'", ",", "'desc'", ":", "'Meridional Ion Velocity'", ",", "'scale'", ":", "'Linear'", ",", "'value_min'", ":", "-", "500.", ",", "'value_max'", ":", "500.", "}", "return" ]
51.492958
26.887324
def on(self, image): """ Project polygons from one image to a new one. Parameters ---------- image : ndarray or tuple of int New image onto which the polygons are to be projected. May also simply be that new image's shape tuple. Returns ------- imgaug.PolygonsOnImage Object containing all projected polygons. """ shape = normalize_shape(image) if shape[0:2] == self.shape[0:2]: return self.deepcopy() polygons = [poly.project(self.shape, shape) for poly in self.polygons] # TODO use deepcopy() here return PolygonsOnImage(polygons, shape)
[ "def", "on", "(", "self", ",", "image", ")", ":", "shape", "=", "normalize_shape", "(", "image", ")", "if", "shape", "[", "0", ":", "2", "]", "==", "self", ".", "shape", "[", "0", ":", "2", "]", ":", "return", "self", ".", "deepcopy", "(", ")", "polygons", "=", "[", "poly", ".", "project", "(", "self", ".", "shape", ",", "shape", ")", "for", "poly", "in", "self", ".", "polygons", "]", "# TODO use deepcopy() here", "return", "PolygonsOnImage", "(", "polygons", ",", "shape", ")" ]
30.954545
17.136364
def sleep(self): """Sleep""" try: self.send_command("SLEEP") return True except requests.exceptions.RequestException: _LOGGER.error("Connection error: sleep command not sent.") return False
[ "def", "sleep", "(", "self", ")", ":", "try", ":", "self", ".", "send_command", "(", "\"SLEEP\"", ")", "return", "True", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Connection error: sleep command not sent.\"", ")", "return", "False" ]
31.75
16.125
def to_bytes(s, encoding=None, errors='strict'): """Returns a bytestring version of 's', encoded as specified in 'encoding'.""" encoding = encoding or 'utf-8' if isinstance(s, bytes): if encoding != 'utf-8': return s.decode('utf-8', errors).encode(encoding, errors) else: return s if not is_string(s): s = string_type(s) return s.encode(encoding, errors)
[ "def", "to_bytes", "(", "s", ",", "encoding", "=", "None", ",", "errors", "=", "'strict'", ")", ":", "encoding", "=", "encoding", "or", "'utf-8'", "if", "isinstance", "(", "s", ",", "bytes", ")", ":", "if", "encoding", "!=", "'utf-8'", ":", "return", "s", ".", "decode", "(", "'utf-8'", ",", "errors", ")", ".", "encode", "(", "encoding", ",", "errors", ")", "else", ":", "return", "s", "if", "not", "is_string", "(", "s", ")", ":", "s", "=", "string_type", "(", "s", ")", "return", "s", ".", "encode", "(", "encoding", ",", "errors", ")" ]
35.166667
11.583333
def cnst_AT(self, X): r"""Compute :math:`A^T \mathbf{x}` where :math:`A \mathbf{x}` is a component of ADMM problem constraint. In this case :math:`A^T \mathbf{x} = (G_r^T \;\; G_c^T) \mathbf{x}`. """ return np.sum(np.concatenate( [sl.GTax(X[..., ax], ax)[..., np.newaxis] for ax in self.axes], axis=X.ndim-1), axis=X.ndim-1)
[ "def", "cnst_AT", "(", "self", ",", "X", ")", ":", "return", "np", ".", "sum", "(", "np", ".", "concatenate", "(", "[", "sl", ".", "GTax", "(", "X", "[", "...", ",", "ax", "]", ",", "ax", ")", "[", "...", ",", "np", ".", "newaxis", "]", "for", "ax", "in", "self", ".", "axes", "]", ",", "axis", "=", "X", ".", "ndim", "-", "1", ")", ",", "axis", "=", "X", ".", "ndim", "-", "1", ")" ]
42.333333
15.777778
def remove_unique_identifiers(identifiers_to_tags, pipeline_links): """Removes unique identifiers and add the original process names to the already parsed pipelines Parameters ---------- identifiers_to_tags : dict Match between unique process identifiers and process names pipeline_links: list Parsed pipeline list with unique identifiers Returns ------- list Pipeline list with original identifiers """ # Replaces the unique identifiers by the original process names for index, val in enumerate(pipeline_links): if val["input"]["process"] != "__init__": val["input"]["process"] = identifiers_to_tags[ val["input"]["process"]] if val["output"]["process"] != "__init__": val["output"]["process"] = identifiers_to_tags[ val["output"]["process"]] return pipeline_links
[ "def", "remove_unique_identifiers", "(", "identifiers_to_tags", ",", "pipeline_links", ")", ":", "# Replaces the unique identifiers by the original process names", "for", "index", ",", "val", "in", "enumerate", "(", "pipeline_links", ")", ":", "if", "val", "[", "\"input\"", "]", "[", "\"process\"", "]", "!=", "\"__init__\"", ":", "val", "[", "\"input\"", "]", "[", "\"process\"", "]", "=", "identifiers_to_tags", "[", "val", "[", "\"input\"", "]", "[", "\"process\"", "]", "]", "if", "val", "[", "\"output\"", "]", "[", "\"process\"", "]", "!=", "\"__init__\"", ":", "val", "[", "\"output\"", "]", "[", "\"process\"", "]", "=", "identifiers_to_tags", "[", "val", "[", "\"output\"", "]", "[", "\"process\"", "]", "]", "return", "pipeline_links" ]
33
19.222222
def flush(self) -> None: """ To act as a file. """ self.underlying_stream.flush() self.file.flush() os.fsync(self.file.fileno())
[ "def", "flush", "(", "self", ")", "->", "None", ":", "self", ".", "underlying_stream", ".", "flush", "(", ")", "self", ".", "file", ".", "flush", "(", ")", "os", ".", "fsync", "(", "self", ".", "file", ".", "fileno", "(", ")", ")" ]
24.285714
7.428571
def category_structure(category, site): """ A category structure. """ return {'description': category.title, 'htmlUrl': '%s://%s%s' % ( PROTOCOL, site.domain, category.get_absolute_url()), 'rssUrl': '%s://%s%s' % ( PROTOCOL, site.domain, reverse('zinnia:category_feed', args=[category.tree_path])), # Useful Wordpress Extensions 'categoryId': category.pk, 'parentId': category.parent and category.parent.pk or 0, 'categoryDescription': category.description, 'categoryName': category.title}
[ "def", "category_structure", "(", "category", ",", "site", ")", ":", "return", "{", "'description'", ":", "category", ".", "title", ",", "'htmlUrl'", ":", "'%s://%s%s'", "%", "(", "PROTOCOL", ",", "site", ".", "domain", ",", "category", ".", "get_absolute_url", "(", ")", ")", ",", "'rssUrl'", ":", "'%s://%s%s'", "%", "(", "PROTOCOL", ",", "site", ".", "domain", ",", "reverse", "(", "'zinnia:category_feed'", ",", "args", "=", "[", "category", ".", "tree_path", "]", ")", ")", ",", "# Useful Wordpress Extensions", "'categoryId'", ":", "category", ".", "pk", ",", "'parentId'", ":", "category", ".", "parent", "and", "category", ".", "parent", ".", "pk", "or", "0", ",", "'categoryDescription'", ":", "category", ".", "description", ",", "'categoryName'", ":", "category", ".", "title", "}" ]
39.875
7.375
def root(self, value): """Set new XML tree""" self._xml = t2s(value) self._root = value
[ "def", "root", "(", "self", ",", "value", ")", ":", "self", ".", "_xml", "=", "t2s", "(", "value", ")", "self", ".", "_root", "=", "value" ]
21.6
16.4
def _compile_iterable(self, schema): """ Compile iterable: iterable of schemas treated as allowed values """ # Compile each member as a schema schema_type = type(schema) schema_subs = tuple(map(self.sub_compile, schema)) # When the schema is an iterable with a single item (e.g. [dict(...)]), # Invalid errors from schema members should be immediately used. # This allows to report sane errors with `Schema([{'age': int}])` error_passthrough = len(schema_subs) == 1 # Prepare self self.compiled_type = const.COMPILED_TYPE.ITERABLE self.name = _(u'{iterable_cls}[{iterable_options}]').format( iterable_cls=get_type_name(schema_type), iterable_options=_(u'|').join(x.name for x in schema_subs) ) # Error partials err_type = self.Invalid(_(u'Wrong value type'), get_type_name(schema_type)) err_value = self.Invalid(_(u'Invalid value'), self.name) # Validator def validate_iterable(l): # Type check if not isinstance(l, schema_type): # expected=<type>, provided=<type> raise err_type(provided=get_type_name(type(l))) # Each `v` member should match to any `schema` member errors = [] # Errors for every value values = [] # Sanitized values for value_index, value in list(enumerate(l)): # Walk through schema members and test if any of them match for value_schema in schema_subs: try: # Try to validate values.append(value_schema(value)) break # Success! except signals.RemoveValue: # `value_schema` commanded to drop this value break except Invalid as e: if error_passthrough: # Error-Passthrough enabled: add the original error errors.append(e.enrich(path=[value_index])) break else: # Error-Passthrough disabled: Ignore errors and hope other members will succeed better pass else: errors.append(err_value(get_literal_name(value), path=[value_index])) # Errors? if errors: raise MultipleInvalid.if_multiple(errors) # Typecast and finish return schema_type(values) # Matcher if self.matcher: return self._compile_callable(validate_iterable) # Stupidly use it as callable return validate_iterable
[ "def", "_compile_iterable", "(", "self", ",", "schema", ")", ":", "# Compile each member as a schema", "schema_type", "=", "type", "(", "schema", ")", "schema_subs", "=", "tuple", "(", "map", "(", "self", ".", "sub_compile", ",", "schema", ")", ")", "# When the schema is an iterable with a single item (e.g. [dict(...)]),", "# Invalid errors from schema members should be immediately used.", "# This allows to report sane errors with `Schema([{'age': int}])`", "error_passthrough", "=", "len", "(", "schema_subs", ")", "==", "1", "# Prepare self", "self", ".", "compiled_type", "=", "const", ".", "COMPILED_TYPE", ".", "ITERABLE", "self", ".", "name", "=", "_", "(", "u'{iterable_cls}[{iterable_options}]'", ")", ".", "format", "(", "iterable_cls", "=", "get_type_name", "(", "schema_type", ")", ",", "iterable_options", "=", "_", "(", "u'|'", ")", ".", "join", "(", "x", ".", "name", "for", "x", "in", "schema_subs", ")", ")", "# Error partials", "err_type", "=", "self", ".", "Invalid", "(", "_", "(", "u'Wrong value type'", ")", ",", "get_type_name", "(", "schema_type", ")", ")", "err_value", "=", "self", ".", "Invalid", "(", "_", "(", "u'Invalid value'", ")", ",", "self", ".", "name", ")", "# Validator", "def", "validate_iterable", "(", "l", ")", ":", "# Type check", "if", "not", "isinstance", "(", "l", ",", "schema_type", ")", ":", "# expected=<type>, provided=<type>", "raise", "err_type", "(", "provided", "=", "get_type_name", "(", "type", "(", "l", ")", ")", ")", "# Each `v` member should match to any `schema` member", "errors", "=", "[", "]", "# Errors for every value", "values", "=", "[", "]", "# Sanitized values", "for", "value_index", ",", "value", "in", "list", "(", "enumerate", "(", "l", ")", ")", ":", "# Walk through schema members and test if any of them match", "for", "value_schema", "in", "schema_subs", ":", "try", ":", "# Try to validate", "values", ".", "append", "(", "value_schema", "(", "value", ")", ")", "break", "# Success!", "except", "signals", ".", "RemoveValue", ":", "# `value_schema` commanded to drop this value", "break", "except", "Invalid", "as", "e", ":", "if", "error_passthrough", ":", "# Error-Passthrough enabled: add the original error", "errors", ".", "append", "(", "e", ".", "enrich", "(", "path", "=", "[", "value_index", "]", ")", ")", "break", "else", ":", "# Error-Passthrough disabled: Ignore errors and hope other members will succeed better", "pass", "else", ":", "errors", ".", "append", "(", "err_value", "(", "get_literal_name", "(", "value", ")", ",", "path", "=", "[", "value_index", "]", ")", ")", "# Errors?", "if", "errors", ":", "raise", "MultipleInvalid", ".", "if_multiple", "(", "errors", ")", "# Typecast and finish", "return", "schema_type", "(", "values", ")", "# Matcher", "if", "self", ".", "matcher", ":", "return", "self", ".", "_compile_callable", "(", "validate_iterable", ")", "# Stupidly use it as callable", "return", "validate_iterable" ]
42.138462
21.353846
def select_features(self, *args, **kwargs): """ Select one or more fields as feature fields. :rtype: DataFrame """ if not args: raise ValueError("Field list cannot be empty.") # generate selected set from args augment = kwargs.get('add', False) fields = _render_field_set(args) self._assert_ml_fields_valid(*fields) return _batch_change_roles(self, fields, FieldRole.FEATURE, augment)
[ "def", "select_features", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "args", ":", "raise", "ValueError", "(", "\"Field list cannot be empty.\"", ")", "# generate selected set from args", "augment", "=", "kwargs", ".", "get", "(", "'add'", ",", "False", ")", "fields", "=", "_render_field_set", "(", "args", ")", "self", ".", "_assert_ml_fields_valid", "(", "*", "fields", ")", "return", "_batch_change_roles", "(", "self", ",", "fields", ",", "FieldRole", ".", "FEATURE", ",", "augment", ")" ]
35.769231
11.769231
def load_jamfile(self, dir, jamfile_module): """Load a Jamfile at the given directory. Returns nothing. Will attempt to load the file as indicated by the JAMFILE patterns. Effect of calling this rule twice with the same 'dir' is underfined.""" assert isinstance(dir, basestring) assert isinstance(jamfile_module, basestring) # See if the Jamfile is where it should be. is_jamroot = False jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT) if jamfile_to_load: if len(jamfile_to_load) > 1: get_manager().errors()( "Multiple Jamfiles found at '{}'\n" "Filenames are: {}" .format(dir, ' '.join(os.path.basename(j) for j in jamfile_to_load)) ) is_jamroot = True jamfile_to_load = jamfile_to_load[0] else: jamfile_to_load = self.find_jamfile(dir) dir = os.path.dirname(jamfile_to_load) if not dir: dir = "." self.used_projects[jamfile_module] = [] # Now load the Jamfile in it's own context. # The call to 'initialize' may load parent Jamfile, which might have # 'use-project' statement that causes a second attempt to load the # same project we're loading now. Checking inside .jamfile-modules # prevents that second attempt from messing up. if not jamfile_module in self.jamfile_modules: previous_project = self.current_project # Initialize the jamfile module before loading. self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load)) if not jamfile_module in self.jamfile_modules: saved_project = self.current_project self.jamfile_modules[jamfile_module] = True bjam.call("load", jamfile_module, jamfile_to_load) if is_jamroot: jamfile = self.find_jamfile(dir, no_errors=True) if jamfile: bjam.call("load", jamfile_module, jamfile) # Now do some checks if self.current_project != saved_project: from textwrap import dedent self.manager.errors()(dedent( """ The value of the .current-project variable has magically changed after loading a Jamfile. This means some of the targets might be defined a the wrong project. after loading %s expected value %s actual value %s """ % (jamfile_module, saved_project, self.current_project) )) self.end_load(previous_project) if self.global_build_dir: id = self.attributeDefault(jamfile_module, "id", None) project_root = self.attribute(jamfile_module, "project-root") location = self.attribute(jamfile_module, "location") if location and project_root == dir: # This is Jamroot if not id: # FIXME: go via errors module, so that contexts are # shown? print "warning: the --build-dir option was specified" print "warning: but Jamroot at '%s'" % dir print "warning: specified no project id" print "warning: the --build-dir option will be ignored"
[ "def", "load_jamfile", "(", "self", ",", "dir", ",", "jamfile_module", ")", ":", "assert", "isinstance", "(", "dir", ",", "basestring", ")", "assert", "isinstance", "(", "jamfile_module", ",", "basestring", ")", "# See if the Jamfile is where it should be.", "is_jamroot", "=", "False", "jamfile_to_load", "=", "b2", ".", "util", ".", "path", ".", "glob", "(", "[", "dir", "]", ",", "self", ".", "JAMROOT", ")", "if", "jamfile_to_load", ":", "if", "len", "(", "jamfile_to_load", ")", ">", "1", ":", "get_manager", "(", ")", ".", "errors", "(", ")", "(", "\"Multiple Jamfiles found at '{}'\\n\"", "\"Filenames are: {}\"", ".", "format", "(", "dir", ",", "' '", ".", "join", "(", "os", ".", "path", ".", "basename", "(", "j", ")", "for", "j", "in", "jamfile_to_load", ")", ")", ")", "is_jamroot", "=", "True", "jamfile_to_load", "=", "jamfile_to_load", "[", "0", "]", "else", ":", "jamfile_to_load", "=", "self", ".", "find_jamfile", "(", "dir", ")", "dir", "=", "os", ".", "path", ".", "dirname", "(", "jamfile_to_load", ")", "if", "not", "dir", ":", "dir", "=", "\".\"", "self", ".", "used_projects", "[", "jamfile_module", "]", "=", "[", "]", "# Now load the Jamfile in it's own context.", "# The call to 'initialize' may load parent Jamfile, which might have", "# 'use-project' statement that causes a second attempt to load the", "# same project we're loading now. Checking inside .jamfile-modules", "# prevents that second attempt from messing up.", "if", "not", "jamfile_module", "in", "self", ".", "jamfile_modules", ":", "previous_project", "=", "self", ".", "current_project", "# Initialize the jamfile module before loading.", "self", ".", "initialize", "(", "jamfile_module", ",", "dir", ",", "os", ".", "path", ".", "basename", "(", "jamfile_to_load", ")", ")", "if", "not", "jamfile_module", "in", "self", ".", "jamfile_modules", ":", "saved_project", "=", "self", ".", "current_project", "self", ".", "jamfile_modules", "[", "jamfile_module", "]", "=", "True", "bjam", ".", "call", "(", "\"load\"", ",", "jamfile_module", ",", "jamfile_to_load", ")", "if", "is_jamroot", ":", "jamfile", "=", "self", ".", "find_jamfile", "(", "dir", ",", "no_errors", "=", "True", ")", "if", "jamfile", ":", "bjam", ".", "call", "(", "\"load\"", ",", "jamfile_module", ",", "jamfile", ")", "# Now do some checks", "if", "self", ".", "current_project", "!=", "saved_project", ":", "from", "textwrap", "import", "dedent", "self", ".", "manager", ".", "errors", "(", ")", "(", "dedent", "(", "\"\"\"\n The value of the .current-project variable has magically changed\n after loading a Jamfile. This means some of the targets might be\n defined a the wrong project.\n after loading %s\n expected value %s\n actual value %s\n \"\"\"", "%", "(", "jamfile_module", ",", "saved_project", ",", "self", ".", "current_project", ")", ")", ")", "self", ".", "end_load", "(", "previous_project", ")", "if", "self", ".", "global_build_dir", ":", "id", "=", "self", ".", "attributeDefault", "(", "jamfile_module", ",", "\"id\"", ",", "None", ")", "project_root", "=", "self", ".", "attribute", "(", "jamfile_module", ",", "\"project-root\"", ")", "location", "=", "self", ".", "attribute", "(", "jamfile_module", ",", "\"location\"", ")", "if", "location", "and", "project_root", "==", "dir", ":", "# This is Jamroot", "if", "not", "id", ":", "# FIXME: go via errors module, so that contexts are", "# shown?", "print", "\"warning: the --build-dir option was specified\"", "print", "\"warning: but Jamroot at '%s'\"", "%", "dir", "print", "\"warning: specified no project id\"", "print", "\"warning: the --build-dir option will be ignored\"" ]
45.55
20.6125
def generate_data_with_shared_vocab(self, data_dir, tmp_dir, task_id=-1): """Generates TF-Records for problems using a global vocabulary file.""" global_vocab_filename = os.path.join(data_dir, self.vocab_filename) if not tf.gfile.Exists(global_vocab_filename): raise ValueError( 'Global vocabulary file: %s does not exist, ' 'please create one using build_vocab.py' % global_vocab_filename) # Before generating data, we copy the global vocabulary file to the children # locations. Although this is not the most disk efficient strategy, it # imposes the fewest changes to the text-to-text API. for p in self.problems: local_vocab_filename = os.path.join(data_dir, p.vocab_filename) if not tf.gfile.Exists(local_vocab_filename): tf.gfile.Copy(global_vocab_filename, local_vocab_filename) p.generate_data(data_dir, tmp_dir, task_id)
[ "def", "generate_data_with_shared_vocab", "(", "self", ",", "data_dir", ",", "tmp_dir", ",", "task_id", "=", "-", "1", ")", ":", "global_vocab_filename", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "self", ".", "vocab_filename", ")", "if", "not", "tf", ".", "gfile", ".", "Exists", "(", "global_vocab_filename", ")", ":", "raise", "ValueError", "(", "'Global vocabulary file: %s does not exist, '", "'please create one using build_vocab.py'", "%", "global_vocab_filename", ")", "# Before generating data, we copy the global vocabulary file to the children", "# locations. Although this is not the most disk efficient strategy, it", "# imposes the fewest changes to the text-to-text API.", "for", "p", "in", "self", ".", "problems", ":", "local_vocab_filename", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "p", ".", "vocab_filename", ")", "if", "not", "tf", ".", "gfile", ".", "Exists", "(", "local_vocab_filename", ")", ":", "tf", ".", "gfile", ".", "Copy", "(", "global_vocab_filename", ",", "local_vocab_filename", ")", "p", ".", "generate_data", "(", "data_dir", ",", "tmp_dir", ",", "task_id", ")" ]
59.666667
21.333333