text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def _process_in_collection_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks for a value's existence in a collection. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the collection existence check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = GraphQLList(strip_non_null_from_type(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', argument_expression, expressions.LocalField(filtered_field_name)) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
[ "def", "_process_in_collection_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "argument_inferred_type", "=", "GraphQLList", "(", "strip_non_null_from_type", "(", "filtered_field_type", ")", ")", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'contains'", ",", "argument_expression", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ")", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "filter_predicate", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
54.870968
31.129032
def click_window_multiple(self, window, button, repeat=2, delay=100000): """ Send a one or more clicks for a specific mouse button at the current mouse location. :param window: The window you want to send the event to or CURRENTWINDOW :param button: The mouse button. Generally, 1 is left, 2 is middle, 3 is right, 4 is wheel up, 5 is wheel down. :param repeat: number of repetitions (default: 2) :param delay: delay between clicks, in microseconds (default: 100k) """ _libxdo.xdo_click_window_multiple( self._xdo, window, button, repeat, delay)
[ "def", "click_window_multiple", "(", "self", ",", "window", ",", "button", ",", "repeat", "=", "2", ",", "delay", "=", "100000", ")", ":", "_libxdo", ".", "xdo_click_window_multiple", "(", "self", ".", "_xdo", ",", "window", ",", "button", ",", "repeat", ",", "delay", ")" ]
43.466667
18.666667
def execute(cmd, cwd=None): """ Execute a command and return it's output. """ try: lines = subprocess \ .check_output(cmd, cwd=cwd, stderr=DEVNULL) \ .splitlines() except subprocess.CalledProcessError: return None else: if lines: return decode(lines[0].strip()) else: return None
[ "def", "execute", "(", "cmd", ",", "cwd", "=", "None", ")", ":", "try", ":", "lines", "=", "subprocess", "", ".", "check_output", "(", "cmd", ",", "cwd", "=", "cwd", ",", "stderr", "=", "DEVNULL", ")", ".", "splitlines", "(", ")", "except", "subprocess", ".", "CalledProcessError", ":", "return", "None", "else", ":", "if", "lines", ":", "return", "decode", "(", "lines", "[", "0", "]", ".", "strip", "(", ")", ")", "else", ":", "return", "None" ]
28.846154
15.923077
def acquire_resources(self, source): """ Store the resources returned by ``source()``. If ``source`` has been acquired before, it will not be called a second time. Args: source (callable): A function that returns a resource or a list of resources. Returns: None """ if source not in self.consulted: self.consulted.add(source) if isinstance(source, Tag): res = source else: res = source(self.H) if res is None: res = set() elif isinstance(res, (list, tuple)): res = set(res) elif isinstance(res, Tag): res = {res} self.resources |= res
[ "def", "acquire_resources", "(", "self", ",", "source", ")", ":", "if", "source", "not", "in", "self", ".", "consulted", ":", "self", ".", "consulted", ".", "add", "(", "source", ")", "if", "isinstance", "(", "source", ",", "Tag", ")", ":", "res", "=", "source", "else", ":", "res", "=", "source", "(", "self", ".", "H", ")", "if", "res", "is", "None", ":", "res", "=", "set", "(", ")", "elif", "isinstance", "(", "res", ",", "(", "list", ",", "tuple", ")", ")", ":", "res", "=", "set", "(", "res", ")", "elif", "isinstance", "(", "res", ",", "Tag", ")", ":", "res", "=", "{", "res", "}", "self", ".", "resources", "|=", "res" ]
30.88
14.4
def atEpoch(self, epoch=2000): ''' Return SkyCoords of the objects, propagated to a (single) given epoch. Parameters ---------- epoch : Time, or float Either an astropy time, or a decimal year of the desired epoch. Returns ------- coordinates : SkyCoord(s) The coordinates, propagated to the given epoch, with that epoch stored in the obstime attribute. ''' projected = copy.deepcopy(self.standardized) # calculate the time offset from the epochs of the orignal coordinates try: epoch.year newobstime = epoch except AttributeError: try: newobstime = epoch.decimalyear*u.year except AttributeError: newobstime = epoch*u.year #with warnings.catch_warnings() : # warnings.filterwarnings("ignore") # newobstime = Time(year, format='decimalyear') # dt = newobstime - self.obstime dt = newobstime - self.obstime # calculate the new positions, propagated linearly by dt try: # if proper motions exist newra = (self.ra + self.pm_ra_cosdec/np.cos(self.dec)*dt).to(u.deg) newdec = (self.dec + self.pm_dec*dt).to(u.deg) except TypeError: # assume no proper motions, if they're not defined newra = self.ra newdec = self.dec self.speak('no proper motions were used for {}'.format(self.name)) projected['ra'] = newra projected['dec'] = newdec projected['obstime'] = newobstime # return as SkyCoord object return self.__class__(projected)
[ "def", "atEpoch", "(", "self", ",", "epoch", "=", "2000", ")", ":", "projected", "=", "copy", ".", "deepcopy", "(", "self", ".", "standardized", ")", "# calculate the time offset from the epochs of the orignal coordinates", "try", ":", "epoch", ".", "year", "newobstime", "=", "epoch", "except", "AttributeError", ":", "try", ":", "newobstime", "=", "epoch", ".", "decimalyear", "*", "u", ".", "year", "except", "AttributeError", ":", "newobstime", "=", "epoch", "*", "u", ".", "year", "#with warnings.catch_warnings() :", "# warnings.filterwarnings(\"ignore\")", "# newobstime = Time(year, format='decimalyear')", "# dt = newobstime - self.obstime", "dt", "=", "newobstime", "-", "self", ".", "obstime", "# calculate the new positions, propagated linearly by dt", "try", ":", "# if proper motions exist", "newra", "=", "(", "self", ".", "ra", "+", "self", ".", "pm_ra_cosdec", "/", "np", ".", "cos", "(", "self", ".", "dec", ")", "*", "dt", ")", ".", "to", "(", "u", ".", "deg", ")", "newdec", "=", "(", "self", ".", "dec", "+", "self", ".", "pm_dec", "*", "dt", ")", ".", "to", "(", "u", ".", "deg", ")", "except", "TypeError", ":", "# assume no proper motions, if they're not defined", "newra", "=", "self", ".", "ra", "newdec", "=", "self", ".", "dec", "self", ".", "speak", "(", "'no proper motions were used for {}'", ".", "format", "(", "self", ".", "name", ")", ")", "projected", "[", "'ra'", "]", "=", "newra", "projected", "[", "'dec'", "]", "=", "newdec", "projected", "[", "'obstime'", "]", "=", "newobstime", "# return as SkyCoord object", "return", "self", ".", "__class__", "(", "projected", ")" ]
33.294118
20.27451
def set_lowest_numeric_score(self, score): """Sets the lowest numeric score. arg: score (decimal): the lowest numeric score raise: InvalidArgument - ``score`` is invalid raise: NoAccess - ``score`` cannot be modified *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.grading.GradeSystemForm.set_lowest_numeric_score if self.get_lowest_numeric_score_metadata().is_read_only(): raise errors.NoAccess() try: score = float(score) except ValueError: raise errors.InvalidArgument() if not self._is_valid_decimal(score, self.get_lowest_numeric_score_metadata()): raise errors.InvalidArgument() self._my_map['lowestNumericScore'] = score
[ "def", "set_lowest_numeric_score", "(", "self", ",", "score", ")", ":", "# Implemented from template for osid.grading.GradeSystemForm.set_lowest_numeric_score", "if", "self", ".", "get_lowest_numeric_score_metadata", "(", ")", ".", "is_read_only", "(", ")", ":", "raise", "errors", ".", "NoAccess", "(", ")", "try", ":", "score", "=", "float", "(", "score", ")", "except", "ValueError", ":", "raise", "errors", ".", "InvalidArgument", "(", ")", "if", "not", "self", ".", "_is_valid_decimal", "(", "score", ",", "self", ".", "get_lowest_numeric_score_metadata", "(", ")", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", ")", "self", ".", "_my_map", "[", "'lowestNumericScore'", "]", "=", "score" ]
42.789474
18.473684
def start_element(self, name, attrs): """ Callback for start of an XML element. Checks to see if we are about to start a table that matches the ignore pattern. @param name: the name of the tag being opened @type name: string @param attrs: a dictionary of the attributes for the tag being opened @type attrs: dictionary """ if name.lower() == "table": for attr in attrs.keys(): if attr.lower() == "name": if self.__ignore_pat.search(attrs[attr]): self.__in_table = 1
[ "def", "start_element", "(", "self", ",", "name", ",", "attrs", ")", ":", "if", "name", ".", "lower", "(", ")", "==", "\"table\"", ":", "for", "attr", "in", "attrs", ".", "keys", "(", ")", ":", "if", "attr", ".", "lower", "(", ")", "==", "\"name\"", ":", "if", "self", ".", "__ignore_pat", ".", "search", "(", "attrs", "[", "attr", "]", ")", ":", "self", ".", "__in_table", "=", "1" ]
32.75
15.25
def iterative_encoder_decoder(encoder_input, encoder_self_attention_bias, encoder_decoder_attention_bias, query, hparams): """Iterative encoder decoder.""" for _ in range(hparams.num_rec_steps): with tf.variable_scope("step", reuse=tf.AUTO_REUSE): encoder_output = image_question_encoder( encoder_input, encoder_self_attention_bias, hparams, query) decoder_output = decoder( query, encoder_output, None, encoder_decoder_attention_bias, hparams) encoder_input = encoder_output query = decoder_output return decoder_output
[ "def", "iterative_encoder_decoder", "(", "encoder_input", ",", "encoder_self_attention_bias", ",", "encoder_decoder_attention_bias", ",", "query", ",", "hparams", ")", ":", "for", "_", "in", "range", "(", "hparams", ".", "num_rec_steps", ")", ":", "with", "tf", ".", "variable_scope", "(", "\"step\"", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "encoder_output", "=", "image_question_encoder", "(", "encoder_input", ",", "encoder_self_attention_bias", ",", "hparams", ",", "query", ")", "decoder_output", "=", "decoder", "(", "query", ",", "encoder_output", ",", "None", ",", "encoder_decoder_attention_bias", ",", "hparams", ")", "encoder_input", "=", "encoder_output", "query", "=", "decoder_output", "return", "decoder_output" ]
29.88
15.16
def group_default_invalidator(self, obj): """Invalidated cached items when the Group changes.""" user_pks = User.objects.values_list('pk', flat=True) return [('User', pk, False) for pk in user_pks]
[ "def", "group_default_invalidator", "(", "self", ",", "obj", ")", ":", "user_pks", "=", "User", ".", "objects", ".", "values_list", "(", "'pk'", ",", "flat", "=", "True", ")", "return", "[", "(", "'User'", ",", "pk", ",", "False", ")", "for", "pk", "in", "user_pks", "]" ]
54.5
9
def to_html(text, config, search_path): """ Convert Markdown text to HTML """ processor = misaka.Markdown(HtmlRenderer(config, search_path), extensions=ENABLED_EXTENSIONS) text = processor(text) if not config.get('no_smartquotes'): text = misaka.smartypants(text) return flask.Markup(text)
[ "def", "to_html", "(", "text", ",", "config", ",", "search_path", ")", ":", "processor", "=", "misaka", ".", "Markdown", "(", "HtmlRenderer", "(", "config", ",", "search_path", ")", ",", "extensions", "=", "ENABLED_EXTENSIONS", ")", "text", "=", "processor", "(", "text", ")", "if", "not", "config", ".", "get", "(", "'no_smartquotes'", ")", ":", "text", "=", "misaka", ".", "smartypants", "(", "text", ")", "return", "flask", ".", "Markup", "(", "text", ")" ]
34.2
15.5
def _leave(ins): """ Return from a function popping N bytes from the stack Use '__fastcall__' as 1st parameter, to just return """ global FLAG_use_function_exit output = [] if ins.quad[1] == '__fastcall__': output.append('ret') return output nbytes = int(ins.quad[1]) # Number of bytes to pop (params size) if nbytes == 0: output.append('ld sp, ix') output.append('pop ix') output.append('ret') return output if nbytes == 1: output.append('ld sp, ix') output.append('pop ix') output.append('inc sp') # "Pops" 1 byte output.append('ret') return output if nbytes <= 11: # Number of bytes it worth the hassle to "pop" off the stack output.append('ld sp, ix') output.append('pop ix') output.append('exx') output.append('pop hl') for i in range((nbytes >> 1) - 1): output.append('pop bc') # Removes (n * 2 - 2) bytes form the stack if nbytes & 1: # Odd? output.append('inc sp') # "Pops" 1 byte (This should never happens, since params are always even-sized) output.append('ex (sp), hl') # Place back return address output.append('exx') output.append('ret') return output if not FLAG_use_function_exit: FLAG_use_function_exit = True # Use standard exit output.append('exx') output.append('ld hl, %i' % nbytes) output.append('__EXIT_FUNCTION:') output.append('ld sp, ix') output.append('pop ix') output.append('pop de') output.append('add hl, sp') output.append('ld sp, hl') output.append('push de') output.append('exx') output.append('ret') else: output.append('exx') output.append('ld hl, %i' % nbytes) output.append('jp __EXIT_FUNCTION') return output
[ "def", "_leave", "(", "ins", ")", ":", "global", "FLAG_use_function_exit", "output", "=", "[", "]", "if", "ins", ".", "quad", "[", "1", "]", "==", "'__fastcall__'", ":", "output", ".", "append", "(", "'ret'", ")", "return", "output", "nbytes", "=", "int", "(", "ins", ".", "quad", "[", "1", "]", ")", "# Number of bytes to pop (params size)", "if", "nbytes", "==", "0", ":", "output", ".", "append", "(", "'ld sp, ix'", ")", "output", ".", "append", "(", "'pop ix'", ")", "output", ".", "append", "(", "'ret'", ")", "return", "output", "if", "nbytes", "==", "1", ":", "output", ".", "append", "(", "'ld sp, ix'", ")", "output", ".", "append", "(", "'pop ix'", ")", "output", ".", "append", "(", "'inc sp'", ")", "# \"Pops\" 1 byte", "output", ".", "append", "(", "'ret'", ")", "return", "output", "if", "nbytes", "<=", "11", ":", "# Number of bytes it worth the hassle to \"pop\" off the stack", "output", ".", "append", "(", "'ld sp, ix'", ")", "output", ".", "append", "(", "'pop ix'", ")", "output", ".", "append", "(", "'exx'", ")", "output", ".", "append", "(", "'pop hl'", ")", "for", "i", "in", "range", "(", "(", "nbytes", ">>", "1", ")", "-", "1", ")", ":", "output", ".", "append", "(", "'pop bc'", ")", "# Removes (n * 2 - 2) bytes form the stack", "if", "nbytes", "&", "1", ":", "# Odd?", "output", ".", "append", "(", "'inc sp'", ")", "# \"Pops\" 1 byte (This should never happens, since params are always even-sized)", "output", ".", "append", "(", "'ex (sp), hl'", ")", "# Place back return address", "output", ".", "append", "(", "'exx'", ")", "output", ".", "append", "(", "'ret'", ")", "return", "output", "if", "not", "FLAG_use_function_exit", ":", "FLAG_use_function_exit", "=", "True", "# Use standard exit", "output", ".", "append", "(", "'exx'", ")", "output", ".", "append", "(", "'ld hl, %i'", "%", "nbytes", ")", "output", ".", "append", "(", "'__EXIT_FUNCTION:'", ")", "output", ".", "append", "(", "'ld sp, ix'", ")", "output", ".", "append", "(", "'pop ix'", ")", "output", ".", "append", "(", "'pop de'", ")", "output", ".", "append", "(", "'add hl, sp'", ")", "output", ".", "append", "(", "'ld sp, hl'", ")", "output", ".", "append", "(", "'push de'", ")", "output", ".", "append", "(", "'exx'", ")", "output", ".", "append", "(", "'ret'", ")", "else", ":", "output", ".", "append", "(", "'exx'", ")", "output", ".", "append", "(", "'ld hl, %i'", "%", "nbytes", ")", "output", ".", "append", "(", "'jp __EXIT_FUNCTION'", ")", "return", "output" ]
28.753846
19.215385
def publish_results(self, view, submitters, commenters): """Submit the results to the subreddit. Has no return value (None).""" def timef(timestamp, date_only=False): """Return a suitable string representaation of the timestamp.""" dtime = datetime.fromtimestamp(timestamp) if date_only: retval = dtime.strftime('%Y-%m-%d') else: retval = dtime.strftime('%Y-%m-%d %H:%M PDT') return retval basic = self.basic_stats() top_commenters = self.top_commenters(commenters) top_comments = self.top_comments() top_submissions = self.top_submissions() # Decrease number of top submitters if body is too large. body = None while body is None or len(body) > 40000 and submitters > 0: body = (basic + self.top_submitters(submitters) + top_commenters + top_submissions + top_comments + self.post_footer) submitters -= 1 title = '{} {} {}posts from {} to {}'.format( self.post_prefix, str(self.subreddit), 'top ' if view in TOP_VALUES else '', timef(self.min_date, True), timef(self.max_date)) try: # Attempt to make the submission return self.submit_subreddit.submit(title, selftext=body) except Exception: logger.exception('Failed to submit to {}' .format(self.submit_subreddit)) self._save_report(title, body)
[ "def", "publish_results", "(", "self", ",", "view", ",", "submitters", ",", "commenters", ")", ":", "def", "timef", "(", "timestamp", ",", "date_only", "=", "False", ")", ":", "\"\"\"Return a suitable string representaation of the timestamp.\"\"\"", "dtime", "=", "datetime", ".", "fromtimestamp", "(", "timestamp", ")", "if", "date_only", ":", "retval", "=", "dtime", ".", "strftime", "(", "'%Y-%m-%d'", ")", "else", ":", "retval", "=", "dtime", ".", "strftime", "(", "'%Y-%m-%d %H:%M PDT'", ")", "return", "retval", "basic", "=", "self", ".", "basic_stats", "(", ")", "top_commenters", "=", "self", ".", "top_commenters", "(", "commenters", ")", "top_comments", "=", "self", ".", "top_comments", "(", ")", "top_submissions", "=", "self", ".", "top_submissions", "(", ")", "# Decrease number of top submitters if body is too large.", "body", "=", "None", "while", "body", "is", "None", "or", "len", "(", "body", ")", ">", "40000", "and", "submitters", ">", "0", ":", "body", "=", "(", "basic", "+", "self", ".", "top_submitters", "(", "submitters", ")", "+", "top_commenters", "+", "top_submissions", "+", "top_comments", "+", "self", ".", "post_footer", ")", "submitters", "-=", "1", "title", "=", "'{} {} {}posts from {} to {}'", ".", "format", "(", "self", ".", "post_prefix", ",", "str", "(", "self", ".", "subreddit", ")", ",", "'top '", "if", "view", "in", "TOP_VALUES", "else", "''", ",", "timef", "(", "self", ".", "min_date", ",", "True", ")", ",", "timef", "(", "self", ".", "max_date", ")", ")", "try", ":", "# Attempt to make the submission", "return", "self", ".", "submit_subreddit", ".", "submit", "(", "title", ",", "selftext", "=", "body", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "'Failed to submit to {}'", ".", "format", "(", "self", ".", "submit_subreddit", ")", ")", "self", ".", "_save_report", "(", "title", ",", "body", ")" ]
44.176471
18.176471
def update(self, friendly_name=values.unset, api_version=values.unset, sms_url=values.unset, sms_method=values.unset, sms_fallback_url=values.unset, sms_fallback_method=values.unset): """ Update the ShortCodeInstance :param unicode friendly_name: A string to describe this resource :param unicode api_version: The API version to use to start a new TwiML session :param unicode sms_url: URL Twilio will request when receiving an SMS :param unicode sms_method: HTTP method to use when requesting the sms url :param unicode sms_fallback_url: URL Twilio will request if an error occurs in executing TwiML :param unicode sms_fallback_method: HTTP method Twilio will use with sms_fallback_url :returns: Updated ShortCodeInstance :rtype: twilio.rest.api.v2010.account.short_code.ShortCodeInstance """ return self._proxy.update( friendly_name=friendly_name, api_version=api_version, sms_url=sms_url, sms_method=sms_method, sms_fallback_url=sms_fallback_url, sms_fallback_method=sms_fallback_method, )
[ "def", "update", "(", "self", ",", "friendly_name", "=", "values", ".", "unset", ",", "api_version", "=", "values", ".", "unset", ",", "sms_url", "=", "values", ".", "unset", ",", "sms_method", "=", "values", ".", "unset", ",", "sms_fallback_url", "=", "values", ".", "unset", ",", "sms_fallback_method", "=", "values", ".", "unset", ")", ":", "return", "self", ".", "_proxy", ".", "update", "(", "friendly_name", "=", "friendly_name", ",", "api_version", "=", "api_version", ",", "sms_url", "=", "sms_url", ",", "sms_method", "=", "sms_method", ",", "sms_fallback_url", "=", "sms_fallback_url", ",", "sms_fallback_method", "=", "sms_fallback_method", ",", ")" ]
49.041667
23.375
def process_bool_arg(arg): """ Determine True/False from argument """ if isinstance(arg, bool): return arg elif isinstance(arg, basestring): if arg.lower() in ["true", "1"]: return True elif arg.lower() in ["false", "0"]: return False
[ "def", "process_bool_arg", "(", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "bool", ")", ":", "return", "arg", "elif", "isinstance", "(", "arg", ",", "basestring", ")", ":", "if", "arg", ".", "lower", "(", ")", "in", "[", "\"true\"", ",", "\"1\"", "]", ":", "return", "True", "elif", "arg", ".", "lower", "(", ")", "in", "[", "\"false\"", ",", "\"0\"", "]", ":", "return", "False" ]
31.777778
9.555556
def _get_remote_video_url(self, remote_node, session_id): """Get grid-extras url to download videos :param remote_node: remote node name :param session_id: test session id :returns: grid-extras url to download videos """ url = '{}/video'.format(self._get_remote_node_url(remote_node)) timeout = time.time() + 5 # 5 seconds from now # Requests videos list until timeout or the video url is found video_url = None while time.time() < timeout: response = requests.get(url).json() try: video_url = response['available_videos'][session_id]['download_url'] break except KeyError: time.sleep(1) return video_url
[ "def", "_get_remote_video_url", "(", "self", ",", "remote_node", ",", "session_id", ")", ":", "url", "=", "'{}/video'", ".", "format", "(", "self", ".", "_get_remote_node_url", "(", "remote_node", ")", ")", "timeout", "=", "time", ".", "time", "(", ")", "+", "5", "# 5 seconds from now", "# Requests videos list until timeout or the video url is found", "video_url", "=", "None", "while", "time", ".", "time", "(", ")", "<", "timeout", ":", "response", "=", "requests", ".", "get", "(", "url", ")", ".", "json", "(", ")", "try", ":", "video_url", "=", "response", "[", "'available_videos'", "]", "[", "session_id", "]", "[", "'download_url'", "]", "break", "except", "KeyError", ":", "time", ".", "sleep", "(", "1", ")", "return", "video_url" ]
38
17.2
def in_constraint(self, node1, node2): """Checks if node1 is in node2's constraints For instance, if node1 = 010 and node2 = 110: 010 & 110 = 010 -> has the element.""" constraint = constraint_table[node2] if constraint == 0b0: return False try: value = self.el2bv[node1] except KeyError: return False return constraint & value != 0
[ "def", "in_constraint", "(", "self", ",", "node1", ",", "node2", ")", ":", "constraint", "=", "constraint_table", "[", "node2", "]", "if", "constraint", "==", "0b0", ":", "return", "False", "try", ":", "value", "=", "self", ".", "el2bv", "[", "node1", "]", "except", "KeyError", ":", "return", "False", "return", "constraint", "&", "value", "!=", "0" ]
30.071429
13.642857
def _FlagIsRegistered(self, flag_obj): """Checks whether a Flag object is registered under long name or short name. Args: flag_obj: A Flag object. Returns: A boolean: True iff flag_obj is registered under long name or short name. """ flag_dict = self.FlagDict() # Check whether flag_obj is registered under its long name. name = flag_obj.name if flag_dict.get(name, None) == flag_obj: return True # Check whether flag_obj is registered under its short name. short_name = flag_obj.short_name if (short_name is not None and flag_dict.get(short_name, None) == flag_obj): return True return False
[ "def", "_FlagIsRegistered", "(", "self", ",", "flag_obj", ")", ":", "flag_dict", "=", "self", ".", "FlagDict", "(", ")", "# Check whether flag_obj is registered under its long name.", "name", "=", "flag_obj", ".", "name", "if", "flag_dict", ".", "get", "(", "name", ",", "None", ")", "==", "flag_obj", ":", "return", "True", "# Check whether flag_obj is registered under its short name.", "short_name", "=", "flag_obj", ".", "short_name", "if", "(", "short_name", "is", "not", "None", "and", "flag_dict", ".", "get", "(", "short_name", ",", "None", ")", "==", "flag_obj", ")", ":", "return", "True", "return", "False" ]
32.75
18
def filter(self, callback=None): """ Run a filter over each of the items. :param callback: The filter callback :type callback: callable or None :rtype: Collection """ if callback: return self.__class__(list(filter(callback, self.items))) return self.__class__(list(filter(None, self.items)))
[ "def", "filter", "(", "self", ",", "callback", "=", "None", ")", ":", "if", "callback", ":", "return", "self", ".", "__class__", "(", "list", "(", "filter", "(", "callback", ",", "self", ".", "items", ")", ")", ")", "return", "self", ".", "__class__", "(", "list", "(", "filter", "(", "None", ",", "self", ".", "items", ")", ")", ")" ]
27.538462
16.923077
def __split_genomic_interval_filename(fn): """ Split a filename of the format chrom:start-end.ext or chrom.ext (full chrom). :return: tuple of (chrom, start, end) -- 'start' and 'end' are None if not present in the filename. """ if fn is None or fn == "": raise ValueError("invalid filename: " + str(fn)) fn = ".".join(fn.split(".")[:-1]) parts = fn.split(":") if len(parts) == 1: return (parts[0].strip(), None, None) else: r_parts = parts[1].split("-") if len(r_parts) != 2: raise ValueError("Invalid filename: " + str(fn)) return (parts[0].strip(), int(r_parts[0]), int(r_parts[1]))
[ "def", "__split_genomic_interval_filename", "(", "fn", ")", ":", "if", "fn", "is", "None", "or", "fn", "==", "\"\"", ":", "raise", "ValueError", "(", "\"invalid filename: \"", "+", "str", "(", "fn", ")", ")", "fn", "=", "\".\"", ".", "join", "(", "fn", ".", "split", "(", "\".\"", ")", "[", ":", "-", "1", "]", ")", "parts", "=", "fn", ".", "split", "(", "\":\"", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "return", "(", "parts", "[", "0", "]", ".", "strip", "(", ")", ",", "None", ",", "None", ")", "else", ":", "r_parts", "=", "parts", "[", "1", "]", ".", "split", "(", "\"-\"", ")", "if", "len", "(", "r_parts", ")", "!=", "2", ":", "raise", "ValueError", "(", "\"Invalid filename: \"", "+", "str", "(", "fn", ")", ")", "return", "(", "parts", "[", "0", "]", ".", "strip", "(", ")", ",", "int", "(", "r_parts", "[", "0", "]", ")", ",", "int", "(", "r_parts", "[", "1", "]", ")", ")" ]
34.666667
15.555556
def from_attribute(attr): """ Converts an attribute into a shadow attribute. :param attr: :class:`MispAttribute` instance to be converted :returns: Converted :class:`MispShadowAttribute` :example: >>> server = MispServer() >>> event = server.events.get(12) >>> attr = event.attributes[0] >>> prop = MispShadowAttribute.from_attribute(attr) """ assert attr is not MispAttribute prop = MispShadowAttribute() prop.distribution = attr.distribution prop.type = attr.type prop.comment = attr.comment prop.value = attr.value prop.category = attr.category prop.to_ids = attr.to_ids return prop
[ "def", "from_attribute", "(", "attr", ")", ":", "assert", "attr", "is", "not", "MispAttribute", "prop", "=", "MispShadowAttribute", "(", ")", "prop", ".", "distribution", "=", "attr", ".", "distribution", "prop", ".", "type", "=", "attr", ".", "type", "prop", ".", "comment", "=", "attr", ".", "comment", "prop", ".", "value", "=", "attr", ".", "value", "prop", ".", "category", "=", "attr", ".", "category", "prop", ".", "to_ids", "=", "attr", ".", "to_ids", "return", "prop" ]
31.217391
13.478261
def apply_zappa_settings(zappa_obj, zappa_settings, environment): '''Load Zappa settings, set defaults if needed, and apply to the Zappa object''' settings_all = json.load(zappa_settings) settings = settings_all[environment] # load defaults for missing options for key,value in DEFAULT_SETTINGS.items(): settings[key] = settings.get(key, value) if '~' in settings['settings_file']: settings['settings_file'] = settings['settings_file'].replace('~', os.path.expanduser('~')) if not os.path.isfile(settings['settings_file']): raise SettingsError("Please make sure your settings_file " "is properly defined in {0}.".format(zappa_settings)) for setting in CUSTOM_SETTINGS: if setting in settings: setattr(zappa_obj, setting, settings[setting]) return settings
[ "def", "apply_zappa_settings", "(", "zappa_obj", ",", "zappa_settings", ",", "environment", ")", ":", "settings_all", "=", "json", ".", "load", "(", "zappa_settings", ")", "settings", "=", "settings_all", "[", "environment", "]", "# load defaults for missing options", "for", "key", ",", "value", "in", "DEFAULT_SETTINGS", ".", "items", "(", ")", ":", "settings", "[", "key", "]", "=", "settings", ".", "get", "(", "key", ",", "value", ")", "if", "'~'", "in", "settings", "[", "'settings_file'", "]", ":", "settings", "[", "'settings_file'", "]", "=", "settings", "[", "'settings_file'", "]", ".", "replace", "(", "'~'", ",", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "settings", "[", "'settings_file'", "]", ")", ":", "raise", "SettingsError", "(", "\"Please make sure your settings_file \"", "\"is properly defined in {0}.\"", ".", "format", "(", "zappa_settings", ")", ")", "for", "setting", "in", "CUSTOM_SETTINGS", ":", "if", "setting", "in", "settings", ":", "setattr", "(", "zappa_obj", ",", "setting", ",", "settings", "[", "setting", "]", ")", "return", "settings" ]
40.380952
22.857143
def _get_alphanumeric_index(query_string): """ Given an input string of either int or char, returns what index in the alphabet and case it is :param query_string: str, query string :return: (int, str), list of the index and type """ # TODO: could probably rework this. it works, but it's ugly as hell. try: return [int(query_string), 'int'] except ValueError: if len(query_string) == 1: if query_string.isupper(): return [string.ascii_uppercase.index(query_string), 'char_hi'] elif query_string.islower(): return [string.ascii_lowercase.index(query_string), 'char_lo'] else: raise IOError('The input is a string longer than one character')
[ "def", "_get_alphanumeric_index", "(", "query_string", ")", ":", "# TODO: could probably rework this. it works, but it's ugly as hell.", "try", ":", "return", "[", "int", "(", "query_string", ")", ",", "'int'", "]", "except", "ValueError", ":", "if", "len", "(", "query_string", ")", "==", "1", ":", "if", "query_string", ".", "isupper", "(", ")", ":", "return", "[", "string", ".", "ascii_uppercase", ".", "index", "(", "query_string", ")", ",", "'char_hi'", "]", "elif", "query_string", ".", "islower", "(", ")", ":", "return", "[", "string", ".", "ascii_lowercase", ".", "index", "(", "query_string", ")", ",", "'char_lo'", "]", "else", ":", "raise", "IOError", "(", "'The input is a string longer than one character'", ")" ]
47.294118
17.705882
def get_results(self): ''' :return: result from running the task ''' self._event.wait() if self._exception is not None: # # Well... rethrownig the exception caught in execute # but on the caller thread # raise self._exception # pylint: disable=E0702 return self._result
[ "def", "get_results", "(", "self", ")", ":", "self", ".", "_event", ".", "wait", "(", ")", "if", "self", ".", "_exception", "is", "not", "None", ":", "#", "# Well... rethrownig the exception caught in execute", "# but on the caller thread", "#", "raise", "self", ".", "_exception", "# pylint: disable=E0702", "return", "self", ".", "_result" ]
30.583333
17.25
def get_files(cls, folder): """ Retrieve the list of files the plugin can work on. Find this list based on the files name, files extension or even actually by reading in the file. :arg folder: the path to the folder containing the files to check. This folder may contain sub-folders. """ filelist = [] if folder is None or not os.path.isdir(folder): return filelist for root, dirs, files in os.walk(folder): for filename in files: filename = os.path.join(root, filename) if is_csv_file(filename): filelist.append(filename) return filelist
[ "def", "get_files", "(", "cls", ",", "folder", ")", ":", "filelist", "=", "[", "]", "if", "folder", "is", "None", "or", "not", "os", ".", "path", ".", "isdir", "(", "folder", ")", ":", "return", "filelist", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "folder", ")", ":", "for", "filename", "in", "files", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", "if", "is_csv_file", "(", "filename", ")", ":", "filelist", ".", "append", "(", "filename", ")", "return", "filelist" ]
37.944444
14.777778
def route_present(name, address_prefix, next_hop_type, route_table, resource_group, next_hop_ip_address=None, connection_auth=None, **kwargs): ''' .. versionadded:: 2019.2.0 Ensure a route exists within a route table. :param name: Name of the route. :param address_prefix: The destination CIDR to which the route applies. :param next_hop_type: The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'. :param next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is 'VirtualAppliance'. :param route_table: The name of the existing route table which will contain the route. :param resource_group: The resource group assigned to the route table. :param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the Azure Resource Manager API. Example usage: .. code-block:: yaml Ensure route exists: azurearm_network.route_present: - name: rt1_route2 - route_table: rt1 - resource_group: group1 - address_prefix: '192.168.0.0/16' - next_hop_type: vnetlocal - connection_auth: {{ profile }} - require: - azurearm_network: Ensure route table exists ''' ret = { 'name': name, 'result': False, 'comment': '', 'changes': {} } if not isinstance(connection_auth, dict): ret['comment'] = 'Connection information must be specified via connection_auth dictionary!' return ret route = __salt__['azurearm_network.route_get']( name, route_table, resource_group, azurearm_log_level='info', **connection_auth ) if 'error' not in route: if address_prefix != route.get('address_prefix'): ret['changes']['address_prefix'] = { 'old': route.get('address_prefix'), 'new': address_prefix } if next_hop_type.lower() != route.get('next_hop_type', '').lower(): ret['changes']['next_hop_type'] = { 'old': route.get('next_hop_type'), 'new': next_hop_type } if next_hop_type.lower() == 'virtualappliance' and next_hop_ip_address != route.get('next_hop_ip_address'): ret['changes']['next_hop_ip_address'] = { 'old': route.get('next_hop_ip_address'), 'new': next_hop_ip_address } if not ret['changes']: ret['result'] = True ret['comment'] = 'Route {0} is already present.'.format(name) return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Route {0} would be updated.'.format(name) return ret else: ret['changes'] = { 'old': {}, 'new': { 'name': name, 'address_prefix': address_prefix, 'next_hop_type': next_hop_type, 'next_hop_ip_address': next_hop_ip_address } } if __opts__['test']: ret['comment'] = 'Route {0} would be created.'.format(name) ret['result'] = None return ret route_kwargs = kwargs.copy() route_kwargs.update(connection_auth) route = __salt__['azurearm_network.route_create_or_update']( name=name, route_table=route_table, resource_group=resource_group, address_prefix=address_prefix, next_hop_type=next_hop_type, next_hop_ip_address=next_hop_ip_address, **route_kwargs ) if 'error' not in route: ret['result'] = True ret['comment'] = 'Route {0} has been created.'.format(name) return ret ret['comment'] = 'Failed to create route {0}! ({1})'.format(name, route.get('error')) return ret
[ "def", "route_present", "(", "name", ",", "address_prefix", ",", "next_hop_type", ",", "route_table", ",", "resource_group", ",", "next_hop_ip_address", "=", "None", ",", "connection_auth", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "if", "not", "isinstance", "(", "connection_auth", ",", "dict", ")", ":", "ret", "[", "'comment'", "]", "=", "'Connection information must be specified via connection_auth dictionary!'", "return", "ret", "route", "=", "__salt__", "[", "'azurearm_network.route_get'", "]", "(", "name", ",", "route_table", ",", "resource_group", ",", "azurearm_log_level", "=", "'info'", ",", "*", "*", "connection_auth", ")", "if", "'error'", "not", "in", "route", ":", "if", "address_prefix", "!=", "route", ".", "get", "(", "'address_prefix'", ")", ":", "ret", "[", "'changes'", "]", "[", "'address_prefix'", "]", "=", "{", "'old'", ":", "route", ".", "get", "(", "'address_prefix'", ")", ",", "'new'", ":", "address_prefix", "}", "if", "next_hop_type", ".", "lower", "(", ")", "!=", "route", ".", "get", "(", "'next_hop_type'", ",", "''", ")", ".", "lower", "(", ")", ":", "ret", "[", "'changes'", "]", "[", "'next_hop_type'", "]", "=", "{", "'old'", ":", "route", ".", "get", "(", "'next_hop_type'", ")", ",", "'new'", ":", "next_hop_type", "}", "if", "next_hop_type", ".", "lower", "(", ")", "==", "'virtualappliance'", "and", "next_hop_ip_address", "!=", "route", ".", "get", "(", "'next_hop_ip_address'", ")", ":", "ret", "[", "'changes'", "]", "[", "'next_hop_ip_address'", "]", "=", "{", "'old'", ":", "route", ".", "get", "(", "'next_hop_ip_address'", ")", ",", "'new'", ":", "next_hop_ip_address", "}", "if", "not", "ret", "[", "'changes'", "]", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Route {0} is already present.'", ".", "format", "(", "name", ")", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Route {0} would be updated.'", ".", "format", "(", "name", ")", "return", "ret", "else", ":", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "{", "}", ",", "'new'", ":", "{", "'name'", ":", "name", ",", "'address_prefix'", ":", "address_prefix", ",", "'next_hop_type'", ":", "next_hop_type", ",", "'next_hop_ip_address'", ":", "next_hop_ip_address", "}", "}", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Route {0} would be created.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "route_kwargs", "=", "kwargs", ".", "copy", "(", ")", "route_kwargs", ".", "update", "(", "connection_auth", ")", "route", "=", "__salt__", "[", "'azurearm_network.route_create_or_update'", "]", "(", "name", "=", "name", ",", "route_table", "=", "route_table", ",", "resource_group", "=", "resource_group", ",", "address_prefix", "=", "address_prefix", ",", "next_hop_type", "=", "next_hop_type", ",", "next_hop_ip_address", "=", "next_hop_ip_address", ",", "*", "*", "route_kwargs", ")", "if", "'error'", "not", "in", "route", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Route {0} has been created.'", ".", "format", "(", "name", ")", "return", "ret", "ret", "[", "'comment'", "]", "=", "'Failed to create route {0}! ({1})'", ".", "format", "(", "name", ",", "route", ".", "get", "(", "'error'", ")", ")", "return", "ret" ]
30.923664
23.091603
def parity_even_p(state, marked_qubits): """ Calculates the parity of elements at indexes in marked_qubits Parity is relative to the binary representation of the integer state. :param state: The wavefunction index that corresponds to this state. :param marked_qubits: The indexes to be considered in the parity sum. :returns: A boolean corresponding to the parity. """ assert isinstance(state, int), \ f"{state} is not an integer. Must call parity_even_p with an integer state." mask = 0 for q in marked_qubits: mask |= 1 << q return bin(mask & state).count("1") % 2 == 0
[ "def", "parity_even_p", "(", "state", ",", "marked_qubits", ")", ":", "assert", "isinstance", "(", "state", ",", "int", ")", ",", "f\"{state} is not an integer. Must call parity_even_p with an integer state.\"", "mask", "=", "0", "for", "q", "in", "marked_qubits", ":", "mask", "|=", "1", "<<", "q", "return", "bin", "(", "mask", "&", "state", ")", ".", "count", "(", "\"1\"", ")", "%", "2", "==", "0" ]
38.625
20.625
def solve(self): '''First EOS-generic method; should be called by all specific EOSs. For solving for `T`, the EOS must provide the method `solve_T`. For all cases, the EOS must provide `a_alpha_and_derivatives`. Calls `set_from_PT` once done. ''' self.check_sufficient_inputs() if self.V: if self.P: self.T = self.solve_T(self.P, self.V) self.a_alpha, self.da_alpha_dT, self.d2a_alpha_dT2 = self.a_alpha_and_derivatives(self.T) else: self.a_alpha, self.da_alpha_dT, self.d2a_alpha_dT2 = self.a_alpha_and_derivatives(self.T) self.P = R*self.T/(self.V-self.b) - self.a_alpha/(self.V*self.V + self.delta*self.V + self.epsilon) Vs = [self.V, 1j, 1j] else: self.a_alpha, self.da_alpha_dT, self.d2a_alpha_dT2 = self.a_alpha_and_derivatives(self.T) Vs = self.volume_solutions(self.T, self.P, self.b, self.delta, self.epsilon, self.a_alpha) self.set_from_PT(Vs)
[ "def", "solve", "(", "self", ")", ":", "self", ".", "check_sufficient_inputs", "(", ")", "if", "self", ".", "V", ":", "if", "self", ".", "P", ":", "self", ".", "T", "=", "self", ".", "solve_T", "(", "self", ".", "P", ",", "self", ".", "V", ")", "self", ".", "a_alpha", ",", "self", ".", "da_alpha_dT", ",", "self", ".", "d2a_alpha_dT2", "=", "self", ".", "a_alpha_and_derivatives", "(", "self", ".", "T", ")", "else", ":", "self", ".", "a_alpha", ",", "self", ".", "da_alpha_dT", ",", "self", ".", "d2a_alpha_dT2", "=", "self", ".", "a_alpha_and_derivatives", "(", "self", ".", "T", ")", "self", ".", "P", "=", "R", "*", "self", ".", "T", "/", "(", "self", ".", "V", "-", "self", ".", "b", ")", "-", "self", ".", "a_alpha", "/", "(", "self", ".", "V", "*", "self", ".", "V", "+", "self", ".", "delta", "*", "self", ".", "V", "+", "self", ".", "epsilon", ")", "Vs", "=", "[", "self", ".", "V", ",", "1j", ",", "1j", "]", "else", ":", "self", ".", "a_alpha", ",", "self", ".", "da_alpha_dT", ",", "self", ".", "d2a_alpha_dT2", "=", "self", ".", "a_alpha_and_derivatives", "(", "self", ".", "T", ")", "Vs", "=", "self", ".", "volume_solutions", "(", "self", ".", "T", ",", "self", ".", "P", ",", "self", ".", "b", ",", "self", ".", "delta", ",", "self", ".", "epsilon", ",", "self", ".", "a_alpha", ")", "self", ".", "set_from_PT", "(", "Vs", ")" ]
51.95
31.75
def com_google_fonts_check_family_equal_font_versions(ttFonts): """Make sure all font files have the same version value.""" all_detected_versions = [] fontfile_versions = {} for ttFont in ttFonts: v = ttFont['head'].fontRevision fontfile_versions[ttFont] = v if v not in all_detected_versions: all_detected_versions.append(v) if len(all_detected_versions) != 1: versions_list = "" for v in fontfile_versions.keys(): versions_list += "* {}: {}\n".format(v.reader.file.name, fontfile_versions[v]) yield WARN, ("version info differs among font" " files of the same font project.\n" "These were the version values found:\n" "{}").format(versions_list) else: yield PASS, "All font files have the same version."
[ "def", "com_google_fonts_check_family_equal_font_versions", "(", "ttFonts", ")", ":", "all_detected_versions", "=", "[", "]", "fontfile_versions", "=", "{", "}", "for", "ttFont", "in", "ttFonts", ":", "v", "=", "ttFont", "[", "'head'", "]", ".", "fontRevision", "fontfile_versions", "[", "ttFont", "]", "=", "v", "if", "v", "not", "in", "all_detected_versions", ":", "all_detected_versions", ".", "append", "(", "v", ")", "if", "len", "(", "all_detected_versions", ")", "!=", "1", ":", "versions_list", "=", "\"\"", "for", "v", "in", "fontfile_versions", ".", "keys", "(", ")", ":", "versions_list", "+=", "\"* {}: {}\\n\"", ".", "format", "(", "v", ".", "reader", ".", "file", ".", "name", ",", "fontfile_versions", "[", "v", "]", ")", "yield", "WARN", ",", "(", "\"version info differs among font\"", "\" files of the same font project.\\n\"", "\"These were the version values found:\\n\"", "\"{}\"", ")", ".", "format", "(", "versions_list", ")", "else", ":", "yield", "PASS", ",", "\"All font files have the same version.\"" ]
39.619048
13.571429
def targeted_einsum(gate: np.ndarray, wf: np.ndarray, wf_target_inds: List[int] ) -> np.ndarray: """Left-multiplies the given axes of the wf tensor by the given gate matrix. Note that the matrix must have a compatible tensor structure. For example, if you have an 6-qubit state vector ``wf`` with shape (2, 2, 2, 2, 2, 2), and a 2-qubit unitary operation ``op`` with shape (2, 2, 2, 2), and you want to apply ``op`` to the 5th and 3rd qubits within ``input_state``, then the output state vector is computed as follows:: output_state = targeted_einsum(op, input_state, [5, 3]) This method also works when the right hand side is a matrix instead of a vector. If a unitary circuit's matrix is ``old_effect``, and you append a CNOT(q1, q4) operation onto the circuit, where the control q1 is the qubit at offset 1 and the target q4 is the qubit at offset 4, then the appended circuit's unitary matrix is computed as follows:: new_effect = targeted_left_multiply(CNOT.reshape((2, 2, 2, 2)), old_effect, [1, 4]) :param gate: What to left-multiply the target tensor by. :param wf: A tensor to carefully broadcast a left-multiply over. :param wf_target_inds: Which axes of the target are being operated on. :returns: The output tensor. """ k = len(wf_target_inds) d = len(wf.shape) work_indices = tuple(range(k)) data_indices = tuple(range(k, k + d)) used_data_indices = tuple(data_indices[q] for q in wf_target_inds) input_indices = work_indices + used_data_indices output_indices = list(data_indices) for w, t in zip(work_indices, wf_target_inds): output_indices[t] = w # TODO: `out` does not work if input matrices share memory with outputs, as is usually # TODO: the case when propogating a wavefunction. This might be fixed in numpy 1.15 # https://github.com/numpy/numpy/pull/11286 # It might be worth re-investigating memory savings with `out` when numpy 1.15 becomes # commonplace. return np.einsum(gate, input_indices, wf, data_indices, output_indices)
[ "def", "targeted_einsum", "(", "gate", ":", "np", ".", "ndarray", ",", "wf", ":", "np", ".", "ndarray", ",", "wf_target_inds", ":", "List", "[", "int", "]", ")", "->", "np", ".", "ndarray", ":", "k", "=", "len", "(", "wf_target_inds", ")", "d", "=", "len", "(", "wf", ".", "shape", ")", "work_indices", "=", "tuple", "(", "range", "(", "k", ")", ")", "data_indices", "=", "tuple", "(", "range", "(", "k", ",", "k", "+", "d", ")", ")", "used_data_indices", "=", "tuple", "(", "data_indices", "[", "q", "]", "for", "q", "in", "wf_target_inds", ")", "input_indices", "=", "work_indices", "+", "used_data_indices", "output_indices", "=", "list", "(", "data_indices", ")", "for", "w", ",", "t", "in", "zip", "(", "work_indices", ",", "wf_target_inds", ")", ":", "output_indices", "[", "t", "]", "=", "w", "# TODO: `out` does not work if input matrices share memory with outputs, as is usually", "# TODO: the case when propogating a wavefunction. This might be fixed in numpy 1.15", "# https://github.com/numpy/numpy/pull/11286", "# It might be worth re-investigating memory savings with `out` when numpy 1.15 becomes", "# commonplace.", "return", "np", ".", "einsum", "(", "gate", ",", "input_indices", ",", "wf", ",", "data_indices", ",", "output_indices", ")" ]
46.956522
23.23913
def cs20(msg): """Aircraft callsign Args: msg (String): 28 bytes hexadecimal message (BDS40) string Returns: string: callsign, max. 8 chars """ chars = '#ABCDEFGHIJKLMNOPQRSTUVWXYZ#####_###############0123456789######' d = hex2bin(data(msg)) cs = '' cs += chars[bin2int(d[8:14])] cs += chars[bin2int(d[14:20])] cs += chars[bin2int(d[20:26])] cs += chars[bin2int(d[26:32])] cs += chars[bin2int(d[32:38])] cs += chars[bin2int(d[38:44])] cs += chars[bin2int(d[44:50])] cs += chars[bin2int(d[50:56])] return cs
[ "def", "cs20", "(", "msg", ")", ":", "chars", "=", "'#ABCDEFGHIJKLMNOPQRSTUVWXYZ#####_###############0123456789######'", "d", "=", "hex2bin", "(", "data", "(", "msg", ")", ")", "cs", "=", "''", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "8", ":", "14", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "14", ":", "20", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "20", ":", "26", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "26", ":", "32", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "32", ":", "38", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "38", ":", "44", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "44", ":", "50", "]", ")", "]", "cs", "+=", "chars", "[", "bin2int", "(", "d", "[", "50", ":", "56", "]", ")", "]", "return", "cs" ]
23.666667
19.541667
def _fit_tfa_inner( self, data, R, template_centers, template_widths, template_centers_mean_cov, template_widths_mean_var_reci): """Fit TFA model, the inner loop part Parameters ---------- data: 2D array, in shape [n_voxel, n_tr] The fMRI data of a subject R : 2D array, in shape [n_voxel, n_dim] The voxel coordinate matrix of fMRI data template_centers: 1D array The template prior on centers template_widths: 1D array The template prior on widths template_centers_mean_cov: 2D array, with shape [K, cov_size] The template prior on covariance of centers' mean template_widths_mean_var_reci: 1D array The reciprocal of template prior on variance of widths' mean Returns ------- TFA Returns the instance itself. """ nfeature = data.shape[0] nsample = data.shape[1] feature_indices =\ np.random.choice(nfeature, self.max_num_voxel, replace=False) sample_features = np.zeros(nfeature).astype(bool) sample_features[feature_indices] = True samples_indices =\ np.random.choice(nsample, self.max_num_tr, replace=False) curr_data = np.zeros((self.max_num_voxel, self.max_num_tr))\ .astype(float) curr_data = data[feature_indices] curr_data = curr_data[:, samples_indices].copy() curr_R = R[feature_indices].copy() centers = self.get_centers(self.local_prior) widths = self.get_widths(self.local_prior) unique_R, inds = self.get_unique_R(curr_R) F = self.get_factors(unique_R, inds, centers, widths) W = self.get_weights(curr_data, F) self.local_posterior_, self.total_cost = self._estimate_centers_widths( unique_R, inds, curr_data, W, centers, widths, template_centers, template_centers_mean_cov, template_widths, template_widths_mean_var_reci) return self
[ "def", "_fit_tfa_inner", "(", "self", ",", "data", ",", "R", ",", "template_centers", ",", "template_widths", ",", "template_centers_mean_cov", ",", "template_widths_mean_var_reci", ")", ":", "nfeature", "=", "data", ".", "shape", "[", "0", "]", "nsample", "=", "data", ".", "shape", "[", "1", "]", "feature_indices", "=", "np", ".", "random", ".", "choice", "(", "nfeature", ",", "self", ".", "max_num_voxel", ",", "replace", "=", "False", ")", "sample_features", "=", "np", ".", "zeros", "(", "nfeature", ")", ".", "astype", "(", "bool", ")", "sample_features", "[", "feature_indices", "]", "=", "True", "samples_indices", "=", "np", ".", "random", ".", "choice", "(", "nsample", ",", "self", ".", "max_num_tr", ",", "replace", "=", "False", ")", "curr_data", "=", "np", ".", "zeros", "(", "(", "self", ".", "max_num_voxel", ",", "self", ".", "max_num_tr", ")", ")", ".", "astype", "(", "float", ")", "curr_data", "=", "data", "[", "feature_indices", "]", "curr_data", "=", "curr_data", "[", ":", ",", "samples_indices", "]", ".", "copy", "(", ")", "curr_R", "=", "R", "[", "feature_indices", "]", ".", "copy", "(", ")", "centers", "=", "self", ".", "get_centers", "(", "self", ".", "local_prior", ")", "widths", "=", "self", ".", "get_widths", "(", "self", ".", "local_prior", ")", "unique_R", ",", "inds", "=", "self", ".", "get_unique_R", "(", "curr_R", ")", "F", "=", "self", ".", "get_factors", "(", "unique_R", ",", "inds", ",", "centers", ",", "widths", ")", "W", "=", "self", ".", "get_weights", "(", "curr_data", ",", "F", ")", "self", ".", "local_posterior_", ",", "self", ".", "total_cost", "=", "self", ".", "_estimate_centers_widths", "(", "unique_R", ",", "inds", ",", "curr_data", ",", "W", ",", "centers", ",", "widths", ",", "template_centers", ",", "template_centers_mean_cov", ",", "template_widths", ",", "template_widths_mean_var_reci", ")", "return", "self" ]
33.548387
18.967742
def calc_h_v1(self): """Approximate the water stage resulting in a certain reference discarge with the Pegasus iteration method. Required control parameters: |QTol| |HTol| Required flux sequence: |QRef| Modified aide sequences: |HMin| |HMax| |QMin| |QMax| Calculated flux sequence: |H| Besides the parameters and sequences given above, those of the actual method for calculating the discharge of the total cross section are required. Examples: Essentially, the Pegasus method is a root finding algorithm which sequentially decreases its search radius (like the simple bisection algorithm) and shows superlinear convergence properties (like the Newton-Raphson algorithm). Ideally, its convergence should be proved for each application model to be derived from HydPy-L-Stream. The following examples focus on the methods |calc_hmin_qmin_hmax_qmax_v1| and |calc_qg_v1| (including their submethods) only: >>> from hydpy.models.lstream import * >>> parameterstep() >>> model.calc_hmin_qmin_hmax_qmax = model.calc_hmin_qmin_hmax_qmax_v1 >>> model.calc_qg = model.calc_qg_v1 >>> model.calc_qm = model.calc_qm_v1 >>> model.calc_av_uv = model.calc_av_uv_v1 >>> model.calc_qv = model.calc_qv_v1 >>> model.calc_avr_uvr = model.calc_avr_uvr_v1 >>> model.calc_qvr = model.calc_qvr_v1 Define the geometry and roughness values for the first test channel: >>> bm(2.0) >>> bnm(4.0) >>> hm(1.0) >>> bv(0.5, 10.0) >>> bbv(1.0, 2.0) >>> bnv(1.0, 8.0) >>> bnvr(20.0) >>> ekm(1.0) >>> skm(20.0) >>> ekv(1.0) >>> skv(60.0, 80.0) >>> gef(0.01) Set the error tolerances of the iteration small enough to not compromise the shown first six decimal places of the following results: >>> qtol(1e-10) >>> htol(1e-10) Derive the required secondary parameters: >>> derived.hv.update() >>> derived.qm.update() >>> derived.qv.update() Define a test function, accepting a reference discharge and printing both the approximated water stage and the related discharge value: >>> def test(qref): ... fluxes.qref = qref ... model.calc_hmin_qmin_hmax_qmax() ... model.calc_h() ... print(repr(fluxes.h)) ... print(repr(fluxes.qg)) Zero discharge and the following discharge values are related to the only discontinuities of the given root finding problem: >>> derived.qm qm(8.399238) >>> derived.qv qv(left=154.463234, right=23.073584) The related water stages are the ones (directly or indirectly) defined above: >>> test(0.0) h(0.0) qg(0.0) >>> test(derived.qm) h(1.0) qg(8.399238) >>> test(derived.qv.left) h(2.0) qg(154.463234) >>> test(derived.qv.right) h(1.25) qg(23.073584) Test some intermediate water stages, inundating the only the main channel, the main channel along with the right foreland, and the main channel along with both forelands respectively: >>> test(6.0) h(0.859452) qg(6.0) >>> test(10.0) h(1.047546) qg(10.0) >>> test(100.0) h(1.77455) qg(100.0) Finally, test two extreme water stages, inundating both outer foreland embankments: >>> test(200.0) h(2.152893) qg(200.0) >>> test(2000.0) h(4.240063) qg(2000.0) There is a potential risk of the implemented iteration method to fail for special channel geometries. To test such cases in a more condensed manner, the following test methods evaluates different water stages automatically in accordance with the example above. An error message is printed only, the estimated discharge does not approximate the reference discharge with six decimal places: >>> def test(): ... derived.hv.update() ... derived.qm.update() ... derived.qv.update() ... qm, qv = derived.qm, derived.qv ... for qref in [0.0, qm, qv.left, qv.right, ... 2.0/3.0*qm+1.0/3.0*min(qv), ... 2.0/3.0*min(qv)+1.0/3.0*max(qv), ... 3.0*max(qv), 30.0*max(qv)]: ... fluxes.qref = qref ... model.calc_hmin_qmin_hmax_qmax() ... model.calc_h() ... if abs(round(fluxes.qg-qref) > 0.0): ... print('Error!', 'qref:', qref, 'qg:', fluxes.qg) Check for a triangle main channel: >>> bm(0.0) >>> test() >>> bm(2.0) Check for a completely flat main channel: >>> hm(0.0) >>> test() Repeat the last example but with a decreased value of |QTol| allowing to trigger another stopping mechanisms if the iteration algorithm: >>> qtol(0.0) >>> test() >>> hm(1.0) >>> qtol(1e-10) Check for a nonexistend main channel: >>> bm(0.0) >>> bnm(0.0) >>> test() >>> bm(2.0) >>> bnm(4.0) Check for a nonexistend forelands: >>> bv(0.0) >>> bbv(0.0) >>> test() >>> bv(0.5, 10.0) >>> bbv(1., 2.0) Check for nonexistend outer foreland embankments: >>> bnvr(0.0) >>> test() To take the last test as an illustrative example, one can see that the given reference discharge is met by the estimated total discharge, which consists of components related to the main channel and the forelands only: >>> fluxes.qref qref(3932.452785) >>> fluxes.qg qg(3932.452785) >>> fluxes.qm qm(530.074621) >>> fluxes.qv qv(113.780226, 3288.597937) >>> fluxes.qvr qvr(0.0, 0.0) """ con = self.parameters.control.fastaccess flu = self.sequences.fluxes.fastaccess aid = self.sequences.aides.fastaccess aid.qmin -= flu.qref aid.qmax -= flu.qref if modelutils.fabs(aid.qmin) < con.qtol: flu.h = aid.hmin self.calc_qg() elif modelutils.fabs(aid.qmax) < con.qtol: flu.h = aid.hmax self.calc_qg() elif modelutils.fabs(aid.hmax-aid.hmin) < con.htol: flu.h = (aid.hmin+aid.hmax)/2. self.calc_qg() else: while True: flu.h = aid.hmin-aid.qmin*(aid.hmax-aid.hmin)/(aid.qmax-aid.qmin) self.calc_qg() aid.qtest = flu.qg-flu.qref if modelutils.fabs(aid.qtest) < con.qtol: return if (((aid.qmax < 0.) and (aid.qtest < 0.)) or ((aid.qmax > 0.) and (aid.qtest > 0.))): aid.qmin *= aid.qmax/(aid.qmax+aid.qtest) else: aid.hmin = aid.hmax aid.qmin = aid.qmax aid.hmax = flu.h aid.qmax = aid.qtest if modelutils.fabs(aid.hmax-aid.hmin) < con.htol: return
[ "def", "calc_h_v1", "(", "self", ")", ":", "con", "=", "self", ".", "parameters", ".", "control", ".", "fastaccess", "flu", "=", "self", ".", "sequences", ".", "fluxes", ".", "fastaccess", "aid", "=", "self", ".", "sequences", ".", "aides", ".", "fastaccess", "aid", ".", "qmin", "-=", "flu", ".", "qref", "aid", ".", "qmax", "-=", "flu", ".", "qref", "if", "modelutils", ".", "fabs", "(", "aid", ".", "qmin", ")", "<", "con", ".", "qtol", ":", "flu", ".", "h", "=", "aid", ".", "hmin", "self", ".", "calc_qg", "(", ")", "elif", "modelutils", ".", "fabs", "(", "aid", ".", "qmax", ")", "<", "con", ".", "qtol", ":", "flu", ".", "h", "=", "aid", ".", "hmax", "self", ".", "calc_qg", "(", ")", "elif", "modelutils", ".", "fabs", "(", "aid", ".", "hmax", "-", "aid", ".", "hmin", ")", "<", "con", ".", "htol", ":", "flu", ".", "h", "=", "(", "aid", ".", "hmin", "+", "aid", ".", "hmax", ")", "/", "2.", "self", ".", "calc_qg", "(", ")", "else", ":", "while", "True", ":", "flu", ".", "h", "=", "aid", ".", "hmin", "-", "aid", ".", "qmin", "*", "(", "aid", ".", "hmax", "-", "aid", ".", "hmin", ")", "/", "(", "aid", ".", "qmax", "-", "aid", ".", "qmin", ")", "self", ".", "calc_qg", "(", ")", "aid", ".", "qtest", "=", "flu", ".", "qg", "-", "flu", ".", "qref", "if", "modelutils", ".", "fabs", "(", "aid", ".", "qtest", ")", "<", "con", ".", "qtol", ":", "return", "if", "(", "(", "(", "aid", ".", "qmax", "<", "0.", ")", "and", "(", "aid", ".", "qtest", "<", "0.", ")", ")", "or", "(", "(", "aid", ".", "qmax", ">", "0.", ")", "and", "(", "aid", ".", "qtest", ">", "0.", ")", ")", ")", ":", "aid", ".", "qmin", "*=", "aid", ".", "qmax", "/", "(", "aid", ".", "qmax", "+", "aid", ".", "qtest", ")", "else", ":", "aid", ".", "hmin", "=", "aid", ".", "hmax", "aid", ".", "qmin", "=", "aid", ".", "qmax", "aid", ".", "hmax", "=", "flu", ".", "h", "aid", ".", "qmax", "=", "aid", ".", "qtest", "if", "modelutils", ".", "fabs", "(", "aid", ".", "hmax", "-", "aid", ".", "hmin", ")", "<", "con", ".", "htol", ":", "return" ]
29.858921
21.929461
def _terminate_procs(procs): """ Terminate all processes in the process dictionary """ logging.warn("Stopping all remaining processes") for proc, g in procs.values(): logging.debug("[%s] SIGTERM", proc.pid) try: proc.terminate() except OSError as e: # we don't care if the process we tried to kill didn't exist. if e.errno != errno.ESRCH: raise sys.exit(1)
[ "def", "_terminate_procs", "(", "procs", ")", ":", "logging", ".", "warn", "(", "\"Stopping all remaining processes\"", ")", "for", "proc", ",", "g", "in", "procs", ".", "values", "(", ")", ":", "logging", ".", "debug", "(", "\"[%s] SIGTERM\"", ",", "proc", ".", "pid", ")", "try", ":", "proc", ".", "terminate", "(", ")", "except", "OSError", "as", "e", ":", "# we don't care if the process we tried to kill didn't exist.", "if", "e", ".", "errno", "!=", "errno", ".", "ESRCH", ":", "raise", "sys", ".", "exit", "(", "1", ")" ]
31.642857
12.928571
def _no_duplicates_constructor(loader, node, deep=False): """Check for duplicate keys.""" mapping = {} for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) value = loader.construct_object(value_node, deep=deep) if key in mapping: raise ConstructorError("while constructing a mapping", node.start_mark, "found duplicate key (%s)" % key, key_node.start_mark) mapping[key] = value return loader.construct_mapping(node, deep)
[ "def", "_no_duplicates_constructor", "(", "loader", ",", "node", ",", "deep", "=", "False", ")", ":", "mapping", "=", "{", "}", "for", "key_node", ",", "value_node", "in", "node", ".", "value", ":", "key", "=", "loader", ".", "construct_object", "(", "key_node", ",", "deep", "=", "deep", ")", "value", "=", "loader", ".", "construct_object", "(", "value_node", ",", "deep", "=", "deep", ")", "if", "key", "in", "mapping", ":", "raise", "ConstructorError", "(", "\"while constructing a mapping\"", ",", "node", ".", "start_mark", ",", "\"found duplicate key (%s)\"", "%", "key", ",", "key_node", ".", "start_mark", ")", "mapping", "[", "key", "]", "=", "value", "return", "loader", ".", "construct_mapping", "(", "node", ",", "deep", ")" ]
40.8
18.466667
def get_blocked(self): """Return a UserList of Redditors with whom the user has blocked.""" url = self.reddit_session.config['blocked'] return self.reddit_session.request_json(url)
[ "def", "get_blocked", "(", "self", ")", ":", "url", "=", "self", ".", "reddit_session", ".", "config", "[", "'blocked'", "]", "return", "self", ".", "reddit_session", ".", "request_json", "(", "url", ")" ]
50.25
10.25
def _evaluatelinearPotentials(Pot,x,t=0.): """Raw, undecorated function for internal use""" if isinstance(Pot,list): sum= 0. for pot in Pot: sum+= pot._call_nodecorator(x,t=t) return sum elif isinstance(Pot,linearPotential): return Pot._call_nodecorator(x,t=t) else: #pragma: no cover raise PotentialError("Input to 'evaluatelinearPotentials' is neither a linearPotential-instance or a list of such instances")
[ "def", "_evaluatelinearPotentials", "(", "Pot", ",", "x", ",", "t", "=", "0.", ")", ":", "if", "isinstance", "(", "Pot", ",", "list", ")", ":", "sum", "=", "0.", "for", "pot", "in", "Pot", ":", "sum", "+=", "pot", ".", "_call_nodecorator", "(", "x", ",", "t", "=", "t", ")", "return", "sum", "elif", "isinstance", "(", "Pot", ",", "linearPotential", ")", ":", "return", "Pot", ".", "_call_nodecorator", "(", "x", ",", "t", "=", "t", ")", "else", ":", "#pragma: no cover", "raise", "PotentialError", "(", "\"Input to 'evaluatelinearPotentials' is neither a linearPotential-instance or a list of such instances\"", ")" ]
42.545455
17.636364
def largest_connected_submatrix(C, directed=True, lcc=None): r"""Compute the count matrix on the largest connected set. Parameters ---------- C : scipy.sparse matrix Count matrix specifying edge weights. directed : bool, optional Whether to compute connected components for a directed or undirected graph. Default is True lcc : (M,) ndarray, optional The largest connected set Returns ------- C_cc : scipy.sparse matrix Count matrix of largest completely connected set of vertices (states) See also -------- largest_connected_set Notes ----- Viewing the count matrix as the adjacency matrix of a (directed) graph the larest connected submatrix is the adjacency matrix of the largest connected set of the corresponding graph. The largest connected submatrix can be efficiently computed using Tarjan's algorithm. References ---------- .. [1] Tarjan, R E. 1972. Depth-first search and linear graph algorithms. SIAM Journal on Computing 1 (2): 146-160. Examples -------- >>> import numpy as np >>> from msmtools.estimation import largest_connected_submatrix >>> C = np.array([[10, 1, 0], [2, 0, 3], [0, 0, 4]]) >>> C_cc_directed = largest_connected_submatrix(C) >>> C_cc_directed # doctest: +ELLIPSIS array([[10, 1], [ 2, 0]]...) >>> C_cc_undirected = largest_connected_submatrix(C, directed=False) >>> C_cc_undirected # doctest: +ELLIPSIS array([[10, 1, 0], [ 2, 0, 3], [ 0, 0, 4]]...) """ if isdense(C): return sparse.connectivity.largest_connected_submatrix(csr_matrix(C), directed=directed, lcc=lcc).toarray() else: return sparse.connectivity.largest_connected_submatrix(C, directed=directed, lcc=lcc)
[ "def", "largest_connected_submatrix", "(", "C", ",", "directed", "=", "True", ",", "lcc", "=", "None", ")", ":", "if", "isdense", "(", "C", ")", ":", "return", "sparse", ".", "connectivity", ".", "largest_connected_submatrix", "(", "csr_matrix", "(", "C", ")", ",", "directed", "=", "directed", ",", "lcc", "=", "lcc", ")", ".", "toarray", "(", ")", "else", ":", "return", "sparse", ".", "connectivity", ".", "largest_connected_submatrix", "(", "C", ",", "directed", "=", "directed", ",", "lcc", "=", "lcc", ")" ]
30.576271
24.254237
def parent_frame_arguments(): """Returns parent frame arguments. When called inside a function, returns a dictionary with the caller's function arguments. These are positional arguments and keyword arguments (**kwargs), while variable arguments (*varargs) are excluded. When called at global scope, this will return an empty dictionary, since there are no arguments. WARNING: If caller function argument names are overloaded before invoking this method, then values will reflect the overloaded value. For this reason, we recommend calling `parent_frame_arguments` at the beginning of the function. """ # All arguments and the names used for *varargs, and **kwargs arg_names, variable_arg_name, keyword_arg_name, local_vars = ( tf_inspect._inspect.getargvalues( # pylint: disable=protected-access # Get the first frame of the caller of this method. tf_inspect._inspect.stack()[1][0])) # pylint: disable=protected-access # Remove the *varargs, and flatten the **kwargs. Both are # nested lists. local_vars.pop(variable_arg_name, {}) keyword_args = local_vars.pop(keyword_arg_name, {}) final_args = {} # Copy over arguments and their values. In general, local_vars # may contain more than just the arguments, since this method # can be called anywhere in a function. for arg_name in arg_names: final_args[arg_name] = local_vars.pop(arg_name) final_args.update(keyword_args) return final_args
[ "def", "parent_frame_arguments", "(", ")", ":", "# All arguments and the names used for *varargs, and **kwargs", "arg_names", ",", "variable_arg_name", ",", "keyword_arg_name", ",", "local_vars", "=", "(", "tf_inspect", ".", "_inspect", ".", "getargvalues", "(", "# pylint: disable=protected-access", "# Get the first frame of the caller of this method.", "tf_inspect", ".", "_inspect", ".", "stack", "(", ")", "[", "1", "]", "[", "0", "]", ")", ")", "# pylint: disable=protected-access", "# Remove the *varargs, and flatten the **kwargs. Both are", "# nested lists.", "local_vars", ".", "pop", "(", "variable_arg_name", ",", "{", "}", ")", "keyword_args", "=", "local_vars", ".", "pop", "(", "keyword_arg_name", ",", "{", "}", ")", "final_args", "=", "{", "}", "# Copy over arguments and their values. In general, local_vars", "# may contain more than just the arguments, since this method", "# can be called anywhere in a function.", "for", "arg_name", "in", "arg_names", ":", "final_args", "[", "arg_name", "]", "=", "local_vars", ".", "pop", "(", "arg_name", ")", "final_args", ".", "update", "(", "keyword_args", ")", "return", "final_args" ]
41.142857
24.428571
def forward(self, x): """ Transforms from the packed to unpacked representations (numpy) :param x: packed numpy array. Must have shape `self.num_matrices x triangular_number :return: Reconstructed numpy array y of shape self.num_matrices x N x N """ fwd = np.zeros((self.num_matrices, self.N, self.N), settings.float_type) indices = np.tril_indices(self.N, 0) z = np.zeros(len(indices[0])).astype(int) for i in range(self.num_matrices): fwd[(z + i,) + indices] = x[i, :] return fwd.squeeze(axis=0) if self.squeeze else fwd
[ "def", "forward", "(", "self", ",", "x", ")", ":", "fwd", "=", "np", ".", "zeros", "(", "(", "self", ".", "num_matrices", ",", "self", ".", "N", ",", "self", ".", "N", ")", ",", "settings", ".", "float_type", ")", "indices", "=", "np", ".", "tril_indices", "(", "self", ".", "N", ",", "0", ")", "z", "=", "np", ".", "zeros", "(", "len", "(", "indices", "[", "0", "]", ")", ")", ".", "astype", "(", "int", ")", "for", "i", "in", "range", "(", "self", ".", "num_matrices", ")", ":", "fwd", "[", "(", "z", "+", "i", ",", ")", "+", "indices", "]", "=", "x", "[", "i", ",", ":", "]", "return", "fwd", ".", "squeeze", "(", "axis", "=", "0", ")", "if", "self", ".", "squeeze", "else", "fwd" ]
47
19.307692
def integrity(integrity_func, retry_errors=(ResponseNotValid,)): """ Args: :param integrity_func: couldb callable or string contains name of method to call """ def build_decorator(func): @functools.wraps(func) def func_wrapper(self, grab, task): if isinstance(integrity_func, (list, tuple)): int_funcs = integrity_func else: int_funcs = [integrity_func] try: for int_func in int_funcs: if isinstance(int_func, str): getattr(self, int_func)(grab) else: int_func(grab) except retry_errors as ex: yield task.clone(refresh_cache=True) error_code = ex.__class__.__name__.replace('_', '-') self.stat.inc('integrity:%s' % error_code) else: result = func(self, grab, task) if result is not None: for event in result: yield event func_wrapper._original_func = func # pylint: disable=protected-access return func_wrapper return build_decorator
[ "def", "integrity", "(", "integrity_func", ",", "retry_errors", "=", "(", "ResponseNotValid", ",", ")", ")", ":", "def", "build_decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "func_wrapper", "(", "self", ",", "grab", ",", "task", ")", ":", "if", "isinstance", "(", "integrity_func", ",", "(", "list", ",", "tuple", ")", ")", ":", "int_funcs", "=", "integrity_func", "else", ":", "int_funcs", "=", "[", "integrity_func", "]", "try", ":", "for", "int_func", "in", "int_funcs", ":", "if", "isinstance", "(", "int_func", ",", "str", ")", ":", "getattr", "(", "self", ",", "int_func", ")", "(", "grab", ")", "else", ":", "int_func", "(", "grab", ")", "except", "retry_errors", "as", "ex", ":", "yield", "task", ".", "clone", "(", "refresh_cache", "=", "True", ")", "error_code", "=", "ex", ".", "__class__", ".", "__name__", ".", "replace", "(", "'_'", ",", "'-'", ")", "self", ".", "stat", ".", "inc", "(", "'integrity:%s'", "%", "error_code", ")", "else", ":", "result", "=", "func", "(", "self", ",", "grab", ",", "task", ")", "if", "result", "is", "not", "None", ":", "for", "event", "in", "result", ":", "yield", "event", "func_wrapper", ".", "_original_func", "=", "func", "# pylint: disable=protected-access", "return", "func_wrapper", "return", "build_decorator" ]
38.548387
12.806452
def nvmlDeviceGetSupportedMemoryClocks(handle): r""" /** * Retrieves the list of possible memory clocks that can be used as an argument for \ref nvmlDeviceSetApplicationsClocks. * * For Kepler &tm; or newer fully supported devices. * * @param device The identifier of the target device * @param count Reference in which to provide the \a clocksMHz array size, and * to return the number of elements * @param clocksMHz Reference in which to return the clock in MHz * * @return * - \ref NVML_SUCCESS if \a count and \a clocksMHz have been populated * - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized * - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid or \a count is NULL * - \ref NVML_ERROR_NOT_SUPPORTED if the device does not support this feature * - \ref NVML_ERROR_INSUFFICIENT_SIZE if \a count is too small (\a count is set to the number of * required elements) * - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible * - \ref NVML_ERROR_UNKNOWN on any unexpected error * * @see nvmlDeviceSetApplicationsClocks * @see nvmlDeviceGetSupportedGraphicsClocks */ nvmlReturn_t DECLDIR nvmlDeviceGetSupportedMemoryClocks """ # first call to get the size c_count = c_uint(0) fn = _nvmlGetFunctionPointer("nvmlDeviceGetSupportedMemoryClocks") ret = fn(handle, byref(c_count), None) if (ret == NVML_SUCCESS): # special case, no clocks return [] elif (ret == NVML_ERROR_INSUFFICIENT_SIZE): # typical case clocks_array = c_uint * c_count.value c_clocks = clocks_array() # make the call again ret = fn(handle, byref(c_count), c_clocks) _nvmlCheckReturn(ret) procs = [] for i in range(c_count.value): procs.append(c_clocks[i]) return procs else: # error case raise NVMLError(ret)
[ "def", "nvmlDeviceGetSupportedMemoryClocks", "(", "handle", ")", ":", "# first call to get the size", "c_count", "=", "c_uint", "(", "0", ")", "fn", "=", "_nvmlGetFunctionPointer", "(", "\"nvmlDeviceGetSupportedMemoryClocks\"", ")", "ret", "=", "fn", "(", "handle", ",", "byref", "(", "c_count", ")", ",", "None", ")", "if", "(", "ret", "==", "NVML_SUCCESS", ")", ":", "# special case, no clocks", "return", "[", "]", "elif", "(", "ret", "==", "NVML_ERROR_INSUFFICIENT_SIZE", ")", ":", "# typical case", "clocks_array", "=", "c_uint", "*", "c_count", ".", "value", "c_clocks", "=", "clocks_array", "(", ")", "# make the call again", "ret", "=", "fn", "(", "handle", ",", "byref", "(", "c_count", ")", ",", "c_clocks", ")", "_nvmlCheckReturn", "(", "ret", ")", "procs", "=", "[", "]", "for", "i", "in", "range", "(", "c_count", ".", "value", ")", ":", "procs", ".", "append", "(", "c_clocks", "[", "i", "]", ")", "return", "procs", "else", ":", "# error case", "raise", "NVMLError", "(", "ret", ")" ]
43.442308
28.192308
def createNorthPointer(self): '''Creates the north pointer relative to current heading.''' self.headingNorthTri = patches.RegularPolygon((0.0,0.80),3,0.05,color='k',zorder=4) self.axes.add_patch(self.headingNorthTri) self.headingNorthText = self.axes.text(0.0,0.675,'N',color='k',size=self.fontSize,horizontalalignment='center',verticalalignment='center',zorder=4)
[ "def", "createNorthPointer", "(", "self", ")", ":", "self", ".", "headingNorthTri", "=", "patches", ".", "RegularPolygon", "(", "(", "0.0", ",", "0.80", ")", ",", "3", ",", "0.05", ",", "color", "=", "'k'", ",", "zorder", "=", "4", ")", "self", ".", "axes", ".", "add_patch", "(", "self", ".", "headingNorthTri", ")", "self", ".", "headingNorthText", "=", "self", ".", "axes", ".", "text", "(", "0.0", ",", "0.675", ",", "'N'", ",", "color", "=", "'k'", ",", "size", "=", "self", ".", "fontSize", ",", "horizontalalignment", "=", "'center'", ",", "verticalalignment", "=", "'center'", ",", "zorder", "=", "4", ")" ]
78.4
42.8
def build_walker(concurrency): """This will return a function suitable for passing to :class:`stacker.plan.Plan` for walking the graph. If concurrency is 1 (no parallelism) this will return a simple topological walker that doesn't use any multithreading. If concurrency is 0, this will return a walker that will walk the graph as fast as the graph topology allows. If concurrency is greater than 1, it will return a walker that will only execute a maximum of concurrency steps at any given time. Returns: func: returns a function to walk a :class:`stacker.dag.DAG`. """ if concurrency == 1: return walk semaphore = UnlimitedSemaphore() if concurrency > 1: semaphore = threading.Semaphore(concurrency) return ThreadedWalker(semaphore).walk
[ "def", "build_walker", "(", "concurrency", ")", ":", "if", "concurrency", "==", "1", ":", "return", "walk", "semaphore", "=", "UnlimitedSemaphore", "(", ")", "if", "concurrency", ">", "1", ":", "semaphore", "=", "threading", ".", "Semaphore", "(", "concurrency", ")", "return", "ThreadedWalker", "(", "semaphore", ")", ".", "walk" ]
33.375
22.166667
def request_instance(vm_=None, call=None): ''' Put together all of the information necessary to request an instance through Novaclient and then fire off the request the instance. Returns data about the instance ''' if call == 'function': # Technically this function may be called other ways too, but it # definitely cannot be called with --function. raise SaltCloudSystemExit( 'The request_instance action must be called with -a or --action.' ) log.info('Creating Cloud VM %s', vm_['name']) salt.utils.cloud.check_name(vm_['name'], 'a-zA-Z0-9._-') conn = get_conn() kwargs = vm_.copy() try: kwargs['image_id'] = get_image(conn, vm_) except Exception as exc: raise SaltCloudSystemExit( 'Error creating {0} on OPENSTACK\n\n' 'Could not find image {1}: {2}\n'.format( vm_['name'], vm_['image'], exc ) ) try: kwargs['flavor_id'] = get_size(conn, vm_) except Exception as exc: raise SaltCloudSystemExit( 'Error creating {0} on OPENSTACK\n\n' 'Could not find size {1}: {2}\n'.format( vm_['name'], vm_['size'], exc ) ) kwargs['key_name'] = config.get_cloud_config_value( 'ssh_key_name', vm_, __opts__, search_global=False ) security_groups = config.get_cloud_config_value( 'security_groups', vm_, __opts__, search_global=False ) if security_groups is not None: vm_groups = security_groups avail_groups = conn.secgroup_list() group_list = [] for vmg in vm_groups: if vmg in [name for name, details in six.iteritems(avail_groups)]: group_list.append(vmg) else: raise SaltCloudNotFound( 'No such security group: \'{0}\''.format(vmg) ) kwargs['security_groups'] = group_list avz = config.get_cloud_config_value( 'availability_zone', vm_, __opts__, default=None, search_global=False ) if avz is not None: kwargs['availability_zone'] = avz kwargs['nics'] = config.get_cloud_config_value( 'networks', vm_, __opts__, search_global=False, default=None ) files = config.get_cloud_config_value( 'files', vm_, __opts__, search_global=False ) if files: kwargs['files'] = {} for src_path in files: if os.path.exists(files[src_path]): with salt.utils.files.fopen(files[src_path], 'r') as fp_: kwargs['files'][src_path] = fp_.read() else: kwargs['files'][src_path] = files[src_path] userdata_file = config.get_cloud_config_value( 'userdata_file', vm_, __opts__, search_global=False, default=None ) if userdata_file is not None: try: with salt.utils.files.fopen(userdata_file, 'r') as fp_: kwargs['userdata'] = salt.utils.cloud.userdata_template( __opts__, vm_, fp_.read() ) except Exception as exc: log.exception( 'Failed to read userdata from %s: %s', userdata_file, exc) kwargs['config_drive'] = config.get_cloud_config_value( 'config_drive', vm_, __opts__, search_global=False ) kwargs.update(get_block_mapping_opts(vm_)) event_kwargs = { 'name': kwargs['name'], 'image': kwargs.get('image_id', 'Boot From Volume'), 'size': kwargs['flavor_id'], } __utils__['cloud.fire_event']( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args={ 'kwargs': __utils__['cloud.filter_event']('requesting', event_kwargs, list(event_kwargs)), }, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) try: data = conn.boot(**kwargs) except Exception as exc: raise SaltCloudSystemExit( 'Error creating {0} on Nova\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initial deployment: {1}\n'.format( vm_['name'], exc ) ) if data.extra.get('password', None) is None and vm_.get('key_filename', None) is None: raise SaltCloudSystemExit('No password returned. Set ssh_key_file.') floating_ip_conf = config.get_cloud_config_value('floating_ip', vm_, __opts__, search_global=False, default={}) if floating_ip_conf.get('auto_assign', False): floating_ip = None if floating_ip_conf.get('ip_address', None) is not None: ip_address = floating_ip_conf.get('ip_address', None) try: fl_ip_dict = conn.floating_ip_show(ip_address) floating_ip = fl_ip_dict['ip'] except Exception as err: raise SaltCloudSystemExit( 'Error assigning floating_ip for {0} on Nova\n\n' 'The following exception was thrown by libcloud when trying to ' 'assign a floating ip: {1}\n'.format( vm_['name'], err ) ) else: pool = floating_ip_conf.get('pool', 'public') try: floating_ip = conn.floating_ip_create(pool)['ip'] except Exception: log.info('A new IP address was unable to be allocated. ' 'An IP address will be pulled from the already allocated list, ' 'This will cause a race condition when building in parallel.') for fl_ip, opts in six.iteritems(conn.floating_ip_list()): if opts['fixed_ip'] is None and opts['pool'] == pool: floating_ip = fl_ip break if floating_ip is None: log.error('No IP addresses available to allocate for this server: %s', vm_['name']) def __query_node_data(vm_): try: node = show_instance(vm_['name'], 'action') log.debug('Loaded node data for %s:\n%s', vm_['name'], pprint.pformat(node)) except Exception as err: log.error( 'Failed to get nodes list: %s', err, # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) # Trigger a failure in the wait for IP function return False return node['state'] == 'ACTIVE' or None # if we associate the floating ip here,then we will fail. # As if we attempt to associate a floating IP before the Nova instance has completed building, # it will fail.So we should associate it after the Nova instance has completed building. try: salt.utils.cloud.wait_for_ip( __query_node_data, update_args=(vm_,) ) except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc: try: # It might be already up, let's destroy it! destroy(vm_['name']) except SaltCloudSystemExit: pass finally: raise SaltCloudSystemExit(six.text_type(exc)) try: conn.floating_ip_associate(vm_['name'], floating_ip) vm_['floating_ip'] = floating_ip except Exception as exc: raise SaltCloudSystemExit( 'Error assigning floating_ip for {0} on Nova\n\n' 'The following exception was thrown by libcloud when trying to ' 'assign a floating ip: {1}\n'.format( vm_['name'], exc ) ) if not vm_.get('password', None): vm_['password'] = data.extra.get('password', '') return data, vm_
[ "def", "request_instance", "(", "vm_", "=", "None", ",", "call", "=", "None", ")", ":", "if", "call", "==", "'function'", ":", "# Technically this function may be called other ways too, but it", "# definitely cannot be called with --function.", "raise", "SaltCloudSystemExit", "(", "'The request_instance action must be called with -a or --action.'", ")", "log", ".", "info", "(", "'Creating Cloud VM %s'", ",", "vm_", "[", "'name'", "]", ")", "salt", ".", "utils", ".", "cloud", ".", "check_name", "(", "vm_", "[", "'name'", "]", ",", "'a-zA-Z0-9._-'", ")", "conn", "=", "get_conn", "(", ")", "kwargs", "=", "vm_", ".", "copy", "(", ")", "try", ":", "kwargs", "[", "'image_id'", "]", "=", "get_image", "(", "conn", ",", "vm_", ")", "except", "Exception", "as", "exc", ":", "raise", "SaltCloudSystemExit", "(", "'Error creating {0} on OPENSTACK\\n\\n'", "'Could not find image {1}: {2}\\n'", ".", "format", "(", "vm_", "[", "'name'", "]", ",", "vm_", "[", "'image'", "]", ",", "exc", ")", ")", "try", ":", "kwargs", "[", "'flavor_id'", "]", "=", "get_size", "(", "conn", ",", "vm_", ")", "except", "Exception", "as", "exc", ":", "raise", "SaltCloudSystemExit", "(", "'Error creating {0} on OPENSTACK\\n\\n'", "'Could not find size {1}: {2}\\n'", ".", "format", "(", "vm_", "[", "'name'", "]", ",", "vm_", "[", "'size'", "]", ",", "exc", ")", ")", "kwargs", "[", "'key_name'", "]", "=", "config", ".", "get_cloud_config_value", "(", "'ssh_key_name'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")", "security_groups", "=", "config", ".", "get_cloud_config_value", "(", "'security_groups'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")", "if", "security_groups", "is", "not", "None", ":", "vm_groups", "=", "security_groups", "avail_groups", "=", "conn", ".", "secgroup_list", "(", ")", "group_list", "=", "[", "]", "for", "vmg", "in", "vm_groups", ":", "if", "vmg", "in", "[", "name", "for", "name", ",", "details", "in", "six", ".", "iteritems", "(", "avail_groups", ")", "]", ":", "group_list", ".", "append", "(", "vmg", ")", "else", ":", "raise", "SaltCloudNotFound", "(", "'No such security group: \\'{0}\\''", ".", "format", "(", "vmg", ")", ")", "kwargs", "[", "'security_groups'", "]", "=", "group_list", "avz", "=", "config", ".", "get_cloud_config_value", "(", "'availability_zone'", ",", "vm_", ",", "__opts__", ",", "default", "=", "None", ",", "search_global", "=", "False", ")", "if", "avz", "is", "not", "None", ":", "kwargs", "[", "'availability_zone'", "]", "=", "avz", "kwargs", "[", "'nics'", "]", "=", "config", ".", "get_cloud_config_value", "(", "'networks'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ")", "files", "=", "config", ".", "get_cloud_config_value", "(", "'files'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")", "if", "files", ":", "kwargs", "[", "'files'", "]", "=", "{", "}", "for", "src_path", "in", "files", ":", "if", "os", ".", "path", ".", "exists", "(", "files", "[", "src_path", "]", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "files", "[", "src_path", "]", ",", "'r'", ")", "as", "fp_", ":", "kwargs", "[", "'files'", "]", "[", "src_path", "]", "=", "fp_", ".", "read", "(", ")", "else", ":", "kwargs", "[", "'files'", "]", "[", "src_path", "]", "=", "files", "[", "src_path", "]", "userdata_file", "=", "config", ".", "get_cloud_config_value", "(", "'userdata_file'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ")", "if", "userdata_file", "is", "not", "None", ":", "try", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "userdata_file", ",", "'r'", ")", "as", "fp_", ":", "kwargs", "[", "'userdata'", "]", "=", "salt", ".", "utils", ".", "cloud", ".", "userdata_template", "(", "__opts__", ",", "vm_", ",", "fp_", ".", "read", "(", ")", ")", "except", "Exception", "as", "exc", ":", "log", ".", "exception", "(", "'Failed to read userdata from %s: %s'", ",", "userdata_file", ",", "exc", ")", "kwargs", "[", "'config_drive'", "]", "=", "config", ".", "get_cloud_config_value", "(", "'config_drive'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")", "kwargs", ".", "update", "(", "get_block_mapping_opts", "(", "vm_", ")", ")", "event_kwargs", "=", "{", "'name'", ":", "kwargs", "[", "'name'", "]", ",", "'image'", ":", "kwargs", ".", "get", "(", "'image_id'", ",", "'Boot From Volume'", ")", ",", "'size'", ":", "kwargs", "[", "'flavor_id'", "]", ",", "}", "__utils__", "[", "'cloud.fire_event'", "]", "(", "'event'", ",", "'requesting instance'", ",", "'salt/cloud/{0}/requesting'", ".", "format", "(", "vm_", "[", "'name'", "]", ")", ",", "args", "=", "{", "'kwargs'", ":", "__utils__", "[", "'cloud.filter_event'", "]", "(", "'requesting'", ",", "event_kwargs", ",", "list", "(", "event_kwargs", ")", ")", ",", "}", ",", "sock_dir", "=", "__opts__", "[", "'sock_dir'", "]", ",", "transport", "=", "__opts__", "[", "'transport'", "]", ")", "try", ":", "data", "=", "conn", ".", "boot", "(", "*", "*", "kwargs", ")", "except", "Exception", "as", "exc", ":", "raise", "SaltCloudSystemExit", "(", "'Error creating {0} on Nova\\n\\n'", "'The following exception was thrown by libcloud when trying to '", "'run the initial deployment: {1}\\n'", ".", "format", "(", "vm_", "[", "'name'", "]", ",", "exc", ")", ")", "if", "data", ".", "extra", ".", "get", "(", "'password'", ",", "None", ")", "is", "None", "and", "vm_", ".", "get", "(", "'key_filename'", ",", "None", ")", "is", "None", ":", "raise", "SaltCloudSystemExit", "(", "'No password returned. Set ssh_key_file.'", ")", "floating_ip_conf", "=", "config", ".", "get_cloud_config_value", "(", "'floating_ip'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "{", "}", ")", "if", "floating_ip_conf", ".", "get", "(", "'auto_assign'", ",", "False", ")", ":", "floating_ip", "=", "None", "if", "floating_ip_conf", ".", "get", "(", "'ip_address'", ",", "None", ")", "is", "not", "None", ":", "ip_address", "=", "floating_ip_conf", ".", "get", "(", "'ip_address'", ",", "None", ")", "try", ":", "fl_ip_dict", "=", "conn", ".", "floating_ip_show", "(", "ip_address", ")", "floating_ip", "=", "fl_ip_dict", "[", "'ip'", "]", "except", "Exception", "as", "err", ":", "raise", "SaltCloudSystemExit", "(", "'Error assigning floating_ip for {0} on Nova\\n\\n'", "'The following exception was thrown by libcloud when trying to '", "'assign a floating ip: {1}\\n'", ".", "format", "(", "vm_", "[", "'name'", "]", ",", "err", ")", ")", "else", ":", "pool", "=", "floating_ip_conf", ".", "get", "(", "'pool'", ",", "'public'", ")", "try", ":", "floating_ip", "=", "conn", ".", "floating_ip_create", "(", "pool", ")", "[", "'ip'", "]", "except", "Exception", ":", "log", ".", "info", "(", "'A new IP address was unable to be allocated. '", "'An IP address will be pulled from the already allocated list, '", "'This will cause a race condition when building in parallel.'", ")", "for", "fl_ip", ",", "opts", "in", "six", ".", "iteritems", "(", "conn", ".", "floating_ip_list", "(", ")", ")", ":", "if", "opts", "[", "'fixed_ip'", "]", "is", "None", "and", "opts", "[", "'pool'", "]", "==", "pool", ":", "floating_ip", "=", "fl_ip", "break", "if", "floating_ip", "is", "None", ":", "log", ".", "error", "(", "'No IP addresses available to allocate for this server: %s'", ",", "vm_", "[", "'name'", "]", ")", "def", "__query_node_data", "(", "vm_", ")", ":", "try", ":", "node", "=", "show_instance", "(", "vm_", "[", "'name'", "]", ",", "'action'", ")", "log", ".", "debug", "(", "'Loaded node data for %s:\\n%s'", ",", "vm_", "[", "'name'", "]", ",", "pprint", ".", "pformat", "(", "node", ")", ")", "except", "Exception", "as", "err", ":", "log", ".", "error", "(", "'Failed to get nodes list: %s'", ",", "err", ",", "# Show the traceback if the debug logging level is enabled", "exc_info_on_loglevel", "=", "logging", ".", "DEBUG", ")", "# Trigger a failure in the wait for IP function", "return", "False", "return", "node", "[", "'state'", "]", "==", "'ACTIVE'", "or", "None", "# if we associate the floating ip here,then we will fail.", "# As if we attempt to associate a floating IP before the Nova instance has completed building,", "# it will fail.So we should associate it after the Nova instance has completed building.", "try", ":", "salt", ".", "utils", ".", "cloud", ".", "wait_for_ip", "(", "__query_node_data", ",", "update_args", "=", "(", "vm_", ",", ")", ")", "except", "(", "SaltCloudExecutionTimeout", ",", "SaltCloudExecutionFailure", ")", "as", "exc", ":", "try", ":", "# It might be already up, let's destroy it!", "destroy", "(", "vm_", "[", "'name'", "]", ")", "except", "SaltCloudSystemExit", ":", "pass", "finally", ":", "raise", "SaltCloudSystemExit", "(", "six", ".", "text_type", "(", "exc", ")", ")", "try", ":", "conn", ".", "floating_ip_associate", "(", "vm_", "[", "'name'", "]", ",", "floating_ip", ")", "vm_", "[", "'floating_ip'", "]", "=", "floating_ip", "except", "Exception", "as", "exc", ":", "raise", "SaltCloudSystemExit", "(", "'Error assigning floating_ip for {0} on Nova\\n\\n'", "'The following exception was thrown by libcloud when trying to '", "'assign a floating ip: {1}\\n'", ".", "format", "(", "vm_", "[", "'name'", "]", ",", "exc", ")", ")", "if", "not", "vm_", ".", "get", "(", "'password'", ",", "None", ")", ":", "vm_", "[", "'password'", "]", "=", "data", ".", "extra", ".", "get", "(", "'password'", ",", "''", ")", "return", "data", ",", "vm_" ]
37.598131
21.981308
def get_array(self, rowBased=True): """Return a two dimensional list with the values of the :py:obj:`self`. :param boolean rowBased: Indicates wether the returned list should be row or column based. Has to be True if list[i] should be the i'th row, False if list[i] should be the i'th column. :return: Returns a list representing the matrix rows containing lists representing the columns for each row. :rtype: list """ if rowBased: array = [] for row in xrange(self._rows): newRow = [] for col in xrange(self._columns): newRow.append(self.get_value(col, row)) array.append(newRow) return array return copy.deepcopy(self.matrix)
[ "def", "get_array", "(", "self", ",", "rowBased", "=", "True", ")", ":", "if", "rowBased", ":", "array", "=", "[", "]", "for", "row", "in", "xrange", "(", "self", ".", "_rows", ")", ":", "newRow", "=", "[", "]", "for", "col", "in", "xrange", "(", "self", ".", "_columns", ")", ":", "newRow", ".", "append", "(", "self", ".", "get_value", "(", "col", ",", "row", ")", ")", "array", ".", "append", "(", "newRow", ")", "return", "array", "return", "copy", ".", "deepcopy", "(", "self", ".", "matrix", ")" ]
40.85
17.95
def bit_count(self, start=None, end=None): """ Count the set bits in a string. Note that the `start` and `end` parameters are offsets in **bytes**. """ return self.database.bitcount(self.key, start, end)
[ "def", "bit_count", "(", "self", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "return", "self", ".", "database", ".", "bitcount", "(", "self", ".", "key", ",", "start", ",", "end", ")" ]
39.666667
9.333333
def construct_start_message(self): """Collect preliminary run info at the start of the DFK. Returns : - Message dict dumped as json string, ready for UDP """ uname = getpass.getuser().encode('latin1') hashed_username = hashlib.sha256(uname).hexdigest()[0:10] hname = socket.gethostname().encode('latin1') hashed_hostname = hashlib.sha256(hname).hexdigest()[0:10] message = {'uuid': self.uuid, 'uname': hashed_username, 'hname': hashed_hostname, 'test': self.test_mode, 'parsl_v': self.parsl_version, 'python_v': self.python_version, 'os': platform.system(), 'os_v': platform.release(), 'start': time.time()} return json.dumps(message)
[ "def", "construct_start_message", "(", "self", ")", ":", "uname", "=", "getpass", ".", "getuser", "(", ")", ".", "encode", "(", "'latin1'", ")", "hashed_username", "=", "hashlib", ".", "sha256", "(", "uname", ")", ".", "hexdigest", "(", ")", "[", "0", ":", "10", "]", "hname", "=", "socket", ".", "gethostname", "(", ")", ".", "encode", "(", "'latin1'", ")", "hashed_hostname", "=", "hashlib", ".", "sha256", "(", "hname", ")", ".", "hexdigest", "(", ")", "[", "0", ":", "10", "]", "message", "=", "{", "'uuid'", ":", "self", ".", "uuid", ",", "'uname'", ":", "hashed_username", ",", "'hname'", ":", "hashed_hostname", ",", "'test'", ":", "self", ".", "test_mode", ",", "'parsl_v'", ":", "self", ".", "parsl_version", ",", "'python_v'", ":", "self", ".", "python_version", ",", "'os'", ":", "platform", ".", "system", "(", ")", ",", "'os_v'", ":", "platform", ".", "release", "(", ")", ",", "'start'", ":", "time", ".", "time", "(", ")", "}", "return", "json", ".", "dumps", "(", "message", ")" ]
40.666667
12.142857
def cmd_tool(args=None): """ Command line utility for creating HDF5 blimpy files. """ from argparse import ArgumentParser parser = ArgumentParser(description="Command line utility for creating HDF5 Filterbank files.") parser.add_argument('dirname', type=str, help='Name of directory to read') args = parser.parse_args() if not HAS_BITSHUFFLE: print("Error: the bitshuffle library is required to run this script.") exit() filelist = glob.glob(os.path.join(args.dirname, '*.fil')) for filename in filelist: if not os.path.exists(filename + '.h5'): t0 = time.time() print("\nReading %s header..." % filename) fb = Filterbank(filename, load_data=False) data_shape = (fb.n_ints_in_file, fb.header['nifs'], fb.header['nchans']) data_dtype = fb.data.dtype print(data_dtype) print("Creating new dataset, %s" % str(data_shape)) block_size = 0 h5 = h5py.File(filename + '.h5', 'w') h5.attrs['CLASS'] = 'FILTERBANK' dset = h5.create_dataset('data', shape=data_shape, compression=bitshuffle.h5.H5FILTER, compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype=data_dtype) dset_mask = h5.create_dataset('mask', shape=data_shape, compression=bitshuffle.h5.H5FILTER, compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='uint8') dset.dims[0].label = "frequency" dset.dims[1].label = "feed_id" dset.dims[2].label = "time" dset_mask.dims[0].label = "frequency" dset_mask.dims[1].label = "feed_id" dset_mask.dims[2].label = "time" # Copy over header information as attributes for key, value in fb.header.items(): dset.attrs[key] = value filesize = os.path.getsize(filename) if filesize >= MAX_SIZE: n_int_per_read = int(filesize / MAX_SIZE / 2) print("Filling in with data over %i reads..." % n_int_per_read) for ii in range(0, n_int_per_read): print("Reading %i of %i" % (ii + 1, n_int_per_read)) #print ii*n_int_per_read, (ii+1)*n_int_per_read fb = Filterbank(filename, t_start=ii*n_int_per_read, t_stop=(ii+1) * n_int_per_read) dset[ii*n_int_per_read:(ii+1)*n_int_per_read] = fb.data[:] else: fb = Filterbank(filename) print(dset.shape, " -> ", fb.data.shape) dset[:] = fb.data[:] h5.close() t1 = time.time() print("Conversion time: %2.2fs" % (t1- t0))
[ "def", "cmd_tool", "(", "args", "=", "None", ")", ":", "from", "argparse", "import", "ArgumentParser", "parser", "=", "ArgumentParser", "(", "description", "=", "\"Command line utility for creating HDF5 Filterbank files.\"", ")", "parser", ".", "add_argument", "(", "'dirname'", ",", "type", "=", "str", ",", "help", "=", "'Name of directory to read'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "if", "not", "HAS_BITSHUFFLE", ":", "print", "(", "\"Error: the bitshuffle library is required to run this script.\"", ")", "exit", "(", ")", "filelist", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "args", ".", "dirname", ",", "'*.fil'", ")", ")", "for", "filename", "in", "filelist", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", "+", "'.h5'", ")", ":", "t0", "=", "time", ".", "time", "(", ")", "print", "(", "\"\\nReading %s header...\"", "%", "filename", ")", "fb", "=", "Filterbank", "(", "filename", ",", "load_data", "=", "False", ")", "data_shape", "=", "(", "fb", ".", "n_ints_in_file", ",", "fb", ".", "header", "[", "'nifs'", "]", ",", "fb", ".", "header", "[", "'nchans'", "]", ")", "data_dtype", "=", "fb", ".", "data", ".", "dtype", "print", "(", "data_dtype", ")", "print", "(", "\"Creating new dataset, %s\"", "%", "str", "(", "data_shape", ")", ")", "block_size", "=", "0", "h5", "=", "h5py", ".", "File", "(", "filename", "+", "'.h5'", ",", "'w'", ")", "h5", ".", "attrs", "[", "'CLASS'", "]", "=", "'FILTERBANK'", "dset", "=", "h5", ".", "create_dataset", "(", "'data'", ",", "shape", "=", "data_shape", ",", "compression", "=", "bitshuffle", ".", "h5", ".", "H5FILTER", ",", "compression_opts", "=", "(", "block_size", ",", "bitshuffle", ".", "h5", ".", "H5_COMPRESS_LZ4", ")", ",", "dtype", "=", "data_dtype", ")", "dset_mask", "=", "h5", ".", "create_dataset", "(", "'mask'", ",", "shape", "=", "data_shape", ",", "compression", "=", "bitshuffle", ".", "h5", ".", "H5FILTER", ",", "compression_opts", "=", "(", "block_size", ",", "bitshuffle", ".", "h5", ".", "H5_COMPRESS_LZ4", ")", ",", "dtype", "=", "'uint8'", ")", "dset", ".", "dims", "[", "0", "]", ".", "label", "=", "\"frequency\"", "dset", ".", "dims", "[", "1", "]", ".", "label", "=", "\"feed_id\"", "dset", ".", "dims", "[", "2", "]", ".", "label", "=", "\"time\"", "dset_mask", ".", "dims", "[", "0", "]", ".", "label", "=", "\"frequency\"", "dset_mask", ".", "dims", "[", "1", "]", ".", "label", "=", "\"feed_id\"", "dset_mask", ".", "dims", "[", "2", "]", ".", "label", "=", "\"time\"", "# Copy over header information as attributes", "for", "key", ",", "value", "in", "fb", ".", "header", ".", "items", "(", ")", ":", "dset", ".", "attrs", "[", "key", "]", "=", "value", "filesize", "=", "os", ".", "path", ".", "getsize", "(", "filename", ")", "if", "filesize", ">=", "MAX_SIZE", ":", "n_int_per_read", "=", "int", "(", "filesize", "/", "MAX_SIZE", "/", "2", ")", "print", "(", "\"Filling in with data over %i reads...\"", "%", "n_int_per_read", ")", "for", "ii", "in", "range", "(", "0", ",", "n_int_per_read", ")", ":", "print", "(", "\"Reading %i of %i\"", "%", "(", "ii", "+", "1", ",", "n_int_per_read", ")", ")", "#print ii*n_int_per_read, (ii+1)*n_int_per_read", "fb", "=", "Filterbank", "(", "filename", ",", "t_start", "=", "ii", "*", "n_int_per_read", ",", "t_stop", "=", "(", "ii", "+", "1", ")", "*", "n_int_per_read", ")", "dset", "[", "ii", "*", "n_int_per_read", ":", "(", "ii", "+", "1", ")", "*", "n_int_per_read", "]", "=", "fb", ".", "data", "[", ":", "]", "else", ":", "fb", "=", "Filterbank", "(", "filename", ")", "print", "(", "dset", ".", "shape", ",", "\" -> \"", ",", "fb", ".", "data", ".", "shape", ")", "dset", "[", ":", "]", "=", "fb", ".", "data", "[", ":", "]", "h5", ".", "close", "(", ")", "t1", "=", "time", ".", "time", "(", ")", "print", "(", "\"Conversion time: %2.2fs\"", "%", "(", "t1", "-", "t0", ")", ")" ]
40.287671
22.315068
def download(config, account, day, region, output): """Download a traildb file for a given account/day/region""" with open(config) as fh: config = yaml.safe_load(fh.read()) jsonschema.validate(config, CONFIG_SCHEMA) found = None for info in config['accounts']: if info['name'] == account: found = info break if not found: log.info("Account %s not found", account) return s3 = boto3.client('s3') day = parse_date(day) key_data = dict(found) key_data['region'] = region key_data['date_fmt'] = "%s/%s/%s" % ( day.year, day.month, day.day) key = config['key_template'] % key_data s3.download_file(found['bucket'], key, output + '.bz2') subprocess.check_call(["lbzip2", "-d", output + '.bz2'])
[ "def", "download", "(", "config", ",", "account", ",", "day", ",", "region", ",", "output", ")", ":", "with", "open", "(", "config", ")", "as", "fh", ":", "config", "=", "yaml", ".", "safe_load", "(", "fh", ".", "read", "(", ")", ")", "jsonschema", ".", "validate", "(", "config", ",", "CONFIG_SCHEMA", ")", "found", "=", "None", "for", "info", "in", "config", "[", "'accounts'", "]", ":", "if", "info", "[", "'name'", "]", "==", "account", ":", "found", "=", "info", "break", "if", "not", "found", ":", "log", ".", "info", "(", "\"Account %s not found\"", ",", "account", ")", "return", "s3", "=", "boto3", ".", "client", "(", "'s3'", ")", "day", "=", "parse_date", "(", "day", ")", "key_data", "=", "dict", "(", "found", ")", "key_data", "[", "'region'", "]", "=", "region", "key_data", "[", "'date_fmt'", "]", "=", "\"%s/%s/%s\"", "%", "(", "day", ".", "year", ",", "day", ".", "month", ",", "day", ".", "day", ")", "key", "=", "config", "[", "'key_template'", "]", "%", "key_data", "s3", ".", "download_file", "(", "found", "[", "'bucket'", "]", ",", "key", ",", "output", "+", "'.bz2'", ")", "subprocess", ".", "check_call", "(", "[", "\"lbzip2\"", ",", "\"-d\"", ",", "output", "+", "'.bz2'", "]", ")" ]
27.137931
18.586207
def insert_child(self, child_pid, index=-1): """Insert a Version child PID.""" if child_pid.status != PIDStatus.REGISTERED: raise PIDRelationConsistencyError( "Version PIDs should have status 'REGISTERED'. Use " "insert_draft_child to insert 'RESERVED' draft PID.") with db.session.begin_nested(): # if there is a draft and "child" is inserted as the last version, # it should be inserted before the draft. draft = self.draft_child if draft and index == -1: index = self.index(draft) super(PIDNodeVersioning, self).insert_child(child_pid, index=index) self.update_redirect()
[ "def", "insert_child", "(", "self", ",", "child_pid", ",", "index", "=", "-", "1", ")", ":", "if", "child_pid", ".", "status", "!=", "PIDStatus", ".", "REGISTERED", ":", "raise", "PIDRelationConsistencyError", "(", "\"Version PIDs should have status 'REGISTERED'. Use \"", "\"insert_draft_child to insert 'RESERVED' draft PID.\"", ")", "with", "db", ".", "session", ".", "begin_nested", "(", ")", ":", "# if there is a draft and \"child\" is inserted as the last version,", "# it should be inserted before the draft.", "draft", "=", "self", ".", "draft_child", "if", "draft", "and", "index", "==", "-", "1", ":", "index", "=", "self", ".", "index", "(", "draft", ")", "super", "(", "PIDNodeVersioning", ",", "self", ")", ".", "insert_child", "(", "child_pid", ",", "index", "=", "index", ")", "self", ".", "update_redirect", "(", ")" ]
51.214286
13.142857
def plotit(self): ''' Produce the plots requested in the Dynac input file. This makes the same plots as produced by the Dynac ``plotit`` command. ''' [self._plot(i) for i in range(len(self.plots))]
[ "def", "plotit", "(", "self", ")", ":", "[", "self", ".", "_plot", "(", "i", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "plots", ")", ")", "]" ]
38.833333
25.833333
def verify_claims(app_req, issuer=None): """ Verify JWT claims. All times must be UTC unix timestamps. These claims will be verified: - iat: issued at time. If JWT was issued more than an hour ago it is rejected. - exp: expiration time. All exceptions are derived from :class:`mozpay.exc.InvalidJWT`. For expirations a :class:`mozpay.exc.RequestExpired` exception will be raised. """ if not issuer: issuer = _get_issuer(app_req=app_req) try: float(str(app_req.get('exp'))) float(str(app_req.get('iat'))) except ValueError: _re_raise_as(InvalidJWT, 'JWT had an invalid exp (%r) or iat (%r) ' % (app_req.get('exp'), app_req.get('iat')), issuer=issuer)
[ "def", "verify_claims", "(", "app_req", ",", "issuer", "=", "None", ")", ":", "if", "not", "issuer", ":", "issuer", "=", "_get_issuer", "(", "app_req", "=", "app_req", ")", "try", ":", "float", "(", "str", "(", "app_req", ".", "get", "(", "'exp'", ")", ")", ")", "float", "(", "str", "(", "app_req", ".", "get", "(", "'iat'", ")", ")", ")", "except", "ValueError", ":", "_re_raise_as", "(", "InvalidJWT", ",", "'JWT had an invalid exp (%r) or iat (%r) '", "%", "(", "app_req", ".", "get", "(", "'exp'", ")", ",", "app_req", ".", "get", "(", "'iat'", ")", ")", ",", "issuer", "=", "issuer", ")" ]
28.107143
15.678571
def parse_region(self): """Pull region/auth url information from context.""" try: auth_url = self.job_args['os_auth_url'] if 'tokens' not in auth_url: if not auth_url.endswith('/'): auth_url = '%s/' % auth_url auth_url = urlparse.urljoin(auth_url, 'tokens') return auth_url except KeyError: raise exceptions.AuthenticationProblem( 'You Are required to specify an Auth URL, Region or Plugin' )
[ "def", "parse_region", "(", "self", ")", ":", "try", ":", "auth_url", "=", "self", ".", "job_args", "[", "'os_auth_url'", "]", "if", "'tokens'", "not", "in", "auth_url", ":", "if", "not", "auth_url", ".", "endswith", "(", "'/'", ")", ":", "auth_url", "=", "'%s/'", "%", "auth_url", "auth_url", "=", "urlparse", ".", "urljoin", "(", "auth_url", ",", "'tokens'", ")", "return", "auth_url", "except", "KeyError", ":", "raise", "exceptions", ".", "AuthenticationProblem", "(", "'You Are required to specify an Auth URL, Region or Plugin'", ")" ]
38
16.714286
def print_results(cls, stdout, stderr): """Print linter results and exits with an error if there's any.""" for line in stderr: print(line, file=sys.stderr) if stdout: if stderr: # blank line to separate stdout from stderr print(file=sys.stderr) cls._print_stdout(stdout) else: print(':) No issues found.')
[ "def", "print_results", "(", "cls", ",", "stdout", ",", "stderr", ")", ":", "for", "line", "in", "stderr", ":", "print", "(", "line", ",", "file", "=", "sys", ".", "stderr", ")", "if", "stdout", ":", "if", "stderr", ":", "# blank line to separate stdout from stderr", "print", "(", "file", "=", "sys", ".", "stderr", ")", "cls", ".", "_print_stdout", "(", "stdout", ")", "else", ":", "print", "(", "':) No issues found.'", ")" ]
39.3
9.5
def auto_toc_tree(self, node): # pylint: disable=too-many-branches """Try to convert a list block to toctree in rst. This function detects if the matches the condition and return a converted toc tree node. The matching condition: The list only contains one level, and only contains references Parameters ---------- node: nodes.Sequential A list node in the doctree Returns ------- tocnode: docutils node The converted toc tree node, None if conversion is not possible. """ if not self.config['enable_auto_toc_tree']: return None # when auto_toc_tree_section is set # only auto generate toctree under the specified section title sec = self.config['auto_toc_tree_section'] if sec is not None: if node.parent is None: return None title = None if isinstance(node.parent, nodes.section): child = node.parent.first_child_matching_class(nodes.title) if child is not None: title = node.parent.children[child] elif isinstance(node.parent, nodes.paragraph): child = node.parent.parent.first_child_matching_class(nodes.title) if child is not None: title = node.parent.parent.children[child] if not title: return None if title.astext().strip() != sec: return None numbered = None if isinstance(node, nodes.bullet_list): numbered = 0 elif isinstance(node, nodes.enumerated_list): numbered = 1 if numbered is None: return None refs = [] for nd in node.children[:]: assert isinstance(nd, nodes.list_item) if len(nd.children) != 1: return None par = nd.children[0] if not isinstance(par, nodes.paragraph): return None if len(par.children) != 1: return None ref = par.children[0] if isinstance(ref, addnodes.pending_xref): ref = ref.children[0] if not isinstance(ref, nodes.reference): return None title, uri, docpath = self.parse_ref(ref) if title is None or uri.startswith('#'): return None if docpath: refs.append((title, docpath)) else: refs.append((title, uri)) self.state_machine.reset(self.document, node.parent, self.current_level) return self.state_machine.run_directive( 'toctree', options={'maxdepth': 1, 'numbered': numbered}, content=['%s <%s>' % (k, v) for k, v in refs])
[ "def", "auto_toc_tree", "(", "self", ",", "node", ")", ":", "# pylint: disable=too-many-branches", "if", "not", "self", ".", "config", "[", "'enable_auto_toc_tree'", "]", ":", "return", "None", "# when auto_toc_tree_section is set", "# only auto generate toctree under the specified section title", "sec", "=", "self", ".", "config", "[", "'auto_toc_tree_section'", "]", "if", "sec", "is", "not", "None", ":", "if", "node", ".", "parent", "is", "None", ":", "return", "None", "title", "=", "None", "if", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "section", ")", ":", "child", "=", "node", ".", "parent", ".", "first_child_matching_class", "(", "nodes", ".", "title", ")", "if", "child", "is", "not", "None", ":", "title", "=", "node", ".", "parent", ".", "children", "[", "child", "]", "elif", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "paragraph", ")", ":", "child", "=", "node", ".", "parent", ".", "parent", ".", "first_child_matching_class", "(", "nodes", ".", "title", ")", "if", "child", "is", "not", "None", ":", "title", "=", "node", ".", "parent", ".", "parent", ".", "children", "[", "child", "]", "if", "not", "title", ":", "return", "None", "if", "title", ".", "astext", "(", ")", ".", "strip", "(", ")", "!=", "sec", ":", "return", "None", "numbered", "=", "None", "if", "isinstance", "(", "node", ",", "nodes", ".", "bullet_list", ")", ":", "numbered", "=", "0", "elif", "isinstance", "(", "node", ",", "nodes", ".", "enumerated_list", ")", ":", "numbered", "=", "1", "if", "numbered", "is", "None", ":", "return", "None", "refs", "=", "[", "]", "for", "nd", "in", "node", ".", "children", "[", ":", "]", ":", "assert", "isinstance", "(", "nd", ",", "nodes", ".", "list_item", ")", "if", "len", "(", "nd", ".", "children", ")", "!=", "1", ":", "return", "None", "par", "=", "nd", ".", "children", "[", "0", "]", "if", "not", "isinstance", "(", "par", ",", "nodes", ".", "paragraph", ")", ":", "return", "None", "if", "len", "(", "par", ".", "children", ")", "!=", "1", ":", "return", "None", "ref", "=", "par", ".", "children", "[", "0", "]", "if", "isinstance", "(", "ref", ",", "addnodes", ".", "pending_xref", ")", ":", "ref", "=", "ref", ".", "children", "[", "0", "]", "if", "not", "isinstance", "(", "ref", ",", "nodes", ".", "reference", ")", ":", "return", "None", "title", ",", "uri", ",", "docpath", "=", "self", ".", "parse_ref", "(", "ref", ")", "if", "title", "is", "None", "or", "uri", ".", "startswith", "(", "'#'", ")", ":", "return", "None", "if", "docpath", ":", "refs", ".", "append", "(", "(", "title", ",", "docpath", ")", ")", "else", ":", "refs", ".", "append", "(", "(", "title", ",", "uri", ")", ")", "self", ".", "state_machine", ".", "reset", "(", "self", ".", "document", ",", "node", ".", "parent", ",", "self", ".", "current_level", ")", "return", "self", ".", "state_machine", ".", "run_directive", "(", "'toctree'", ",", "options", "=", "{", "'maxdepth'", ":", "1", ",", "'numbered'", ":", "numbered", "}", ",", "content", "=", "[", "'%s <%s>'", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "refs", "]", ")" ]
37.644737
15.671053
def output_keys(self, source_keys): """ Given input chunk keys, compute what keys will be needed to put the result into the result array. As an example of where this gets used - when we aggregate on a particular axis, the source keys may be ``(0:2, None:None)``, but for an aggregation on axis 0, they would result in target values on dimension 2 only and so be ``(None: None, )``. """ keys = list(source_keys) # Remove the aggregated axis from the keys. del keys[self.axis] return tuple(keys)
[ "def", "output_keys", "(", "self", ",", "source_keys", ")", ":", "keys", "=", "list", "(", "source_keys", ")", "# Remove the aggregated axis from the keys.", "del", "keys", "[", "self", ".", "axis", "]", "return", "tuple", "(", "keys", ")" ]
38.466667
18.333333
def first_delayed(self): """ Return the first entry in the delayed zset (a tuple with the job's pk and the score of the zset, which it's delayed time as a timestamp) Returns None if no delayed jobs """ entries = self.delayed.zrange(0, 0, withscores=True) return entries[0] if entries else None
[ "def", "first_delayed", "(", "self", ")", ":", "entries", "=", "self", ".", "delayed", ".", "zrange", "(", "0", ",", "0", ",", "withscores", "=", "True", ")", "return", "entries", "[", "0", "]", "if", "entries", "else", "None" ]
42.75
14.25
def show_version(a_device): """Execute show version command using Netmiko.""" remote_conn = ConnectHandler(**a_device) print() print("#" * 80) print(remote_conn.send_command("show version")) print("#" * 80) print()
[ "def", "show_version", "(", "a_device", ")", ":", "remote_conn", "=", "ConnectHandler", "(", "*", "*", "a_device", ")", "print", "(", ")", "print", "(", "\"#\"", "*", "80", ")", "print", "(", "remote_conn", ".", "send_command", "(", "\"show version\"", ")", ")", "print", "(", "\"#\"", "*", "80", ")", "print", "(", ")" ]
29.375
16
def isMine(self, scriptname): """Primitive queuing system detection; only looks at suffix at the moment.""" suffix = os.path.splitext(scriptname)[1].lower() if suffix.startswith('.'): suffix = suffix[1:] return self.suffix == suffix
[ "def", "isMine", "(", "self", ",", "scriptname", ")", ":", "suffix", "=", "os", ".", "path", ".", "splitext", "(", "scriptname", ")", "[", "1", "]", ".", "lower", "(", ")", "if", "suffix", ".", "startswith", "(", "'.'", ")", ":", "suffix", "=", "suffix", "[", "1", ":", "]", "return", "self", ".", "suffix", "==", "suffix" ]
45.166667
7.666667
def update_user_auth_stat(self, user, success=True): """ Update authentication successful to user. :param user: The authenticated user model :param success: Default to true, if false increments fail_login_count on user model """ if not user.login_count: user.login_count = 0 if not user.fail_login_count: user.fail_login_count = 0 if success: user.login_count += 1 user.fail_login_count = 0 else: user.fail_login_count += 1 user.last_login = datetime.datetime.now() self.update_user(user)
[ "def", "update_user_auth_stat", "(", "self", ",", "user", ",", "success", "=", "True", ")", ":", "if", "not", "user", ".", "login_count", ":", "user", ".", "login_count", "=", "0", "if", "not", "user", ".", "fail_login_count", ":", "user", ".", "fail_login_count", "=", "0", "if", "success", ":", "user", ".", "login_count", "+=", "1", "user", ".", "fail_login_count", "=", "0", "else", ":", "user", ".", "fail_login_count", "+=", "1", "user", ".", "last_login", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "self", ".", "update_user", "(", "user", ")" ]
33.1
12.1
def __set_ethernet_uris(self, ethernet_names, operation="add"): """Updates network uris.""" if not isinstance(ethernet_names, list): ethernet_names = [ethernet_names] associated_enets = self.data.get('networkUris', []) ethernet_uris = [] for i, enet in enumerate(ethernet_names): enet_exists = self._ethernet_networks.get_by_name(enet) if enet_exists: ethernet_uris.append(enet_exists.data['uri']) else: raise HPOneViewResourceNotFound("Ethernet: {} does not exist".foramt(enet)) if operation == "remove": enets_to_update = sorted(list(set(associated_enets) - set(ethernet_uris))) elif operation == "add": enets_to_update = sorted(list(set(associated_enets).union(set(ethernet_uris)))) else: raise ValueError("Value {} is not supported as operation. The supported values are: ['add', 'remove']") if set(enets_to_update) != set(associated_enets): updated_network = {'networkUris': enets_to_update} self.update(updated_network)
[ "def", "__set_ethernet_uris", "(", "self", ",", "ethernet_names", ",", "operation", "=", "\"add\"", ")", ":", "if", "not", "isinstance", "(", "ethernet_names", ",", "list", ")", ":", "ethernet_names", "=", "[", "ethernet_names", "]", "associated_enets", "=", "self", ".", "data", ".", "get", "(", "'networkUris'", ",", "[", "]", ")", "ethernet_uris", "=", "[", "]", "for", "i", ",", "enet", "in", "enumerate", "(", "ethernet_names", ")", ":", "enet_exists", "=", "self", ".", "_ethernet_networks", ".", "get_by_name", "(", "enet", ")", "if", "enet_exists", ":", "ethernet_uris", ".", "append", "(", "enet_exists", ".", "data", "[", "'uri'", "]", ")", "else", ":", "raise", "HPOneViewResourceNotFound", "(", "\"Ethernet: {} does not exist\"", ".", "foramt", "(", "enet", ")", ")", "if", "operation", "==", "\"remove\"", ":", "enets_to_update", "=", "sorted", "(", "list", "(", "set", "(", "associated_enets", ")", "-", "set", "(", "ethernet_uris", ")", ")", ")", "elif", "operation", "==", "\"add\"", ":", "enets_to_update", "=", "sorted", "(", "list", "(", "set", "(", "associated_enets", ")", ".", "union", "(", "set", "(", "ethernet_uris", ")", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Value {} is not supported as operation. The supported values are: ['add', 'remove']\"", ")", "if", "set", "(", "enets_to_update", ")", "!=", "set", "(", "associated_enets", ")", ":", "updated_network", "=", "{", "'networkUris'", ":", "enets_to_update", "}", "self", ".", "update", "(", "updated_network", ")" ]
44.68
25.04
def getUpperDetectionLimit(self): """Returns the Upper Detection Limit (UDL) that applies to this analysis in particular. If no value set or the analysis service doesn't allow manual input of detection limits, returns the value set by default in the Analysis Service """ if self.isUpperDetectionLimit(): result = self.getResult() try: # in this case, the result itself is the LDL. return float(result) except (TypeError, ValueError): logger.warn("The result for the analysis %s is a lower " "detection limit, but not floatable: '%s'. " "Returnig AS's default LDL." % (self.id, result)) return AbstractBaseAnalysis.getUpperDetectionLimit(self)
[ "def", "getUpperDetectionLimit", "(", "self", ")", ":", "if", "self", ".", "isUpperDetectionLimit", "(", ")", ":", "result", "=", "self", ".", "getResult", "(", ")", "try", ":", "# in this case, the result itself is the LDL.", "return", "float", "(", "result", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "logger", ".", "warn", "(", "\"The result for the analysis %s is a lower \"", "\"detection limit, but not floatable: '%s'. \"", "\"Returnig AS's default LDL.\"", "%", "(", "self", ".", "id", ",", "result", ")", ")", "return", "AbstractBaseAnalysis", ".", "getUpperDetectionLimit", "(", "self", ")" ]
50
14.352941
def __execute_cmd(name, cmd): ''' Execute Riak commands ''' return __salt__['cmd.run_all']( '{0} {1}'.format(salt.utils.path.which(name), cmd) )
[ "def", "__execute_cmd", "(", "name", ",", "cmd", ")", ":", "return", "__salt__", "[", "'cmd.run_all'", "]", "(", "'{0} {1}'", ".", "format", "(", "salt", ".", "utils", ".", "path", ".", "which", "(", "name", ")", ",", "cmd", ")", ")" ]
23.714286
21.428571
def _get_auth(self, force_console=False): """Try to get login auth from known sources.""" if not self.target: raise ValueError("Unspecified target ({!r})".format(self.target)) elif not force_console and self.URL_RE.match(self.target): auth_url = urlparse(self.target) source = 'url' if auth_url.username: self.user = auth_url.username if auth_url.password: self.password = auth_url.password if not self.auth_valid(): source = self._get_auth_from_keyring() if not self.auth_valid(): source = self._get_auth_from_netrc(auth_url.hostname) if not self.auth_valid(): source = self._get_auth_from_console(self.target) else: source = self._get_auth_from_console(self.target) if self.auth_valid(): self.source = source
[ "def", "_get_auth", "(", "self", ",", "force_console", "=", "False", ")", ":", "if", "not", "self", ".", "target", ":", "raise", "ValueError", "(", "\"Unspecified target ({!r})\"", ".", "format", "(", "self", ".", "target", ")", ")", "elif", "not", "force_console", "and", "self", ".", "URL_RE", ".", "match", "(", "self", ".", "target", ")", ":", "auth_url", "=", "urlparse", "(", "self", ".", "target", ")", "source", "=", "'url'", "if", "auth_url", ".", "username", ":", "self", ".", "user", "=", "auth_url", ".", "username", "if", "auth_url", ".", "password", ":", "self", ".", "password", "=", "auth_url", ".", "password", "if", "not", "self", ".", "auth_valid", "(", ")", ":", "source", "=", "self", ".", "_get_auth_from_keyring", "(", ")", "if", "not", "self", ".", "auth_valid", "(", ")", ":", "source", "=", "self", ".", "_get_auth_from_netrc", "(", "auth_url", ".", "hostname", ")", "if", "not", "self", ".", "auth_valid", "(", ")", ":", "source", "=", "self", ".", "_get_auth_from_console", "(", "self", ".", "target", ")", "else", ":", "source", "=", "self", ".", "_get_auth_from_console", "(", "self", ".", "target", ")", "if", "self", ".", "auth_valid", "(", ")", ":", "self", ".", "source", "=", "source" ]
42.272727
13.954545
def time_seconds(tc_array, year): """Return the time object from the timecodes """ tc_array = np.array(tc_array, copy=True) word = tc_array[:, 0] day = word >> 1 word = tc_array[:, 1].astype(np.uint64) msecs = ((127) & word) * 1024 word = tc_array[:, 2] msecs += word & 1023 msecs *= 1024 word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( str(year) + '-01-01T00:00:00Z', 's') + msecs[:].astype('timedelta64[ms]') + (day - 1)[:].astype('timedelta64[D]'))
[ "def", "time_seconds", "(", "tc_array", ",", "year", ")", ":", "tc_array", "=", "np", ".", "array", "(", "tc_array", ",", "copy", "=", "True", ")", "word", "=", "tc_array", "[", ":", ",", "0", "]", "day", "=", "word", ">>", "1", "word", "=", "tc_array", "[", ":", ",", "1", "]", ".", "astype", "(", "np", ".", "uint64", ")", "msecs", "=", "(", "(", "127", ")", "&", "word", ")", "*", "1024", "word", "=", "tc_array", "[", ":", ",", "2", "]", "msecs", "+=", "word", "&", "1023", "msecs", "*=", "1024", "word", "=", "tc_array", "[", ":", ",", "3", "]", "msecs", "+=", "word", "&", "1023", "return", "(", "np", ".", "datetime64", "(", "str", "(", "year", ")", "+", "'-01-01T00:00:00Z'", ",", "'s'", ")", "+", "msecs", "[", ":", "]", ".", "astype", "(", "'timedelta64[ms]'", ")", "+", "(", "day", "-", "1", ")", "[", ":", "]", ".", "astype", "(", "'timedelta64[D]'", ")", ")" ]
31.117647
10.117647
def system_info(url, auth, verify_ssl): """Retrieve SDC system information. Args: url (str): the host url. auth (tuple): a tuple of username, and password. """ sysinfo_response = requests.get(url + '/info', headers=X_REQ_BY, auth=auth, verify=verify_ssl) sysinfo_response.raise_for_status() return sysinfo_response.json()
[ "def", "system_info", "(", "url", ",", "auth", ",", "verify_ssl", ")", ":", "sysinfo_response", "=", "requests", ".", "get", "(", "url", "+", "'/info'", ",", "headers", "=", "X_REQ_BY", ",", "auth", "=", "auth", ",", "verify", "=", "verify_ssl", ")", "sysinfo_response", ".", "raise_for_status", "(", ")", "return", "sysinfo_response", ".", "json", "(", ")" ]
32.090909
18.272727
def get_accounts_from_file(filename): """ Reads a list of user/password combinations from the given file and returns a list of Account instances. The file content has the following format:: [account-pool] user1 = cGFzc3dvcmQ= user2 = cGFzc3dvcmQ= Note that "cGFzc3dvcmQ=" is a base64 encoded password. If the input file contains extra config sections other than "account-pool", they are ignored. Each password needs to be base64 encrypted. To encrypt a password, you may use the following command:: python -c 'import base64; print(base64.b64encode("thepassword"))' :type filename: string :param filename: The name of the file containing the list of accounts. :rtype: list[Account] :return: The newly created account instances. """ accounts = [] cfgparser = __import__('configparser', {}, {}, ['']) parser = cfgparser.RawConfigParser() parser.optionxform = str parser.read(filename) for user, password in parser.items('account-pool'): password = base64.decodebytes(password.encode('latin1')) accounts.append(Account(user, password.decode('latin1'))) return accounts
[ "def", "get_accounts_from_file", "(", "filename", ")", ":", "accounts", "=", "[", "]", "cfgparser", "=", "__import__", "(", "'configparser'", ",", "{", "}", ",", "{", "}", ",", "[", "''", "]", ")", "parser", "=", "cfgparser", ".", "RawConfigParser", "(", ")", "parser", ".", "optionxform", "=", "str", "parser", ".", "read", "(", "filename", ")", "for", "user", ",", "password", "in", "parser", ".", "items", "(", "'account-pool'", ")", ":", "password", "=", "base64", ".", "decodebytes", "(", "password", ".", "encode", "(", "'latin1'", ")", ")", "accounts", ".", "append", "(", "Account", "(", "user", ",", "password", ".", "decode", "(", "'latin1'", ")", ")", ")", "return", "accounts" ]
36.59375
18.46875
async def helo( self, hostname: str = None, timeout: DefaultNumType = _default ) -> SMTPResponse: """ Send the SMTP HELO command. Hostname to send for this command defaults to the FQDN of the local host. :raises SMTPHeloError: on unexpected server response code """ if hostname is None: hostname = self.source_address async with self._command_lock: response = await self.execute_command( b"HELO", hostname.encode("ascii"), timeout=timeout ) self.last_helo_response = response if response.code != SMTPStatus.completed: raise SMTPHeloError(response.code, response.message) return response
[ "async", "def", "helo", "(", "self", ",", "hostname", ":", "str", "=", "None", ",", "timeout", ":", "DefaultNumType", "=", "_default", ")", "->", "SMTPResponse", ":", "if", "hostname", "is", "None", ":", "hostname", "=", "self", ".", "source_address", "async", "with", "self", ".", "_command_lock", ":", "response", "=", "await", "self", ".", "execute_command", "(", "b\"HELO\"", ",", "hostname", ".", "encode", "(", "\"ascii\"", ")", ",", "timeout", "=", "timeout", ")", "self", ".", "last_helo_response", "=", "response", "if", "response", ".", "code", "!=", "SMTPStatus", ".", "completed", ":", "raise", "SMTPHeloError", "(", "response", ".", "code", ",", "response", ".", "message", ")", "return", "response" ]
33.272727
18.909091
def _generic_action_parser(self): """Generic parser for Actions.""" actions = [] while True: action_code = unpack_ui8(self._src) if action_code == 0: break action_name = ACTION_NAMES[action_code] if action_code > 128: # have a payload! action_len = unpack_ui16(self._src) try: action_meth = getattr( self, "_handle_" + action_name.lower()) except AttributeError: if self.unknown_alert: raise ValueError( "Unknown action: " + repr(action_name)) action_payload = self._src.read(action_len) _dict = {'__str__': _repr, '__repr__': _repr, 'name': action_name} action = type("UnknownAction", (SWFObject,), _dict)() action.raw_payload = action_payload actions.append(action) else: prev_pos = self._src.tell() for action in action_meth(action_len): assert action is not None, action_name actions.append(action) quant_read = self._src.tell() - prev_pos if quant_read != action_len: raise RuntimeError( "Bad bytes consumption by action {!r} handler " "(did {}, should {})".format( action_name, quant_read, action_len)) else: action = _make_object(action_name) actions.append(action) return actions
[ "def", "_generic_action_parser", "(", "self", ")", ":", "actions", "=", "[", "]", "while", "True", ":", "action_code", "=", "unpack_ui8", "(", "self", ".", "_src", ")", "if", "action_code", "==", "0", ":", "break", "action_name", "=", "ACTION_NAMES", "[", "action_code", "]", "if", "action_code", ">", "128", ":", "# have a payload!", "action_len", "=", "unpack_ui16", "(", "self", ".", "_src", ")", "try", ":", "action_meth", "=", "getattr", "(", "self", ",", "\"_handle_\"", "+", "action_name", ".", "lower", "(", ")", ")", "except", "AttributeError", ":", "if", "self", ".", "unknown_alert", ":", "raise", "ValueError", "(", "\"Unknown action: \"", "+", "repr", "(", "action_name", ")", ")", "action_payload", "=", "self", ".", "_src", ".", "read", "(", "action_len", ")", "_dict", "=", "{", "'__str__'", ":", "_repr", ",", "'__repr__'", ":", "_repr", ",", "'name'", ":", "action_name", "}", "action", "=", "type", "(", "\"UnknownAction\"", ",", "(", "SWFObject", ",", ")", ",", "_dict", ")", "(", ")", "action", ".", "raw_payload", "=", "action_payload", "actions", ".", "append", "(", "action", ")", "else", ":", "prev_pos", "=", "self", ".", "_src", ".", "tell", "(", ")", "for", "action", "in", "action_meth", "(", "action_len", ")", ":", "assert", "action", "is", "not", "None", ",", "action_name", "actions", ".", "append", "(", "action", ")", "quant_read", "=", "self", ".", "_src", ".", "tell", "(", ")", "-", "prev_pos", "if", "quant_read", "!=", "action_len", ":", "raise", "RuntimeError", "(", "\"Bad bytes consumption by action {!r} handler \"", "\"(did {}, should {})\"", ".", "format", "(", "action_name", ",", "quant_read", ",", "action_len", ")", ")", "else", ":", "action", "=", "_make_object", "(", "action_name", ")", "actions", ".", "append", "(", "action", ")", "return", "actions" ]
41.761905
15.690476
def _on_github_user(self, future, access_token, response): """Invoked as a callback when self.github_request returns the response to the request for user data. :param method future: The callback method to pass along :param str access_token: The access token for the user's use :param dict response: The HTTP response already decoded """ response['access_token'] = access_token future.set_result(response)
[ "def", "_on_github_user", "(", "self", ",", "future", ",", "access_token", ",", "response", ")", ":", "response", "[", "'access_token'", "]", "=", "access_token", "future", ".", "set_result", "(", "response", ")" ]
41.818182
17
def main(args=None): """Call the CLI interface and wait for the result.""" retcode = 0 try: ci = CliInterface() args = ci.parser.parse_args() result = args.func(args) if result is not None: print(result) retcode = 0 except Exception: retcode = 1 traceback.print_exc() sys.exit(retcode)
[ "def", "main", "(", "args", "=", "None", ")", ":", "retcode", "=", "0", "try", ":", "ci", "=", "CliInterface", "(", ")", "args", "=", "ci", ".", "parser", ".", "parse_args", "(", ")", "result", "=", "args", ".", "func", "(", "args", ")", "if", "result", "is", "not", "None", ":", "print", "(", "result", ")", "retcode", "=", "0", "except", "Exception", ":", "retcode", "=", "1", "traceback", ".", "print_exc", "(", ")", "sys", ".", "exit", "(", "retcode", ")" ]
25.714286
15.5
def _home_assistant_config(self): """ Creates home assistant configuration for the known devices """ devices = {} for scs_id, dev in self._devices.items(): devices[dev['ha_id']] = { 'name': dev['name'], 'scs_id': scs_id} return {'devices': devices}
[ "def", "_home_assistant_config", "(", "self", ")", ":", "devices", "=", "{", "}", "for", "scs_id", ",", "dev", "in", "self", ".", "_devices", ".", "items", "(", ")", ":", "devices", "[", "dev", "[", "'ha_id'", "]", "]", "=", "{", "'name'", ":", "dev", "[", "'name'", "]", ",", "'scs_id'", ":", "scs_id", "}", "return", "{", "'devices'", ":", "devices", "}" ]
35.222222
10.555556
def also_restrict_to(self, restriction): """ Works like restict_to but offers an additional restriction. Playbooks use this to implement serial behavior. """ if type(restriction) != list: restriction = [ restriction ] self._also_restriction = restriction
[ "def", "also_restrict_to", "(", "self", ",", "restriction", ")", ":", "if", "type", "(", "restriction", ")", "!=", "list", ":", "restriction", "=", "[", "restriction", "]", "self", ".", "_also_restriction", "=", "restriction" ]
38.5
7.25
def round_array(array_in): """ arr_out = round_array(array_in) Rounds an array and recasts it to int. Also works on scalars. """ if isinstance(array_in, ndarray): return np.round(array_in).astype(int) else: return int(np.round(array_in))
[ "def", "round_array", "(", "array_in", ")", ":", "if", "isinstance", "(", "array_in", ",", "ndarray", ")", ":", "return", "np", ".", "round", "(", "array_in", ")", ".", "astype", "(", "int", ")", "else", ":", "return", "int", "(", "np", ".", "round", "(", "array_in", ")", ")" ]
26.9
12.5
def get_ports(device_owners=None, vnic_type=None, port_id=None, active=True): """Returns list of all ports in neutron the db""" session = db.get_reader_session() with session.begin(): port_model = models_v2.Port ports = (session .query(port_model) .filter_unnecessary_ports(device_owners, vnic_type, active)) if port_id: ports = ports.filter(port_model.id == port_id) return ports.all()
[ "def", "get_ports", "(", "device_owners", "=", "None", ",", "vnic_type", "=", "None", ",", "port_id", "=", "None", ",", "active", "=", "True", ")", ":", "session", "=", "db", ".", "get_reader_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "port_model", "=", "models_v2", ".", "Port", "ports", "=", "(", "session", ".", "query", "(", "port_model", ")", ".", "filter_unnecessary_ports", "(", "device_owners", ",", "vnic_type", ",", "active", ")", ")", "if", "port_id", ":", "ports", "=", "ports", ".", "filter", "(", "port_model", ".", "id", "==", "port_id", ")", "return", "ports", ".", "all", "(", ")" ]
42
15.909091
def process(self): """Execute the grep command""" for _, path in self.state.input: log_file_path = os.path.join(self._output_path, 'grepper.log') print('Log file: {0:s}'.format(log_file_path)) print('Walking through dir (absolute) = ' + os.path.abspath(path)) try: for root, _, files in os.walk(path): for filename in files: found = set() fullpath = '{0:s}/{1:s}'.format(os.path.abspath(root), filename) if mimetypes.guess_type(filename)[0] == 'application/pdf': found = self.grepPDF(fullpath) else: with open(fullpath, 'r') as fp: for line in fp: found.update(set(x.lower() for x in re.findall( self._keywords, line, re.IGNORECASE))) if [item for item in found if item]: output = '{0:s}/{1:s}:{2:s}'.format(path, filename, ','.join( filter(None, found))) if self._final_output: self._final_output += '\n' + output else: self._final_output = output print(output) except OSError as exception: self.state.add_error(exception, critical=True) return # Catch all remaining errors since we want to gracefully report them except Exception as exception: # pylint: disable=broad-except self.state.add_error(exception, critical=True) return
[ "def", "process", "(", "self", ")", ":", "for", "_", ",", "path", "in", "self", ".", "state", ".", "input", ":", "log_file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_output_path", ",", "'grepper.log'", ")", "print", "(", "'Log file: {0:s}'", ".", "format", "(", "log_file_path", ")", ")", "print", "(", "'Walking through dir (absolute) = '", "+", "os", ".", "path", ".", "abspath", "(", "path", ")", ")", "try", ":", "for", "root", ",", "_", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "filename", "in", "files", ":", "found", "=", "set", "(", ")", "fullpath", "=", "'{0:s}/{1:s}'", ".", "format", "(", "os", ".", "path", ".", "abspath", "(", "root", ")", ",", "filename", ")", "if", "mimetypes", ".", "guess_type", "(", "filename", ")", "[", "0", "]", "==", "'application/pdf'", ":", "found", "=", "self", ".", "grepPDF", "(", "fullpath", ")", "else", ":", "with", "open", "(", "fullpath", ",", "'r'", ")", "as", "fp", ":", "for", "line", "in", "fp", ":", "found", ".", "update", "(", "set", "(", "x", ".", "lower", "(", ")", "for", "x", "in", "re", ".", "findall", "(", "self", ".", "_keywords", ",", "line", ",", "re", ".", "IGNORECASE", ")", ")", ")", "if", "[", "item", "for", "item", "in", "found", "if", "item", "]", ":", "output", "=", "'{0:s}/{1:s}:{2:s}'", ".", "format", "(", "path", ",", "filename", ",", "','", ".", "join", "(", "filter", "(", "None", ",", "found", ")", ")", ")", "if", "self", ".", "_final_output", ":", "self", ".", "_final_output", "+=", "'\\n'", "+", "output", "else", ":", "self", ".", "_final_output", "=", "output", "print", "(", "output", ")", "except", "OSError", "as", "exception", ":", "self", ".", "state", ".", "add_error", "(", "exception", ",", "critical", "=", "True", ")", "return", "# Catch all remaining errors since we want to gracefully report them", "except", "Exception", "as", "exception", ":", "# pylint: disable=broad-except", "self", ".", "state", ".", "add_error", "(", "exception", ",", "critical", "=", "True", ")", "return" ]
41.4
18.028571
def list_group_members(self, group_url, max_results=0): ''' a method to retrieve a list of members for a meetup group :param group_url: string with meetup urlname for group :param max_results: [optional] integer with number of members to include :return: dictionary with list of member details inside [json] key member_details = self._reconstruct_member({}) ''' # https://www.meetup.com/meetup_api/docs/:urlname/members/#list title = '%s.list_group_members' % self.__class__.__name__ # validate inputs input_fields = { 'group_url': group_url, 'max_results': max_results } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # construct request fields url = '%s/%s/members' % (self.endpoint, group_url) params = { 'fields': 'gender,birthday,last_event,messaging_pref,next_event,other_services,privacy,self,stats' } if max_results: params['page'] = str(max_results) # send request response_details = self._get_request(url, params=params) # reconstruct method output group_members = { 'json': [] } for key, value in response_details.items(): if key != 'json': group_members[key] = value for member in response_details['json']: group_members['json'].append(self._reconstruct_member(member)) return group_members
[ "def", "list_group_members", "(", "self", ",", "group_url", ",", "max_results", "=", "0", ")", ":", "# https://www.meetup.com/meetup_api/docs/:urlname/members/#list\r", "title", "=", "'%s.list_group_members'", "%", "self", ".", "__class__", ".", "__name__", "# validate inputs\r", "input_fields", "=", "{", "'group_url'", ":", "group_url", ",", "'max_results'", ":", "max_results", "}", "for", "key", ",", "value", "in", "input_fields", ".", "items", "(", ")", ":", "if", "value", ":", "object_title", "=", "'%s(%s=%s)'", "%", "(", "title", ",", "key", ",", "str", "(", "value", ")", ")", "self", ".", "fields", ".", "validate", "(", "value", ",", "'.%s'", "%", "key", ",", "object_title", ")", "# construct request fields\r", "url", "=", "'%s/%s/members'", "%", "(", "self", ".", "endpoint", ",", "group_url", ")", "params", "=", "{", "'fields'", ":", "'gender,birthday,last_event,messaging_pref,next_event,other_services,privacy,self,stats'", "}", "if", "max_results", ":", "params", "[", "'page'", "]", "=", "str", "(", "max_results", ")", "# send request\r", "response_details", "=", "self", ".", "_get_request", "(", "url", ",", "params", "=", "params", ")", "# reconstruct method output\r", "group_members", "=", "{", "'json'", ":", "[", "]", "}", "for", "key", ",", "value", "in", "response_details", ".", "items", "(", ")", ":", "if", "key", "!=", "'json'", ":", "group_members", "[", "key", "]", "=", "value", "for", "member", "in", "response_details", "[", "'json'", "]", ":", "group_members", "[", "'json'", "]", ".", "append", "(", "self", ".", "_reconstruct_member", "(", "member", ")", ")", "return", "group_members" ]
35.06383
24.510638
def heightmap_add_voronoi( hm: np.ndarray, nbPoints: Any, nbCoef: int, coef: Sequence[float], rnd: Optional[tcod.random.Random] = None, ) -> None: """Add values from a Voronoi diagram to the heightmap. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. nbPoints (Any): Number of Voronoi sites. nbCoef (int): The diagram value is calculated from the nbCoef closest sites. coef (Sequence[float]): The distance to each site is scaled by the corresponding coef. Closest site : coef[0], second closest site : coef[1], ... rnd (Optional[Random]): A Random instance, or None. """ nbPoints = len(coef) ccoef = ffi.new("float[]", coef) lib.TCOD_heightmap_add_voronoi( _heightmap_cdata(hm), nbPoints, nbCoef, ccoef, rnd.random_c if rnd else ffi.NULL, )
[ "def", "heightmap_add_voronoi", "(", "hm", ":", "np", ".", "ndarray", ",", "nbPoints", ":", "Any", ",", "nbCoef", ":", "int", ",", "coef", ":", "Sequence", "[", "float", "]", ",", "rnd", ":", "Optional", "[", "tcod", ".", "random", ".", "Random", "]", "=", "None", ",", ")", "->", "None", ":", "nbPoints", "=", "len", "(", "coef", ")", "ccoef", "=", "ffi", ".", "new", "(", "\"float[]\"", ",", "coef", ")", "lib", ".", "TCOD_heightmap_add_voronoi", "(", "_heightmap_cdata", "(", "hm", ")", ",", "nbPoints", ",", "nbCoef", ",", "ccoef", ",", "rnd", ".", "random_c", "if", "rnd", "else", "ffi", ".", "NULL", ",", ")" ]
34.034483
18.344828
def use(**kwargs): """ Updates the active resource configuration to the passed keyword arguments. Invoking this method without passing arguments will just return the active resource configuration. @returns The previous configuration. """ config = dict(use.config) use.config.update(kwargs) return config
[ "def", "use", "(", "*", "*", "kwargs", ")", ":", "config", "=", "dict", "(", "use", ".", "config", ")", "use", ".", "config", ".", "update", "(", "kwargs", ")", "return", "config" ]
24.285714
18.142857
def translate_pname(self, pname: PrefName, mid: ModuleId) -> QualName: """Translate a prefixed name to a qualified name. Args: pname: Name with an optional prefix. mid: Identifier of the module in which `pname` appears. Raises: ModuleNotRegistered: If `mid` is not registered in the data model. UnknownPrefix: If the prefix specified in `pname` is not declared. """ loc, nid = self.resolve_pname(pname, mid) return (loc, self.namespace(nid))
[ "def", "translate_pname", "(", "self", ",", "pname", ":", "PrefName", ",", "mid", ":", "ModuleId", ")", "->", "QualName", ":", "loc", ",", "nid", "=", "self", ".", "resolve_pname", "(", "pname", ",", "mid", ")", "return", "(", "loc", ",", "self", ".", "namespace", "(", "nid", ")", ")" ]
47.909091
18.454545
def set_servo_speed(self, goalspeed, led): """ Set the Herkulex in continuous rotation mode Args: goalspeed (int): the speed , range -1023 to 1023 led (int): the LED color 0x00 LED off 0x04 GREEN 0x08 BLUE 0x10 RED """ if goalspeed>0 : goalspeed_msb = (int(goalspeed)& 0xFF00) >> 8 goalspeed_lsb = int(goalspeed) & 0xff elif goalspeed<0 : goalspeed_msb = 64+(255- ((int(goalspeed)& 0xFF00) >> 8)) goalspeed_lsb = (abs(goalspeed) & 0xff) #print goalspeed_msb,goalspeed_lsb data = [] data.append(0x0C) data.append(self.servoid) data.append(I_JOG_REQ) data.append(goalspeed_lsb) data.append(goalspeed_msb) data.append(0x02|led) data.append(self.servoid) data.append(0x00) send_data(data)
[ "def", "set_servo_speed", "(", "self", ",", "goalspeed", ",", "led", ")", ":", "if", "goalspeed", ">", "0", ":", "goalspeed_msb", "=", "(", "int", "(", "goalspeed", ")", "&", "0xFF00", ")", ">>", "8", "goalspeed_lsb", "=", "int", "(", "goalspeed", ")", "&", "0xff", "elif", "goalspeed", "<", "0", ":", "goalspeed_msb", "=", "64", "+", "(", "255", "-", "(", "(", "int", "(", "goalspeed", ")", "&", "0xFF00", ")", ">>", "8", ")", ")", "goalspeed_lsb", "=", "(", "abs", "(", "goalspeed", ")", "&", "0xff", ")", "#print goalspeed_msb,goalspeed_lsb", "data", "=", "[", "]", "data", ".", "append", "(", "0x0C", ")", "data", ".", "append", "(", "self", ".", "servoid", ")", "data", ".", "append", "(", "I_JOG_REQ", ")", "data", ".", "append", "(", "goalspeed_lsb", ")", "data", ".", "append", "(", "goalspeed_msb", ")", "data", ".", "append", "(", "0x02", "|", "led", ")", "data", ".", "append", "(", "self", ".", "servoid", ")", "data", ".", "append", "(", "0x00", ")", "send_data", "(", "data", ")" ]
30.645161
14.741935
def closest_common_ancestor(self, other): """ Find the common ancestor between this history node and 'other'. :param other: the PathHistory to find a common ancestor with. :return: the common ancestor SimStateHistory, or None if there isn't one """ our_history_iter = reversed(HistoryIter(self)) their_history_iter = reversed(HistoryIter(other)) sofar = set() while True: our_done = False their_done = False try: our_next = next(our_history_iter) if our_next in sofar: # we found it! return our_next sofar.add(our_next) except StopIteration: # we ran out of items during iteration our_done = True try: their_next = next(their_history_iter) if their_next in sofar: # we found it! return their_next sofar.add(their_next) except StopIteration: # we ran out of items during iteration their_done = True # if we ran out of both lists, there's no common ancestor if our_done and their_done: return None
[ "def", "closest_common_ancestor", "(", "self", ",", "other", ")", ":", "our_history_iter", "=", "reversed", "(", "HistoryIter", "(", "self", ")", ")", "their_history_iter", "=", "reversed", "(", "HistoryIter", "(", "other", ")", ")", "sofar", "=", "set", "(", ")", "while", "True", ":", "our_done", "=", "False", "their_done", "=", "False", "try", ":", "our_next", "=", "next", "(", "our_history_iter", ")", "if", "our_next", "in", "sofar", ":", "# we found it!", "return", "our_next", "sofar", ".", "add", "(", "our_next", ")", "except", "StopIteration", ":", "# we ran out of items during iteration", "our_done", "=", "True", "try", ":", "their_next", "=", "next", "(", "their_history_iter", ")", "if", "their_next", "in", "sofar", ":", "# we found it!", "return", "their_next", "sofar", ".", "add", "(", "their_next", ")", "except", "StopIteration", ":", "# we ran out of items during iteration", "their_done", "=", "True", "# if we ran out of both lists, there's no common ancestor", "if", "our_done", "and", "their_done", ":", "return", "None" ]
34.131579
15.973684
def has_hardware_breakpoint(self, dwThreadId, address): """ Checks if a hardware breakpoint is defined at the given address. @see: L{define_hardware_breakpoint}, L{get_hardware_breakpoint}, L{erase_hardware_breakpoint}, L{enable_hardware_breakpoint}, L{enable_one_shot_hardware_breakpoint}, L{disable_hardware_breakpoint} @type dwThreadId: int @param dwThreadId: Thread global ID. @type address: int @param address: Memory address of breakpoint. @rtype: bool @return: C{True} if the breakpoint is defined, C{False} otherwise. """ if dwThreadId in self.__hardwareBP: bpSet = self.__hardwareBP[dwThreadId] for bp in bpSet: if bp.get_address() == address: return True return False
[ "def", "has_hardware_breakpoint", "(", "self", ",", "dwThreadId", ",", "address", ")", ":", "if", "dwThreadId", "in", "self", ".", "__hardwareBP", ":", "bpSet", "=", "self", ".", "__hardwareBP", "[", "dwThreadId", "]", "for", "bp", "in", "bpSet", ":", "if", "bp", ".", "get_address", "(", ")", "==", "address", ":", "return", "True", "return", "False" ]
32.814815
15.037037
def cmd_repeat(self, args): '''repeat a command at regular intervals''' if len(args) == 0: if len(self.repeats) == 0: print("No repeats") return for i in range(len(self.repeats)): print("%u: %s" % (i, self.repeats[i])) return if args[0] == 'add': if len(args) < 3: print("Usage: repeat add PERIOD CMD") return self.repeats.append(RepeatCommand(float(args[1]), " ".join(args[2:]))) elif args[0] == 'remove': if len(args) < 2: print("Usage: repeat remove INDEX") return i = int(args[1]) if i < 0 or i >= len(self.repeats): print("Invalid index %d" % i) return self.repeats.pop(i) return elif args[0] == 'clean': self.repeats = [] else: print("Usage: repeat <add|remove|clean>")
[ "def", "cmd_repeat", "(", "self", ",", "args", ")", ":", "if", "len", "(", "args", ")", "==", "0", ":", "if", "len", "(", "self", ".", "repeats", ")", "==", "0", ":", "print", "(", "\"No repeats\"", ")", "return", "for", "i", "in", "range", "(", "len", "(", "self", ".", "repeats", ")", ")", ":", "print", "(", "\"%u: %s\"", "%", "(", "i", ",", "self", ".", "repeats", "[", "i", "]", ")", ")", "return", "if", "args", "[", "0", "]", "==", "'add'", ":", "if", "len", "(", "args", ")", "<", "3", ":", "print", "(", "\"Usage: repeat add PERIOD CMD\"", ")", "return", "self", ".", "repeats", ".", "append", "(", "RepeatCommand", "(", "float", "(", "args", "[", "1", "]", ")", ",", "\" \"", ".", "join", "(", "args", "[", "2", ":", "]", ")", ")", ")", "elif", "args", "[", "0", "]", "==", "'remove'", ":", "if", "len", "(", "args", ")", "<", "2", ":", "print", "(", "\"Usage: repeat remove INDEX\"", ")", "return", "i", "=", "int", "(", "args", "[", "1", "]", ")", "if", "i", "<", "0", "or", "i", ">=", "len", "(", "self", ".", "repeats", ")", ":", "print", "(", "\"Invalid index %d\"", "%", "i", ")", "return", "self", ".", "repeats", ".", "pop", "(", "i", ")", "return", "elif", "args", "[", "0", "]", "==", "'clean'", ":", "self", ".", "repeats", "=", "[", "]", "else", ":", "print", "(", "\"Usage: repeat <add|remove|clean>\"", ")" ]
35.142857
13.571429
async def _do_tp(self, pip, mount) -> top_types.Point: """ Execute the work of tip probe. This is a separate function so that it can be encapsulated in a context manager that ensures the state of the pipette tip tracking is reset properly. It should not be called outside of :py:meth:`locate_tip_probe_center`. :param pip: The pipette to use :type pip: opentrons.hardware_control.pipette.Pipette :param mount: The mount on which the pipette is attached :type mount: opentrons.types.Mount """ # Clear the old offset during calibration pip.update_instrument_offset(top_types.Point()) # Hotspots based on our expectation of tip length and config hotspots = robot_configs.calculate_tip_probe_hotspots( pip.current_tip_length, self._config.tip_probe) new_pos: Dict[Axis, List[float]] = { ax: [] for ax in Axis.gantry_axes() if ax != Axis.A} safe_z = self._config.tip_probe.z_clearance.crossover + \ self._config.tip_probe.center[2] for hs in hotspots: ax_en = Axis[hs.axis.upper()] overridden_center = { ax: sum(vals)/len(vals) if len(vals) == 2 else self._config.tip_probe.center[ax.value] for ax, vals in new_pos.items() } x0 = overridden_center[Axis.X] + hs.x_start_offs y0 = overridden_center[Axis.Y] + hs.y_start_offs z0 = hs.z_start_abs pos = await self.current_position(mount) # Move safely to the setup point for the probe await self.move_to(mount, top_types.Point(pos[Axis.X], pos[Axis.Y], safe_z)) await self.move_to(mount, top_types.Point(x0, y0, safe_z)) await self.move_to(mount, top_types.Point(x0, y0, z0)) if ax_en == Axis.Z: to_probe = Axis.by_mount(mount) else: to_probe = ax_en # Probe and retrieve the position afterwards async with self._motion_lock: self._current_position = self._deck_from_smoothie( self._backend.probe( to_probe.name.lower(), hs.probe_distance)) xyz = await self.gantry_position(mount) # Store the upated position. self._log.debug( "tip probe: hs {}: start: ({} {} {}) status {} will add {}" .format(hs, x0, y0, z0, new_pos, xyz[ax_en.value])) new_pos[ax_en].append(xyz[ax_en.value]) # Before moving up, move back to clear the switches bounce = self._config.tip_probe.bounce_distance\ * (-1.0 if hs.probe_distance > 0 else 1.0) await self.move_rel(mount, top_types.Point( **{hs.axis: bounce})) await self.move_to(mount, xyz._replace(z=safe_z)) to_ret = top_types.Point(**{ax.name.lower(): sum(vals)/len(vals) for ax, vals in new_pos.items()}) self._log.info("Tip probe complete with {} {} on {}. " "New position: {} (default {}), averaged from {}" .format(pip.name, pip.pipette_id, mount.name, to_ret, self._config.tip_probe.center, new_pos)) return to_ret
[ "async", "def", "_do_tp", "(", "self", ",", "pip", ",", "mount", ")", "->", "top_types", ".", "Point", ":", "# Clear the old offset during calibration", "pip", ".", "update_instrument_offset", "(", "top_types", ".", "Point", "(", ")", ")", "# Hotspots based on our expectation of tip length and config", "hotspots", "=", "robot_configs", ".", "calculate_tip_probe_hotspots", "(", "pip", ".", "current_tip_length", ",", "self", ".", "_config", ".", "tip_probe", ")", "new_pos", ":", "Dict", "[", "Axis", ",", "List", "[", "float", "]", "]", "=", "{", "ax", ":", "[", "]", "for", "ax", "in", "Axis", ".", "gantry_axes", "(", ")", "if", "ax", "!=", "Axis", ".", "A", "}", "safe_z", "=", "self", ".", "_config", ".", "tip_probe", ".", "z_clearance", ".", "crossover", "+", "self", ".", "_config", ".", "tip_probe", ".", "center", "[", "2", "]", "for", "hs", "in", "hotspots", ":", "ax_en", "=", "Axis", "[", "hs", ".", "axis", ".", "upper", "(", ")", "]", "overridden_center", "=", "{", "ax", ":", "sum", "(", "vals", ")", "/", "len", "(", "vals", ")", "if", "len", "(", "vals", ")", "==", "2", "else", "self", ".", "_config", ".", "tip_probe", ".", "center", "[", "ax", ".", "value", "]", "for", "ax", ",", "vals", "in", "new_pos", ".", "items", "(", ")", "}", "x0", "=", "overridden_center", "[", "Axis", ".", "X", "]", "+", "hs", ".", "x_start_offs", "y0", "=", "overridden_center", "[", "Axis", ".", "Y", "]", "+", "hs", ".", "y_start_offs", "z0", "=", "hs", ".", "z_start_abs", "pos", "=", "await", "self", ".", "current_position", "(", "mount", ")", "# Move safely to the setup point for the probe", "await", "self", ".", "move_to", "(", "mount", ",", "top_types", ".", "Point", "(", "pos", "[", "Axis", ".", "X", "]", ",", "pos", "[", "Axis", ".", "Y", "]", ",", "safe_z", ")", ")", "await", "self", ".", "move_to", "(", "mount", ",", "top_types", ".", "Point", "(", "x0", ",", "y0", ",", "safe_z", ")", ")", "await", "self", ".", "move_to", "(", "mount", ",", "top_types", ".", "Point", "(", "x0", ",", "y0", ",", "z0", ")", ")", "if", "ax_en", "==", "Axis", ".", "Z", ":", "to_probe", "=", "Axis", ".", "by_mount", "(", "mount", ")", "else", ":", "to_probe", "=", "ax_en", "# Probe and retrieve the position afterwards", "async", "with", "self", ".", "_motion_lock", ":", "self", ".", "_current_position", "=", "self", ".", "_deck_from_smoothie", "(", "self", ".", "_backend", ".", "probe", "(", "to_probe", ".", "name", ".", "lower", "(", ")", ",", "hs", ".", "probe_distance", ")", ")", "xyz", "=", "await", "self", ".", "gantry_position", "(", "mount", ")", "# Store the upated position.", "self", ".", "_log", ".", "debug", "(", "\"tip probe: hs {}: start: ({} {} {}) status {} will add {}\"", ".", "format", "(", "hs", ",", "x0", ",", "y0", ",", "z0", ",", "new_pos", ",", "xyz", "[", "ax_en", ".", "value", "]", ")", ")", "new_pos", "[", "ax_en", "]", ".", "append", "(", "xyz", "[", "ax_en", ".", "value", "]", ")", "# Before moving up, move back to clear the switches", "bounce", "=", "self", ".", "_config", ".", "tip_probe", ".", "bounce_distance", "*", "(", "-", "1.0", "if", "hs", ".", "probe_distance", ">", "0", "else", "1.0", ")", "await", "self", ".", "move_rel", "(", "mount", ",", "top_types", ".", "Point", "(", "*", "*", "{", "hs", ".", "axis", ":", "bounce", "}", ")", ")", "await", "self", ".", "move_to", "(", "mount", ",", "xyz", ".", "_replace", "(", "z", "=", "safe_z", ")", ")", "to_ret", "=", "top_types", ".", "Point", "(", "*", "*", "{", "ax", ".", "name", ".", "lower", "(", ")", ":", "sum", "(", "vals", ")", "/", "len", "(", "vals", ")", "for", "ax", ",", "vals", "in", "new_pos", ".", "items", "(", ")", "}", ")", "self", ".", "_log", ".", "info", "(", "\"Tip probe complete with {} {} on {}. \"", "\"New position: {} (default {}), averaged from {}\"", ".", "format", "(", "pip", ".", "name", ",", "pip", ".", "pipette_id", ",", "mount", ".", "name", ",", "to_ret", ",", "self", ".", "_config", ".", "tip_probe", ".", "center", ",", "new_pos", ")", ")", "return", "to_ret" ]
47.96
16.533333
def _runargs(argstring): """ Entrypoint for debugging """ import shlex parser = cli.make_arg_parser() args = parser.parse_args(shlex.split(argstring)) run(args)
[ "def", "_runargs", "(", "argstring", ")", ":", "import", "shlex", "parser", "=", "cli", ".", "make_arg_parser", "(", ")", "args", "=", "parser", ".", "parse_args", "(", "shlex", ".", "split", "(", "argstring", ")", ")", "run", "(", "args", ")" ]
25.428571
12.142857
def parse_attribute(tokens, is_merc): """ Parse a token stream from inside an attribute selector. Enter this function after a left-bracket is found: http://www.w3.org/TR/CSS2/selector.html#attribute-selectors """ # # Local helper functions # def next_scalar(tokens, op): """ Look for a scalar value just after an attribute selector operator. """ while True: tname, tvalue, line, col = tokens.next() if tname == 'NUMBER': try: value = int(tvalue) except ValueError: value = float(tvalue) return value elif (tname, tvalue) == ('CHAR', '-'): tname, tvalue, line, col = tokens.next() if tname == 'NUMBER': try: value = int(tvalue) except ValueError: value = float(tvalue) return -value else: raise ParseException('Unexpected non-number after a minus sign', line, col) elif tname in ('STRING', 'IDENT'): if op in ('<', '<=', '=>', '>'): raise ParseException('Selector attribute must use a number for comparison tests', line, col) if tname == 'STRING': return tvalue[1:-1] else: return tvalue elif tname != 'S': raise ParseException('Unexpected non-scalar token in attribute', line, col) def finish_attribute(tokens): """ Look for the end of an attribute selector operator. """ while True: tname, tvalue, line, col = tokens.next() if (tname, tvalue) == ('CHAR', ']'): return elif tname != 'S': raise ParseException('Found something other than a closing right-bracket at the end of attribute', line, col) # # The work. # while True: tname, tvalue, line, col = tokens.next() if tname == 'IDENT': property = tvalue while True: tname, tvalue, line, col = tokens.next() if (tname, tvalue) in [('CHAR', '<'), ('CHAR', '>')]: _tname, _tvalue, line, col = tokens.next() if (_tname, _tvalue) == ('CHAR', '='): # # Operator is one of '<=', '>=' # op = tvalue + _tvalue value = next_scalar(tokens, op) finish_attribute(tokens) return SelectorAttributeTest(property, op, value) else: # # Operator is one of '<', '>' and we popped a token too early # op = tvalue value = next_scalar(chain([(_tname, _tvalue, line, col)], tokens), op) finish_attribute(tokens) return SelectorAttributeTest(property, op, value) elif (tname, tvalue) == ('CHAR', '!'): _tname, _tvalue, line, col = tokens.next() if (_tname, _tvalue) == ('CHAR', '='): # # Operator is '!=' # op = tvalue + _tvalue value = next_scalar(tokens, op) finish_attribute(tokens) return SelectorAttributeTest(property, op, value) else: raise ParseException('Malformed operator in attribute selector', line, col) elif (tname, tvalue) == ('CHAR', '='): # # Operator is '=' # op = tvalue value = next_scalar(tokens, op) finish_attribute(tokens) return SelectorAttributeTest(property, op, value) elif tname != 'S': raise ParseException('Missing operator in attribute selector', line, col) elif tname != 'S': raise ParseException('Unexpected token in attribute selector', line, col) raise ParseException('Malformed attribute selector', line, col)
[ "def", "parse_attribute", "(", "tokens", ",", "is_merc", ")", ":", "#", "# Local helper functions", "#", "def", "next_scalar", "(", "tokens", ",", "op", ")", ":", "\"\"\" Look for a scalar value just after an attribute selector operator.\n \"\"\"", "while", "True", ":", "tname", ",", "tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "tname", "==", "'NUMBER'", ":", "try", ":", "value", "=", "int", "(", "tvalue", ")", "except", "ValueError", ":", "value", "=", "float", "(", "tvalue", ")", "return", "value", "elif", "(", "tname", ",", "tvalue", ")", "==", "(", "'CHAR'", ",", "'-'", ")", ":", "tname", ",", "tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "tname", "==", "'NUMBER'", ":", "try", ":", "value", "=", "int", "(", "tvalue", ")", "except", "ValueError", ":", "value", "=", "float", "(", "tvalue", ")", "return", "-", "value", "else", ":", "raise", "ParseException", "(", "'Unexpected non-number after a minus sign'", ",", "line", ",", "col", ")", "elif", "tname", "in", "(", "'STRING'", ",", "'IDENT'", ")", ":", "if", "op", "in", "(", "'<'", ",", "'<='", ",", "'=>'", ",", "'>'", ")", ":", "raise", "ParseException", "(", "'Selector attribute must use a number for comparison tests'", ",", "line", ",", "col", ")", "if", "tname", "==", "'STRING'", ":", "return", "tvalue", "[", "1", ":", "-", "1", "]", "else", ":", "return", "tvalue", "elif", "tname", "!=", "'S'", ":", "raise", "ParseException", "(", "'Unexpected non-scalar token in attribute'", ",", "line", ",", "col", ")", "def", "finish_attribute", "(", "tokens", ")", ":", "\"\"\" Look for the end of an attribute selector operator.\n \"\"\"", "while", "True", ":", "tname", ",", "tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "(", "tname", ",", "tvalue", ")", "==", "(", "'CHAR'", ",", "']'", ")", ":", "return", "elif", "tname", "!=", "'S'", ":", "raise", "ParseException", "(", "'Found something other than a closing right-bracket at the end of attribute'", ",", "line", ",", "col", ")", "#", "# The work.", "#", "while", "True", ":", "tname", ",", "tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "tname", "==", "'IDENT'", ":", "property", "=", "tvalue", "while", "True", ":", "tname", ",", "tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "(", "tname", ",", "tvalue", ")", "in", "[", "(", "'CHAR'", ",", "'<'", ")", ",", "(", "'CHAR'", ",", "'>'", ")", "]", ":", "_tname", ",", "_tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "(", "_tname", ",", "_tvalue", ")", "==", "(", "'CHAR'", ",", "'='", ")", ":", "#", "# Operator is one of '<=', '>='", "#", "op", "=", "tvalue", "+", "_tvalue", "value", "=", "next_scalar", "(", "tokens", ",", "op", ")", "finish_attribute", "(", "tokens", ")", "return", "SelectorAttributeTest", "(", "property", ",", "op", ",", "value", ")", "else", ":", "#", "# Operator is one of '<', '>' and we popped a token too early", "#", "op", "=", "tvalue", "value", "=", "next_scalar", "(", "chain", "(", "[", "(", "_tname", ",", "_tvalue", ",", "line", ",", "col", ")", "]", ",", "tokens", ")", ",", "op", ")", "finish_attribute", "(", "tokens", ")", "return", "SelectorAttributeTest", "(", "property", ",", "op", ",", "value", ")", "elif", "(", "tname", ",", "tvalue", ")", "==", "(", "'CHAR'", ",", "'!'", ")", ":", "_tname", ",", "_tvalue", ",", "line", ",", "col", "=", "tokens", ".", "next", "(", ")", "if", "(", "_tname", ",", "_tvalue", ")", "==", "(", "'CHAR'", ",", "'='", ")", ":", "#", "# Operator is '!='", "#", "op", "=", "tvalue", "+", "_tvalue", "value", "=", "next_scalar", "(", "tokens", ",", "op", ")", "finish_attribute", "(", "tokens", ")", "return", "SelectorAttributeTest", "(", "property", ",", "op", ",", "value", ")", "else", ":", "raise", "ParseException", "(", "'Malformed operator in attribute selector'", ",", "line", ",", "col", ")", "elif", "(", "tname", ",", "tvalue", ")", "==", "(", "'CHAR'", ",", "'='", ")", ":", "#", "# Operator is '='", "#", "op", "=", "tvalue", "value", "=", "next_scalar", "(", "tokens", ",", "op", ")", "finish_attribute", "(", "tokens", ")", "return", "SelectorAttributeTest", "(", "property", ",", "op", ",", "value", ")", "elif", "tname", "!=", "'S'", ":", "raise", "ParseException", "(", "'Missing operator in attribute selector'", ",", "line", ",", "col", ")", "elif", "tname", "!=", "'S'", ":", "raise", "ParseException", "(", "'Unexpected token in attribute selector'", ",", "line", ",", "col", ")", "raise", "ParseException", "(", "'Malformed attribute selector'", ",", "line", ",", "col", ")" ]
38.931034
19.232759
def traverse(obj, *path, **kwargs): """ Traverse the object we receive with the given path. Path items can be either strings or lists of strings (or any nested combination thereof). Behavior in given cases is laid out line by line below. """ if path: if isinstance(obj, list) or isinstance(obj, tuple): #If the current state of the object received is a #list, return a list of each of its children elements, #traversed with the current state of the string return [traverse(x, *path) for x in obj] elif isinstance(obj, dict): #If the current state of the object received is a #dictionary, do the following... if isinstance(path[0], list) or isinstance(path[0], tuple): #If the current top item in the path is a list, #return a dictionary with keys to each of the #items in the list, each traversed recursively. for branch in path[0]: if not isinstance(branch, basestring): raise TraversalError(obj, path[0]) return {name: traverse(obj[name], *path[1:], split=True) for name in path[0]} elif not isinstance(path[0], basestring): #If the key isn't a string (or a list; handled #previously), raise an exception. raise TraversalError(obj, path[0]) elif path[0] == '\\*': #If the key is a wildcard, return a dict containing #each item, traversed down recursively. return {name: traverse(item, *path[1:], split=True) for name, item in obj.items()} elif path[0] in obj: #The individual key is in the current object; #traverse it and return the result. return traverse(obj[path[0]], *path[1:]) else: #The individual key doesn't exist in the #current object; raise an error raise TraversalError(obj, path[0]) else: #If the current object isn't either a list or #a dict, then do one of two things: if kwargs.get('split', False): #If the previously-recursed operation caused #a split in a dict, just return the object; it's #been specifically called out, but it isn't #possible to recurse further. return obj else: #The object can't be traversed, and we didn't #specifically call it out to do something #else with. Raise an exception. raise TraversalError(obj, path[0]) else: #If there's no path left, then just return the #object that we received. return obj
[ "def", "traverse", "(", "obj", ",", "*", "path", ",", "*", "*", "kwargs", ")", ":", "if", "path", ":", "if", "isinstance", "(", "obj", ",", "list", ")", "or", "isinstance", "(", "obj", ",", "tuple", ")", ":", "#If the current state of the object received is a", "#list, return a list of each of its children elements,", "#traversed with the current state of the string", "return", "[", "traverse", "(", "x", ",", "*", "path", ")", "for", "x", "in", "obj", "]", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "#If the current state of the object received is a", "#dictionary, do the following...", "if", "isinstance", "(", "path", "[", "0", "]", ",", "list", ")", "or", "isinstance", "(", "path", "[", "0", "]", ",", "tuple", ")", ":", "#If the current top item in the path is a list,", "#return a dictionary with keys to each of the", "#items in the list, each traversed recursively.", "for", "branch", "in", "path", "[", "0", "]", ":", "if", "not", "isinstance", "(", "branch", ",", "basestring", ")", ":", "raise", "TraversalError", "(", "obj", ",", "path", "[", "0", "]", ")", "return", "{", "name", ":", "traverse", "(", "obj", "[", "name", "]", ",", "*", "path", "[", "1", ":", "]", ",", "split", "=", "True", ")", "for", "name", "in", "path", "[", "0", "]", "}", "elif", "not", "isinstance", "(", "path", "[", "0", "]", ",", "basestring", ")", ":", "#If the key isn't a string (or a list; handled", "#previously), raise an exception.", "raise", "TraversalError", "(", "obj", ",", "path", "[", "0", "]", ")", "elif", "path", "[", "0", "]", "==", "'\\\\*'", ":", "#If the key is a wildcard, return a dict containing", "#each item, traversed down recursively.", "return", "{", "name", ":", "traverse", "(", "item", ",", "*", "path", "[", "1", ":", "]", ",", "split", "=", "True", ")", "for", "name", ",", "item", "in", "obj", ".", "items", "(", ")", "}", "elif", "path", "[", "0", "]", "in", "obj", ":", "#The individual key is in the current object;", "#traverse it and return the result.", "return", "traverse", "(", "obj", "[", "path", "[", "0", "]", "]", ",", "*", "path", "[", "1", ":", "]", ")", "else", ":", "#The individual key doesn't exist in the", "#current object; raise an error", "raise", "TraversalError", "(", "obj", ",", "path", "[", "0", "]", ")", "else", ":", "#If the current object isn't either a list or", "#a dict, then do one of two things:", "if", "kwargs", ".", "get", "(", "'split'", ",", "False", ")", ":", "#If the previously-recursed operation caused", "#a split in a dict, just return the object; it's", "#been specifically called out, but it isn't", "#possible to recurse further.", "return", "obj", "else", ":", "#The object can't be traversed, and we didn't", "#specifically call it out to do something", "#else with. Raise an exception.", "raise", "TraversalError", "(", "obj", ",", "path", "[", "0", "]", ")", "else", ":", "#If there's no path left, then just return the", "#object that we received.", "return", "obj" ]
48.448276
16.793103
def write_single_coil(slave_id, address, value): """ Return ADU for Modbus function code 05: Write Single Coil. :param slave_id: Number of slave. :return: Byte array with ADU. """ function = WriteSingleCoil() function.address = address function.value = value return _create_request_adu(slave_id, function.request_pdu)
[ "def", "write_single_coil", "(", "slave_id", ",", "address", ",", "value", ")", ":", "function", "=", "WriteSingleCoil", "(", ")", "function", ".", "address", "=", "address", "function", ".", "value", "=", "value", "return", "_create_request_adu", "(", "slave_id", ",", "function", ".", "request_pdu", ")" ]
31
13.818182
def _gen_headers(self, bearer, url): ''' Generate headders, adding in Oauth2 bearer token if present ''' headers = { "Accept": "*/*", "Accept-Encoding": "gzip, deflate", "Accept-Language": ("en;q=1, fr;q=0.9, de;q=0.8, ja;q=0.7, " + "nl;q=0.6, it;q=0.5"), "User-Agent": ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) " + "AppleWebKit/537.36 (KHTML, like Gecko) " + "Chrome/68.0.3440.106 Safari/537.36"), } if bearer: headers["Authorization"] = "Bearer {0}".format(bearer) if url == "https://api.robinhood.com/options/orders/": headers["Content-Type"] = "application/json; charset=utf-8" return headers
[ "def", "_gen_headers", "(", "self", ",", "bearer", ",", "url", ")", ":", "headers", "=", "{", "\"Accept\"", ":", "\"*/*\"", ",", "\"Accept-Encoding\"", ":", "\"gzip, deflate\"", ",", "\"Accept-Language\"", ":", "(", "\"en;q=1, fr;q=0.9, de;q=0.8, ja;q=0.7, \"", "+", "\"nl;q=0.6, it;q=0.5\"", ")", ",", "\"User-Agent\"", ":", "(", "\"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) \"", "+", "\"AppleWebKit/537.36 (KHTML, like Gecko) \"", "+", "\"Chrome/68.0.3440.106 Safari/537.36\"", ")", ",", "}", "if", "bearer", ":", "headers", "[", "\"Authorization\"", "]", "=", "\"Bearer {0}\"", ".", "format", "(", "bearer", ")", "if", "url", "==", "\"https://api.robinhood.com/options/orders/\"", ":", "headers", "[", "\"Content-Type\"", "]", "=", "\"application/json; charset=utf-8\"", "return", "headers" ]
42.578947
24.263158
def turbulent_Petukhov_Kirillov_Popov(Re=None, Pr=None, fd=None): r'''Calculates internal convection Nusselt number for turbulent flows in pipe according to [2]_ and [3]_ as in [1]_. .. math:: Nu = \frac{(f/8)RePr}{C+12.7(f/8)^{1/2}(Pr^{2/3}-1)}\\ C = 1.07 + 900/Re - [0.63/(1+10Pr)] Parameters ---------- Re : float Reynolds number, [-] Pr : float Prandtl number, [-] fd : float Darcy friction factor [-] Returns ------- Nu : float Nusselt number, [-] Notes ----- Range according to [1]_ is 0.5 < Pr ≤ 10^6 and 4000 ≤ Re ≤ 5*10^6 Examples -------- >>> turbulent_Petukhov_Kirillov_Popov(Re=1E5, Pr=1.2, fd=0.0185) 250.11935088905105 References ---------- .. [1] Rohsenow, Warren and James Hartnett and Young Cho. Handbook of Heat Transfer, 3E. New York: McGraw-Hill, 1998. .. [2] B. S. Petukhov, and V. V. Kirillov, "The Problem of Heat Exchange in the Turbulent Flow of Liquids in Tubes," (Russian) Teploenergetika, (4): 63-68, 1958 .. [3] B. S. Petukhov and V. N. Popov, "Theoretical Calculation of Heat Exchange in Turbulent Flow in Tubes of an Incompressible Fluidwith Variable Physical Properties," High Temp., (111): 69-83, 1963. ''' C = 1.07 + 900./Re - (0.63/(1. + 10.*Pr)) return (fd/8.)*Re*Pr/(C + 12.7*(fd/8.)**0.5*(Pr**(2/3.) - 1.))
[ "def", "turbulent_Petukhov_Kirillov_Popov", "(", "Re", "=", "None", ",", "Pr", "=", "None", ",", "fd", "=", "None", ")", ":", "C", "=", "1.07", "+", "900.", "/", "Re", "-", "(", "0.63", "/", "(", "1.", "+", "10.", "*", "Pr", ")", ")", "return", "(", "fd", "/", "8.", ")", "*", "Re", "*", "Pr", "/", "(", "C", "+", "12.7", "*", "(", "fd", "/", "8.", ")", "**", "0.5", "*", "(", "Pr", "**", "(", "2", "/", "3.", ")", "-", "1.", ")", ")" ]
31.704545
26.431818
def requires_roles(roles): """ Decorator for :class:`ModelView` views that limits access to the specified roles. """ def inner(f): def is_available_here(context): return bool(roles.intersection(context.obj.current_roles)) def is_available(context): result = is_available_here(context) if result and hasattr(f, 'is_available'): # We passed, but we're wrapping another test, so ask there as well return f.is_available(context) return result @wraps(f) def wrapper(self, *args, **kwargs): add_auth_attribute('login_required', True) if not is_available_here(self): abort(403) return f(self, *args, **kwargs) wrapper.requires_roles = roles wrapper.is_available = is_available return wrapper return inner
[ "def", "requires_roles", "(", "roles", ")", ":", "def", "inner", "(", "f", ")", ":", "def", "is_available_here", "(", "context", ")", ":", "return", "bool", "(", "roles", ".", "intersection", "(", "context", ".", "obj", ".", "current_roles", ")", ")", "def", "is_available", "(", "context", ")", ":", "result", "=", "is_available_here", "(", "context", ")", "if", "result", "and", "hasattr", "(", "f", ",", "'is_available'", ")", ":", "# We passed, but we're wrapping another test, so ask there as well", "return", "f", ".", "is_available", "(", "context", ")", "return", "result", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "add_auth_attribute", "(", "'login_required'", ",", "True", ")", "if", "not", "is_available_here", "(", "self", ")", ":", "abort", "(", "403", ")", "return", "f", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "wrapper", ".", "requires_roles", "=", "roles", "wrapper", ".", "is_available", "=", "is_available", "return", "wrapper", "return", "inner" ]
32.814815
16.740741
def additions_remove(**kwargs): ''' Remove VirtualBox Guest Additions. Firstly it tries to uninstall itself by executing '/opt/VBoxGuestAdditions-VERSION/uninstall.run uninstall'. It uses the CD, connected by VirtualBox if it failes. CLI Example: .. code-block:: bash salt '*' vbox_guest.additions_remove salt '*' vbox_guest.additions_remove force=True :param force: force VirtualBox Guest Additions removing :type force: bool :return: True if VirtualBox Guest Additions were removed successfully else False ''' kernel = __grains__.get('kernel', '') if kernel == 'Linux': ret = _additions_remove_linux() if not ret: ret = _additions_remove_use_cd(**kwargs) return ret
[ "def", "additions_remove", "(", "*", "*", "kwargs", ")", ":", "kernel", "=", "__grains__", ".", "get", "(", "'kernel'", ",", "''", ")", "if", "kernel", "==", "'Linux'", ":", "ret", "=", "_additions_remove_linux", "(", ")", "if", "not", "ret", ":", "ret", "=", "_additions_remove_use_cd", "(", "*", "*", "kwargs", ")", "return", "ret" ]
28.461538
22.538462
def create(cls, session, attributes=None, relationships=None): """Create a resource of the resource. This should only be called from sub-classes Args: session(Session): The session to create the resource in. attributes(dict): Any attributes that are valid for the given resource type. relationships(dict): Any relationships that are valid for the given resource type. Returns: Resource: An instance of a resource. """ resource_type = cls._resource_type() resource_path = cls._resource_path() url = session._build_url(resource_path) json = build_request_body(resource_type, None, attributes=attributes, relationships=relationships) process = cls._mk_one(session) return session.post(url, CB.json(201, process), json=json)
[ "def", "create", "(", "cls", ",", "session", ",", "attributes", "=", "None", ",", "relationships", "=", "None", ")", ":", "resource_type", "=", "cls", ".", "_resource_type", "(", ")", "resource_path", "=", "cls", ".", "_resource_path", "(", ")", "url", "=", "session", ".", "_build_url", "(", "resource_path", ")", "json", "=", "build_request_body", "(", "resource_type", ",", "None", ",", "attributes", "=", "attributes", ",", "relationships", "=", "relationships", ")", "process", "=", "cls", ".", "_mk_one", "(", "session", ")", "return", "session", ".", "post", "(", "url", ",", "CB", ".", "json", "(", "201", ",", "process", ")", ",", "json", "=", "json", ")" ]
33.035714
21.392857
def data(self, *args): '''Add or retrieve data values for this :class:`Html`.''' data = self._data if not args: return data or {} result, adding = self._attrdata('data', *args) if adding: if data is None: self._extra['data'] = {} add = self._visitor.add_data for key, value in result.items(): add(self, key, value) return self else: return result
[ "def", "data", "(", "self", ",", "*", "args", ")", ":", "data", "=", "self", ".", "_data", "if", "not", "args", ":", "return", "data", "or", "{", "}", "result", ",", "adding", "=", "self", ".", "_attrdata", "(", "'data'", ",", "*", "args", ")", "if", "adding", ":", "if", "data", "is", "None", ":", "self", ".", "_extra", "[", "'data'", "]", "=", "{", "}", "add", "=", "self", ".", "_visitor", ".", "add_data", "for", "key", ",", "value", "in", "result", ".", "items", "(", ")", ":", "add", "(", "self", ",", "key", ",", "value", ")", "return", "self", "else", ":", "return", "result" ]
32.266667
13.6
def get_logger(self): """ Returns the standard logger """ if Global.LOGGER: Global.LOGGER.debug('configuring a logger') if self._logger_instance is not None: return self._logger_instance self._logger_instance = logging.getLogger("flowsLogger") self._logger_instance.setLevel(logging.DEBUG) log_format = '%(asctime)s - [%(levelname)s]|%(thread)d\t%(message)s' log_date_format = '%Y-%m-%d %H:%M:%S' formatter = logging.Formatter(log_format, log_date_format) new_log_stream_handler = logging.StreamHandler() new_log_stream_handler.setFormatter(formatter) new_log_stream_handler.setLevel(logging.INFO) self._logger_instance.addHandler(new_log_stream_handler) return self._logger_instance
[ "def", "get_logger", "(", "self", ")", ":", "if", "Global", ".", "LOGGER", ":", "Global", ".", "LOGGER", ".", "debug", "(", "'configuring a logger'", ")", "if", "self", ".", "_logger_instance", "is", "not", "None", ":", "return", "self", ".", "_logger_instance", "self", ".", "_logger_instance", "=", "logging", ".", "getLogger", "(", "\"flowsLogger\"", ")", "self", ".", "_logger_instance", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "log_format", "=", "'%(asctime)s - [%(levelname)s]|%(thread)d\\t%(message)s'", "log_date_format", "=", "'%Y-%m-%d %H:%M:%S'", "formatter", "=", "logging", ".", "Formatter", "(", "log_format", ",", "log_date_format", ")", "new_log_stream_handler", "=", "logging", ".", "StreamHandler", "(", ")", "new_log_stream_handler", ".", "setFormatter", "(", "formatter", ")", "new_log_stream_handler", ".", "setLevel", "(", "logging", ".", "INFO", ")", "self", ".", "_logger_instance", ".", "addHandler", "(", "new_log_stream_handler", ")", "return", "self", ".", "_logger_instance" ]
35.217391
18.869565
def diff_move(self,v,new_comm): """ Calculate the difference in the quality function if node ``v`` is moved to community ``new_comm``. Parameters ---------- v The node to move. new_comm The community to move to. Returns ------- float Difference in quality function. Notes ----- The difference returned by diff_move should be equivalent to first determining the quality of the partition, then calling move_node, and then determining again the quality of the partition and looking at the difference. In other words >>> partition = louvain.find_partition(ig.Graph.Famous('Zachary'), ... louvain.ModularityVertexPartition) >>> diff = partition.diff_move(v=0, new_comm=0) >>> q1 = partition.quality() >>> partition.move_node(v=0, new_comm=0) >>> q2 = partition.quality() >>> round(diff, 10) == round(q2 - q1, 10) True .. warning:: Only derived classes provide actual implementations, the base class provides no implementation for this function. """ return _c_louvain._MutableVertexPartition_diff_move(self._partition, v, new_comm)
[ "def", "diff_move", "(", "self", ",", "v", ",", "new_comm", ")", ":", "return", "_c_louvain", ".", "_MutableVertexPartition_diff_move", "(", "self", ".", "_partition", ",", "v", ",", "new_comm", ")" ]
30.552632
24.552632
def svg_to_path(file_obj, file_type=None): """ Load an SVG file into a Path2D object. Parameters ----------- file_obj : open file object Contains SVG data file_type: None Not used Returns ----------- loaded : dict With kwargs for Path2D constructor """ def element_transform(e, max_depth=100): """ Find a transformation matrix for an XML element. """ matrices = [] current = e for i in range(max_depth): if 'transform' in current.attrib: mat = transform_to_matrices(current.attrib['transform']) matrices.extend(mat) # cached[current] = mat current = current.getparent() if current is None: break if len(matrices) == 0: return np.eye(3) elif len(matrices) == 1: return matrices[0] else: return util.multi_dot(matrices[::-1]) # first parse the XML xml = etree.fromstring(file_obj.read()) # store paths and transforms as # (path string, 3x3 matrix) paths = [] # store every path element for element in xml.iter('{*}path'): paths.append((element.attrib['d'], element_transform(element))) return _svg_path_convert(paths)
[ "def", "svg_to_path", "(", "file_obj", ",", "file_type", "=", "None", ")", ":", "def", "element_transform", "(", "e", ",", "max_depth", "=", "100", ")", ":", "\"\"\"\n Find a transformation matrix for an XML element.\n \"\"\"", "matrices", "=", "[", "]", "current", "=", "e", "for", "i", "in", "range", "(", "max_depth", ")", ":", "if", "'transform'", "in", "current", ".", "attrib", ":", "mat", "=", "transform_to_matrices", "(", "current", ".", "attrib", "[", "'transform'", "]", ")", "matrices", ".", "extend", "(", "mat", ")", "# cached[current] = mat", "current", "=", "current", ".", "getparent", "(", ")", "if", "current", "is", "None", ":", "break", "if", "len", "(", "matrices", ")", "==", "0", ":", "return", "np", ".", "eye", "(", "3", ")", "elif", "len", "(", "matrices", ")", "==", "1", ":", "return", "matrices", "[", "0", "]", "else", ":", "return", "util", ".", "multi_dot", "(", "matrices", "[", ":", ":", "-", "1", "]", ")", "# first parse the XML", "xml", "=", "etree", ".", "fromstring", "(", "file_obj", ".", "read", "(", ")", ")", "# store paths and transforms as", "# (path string, 3x3 matrix)", "paths", "=", "[", "]", "# store every path element", "for", "element", "in", "xml", ".", "iter", "(", "'{*}path'", ")", ":", "paths", ".", "append", "(", "(", "element", ".", "attrib", "[", "'d'", "]", ",", "element_transform", "(", "element", ")", ")", ")", "return", "_svg_path_convert", "(", "paths", ")" ]
25.076923
15.846154
def from_keras(cls, model, bounds, input_shape=None, channel_axis=3, preprocessing=(0, 1)): """Alternative constructor for a TensorFlowModel that accepts a `tf.keras.Model` instance. Parameters ---------- model : `tensorflow.keras.Model` A `tensorflow.keras.Model` that accepts a single input tensor and returns a single output tensor representing logits. bounds : tuple Tuple of lower and upper bound for the pixel values, usually (0, 1) or (0, 255). input_shape : tuple The shape of a single input, e.g. (28, 28, 1) for MNIST. If None, tries to get the the shape from the model's input_shape attribute. channel_axis : int The index of the axis that represents color channels. preprocessing: 2-element tuple with floats or numpy arrays Elementwises preprocessing of input; we first subtract the first element of preprocessing from the input and then divide the input by the second element. """ import tensorflow as tf if input_shape is None: try: input_shape = model.input_shape[1:] except AttributeError: raise ValueError( 'Please specify input_shape manually or ' 'provide a model with an input_shape attribute') with tf.keras.backend.get_session().as_default(): inputs = tf.placeholder(tf.float32, (None,) + input_shape) logits = model(inputs) return cls(inputs, logits, bounds=bounds, channel_axis=channel_axis, preprocessing=preprocessing)
[ "def", "from_keras", "(", "cls", ",", "model", ",", "bounds", ",", "input_shape", "=", "None", ",", "channel_axis", "=", "3", ",", "preprocessing", "=", "(", "0", ",", "1", ")", ")", ":", "import", "tensorflow", "as", "tf", "if", "input_shape", "is", "None", ":", "try", ":", "input_shape", "=", "model", ".", "input_shape", "[", "1", ":", "]", "except", "AttributeError", ":", "raise", "ValueError", "(", "'Please specify input_shape manually or '", "'provide a model with an input_shape attribute'", ")", "with", "tf", ".", "keras", ".", "backend", ".", "get_session", "(", ")", ".", "as_default", "(", ")", ":", "inputs", "=", "tf", ".", "placeholder", "(", "tf", ".", "float32", ",", "(", "None", ",", ")", "+", "input_shape", ")", "logits", "=", "model", "(", "inputs", ")", "return", "cls", "(", "inputs", ",", "logits", ",", "bounds", "=", "bounds", ",", "channel_axis", "=", "channel_axis", ",", "preprocessing", "=", "preprocessing", ")" ]
45.263158
18.684211
def parse_args(self): """Parse CLI args.""" Args(self.tcex.parser) self.args = self.tcex.args
[ "def", "parse_args", "(", "self", ")", ":", "Args", "(", "self", ".", "tcex", ".", "parser", ")", "self", ".", "args", "=", "self", ".", "tcex", ".", "args" ]
28.5
8.75