text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def highs(self, assets, dt): """ The high field's aggregation returns the largest high seen between the market open and the current dt. If there has been no data on or before the `dt` the high is `nan`. Returns ------- np.array with dtype=float64, in order of assets parameter. """ market_open, prev_dt, dt_value, entries = self._prelude(dt, 'high') highs = [] session_label = self._trading_calendar.minute_to_session_label(dt) for asset in assets: if not asset.is_alive_for_session(session_label): highs.append(np.NaN) continue if prev_dt is None: val = self._minute_reader.get_value(asset, dt, 'high') entries[asset] = (dt_value, val) highs.append(val) continue else: try: last_visited_dt, last_max = entries[asset] if last_visited_dt == dt_value: highs.append(last_max) continue elif last_visited_dt == prev_dt: curr_val = self._minute_reader.get_value( asset, dt, 'high') if pd.isnull(curr_val): val = last_max elif pd.isnull(last_max): val = curr_val else: val = max(last_max, curr_val) entries[asset] = (dt_value, val) highs.append(val) continue else: after_last = pd.Timestamp( last_visited_dt + self._one_min, tz='UTC') window = self._minute_reader.load_raw_arrays( ['high'], after_last, dt, [asset], )[0].T val = np.nanmax(np.append(window, last_max)) entries[asset] = (dt_value, val) highs.append(val) continue except KeyError: window = self._minute_reader.load_raw_arrays( ['high'], market_open, dt, [asset], )[0].T val = np.nanmax(window) entries[asset] = (dt_value, val) highs.append(val) continue return np.array(highs)
[ "def", "highs", "(", "self", ",", "assets", ",", "dt", ")", ":", "market_open", ",", "prev_dt", ",", "dt_value", ",", "entries", "=", "self", ".", "_prelude", "(", "dt", ",", "'high'", ")", "highs", "=", "[", "]", "session_label", "=", "self", ".", "_trading_calendar", ".", "minute_to_session_label", "(", "dt", ")", "for", "asset", "in", "assets", ":", "if", "not", "asset", ".", "is_alive_for_session", "(", "session_label", ")", ":", "highs", ".", "append", "(", "np", ".", "NaN", ")", "continue", "if", "prev_dt", "is", "None", ":", "val", "=", "self", ".", "_minute_reader", ".", "get_value", "(", "asset", ",", "dt", ",", "'high'", ")", "entries", "[", "asset", "]", "=", "(", "dt_value", ",", "val", ")", "highs", ".", "append", "(", "val", ")", "continue", "else", ":", "try", ":", "last_visited_dt", ",", "last_max", "=", "entries", "[", "asset", "]", "if", "last_visited_dt", "==", "dt_value", ":", "highs", ".", "append", "(", "last_max", ")", "continue", "elif", "last_visited_dt", "==", "prev_dt", ":", "curr_val", "=", "self", ".", "_minute_reader", ".", "get_value", "(", "asset", ",", "dt", ",", "'high'", ")", "if", "pd", ".", "isnull", "(", "curr_val", ")", ":", "val", "=", "last_max", "elif", "pd", ".", "isnull", "(", "last_max", ")", ":", "val", "=", "curr_val", "else", ":", "val", "=", "max", "(", "last_max", ",", "curr_val", ")", "entries", "[", "asset", "]", "=", "(", "dt_value", ",", "val", ")", "highs", ".", "append", "(", "val", ")", "continue", "else", ":", "after_last", "=", "pd", ".", "Timestamp", "(", "last_visited_dt", "+", "self", ".", "_one_min", ",", "tz", "=", "'UTC'", ")", "window", "=", "self", ".", "_minute_reader", ".", "load_raw_arrays", "(", "[", "'high'", "]", ",", "after_last", ",", "dt", ",", "[", "asset", "]", ",", ")", "[", "0", "]", ".", "T", "val", "=", "np", ".", "nanmax", "(", "np", ".", "append", "(", "window", ",", "last_max", ")", ")", "entries", "[", "asset", "]", "=", "(", "dt_value", ",", "val", ")", "highs", ".", "append", "(", "val", ")", "continue", "except", "KeyError", ":", "window", "=", "self", ".", "_minute_reader", ".", "load_raw_arrays", "(", "[", "'high'", "]", ",", "market_open", ",", "dt", ",", "[", "asset", "]", ",", ")", "[", "0", "]", ".", "T", "val", "=", "np", ".", "nanmax", "(", "window", ")", "entries", "[", "asset", "]", "=", "(", "dt_value", ",", "val", ")", "highs", ".", "append", "(", "val", ")", "continue", "return", "np", ".", "array", "(", "highs", ")" ]
39.264706
15.029412
def update_context(cls, base_context, str_or_dict, template_path=None): """Helper method to structure initial message context data. NOTE: updates `base_context` inplace. :param dict base_context: context dict to update :param dict, str str_or_dict: text representing a message, or a dict to be placed into message context. :param str template_path: template path to be used for message rendering """ if isinstance(str_or_dict, dict): base_context.update(str_or_dict) base_context['use_tpl'] = True else: base_context[cls.SIMPLE_TEXT_ID] = str_or_dict if cls.SIMPLE_TEXT_ID in str_or_dict: base_context['use_tpl'] = False base_context['tpl'] = template_path
[ "def", "update_context", "(", "cls", ",", "base_context", ",", "str_or_dict", ",", "template_path", "=", "None", ")", ":", "if", "isinstance", "(", "str_or_dict", ",", "dict", ")", ":", "base_context", ".", "update", "(", "str_or_dict", ")", "base_context", "[", "'use_tpl'", "]", "=", "True", "else", ":", "base_context", "[", "cls", ".", "SIMPLE_TEXT_ID", "]", "=", "str_or_dict", "if", "cls", ".", "SIMPLE_TEXT_ID", "in", "str_or_dict", ":", "base_context", "[", "'use_tpl'", "]", "=", "False", "base_context", "[", "'tpl'", "]", "=", "template_path" ]
40.526316
20.315789
def _find_players(self, boxscore): """ Find all players for each team. Iterate through every player for both teams as found in the boxscore tables and create a list of instances of the BoxscorePlayer class for each player. Return lists of player instances comprising the away and home team players, respectively. Parameters ---------- boxscore : PyQuery object A PyQuery object containing all of the HTML data from the boxscore. Returns ------- tuple Returns a ``tuple`` in the format (away_players, home_players) where each element is a list of player instances for the away and home teams, respectively. """ player_dict = {} tables = self._find_boxscore_tables(boxscore) for table in tables: player_dict = self._extract_player_stats(table, player_dict) away_players, home_players = self._instantiate_players(player_dict) return away_players, home_players
[ "def", "_find_players", "(", "self", ",", "boxscore", ")", ":", "player_dict", "=", "{", "}", "tables", "=", "self", ".", "_find_boxscore_tables", "(", "boxscore", ")", "for", "table", "in", "tables", ":", "player_dict", "=", "self", ".", "_extract_player_stats", "(", "table", ",", "player_dict", ")", "away_players", ",", "home_players", "=", "self", ".", "_instantiate_players", "(", "player_dict", ")", "return", "away_players", ",", "home_players" ]
37.035714
22.392857
def set_objective_sense(self, sense): """Set type of problem (maximize or minimize).""" if sense not in (ObjectiveSense.Minimize, ObjectiveSense.Maximize): raise ValueError('Invalid objective sense') self._p.ModelSense = self.OBJ_SENSE_MAP[sense]
[ "def", "set_objective_sense", "(", "self", ",", "sense", ")", ":", "if", "sense", "not", "in", "(", "ObjectiveSense", ".", "Minimize", ",", "ObjectiveSense", ".", "Maximize", ")", ":", "raise", "ValueError", "(", "'Invalid objective sense'", ")", "self", ".", "_p", ".", "ModelSense", "=", "self", ".", "OBJ_SENSE_MAP", "[", "sense", "]" ]
39.714286
21
def addvPPfunc(self,solution): ''' Adds the marginal marginal value function to an existing solution, so that the next solver can evaluate vPP and thus use cubic interpolation. Parameters ---------- solution : ConsumerSolution The solution to this single period problem, which must include the consumption function. Returns ------- solution : ConsumerSolution The same solution passed as input, but with the marginal marginal value function for this period added as the attribute vPPfunc. ''' vPPfuncNow = MargMargValueFunc2D(solution.cFunc,self.CRRA) solution.vPPfunc = vPPfuncNow return solution
[ "def", "addvPPfunc", "(", "self", ",", "solution", ")", ":", "vPPfuncNow", "=", "MargMargValueFunc2D", "(", "solution", ".", "cFunc", ",", "self", ".", "CRRA", ")", "solution", ".", "vPPfunc", "=", "vPPfuncNow", "return", "solution" ]
37
24.8
def _move_to_desired_location(self): """Animate movement to desired location on map.""" self._next_update = 100000 x_start = self._convert_longitude(self._longitude) y_start = self._convert_latitude(self._latitude) x_end = self._convert_longitude(self._desired_longitude) y_end = self._convert_latitude(self._desired_latitude) if sqrt((x_end - x_start) ** 2 + (y_end - y_start) ** 2) > _START_SIZE // 4: self._zoom_map(True) elif self._zoom != self._desired_zoom: self._zoom_map(self._desired_zoom < self._zoom) if self._longitude != self._desired_longitude: self._next_update = 1 if self._desired_longitude < self._longitude: self._longitude = max(self._longitude - 360 / 2 ** self._zoom / self._size * 2, self._desired_longitude) else: self._longitude = min(self._longitude + 360 / 2 ** self._zoom / self._size * 2, self._desired_longitude) if self._latitude != self._desired_latitude: self._next_update = 1 if self._desired_latitude < self._latitude: self._latitude = max(self._inc_lat(self._latitude, 2), self._desired_latitude) else: self._latitude = min(self._inc_lat(self._latitude, -2), self._desired_latitude) if self._next_update == 1: self._updated.set()
[ "def", "_move_to_desired_location", "(", "self", ")", ":", "self", ".", "_next_update", "=", "100000", "x_start", "=", "self", ".", "_convert_longitude", "(", "self", ".", "_longitude", ")", "y_start", "=", "self", ".", "_convert_latitude", "(", "self", ".", "_latitude", ")", "x_end", "=", "self", ".", "_convert_longitude", "(", "self", ".", "_desired_longitude", ")", "y_end", "=", "self", ".", "_convert_latitude", "(", "self", ".", "_desired_latitude", ")", "if", "sqrt", "(", "(", "x_end", "-", "x_start", ")", "**", "2", "+", "(", "y_end", "-", "y_start", ")", "**", "2", ")", ">", "_START_SIZE", "//", "4", ":", "self", ".", "_zoom_map", "(", "True", ")", "elif", "self", ".", "_zoom", "!=", "self", ".", "_desired_zoom", ":", "self", ".", "_zoom_map", "(", "self", ".", "_desired_zoom", "<", "self", ".", "_zoom", ")", "if", "self", ".", "_longitude", "!=", "self", ".", "_desired_longitude", ":", "self", ".", "_next_update", "=", "1", "if", "self", ".", "_desired_longitude", "<", "self", ".", "_longitude", ":", "self", ".", "_longitude", "=", "max", "(", "self", ".", "_longitude", "-", "360", "/", "2", "**", "self", ".", "_zoom", "/", "self", ".", "_size", "*", "2", ",", "self", ".", "_desired_longitude", ")", "else", ":", "self", ".", "_longitude", "=", "min", "(", "self", ".", "_longitude", "+", "360", "/", "2", "**", "self", ".", "_zoom", "/", "self", ".", "_size", "*", "2", ",", "self", ".", "_desired_longitude", ")", "if", "self", ".", "_latitude", "!=", "self", ".", "_desired_latitude", ":", "self", ".", "_next_update", "=", "1", "if", "self", ".", "_desired_latitude", "<", "self", ".", "_latitude", ":", "self", ".", "_latitude", "=", "max", "(", "self", ".", "_inc_lat", "(", "self", ".", "_latitude", ",", "2", ")", ",", "self", ".", "_desired_latitude", ")", "else", ":", "self", ".", "_latitude", "=", "min", "(", "self", ".", "_inc_lat", "(", "self", ".", "_latitude", ",", "-", "2", ")", ",", "self", ".", "_desired_latitude", ")", "if", "self", ".", "_next_update", "==", "1", ":", "self", ".", "_updated", ".", "set", "(", ")" ]
54.62963
20.851852
def add_redirect(self, name, proto, host_ip, host_port, guest_ip, guest_port): """Adds a new NAT port-forwarding rule. in name of type str The name of the rule. An empty name is acceptable, in which case the NAT engine auto-generates one using the other parameters. in proto of type :class:`NATProtocol` Protocol handled with the rule. in host_ip of type str IP of the host interface to which the rule should apply. An empty ip address is acceptable, in which case the NAT engine binds the handling socket to any interface. in host_port of type int The port number to listen on. in guest_ip of type str The IP address of the guest which the NAT engine will forward matching packets to. An empty IP address is acceptable, in which case the NAT engine will forward packets to the first DHCP lease (x.x.x.15). in guest_port of type int The port number to forward. """ if not isinstance(name, basestring): raise TypeError("name can only be an instance of type basestring") if not isinstance(proto, NATProtocol): raise TypeError("proto can only be an instance of type NATProtocol") if not isinstance(host_ip, basestring): raise TypeError("host_ip can only be an instance of type basestring") if not isinstance(host_port, baseinteger): raise TypeError("host_port can only be an instance of type baseinteger") if not isinstance(guest_ip, basestring): raise TypeError("guest_ip can only be an instance of type basestring") if not isinstance(guest_port, baseinteger): raise TypeError("guest_port can only be an instance of type baseinteger") self._call("addRedirect", in_p=[name, proto, host_ip, host_port, guest_ip, guest_port])
[ "def", "add_redirect", "(", "self", ",", "name", ",", "proto", ",", "host_ip", ",", "host_port", ",", "guest_ip", ",", "guest_port", ")", ":", "if", "not", "isinstance", "(", "name", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"name can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "proto", ",", "NATProtocol", ")", ":", "raise", "TypeError", "(", "\"proto can only be an instance of type NATProtocol\"", ")", "if", "not", "isinstance", "(", "host_ip", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"host_ip can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "host_port", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"host_port can only be an instance of type baseinteger\"", ")", "if", "not", "isinstance", "(", "guest_ip", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"guest_ip can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "guest_port", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"guest_port can only be an instance of type baseinteger\"", ")", "self", ".", "_call", "(", "\"addRedirect\"", ",", "in_p", "=", "[", "name", ",", "proto", ",", "host_ip", ",", "host_port", ",", "guest_ip", ",", "guest_port", "]", ")" ]
48.025
25.325
def is_url(value, **kwargs): """Indicate whether ``value`` is a URL. .. note:: URL validation is...complicated. The methodology that we have adopted here is *generally* compliant with `RFC 1738 <https://tools.ietf.org/html/rfc1738>`_, `RFC 6761 <https://tools.ietf.org/html/rfc6761>`_, `RFC 2181 <https://tools.ietf.org/html/rfc2181>`_ and uses a combination of string parsing and regular expressions, This approach ensures more complete coverage for unusual edge cases, while still letting us use regular expressions that perform quickly. :param value: The value to evaluate. :param allow_special_ips: If ``True``, will succeed when validating special IP addresses, such as loopback IPs like ``127.0.0.1`` or ``0.0.0.0``. If ``False``, will fail if ``value`` is a special IP address. Defaults to ``False``. :type allow_special_ips: :class:`bool <python:bool>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ try: value = validators.url(value, **kwargs) except SyntaxError as error: raise error except Exception: return False return True
[ "def", "is_url", "(", "value", ",", "*", "*", "kwargs", ")", ":", "try", ":", "value", "=", "validators", ".", "url", "(", "value", ",", "*", "*", "kwargs", ")", "except", "SyntaxError", "as", "error", ":", "raise", "error", "except", "Exception", ":", "return", "False", "return", "True" ]
36.540541
25.72973
def compute_context_vector(self, prev_state, inputs, precomputed_values=None, mask=None): """ Compute the context vector with soft attention. """ precomputed_values = precomputed_values if precomputed_values else self.precompute(inputs) align_weights = self.compute_alignments(prev_state, precomputed_values, mask) context_vector = T.sum(align_weights[:, :, None] * inputs, axis=1) return context_vector
[ "def", "compute_context_vector", "(", "self", ",", "prev_state", ",", "inputs", ",", "precomputed_values", "=", "None", ",", "mask", "=", "None", ")", ":", "precomputed_values", "=", "precomputed_values", "if", "precomputed_values", "else", "self", ".", "precompute", "(", "inputs", ")", "align_weights", "=", "self", ".", "compute_alignments", "(", "prev_state", ",", "precomputed_values", ",", "mask", ")", "context_vector", "=", "T", ".", "sum", "(", "align_weights", "[", ":", ",", ":", ",", "None", "]", "*", "inputs", ",", "axis", "=", "1", ")", "return", "context_vector" ]
56.5
26.5
def delete_collection_api_service(self, **kwargs): """ delete collection of APIService This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_api_service(async_req=True) >>> result = thread.get() :param async_req bool :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_api_service_with_http_info(**kwargs) else: (data) = self.delete_collection_api_service_with_http_info(**kwargs) return data
[ "def", "delete_collection_api_service", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "delete_collection_api_service_with_http_info", "(", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "delete_collection_api_service_with_http_info", "(", "*", "*", "kwargs", ")", "return", "data" ]
167.481481
138.740741
def _add_membership_multicast_socket(self): """ Make membership request to multicast :rtype: None """ self._membership_request = socket.inet_aton(self._multicast_group) \ + socket.inet_aton(self._multicast_ip) # Send add membership request to socket # See http://www.tldp.org/HOWTO/Multicast-HOWTO-6.html # for explanation of sockopts self._multicast_socket.setsockopt( socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self._membership_request )
[ "def", "_add_membership_multicast_socket", "(", "self", ")", ":", "self", ".", "_membership_request", "=", "socket", ".", "inet_aton", "(", "self", ".", "_multicast_group", ")", "+", "socket", ".", "inet_aton", "(", "self", ".", "_multicast_ip", ")", "# Send add membership request to socket", "# See http://www.tldp.org/HOWTO/Multicast-HOWTO-6.html", "# for explanation of sockopts", "self", ".", "_multicast_socket", ".", "setsockopt", "(", "socket", ".", "IPPROTO_IP", ",", "socket", ".", "IP_ADD_MEMBERSHIP", ",", "self", ".", "_membership_request", ")" ]
32.647059
13.823529
def addSubEditor(self, subEditor, isFocusProxy=False): """ Adds a sub editor to the layout (at the right but before the reset button) Will add the necessary event filter to handle tabs and sets the strong focus so that events will not propagate to the tree view. If isFocusProxy is True the sub editor will be the focus proxy of the CTI. """ self.hBoxLayout.insertWidget(len(self._subEditors), subEditor) self._subEditors.append(subEditor) subEditor.installEventFilter(self) subEditor.setFocusPolicy(Qt.StrongFocus) if isFocusProxy: self.setFocusProxy(subEditor) return subEditor
[ "def", "addSubEditor", "(", "self", ",", "subEditor", ",", "isFocusProxy", "=", "False", ")", ":", "self", ".", "hBoxLayout", ".", "insertWidget", "(", "len", "(", "self", ".", "_subEditors", ")", ",", "subEditor", ")", "self", ".", "_subEditors", ".", "append", "(", "subEditor", ")", "subEditor", ".", "installEventFilter", "(", "self", ")", "subEditor", ".", "setFocusPolicy", "(", "Qt", ".", "StrongFocus", ")", "if", "isFocusProxy", ":", "self", ".", "setFocusProxy", "(", "subEditor", ")", "return", "subEditor" ]
39.941176
21.529412
def _set_ignore_delete_all_response(self, v, load=False): """ Setter method for ignore_delete_all_response, mapped from YANG variable /vcenter/discovery/ignore_delete_all_response (container) If this variable is read-only (config: false) in the source YANG file, then _set_ignore_delete_all_response is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ignore_delete_all_response() directly. YANG Description: Ignore delete-all from vCenter """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=ignore_delete_all_response.ignore_delete_all_response, is_container='container', presence=False, yang_name="ignore-delete-all-response", rest_name="ignore-delete-all-response", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ignore delete-all from vCenter'}}, namespace='urn:brocade.com:mgmt:brocade-vswitch', defining_module='brocade-vswitch', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """ignore_delete_all_response must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=ignore_delete_all_response.ignore_delete_all_response, is_container='container', presence=False, yang_name="ignore-delete-all-response", rest_name="ignore-delete-all-response", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ignore delete-all from vCenter'}}, namespace='urn:brocade.com:mgmt:brocade-vswitch', defining_module='brocade-vswitch', yang_type='container', is_config=True)""", }) self.__ignore_delete_all_response = t if hasattr(self, '_set'): self._set()
[ "def", "_set_ignore_delete_all_response", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "ignore_delete_all_response", ".", "ignore_delete_all_response", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"ignore-delete-all-response\"", ",", "rest_name", "=", "\"ignore-delete-all-response\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Ignore delete-all from vCenter'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-vswitch'", ",", "defining_module", "=", "'brocade-vswitch'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"ignore_delete_all_response must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=ignore_delete_all_response.ignore_delete_all_response, is_container='container', presence=False, yang_name=\"ignore-delete-all-response\", rest_name=\"ignore-delete-all-response\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ignore delete-all from vCenter'}}, namespace='urn:brocade.com:mgmt:brocade-vswitch', defining_module='brocade-vswitch', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__ignore_delete_all_response", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
79.5
38.666667
def _gettables(self): """Return a list of hdf5 tables name PyMCsamples. """ groups = self._h5file.list_nodes("/") if len(groups) == 0: return [] else: return [ gr.PyMCsamples for gr in groups if gr._v_name[:5] == 'chain']
[ "def", "_gettables", "(", "self", ")", ":", "groups", "=", "self", ".", "_h5file", ".", "list_nodes", "(", "\"/\"", ")", "if", "len", "(", "groups", ")", "==", "0", ":", "return", "[", "]", "else", ":", "return", "[", "gr", ".", "PyMCsamples", "for", "gr", "in", "groups", "if", "gr", ".", "_v_name", "[", ":", "5", "]", "==", "'chain'", "]" ]
29.3
17.9
def plot_eigh(self, colorbar=True, cb_orientation='vertical', tick_interval=[60, 60], minor_tick_interval=[20, 20], xlabel='Longitude', ylabel='Latitude', axes_labelsize=9, tick_labelsize=8, show=True, fname=None, **kwargs): """ Plot the two eigenvalues and maximum absolute value eigenvalue of the horizontal tensor. Usage ----- x.plot_eigh([tick_interval, minor_tick_interval, xlabel, ylabel, colorbar, cb_orientation, cb_label, axes_labelsize, tick_labelsize, show, fname, **kwargs]) Parameters ---------- tick_interval : list or tuple, optional, default = [60, 60] Intervals to use when plotting the major x and y ticks. If set to None, major ticks will not be plotted. minor_tick_interval : list or tuple, optional, default = [20, 20] Intervals to use when plotting the minor x and y ticks. If set to None, minor ticks will not be plotted. xlabel : str, optional, default = 'Longitude' Label for the longitude axis. ylabel : str, optional, default = 'Latitude' Label for the latitude axis. colorbar : bool, optional, default = True If True, plot a colorbar. cb_orientation : str, optional, default = 'vertical' Orientation of the colorbar: either 'vertical' or 'horizontal'. cb_label : str, optional, default = None Text label for the colorbar. axes_labelsize : int, optional, default = 9 The font size for the x and y axes labels. tick_labelsize : int, optional, default = 8 The font size for the x and y tick labels. show : bool, optional, default = True If True, plot the image to the screen. fname : str, optional, default = None If present, and if axes is not specified, save the image to the specified file. kwargs : optional Keyword arguements that will be sent to the SHGrid.plot() and plt.imshow() methods. """ if colorbar is True: if cb_orientation == 'horizontal': scale = 2.3 else: scale = 1.4 else: scale = 1.65 figsize = (_mpl.rcParams['figure.figsize'][0], _mpl.rcParams['figure.figsize'][0] * scale) fig, ax = _plt.subplots(3, 1, figsize=figsize) self.plot_eigh1(colorbar=colorbar, cb_orientation=cb_orientation, ax=ax.flat[0], xlabel=xlabel, ylabel=ylabel, tick_interval=tick_interval, tick_labelsize=tick_labelsize, minor_tick_interval=minor_tick_interval, **kwargs) self.plot_eigh2(colorbar=colorbar, cb_orientation=cb_orientation, ax=ax.flat[1], xlabel=xlabel, ylabel=ylabel, tick_interval=tick_interval, tick_labelsize=tick_labelsize, minor_tick_interval=minor_tick_interval, **kwargs) self.plot_eighh(colorbar=colorbar, cb_orientation=cb_orientation, ax=ax.flat[2], xlabel=xlabel, ylabel=ylabel, tick_interval=tick_interval, tick_labelsize=tick_labelsize, minor_tick_interval=minor_tick_interval, **kwargs) fig.tight_layout(pad=0.5) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, ax
[ "def", "plot_eigh", "(", "self", ",", "colorbar", "=", "True", ",", "cb_orientation", "=", "'vertical'", ",", "tick_interval", "=", "[", "60", ",", "60", "]", ",", "minor_tick_interval", "=", "[", "20", ",", "20", "]", ",", "xlabel", "=", "'Longitude'", ",", "ylabel", "=", "'Latitude'", ",", "axes_labelsize", "=", "9", ",", "tick_labelsize", "=", "8", ",", "show", "=", "True", ",", "fname", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "colorbar", "is", "True", ":", "if", "cb_orientation", "==", "'horizontal'", ":", "scale", "=", "2.3", "else", ":", "scale", "=", "1.4", "else", ":", "scale", "=", "1.65", "figsize", "=", "(", "_mpl", ".", "rcParams", "[", "'figure.figsize'", "]", "[", "0", "]", ",", "_mpl", ".", "rcParams", "[", "'figure.figsize'", "]", "[", "0", "]", "*", "scale", ")", "fig", ",", "ax", "=", "_plt", ".", "subplots", "(", "3", ",", "1", ",", "figsize", "=", "figsize", ")", "self", ".", "plot_eigh1", "(", "colorbar", "=", "colorbar", ",", "cb_orientation", "=", "cb_orientation", ",", "ax", "=", "ax", ".", "flat", "[", "0", "]", ",", "xlabel", "=", "xlabel", ",", "ylabel", "=", "ylabel", ",", "tick_interval", "=", "tick_interval", ",", "tick_labelsize", "=", "tick_labelsize", ",", "minor_tick_interval", "=", "minor_tick_interval", ",", "*", "*", "kwargs", ")", "self", ".", "plot_eigh2", "(", "colorbar", "=", "colorbar", ",", "cb_orientation", "=", "cb_orientation", ",", "ax", "=", "ax", ".", "flat", "[", "1", "]", ",", "xlabel", "=", "xlabel", ",", "ylabel", "=", "ylabel", ",", "tick_interval", "=", "tick_interval", ",", "tick_labelsize", "=", "tick_labelsize", ",", "minor_tick_interval", "=", "minor_tick_interval", ",", "*", "*", "kwargs", ")", "self", ".", "plot_eighh", "(", "colorbar", "=", "colorbar", ",", "cb_orientation", "=", "cb_orientation", ",", "ax", "=", "ax", ".", "flat", "[", "2", "]", ",", "xlabel", "=", "xlabel", ",", "ylabel", "=", "ylabel", ",", "tick_interval", "=", "tick_interval", ",", "tick_labelsize", "=", "tick_labelsize", ",", "minor_tick_interval", "=", "minor_tick_interval", ",", "*", "*", "kwargs", ")", "fig", ".", "tight_layout", "(", "pad", "=", "0.5", ")", "if", "show", ":", "fig", ".", "show", "(", ")", "if", "fname", "is", "not", "None", ":", "fig", ".", "savefig", "(", "fname", ")", "return", "fig", ",", "ax" ]
43.435294
19.435294
def encrypt_with_caching(kms_cmk_arn, max_age_in_cache, cache_capacity): """Encrypts a string using an AWS KMS customer master key (CMK) and data key caching. :param str kms_cmk_arn: Amazon Resource Name (ARN) of the KMS customer master key :param float max_age_in_cache: Maximum time in seconds that a cached entry can be used :param int cache_capacity: Maximum number of entries to retain in cache at once """ # Data to be encrypted my_data = "My plaintext data" # Security thresholds # Max messages (or max bytes per) data key are optional MAX_ENTRY_MESSAGES = 100 # Create an encryption context encryption_context = {"purpose": "test"} # Create a master key provider for the KMS customer master key (CMK) key_provider = aws_encryption_sdk.KMSMasterKeyProvider(key_ids=[kms_cmk_arn]) # Create a local cache cache = aws_encryption_sdk.LocalCryptoMaterialsCache(cache_capacity) # Create a caching CMM caching_cmm = aws_encryption_sdk.CachingCryptoMaterialsManager( master_key_provider=key_provider, cache=cache, max_age=max_age_in_cache, max_messages_encrypted=MAX_ENTRY_MESSAGES, ) # When the call to encrypt data specifies a caching CMM, # the encryption operation uses the data key cache specified # in the caching CMM encrypted_message, _header = aws_encryption_sdk.encrypt( source=my_data, materials_manager=caching_cmm, encryption_context=encryption_context ) return encrypted_message
[ "def", "encrypt_with_caching", "(", "kms_cmk_arn", ",", "max_age_in_cache", ",", "cache_capacity", ")", ":", "# Data to be encrypted", "my_data", "=", "\"My plaintext data\"", "# Security thresholds", "# Max messages (or max bytes per) data key are optional", "MAX_ENTRY_MESSAGES", "=", "100", "# Create an encryption context", "encryption_context", "=", "{", "\"purpose\"", ":", "\"test\"", "}", "# Create a master key provider for the KMS customer master key (CMK)", "key_provider", "=", "aws_encryption_sdk", ".", "KMSMasterKeyProvider", "(", "key_ids", "=", "[", "kms_cmk_arn", "]", ")", "# Create a local cache", "cache", "=", "aws_encryption_sdk", ".", "LocalCryptoMaterialsCache", "(", "cache_capacity", ")", "# Create a caching CMM", "caching_cmm", "=", "aws_encryption_sdk", ".", "CachingCryptoMaterialsManager", "(", "master_key_provider", "=", "key_provider", ",", "cache", "=", "cache", ",", "max_age", "=", "max_age_in_cache", ",", "max_messages_encrypted", "=", "MAX_ENTRY_MESSAGES", ",", ")", "# When the call to encrypt data specifies a caching CMM,", "# the encryption operation uses the data key cache specified", "# in the caching CMM", "encrypted_message", ",", "_header", "=", "aws_encryption_sdk", ".", "encrypt", "(", "source", "=", "my_data", ",", "materials_manager", "=", "caching_cmm", ",", "encryption_context", "=", "encryption_context", ")", "return", "encrypted_message" ]
38.538462
25.153846
def _pop_digits(char_list): """Pop consecutive digits from the front of list and return them Pops any and all consecutive digits from the start of the provided character list and returns them as a list of string digits. Operates on (and possibly alters) the passed list. :param list char_list: a list of characters :return: a list of string digits :rtype: list """ logger.debug('_pop_digits(%s)', char_list) digits = [] while len(char_list) != 0 and char_list[0].isdigit(): digits.append(char_list.pop(0)) logger.debug('got digits: %s', digits) logger.debug('updated char list: %s', char_list) return digits
[ "def", "_pop_digits", "(", "char_list", ")", ":", "logger", ".", "debug", "(", "'_pop_digits(%s)'", ",", "char_list", ")", "digits", "=", "[", "]", "while", "len", "(", "char_list", ")", "!=", "0", "and", "char_list", "[", "0", "]", ".", "isdigit", "(", ")", ":", "digits", ".", "append", "(", "char_list", ".", "pop", "(", "0", ")", ")", "logger", ".", "debug", "(", "'got digits: %s'", ",", "digits", ")", "logger", ".", "debug", "(", "'updated char list: %s'", ",", "char_list", ")", "return", "digits" ]
36.444444
15.611111
def ensure_local_net( network_name: str = DOCKER_STARCRAFT_NETWORK, subnet_cidr: str = SUBNET_CIDR ) -> None: """ Create docker local net if not found. :raises docker.errors.APIError """ logger.info(f"checking whether docker has network {network_name}") ipam_pool = docker.types.IPAMPool(subnet=subnet_cidr) ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool]) networks = docker_client.networks.list(names=DOCKER_STARCRAFT_NETWORK) output = networks[0].short_id if networks else None if not output: logger.info("network not found, creating ...") output = docker_client.networks.create(DOCKER_STARCRAFT_NETWORK, ipam=ipam_config).short_id logger.debug(f"docker network id: {output}")
[ "def", "ensure_local_net", "(", "network_name", ":", "str", "=", "DOCKER_STARCRAFT_NETWORK", ",", "subnet_cidr", ":", "str", "=", "SUBNET_CIDR", ")", "->", "None", ":", "logger", ".", "info", "(", "f\"checking whether docker has network {network_name}\"", ")", "ipam_pool", "=", "docker", ".", "types", ".", "IPAMPool", "(", "subnet", "=", "subnet_cidr", ")", "ipam_config", "=", "docker", ".", "types", ".", "IPAMConfig", "(", "pool_configs", "=", "[", "ipam_pool", "]", ")", "networks", "=", "docker_client", ".", "networks", ".", "list", "(", "names", "=", "DOCKER_STARCRAFT_NETWORK", ")", "output", "=", "networks", "[", "0", "]", ".", "short_id", "if", "networks", "else", "None", "if", "not", "output", ":", "logger", ".", "info", "(", "\"network not found, creating ...\"", ")", "output", "=", "docker_client", ".", "networks", ".", "create", "(", "DOCKER_STARCRAFT_NETWORK", ",", "ipam", "=", "ipam_config", ")", ".", "short_id", "logger", ".", "debug", "(", "f\"docker network id: {output}\"", ")" ]
41.833333
18.722222
def run_with_graph_transformation(self) -> Iterable[BELGraph]: """Calculate scores for all leaves until there are none, removes edges until there are, and repeats until all nodes have been scored. Also, yields the current graph at every step so you can make a cool animation of how the graph changes throughout the course of the algorithm :return: An iterable of BEL graphs """ yield self.get_remaining_graph() while not self.done_chomping(): while not list(self.iter_leaves()): self.remove_random_edge() yield self.get_remaining_graph() self.score_leaves() yield self.get_remaining_graph()
[ "def", "run_with_graph_transformation", "(", "self", ")", "->", "Iterable", "[", "BELGraph", "]", ":", "yield", "self", ".", "get_remaining_graph", "(", ")", "while", "not", "self", ".", "done_chomping", "(", ")", ":", "while", "not", "list", "(", "self", ".", "iter_leaves", "(", ")", ")", ":", "self", ".", "remove_random_edge", "(", ")", "yield", "self", ".", "get_remaining_graph", "(", ")", "self", ".", "score_leaves", "(", ")", "yield", "self", ".", "get_remaining_graph", "(", ")" ]
50.142857
14.142857
def merge(self, dataset): """ Merge the specified dataset on top of the existing data. This replaces all values in the existing dataset with the values from the given dataset. Args: dataset (TaskData): A reference to the TaskData object that should be merged on top of the existing object. """ def merge_data(source, dest): for key, value in source.items(): if isinstance(value, dict): merge_data(value, dest.setdefault(key, {})) else: dest[key] = value return dest merge_data(dataset.data, self._data) for h in dataset.task_history: if h not in self._task_history: self._task_history.append(h)
[ "def", "merge", "(", "self", ",", "dataset", ")", ":", "def", "merge_data", "(", "source", ",", "dest", ")", ":", "for", "key", ",", "value", "in", "source", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "merge_data", "(", "value", ",", "dest", ".", "setdefault", "(", "key", ",", "{", "}", ")", ")", "else", ":", "dest", "[", "key", "]", "=", "value", "return", "dest", "merge_data", "(", "dataset", ".", "data", ",", "self", ".", "_data", ")", "for", "h", "in", "dataset", ".", "task_history", ":", "if", "h", "not", "in", "self", ".", "_task_history", ":", "self", ".", "_task_history", ".", "append", "(", "h", ")" ]
34.434783
17.434783
def serialize_options(opts): """ A helper method to serialize and processes the options dictionary. """ options = (opts or {}).copy() for key in opts.keys(): if key not in DEFAULT_OPTIONS: LOG.warning("Unknown option passed to Flask-CORS: %s", key) # Ensure origins is a list of allowed origins with at least one entry. options['origins'] = sanitize_regex_param(options.get('origins')) options['allow_headers'] = sanitize_regex_param(options.get('allow_headers')) # This is expressly forbidden by the spec. Raise a value error so people # don't get burned in production. if r'.*' in options['origins'] and options['supports_credentials'] and options['send_wildcard']: raise ValueError("Cannot use supports_credentials in conjunction with" "an origin string of '*'. See: " "http://www.w3.org/TR/cors/#resource-requests") serialize_option(options, 'expose_headers') serialize_option(options, 'methods', upper=True) if isinstance(options.get('max_age'), timedelta): options['max_age'] = str(int(options['max_age'].total_seconds())) return options
[ "def", "serialize_options", "(", "opts", ")", ":", "options", "=", "(", "opts", "or", "{", "}", ")", ".", "copy", "(", ")", "for", "key", "in", "opts", ".", "keys", "(", ")", ":", "if", "key", "not", "in", "DEFAULT_OPTIONS", ":", "LOG", ".", "warning", "(", "\"Unknown option passed to Flask-CORS: %s\"", ",", "key", ")", "# Ensure origins is a list of allowed origins with at least one entry.", "options", "[", "'origins'", "]", "=", "sanitize_regex_param", "(", "options", ".", "get", "(", "'origins'", ")", ")", "options", "[", "'allow_headers'", "]", "=", "sanitize_regex_param", "(", "options", ".", "get", "(", "'allow_headers'", ")", ")", "# This is expressly forbidden by the spec. Raise a value error so people", "# don't get burned in production.", "if", "r'.*'", "in", "options", "[", "'origins'", "]", "and", "options", "[", "'supports_credentials'", "]", "and", "options", "[", "'send_wildcard'", "]", ":", "raise", "ValueError", "(", "\"Cannot use supports_credentials in conjunction with\"", "\"an origin string of '*'. See: \"", "\"http://www.w3.org/TR/cors/#resource-requests\"", ")", "serialize_option", "(", "options", ",", "'expose_headers'", ")", "serialize_option", "(", "options", ",", "'methods'", ",", "upper", "=", "True", ")", "if", "isinstance", "(", "options", ".", "get", "(", "'max_age'", ")", ",", "timedelta", ")", ":", "options", "[", "'max_age'", "]", "=", "str", "(", "int", "(", "options", "[", "'max_age'", "]", ".", "total_seconds", "(", ")", ")", ")", "return", "options" ]
38.966667
26.433333
def log_message(self, format, *args): """Log an arbitrary message. This is used by all other logging functions. Override it if you have specific logging wishes. The first argument, FORMAT, is a format string for the message to be logged. If the format string contains any % escapes requiring parameters, they should be specified as subsequent arguments (it's just like printf!). The client ip and current date/time are prefixed to every message. """ sys.stderr.write("%s - - [%s] %s\n" % (self.address_string(), self.log_date_time_string(), format%args))
[ "def", "log_message", "(", "self", ",", "format", ",", "*", "args", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"%s - - [%s] %s\\n\"", "%", "(", "self", ".", "address_string", "(", ")", ",", "self", ".", "log_date_time_string", "(", ")", ",", "format", "%", "args", ")", ")" ]
34
18.904762
def create_ui(self): ''' Create UI elements and connect signals. ''' box = Gtk.Box() rotate_left = Gtk.Button('Rotate left') rotate_right = Gtk.Button('Rotate right') flip_horizontal = Gtk.Button('Flip horizontal') flip_vertical = Gtk.Button('Flip vertical') reset = Gtk.Button('Reset') load = Gtk.Button('Load...') save = Gtk.Button('Save...') rotate_left.connect('clicked', lambda *args: self.rotate_left()) rotate_right.connect('clicked', lambda *args: self.rotate_right()) flip_horizontal.connect('clicked', lambda *args: self.flip_horizontal()) flip_vertical.connect('clicked', lambda *args: self.flip_vertical()) reset.connect('clicked', lambda *args: self.reset()) load.connect('clicked', lambda *args: GObject.idle_add(self.load)) save.connect('clicked', lambda *args: GObject.idle_add(self.save)) for b in (rotate_left, rotate_right, flip_horizontal, flip_vertical, reset, load, save): box.pack_start(b, False, False, 0) box.show_all() self.widget.pack_start(box, False, False, 0) if self.warp_actor.parent_corners is None: for b in (rotate_left, rotate_right, flip_horizontal, flip_vertical, reset, load, save): b.set_sensitive(False) def check_init(): if self.warp_actor.parent_corners is not None: for b in (rotate_left, rotate_right, flip_horizontal, flip_vertical, reset, load, save): b.set_sensitive(True) return False return True GObject.timeout_add(100, check_init)
[ "def", "create_ui", "(", "self", ")", ":", "box", "=", "Gtk", ".", "Box", "(", ")", "rotate_left", "=", "Gtk", ".", "Button", "(", "'Rotate left'", ")", "rotate_right", "=", "Gtk", ".", "Button", "(", "'Rotate right'", ")", "flip_horizontal", "=", "Gtk", ".", "Button", "(", "'Flip horizontal'", ")", "flip_vertical", "=", "Gtk", ".", "Button", "(", "'Flip vertical'", ")", "reset", "=", "Gtk", ".", "Button", "(", "'Reset'", ")", "load", "=", "Gtk", ".", "Button", "(", "'Load...'", ")", "save", "=", "Gtk", ".", "Button", "(", "'Save...'", ")", "rotate_left", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "self", ".", "rotate_left", "(", ")", ")", "rotate_right", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "self", ".", "rotate_right", "(", ")", ")", "flip_horizontal", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "self", ".", "flip_horizontal", "(", ")", ")", "flip_vertical", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "self", ".", "flip_vertical", "(", ")", ")", "reset", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "self", ".", "reset", "(", ")", ")", "load", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "GObject", ".", "idle_add", "(", "self", ".", "load", ")", ")", "save", ".", "connect", "(", "'clicked'", ",", "lambda", "*", "args", ":", "GObject", ".", "idle_add", "(", "self", ".", "save", ")", ")", "for", "b", "in", "(", "rotate_left", ",", "rotate_right", ",", "flip_horizontal", ",", "flip_vertical", ",", "reset", ",", "load", ",", "save", ")", ":", "box", ".", "pack_start", "(", "b", ",", "False", ",", "False", ",", "0", ")", "box", ".", "show_all", "(", ")", "self", ".", "widget", ".", "pack_start", "(", "box", ",", "False", ",", "False", ",", "0", ")", "if", "self", ".", "warp_actor", ".", "parent_corners", "is", "None", ":", "for", "b", "in", "(", "rotate_left", ",", "rotate_right", ",", "flip_horizontal", ",", "flip_vertical", ",", "reset", ",", "load", ",", "save", ")", ":", "b", ".", "set_sensitive", "(", "False", ")", "def", "check_init", "(", ")", ":", "if", "self", ".", "warp_actor", ".", "parent_corners", "is", "not", "None", ":", "for", "b", "in", "(", "rotate_left", ",", "rotate_right", ",", "flip_horizontal", ",", "flip_vertical", ",", "reset", ",", "load", ",", "save", ")", ":", "b", ".", "set_sensitive", "(", "True", ")", "return", "False", "return", "True", "GObject", ".", "timeout_add", "(", "100", ",", "check_init", ")" ]
43.512195
19.268293
def kendalltau_dist(params1, params2=None): r"""Compute the Kendall tau distance between two models. This function computes the Kendall tau distance between the rankings induced by two parameter vectors. Let :math:`\sigma_i` be the rank of item ``i`` in the model described by ``params1``, and :math:`\tau_i` be its rank in the model described by ``params2``. The Kendall tau distance is defined as the number of pairwise disagreements between the two rankings, i.e., .. math:: \sum_{i=1}^N \sum_{j=1}^N \mathbf{1} \{ \sigma_i > \sigma_j \wedge \tau_i < \tau_j \} By convention, items with the lowest parameters are ranked first (i.e., sorted using the natural order). If the argument ``params2`` is ``None``, the second model is assumed to rank the items by their index: item ``0`` has rank 1, item ``1`` has rank 2, etc. If some values are equal within a parameter vector, all items are given a distinct rank, corresponding to the order in which the values occur. Parameters ---------- params1 : array_like Parameters of the first model. params2 : array_like, optional Parameters of the second model. Returns ------- dist : float Kendall tau distance. """ assert params2 is None or len(params1) == len(params2) ranks1 = rankdata(params1, method="ordinal") if params2 is None: ranks2 = np.arange(1, len(params1) + 1, dtype=float) else: ranks2 = rankdata(params2, method="ordinal") tau, _ = kendalltau(ranks1, ranks2) n_items = len(params1) n_pairs = n_items * (n_items - 1) / 2 return round((n_pairs - n_pairs * tau) / 2)
[ "def", "kendalltau_dist", "(", "params1", ",", "params2", "=", "None", ")", ":", "assert", "params2", "is", "None", "or", "len", "(", "params1", ")", "==", "len", "(", "params2", ")", "ranks1", "=", "rankdata", "(", "params1", ",", "method", "=", "\"ordinal\"", ")", "if", "params2", "is", "None", ":", "ranks2", "=", "np", ".", "arange", "(", "1", ",", "len", "(", "params1", ")", "+", "1", ",", "dtype", "=", "float", ")", "else", ":", "ranks2", "=", "rankdata", "(", "params2", ",", "method", "=", "\"ordinal\"", ")", "tau", ",", "_", "=", "kendalltau", "(", "ranks1", ",", "ranks2", ")", "n_items", "=", "len", "(", "params1", ")", "n_pairs", "=", "n_items", "*", "(", "n_items", "-", "1", ")", "/", "2", "return", "round", "(", "(", "n_pairs", "-", "n_pairs", "*", "tau", ")", "/", "2", ")" ]
36.065217
23.391304
def coords(self): """The (X, Y) coordinates of the tablet tool, in mm from the top left corner of the tablet in its current logical orientation and whether they have changed in this event. Use :meth:`transform_coords` for transforming the axes values into a different coordinate space. Note: On some devices, returned value may be negative or larger than the width of the device. See `Out-of-bounds motion events`_ for more details. Returns: ((float, float), bool): The current values of the the axes and whether they have changed. """ x_changed = self._libinput.libinput_event_tablet_tool_x_has_changed( self._handle) y_changed = self._libinput.libinput_event_tablet_tool_y_has_changed( self._handle) x = self._libinput.libinput_event_tablet_tool_get_x(self._handle) y = self._libinput.libinput_event_tablet_tool_get_y(self._handle) return (x, y), x_changed or y_changed
[ "def", "coords", "(", "self", ")", ":", "x_changed", "=", "self", ".", "_libinput", ".", "libinput_event_tablet_tool_x_has_changed", "(", "self", ".", "_handle", ")", "y_changed", "=", "self", ".", "_libinput", ".", "libinput_event_tablet_tool_y_has_changed", "(", "self", ".", "_handle", ")", "x", "=", "self", ".", "_libinput", ".", "libinput_event_tablet_tool_get_x", "(", "self", ".", "_handle", ")", "y", "=", "self", ".", "_libinput", ".", "libinput_event_tablet_tool_get_y", "(", "self", ".", "_handle", ")", "return", "(", "x", ",", "y", ")", ",", "x_changed", "or", "y_changed" ]
37.416667
22.708333
def a_connection_timeout(ctx): """Check the prompt and update the drivers.""" prompt = ctx.ctrl.after ctx.msg = "Received the jump host prompt: '{}'".format(prompt) ctx.device.connected = False ctx.finished = True raise ConnectionTimeoutError("Unable to connect to the device.", ctx.ctrl.hostname)
[ "def", "a_connection_timeout", "(", "ctx", ")", ":", "prompt", "=", "ctx", ".", "ctrl", ".", "after", "ctx", ".", "msg", "=", "\"Received the jump host prompt: '{}'\"", ".", "format", "(", "prompt", ")", "ctx", ".", "device", ".", "connected", "=", "False", "ctx", ".", "finished", "=", "True", "raise", "ConnectionTimeoutError", "(", "\"Unable to connect to the device.\"", ",", "ctx", ".", "ctrl", ".", "hostname", ")" ]
45
17.285714
def remove_terms_used_in_less_than_num_docs(self, threshold): ''' Parameters ---------- threshold: int Minimum number of documents term should appear in to be kept Returns ------- TermDocMatrix, new object with terms removed. ''' term_counts = self._X.astype(bool).astype(int).sum(axis=0).A[0] terms_to_remove = np.where(term_counts < threshold)[0] return self.remove_terms_by_indices(terms_to_remove)
[ "def", "remove_terms_used_in_less_than_num_docs", "(", "self", ",", "threshold", ")", ":", "term_counts", "=", "self", ".", "_X", ".", "astype", "(", "bool", ")", ".", "astype", "(", "int", ")", ".", "sum", "(", "axis", "=", "0", ")", ".", "A", "[", "0", "]", "terms_to_remove", "=", "np", ".", "where", "(", "term_counts", "<", "threshold", ")", "[", "0", "]", "return", "self", ".", "remove_terms_by_indices", "(", "terms_to_remove", ")" ]
34.928571
24.928571
def _check_file_field(self, field): """Check that field exists and is a file field""" is_field = field in self.field_names is_file = self.__meta_metadata(field, 'field_type') == 'file' if not (is_field and is_file): msg = "'%s' is not a field or not a 'file' field" % field raise ValueError(msg) else: return True
[ "def", "_check_file_field", "(", "self", ",", "field", ")", ":", "is_field", "=", "field", "in", "self", ".", "field_names", "is_file", "=", "self", ".", "__meta_metadata", "(", "field", ",", "'field_type'", ")", "==", "'file'", "if", "not", "(", "is_field", "and", "is_file", ")", ":", "msg", "=", "\"'%s' is not a field or not a 'file' field\"", "%", "field", "raise", "ValueError", "(", "msg", ")", "else", ":", "return", "True" ]
42.333333
13.333333
def get_plugins_directory(config_path=None, microdrop_user_root=None): ''' Resolve plugins directory. Plugins directory is resolved as follows, highest-priority first: 1. ``plugins`` directory specified in provided :data:`config_path`. 2. ``plugins`` sub-directory of specified MicroDrop profile path (i.e., :data:`microdrop_user_root`) 3. ``plugins`` sub-directory of parent directory of configuration file path specified using ``MICRODROP_CONFIG`` environment variable. 4. ``plugins`` sub-directory of MicroDrop profile path specified using ``MICRODROP_PROFILE`` environment variable. 5. Plugins directory specified in ``<home directory>/MicroDrop/microdrop.ini``. 6. Plugins directory in default profile location, i.e., ``<home directory>/MicroDrop/plugins``. Parameters ---------- config_path : str, optional Configuration file path (i.e., path to ``microdrop.ini``). microdrop_user_root : str, optional Path to MicroDrop user data directory. Returns ------- path Absolute path to plugins directory. ''' RESOLVED_BY_NONE = 'default' RESOLVED_BY_CONFIG_ARG = 'config_path argument' RESOLVED_BY_PROFILE_ARG = 'microdrop_user_root argument' RESOLVED_BY_CONFIG_ENV = 'MICRODROP_CONFIG environment variable' RESOLVED_BY_PROFILE_ENV = 'MICRODROP_PROFILE environment variable' resolved_by = [RESOLVED_BY_NONE] # # Find plugins directory path # if microdrop_user_root is not None: microdrop_user_root = path(microdrop_user_root).realpath() resolved_by.append(RESOLVED_BY_PROFILE_ARG) elif 'MICRODROP_PROFILE' in os.environ: microdrop_user_root = path(os.environ['MICRODROP_PROFILE']).realpath() resolved_by.append(RESOLVED_BY_PROFILE_ENV) else: microdrop_user_root = path(home_dir()).joinpath('MicroDrop') if config_path is not None: config_path = path(config_path).expand() resolved_by.append(RESOLVED_BY_CONFIG_ARG) elif 'MICRODROP_CONFIG' in os.environ: config_path = path(os.environ['MICRODROP_CONFIG']).realpath() resolved_by.append(RESOLVED_BY_CONFIG_ENV) else: config_path = microdrop_user_root.joinpath('microdrop.ini') try: # Look up plugins directory stored in configuration file. plugins_directory = path(configobj.ConfigObj(config_path) ['plugins']['directory']) if not plugins_directory.isabs(): # Plugins directory stored in configuration file as relative path. # Interpret as relative to parent directory of configuration file. plugins_directory = config_path.parent.joinpath(plugins_directory) if not plugins_directory.isdir(): raise IOError('Plugins directory does not exist: {}' .format(plugins_directory)) except Exception, why: # Error looking up plugins directory in configuration file (maybe no # plugins directory was listed in configuration file?). plugins_directory = microdrop_user_root.joinpath('plugins') logger.warning('%s. Using default plugins directory: %s', why, plugins_directory) if resolved_by[-1] in (RESOLVED_BY_CONFIG_ARG, RESOLVED_BY_CONFIG_ENV): resolved_by.pop() logger.info('Resolved plugins directory by %s: %s', resolved_by[-1], plugins_directory) return plugins_directory
[ "def", "get_plugins_directory", "(", "config_path", "=", "None", ",", "microdrop_user_root", "=", "None", ")", ":", "RESOLVED_BY_NONE", "=", "'default'", "RESOLVED_BY_CONFIG_ARG", "=", "'config_path argument'", "RESOLVED_BY_PROFILE_ARG", "=", "'microdrop_user_root argument'", "RESOLVED_BY_CONFIG_ENV", "=", "'MICRODROP_CONFIG environment variable'", "RESOLVED_BY_PROFILE_ENV", "=", "'MICRODROP_PROFILE environment variable'", "resolved_by", "=", "[", "RESOLVED_BY_NONE", "]", "# # Find plugins directory path #", "if", "microdrop_user_root", "is", "not", "None", ":", "microdrop_user_root", "=", "path", "(", "microdrop_user_root", ")", ".", "realpath", "(", ")", "resolved_by", ".", "append", "(", "RESOLVED_BY_PROFILE_ARG", ")", "elif", "'MICRODROP_PROFILE'", "in", "os", ".", "environ", ":", "microdrop_user_root", "=", "path", "(", "os", ".", "environ", "[", "'MICRODROP_PROFILE'", "]", ")", ".", "realpath", "(", ")", "resolved_by", ".", "append", "(", "RESOLVED_BY_PROFILE_ENV", ")", "else", ":", "microdrop_user_root", "=", "path", "(", "home_dir", "(", ")", ")", ".", "joinpath", "(", "'MicroDrop'", ")", "if", "config_path", "is", "not", "None", ":", "config_path", "=", "path", "(", "config_path", ")", ".", "expand", "(", ")", "resolved_by", ".", "append", "(", "RESOLVED_BY_CONFIG_ARG", ")", "elif", "'MICRODROP_CONFIG'", "in", "os", ".", "environ", ":", "config_path", "=", "path", "(", "os", ".", "environ", "[", "'MICRODROP_CONFIG'", "]", ")", ".", "realpath", "(", ")", "resolved_by", ".", "append", "(", "RESOLVED_BY_CONFIG_ENV", ")", "else", ":", "config_path", "=", "microdrop_user_root", ".", "joinpath", "(", "'microdrop.ini'", ")", "try", ":", "# Look up plugins directory stored in configuration file.", "plugins_directory", "=", "path", "(", "configobj", ".", "ConfigObj", "(", "config_path", ")", "[", "'plugins'", "]", "[", "'directory'", "]", ")", "if", "not", "plugins_directory", ".", "isabs", "(", ")", ":", "# Plugins directory stored in configuration file as relative path.", "# Interpret as relative to parent directory of configuration file.", "plugins_directory", "=", "config_path", ".", "parent", ".", "joinpath", "(", "plugins_directory", ")", "if", "not", "plugins_directory", ".", "isdir", "(", ")", ":", "raise", "IOError", "(", "'Plugins directory does not exist: {}'", ".", "format", "(", "plugins_directory", ")", ")", "except", "Exception", ",", "why", ":", "# Error looking up plugins directory in configuration file (maybe no", "# plugins directory was listed in configuration file?).", "plugins_directory", "=", "microdrop_user_root", ".", "joinpath", "(", "'plugins'", ")", "logger", ".", "warning", "(", "'%s. Using default plugins directory: %s'", ",", "why", ",", "plugins_directory", ")", "if", "resolved_by", "[", "-", "1", "]", "in", "(", "RESOLVED_BY_CONFIG_ARG", ",", "RESOLVED_BY_CONFIG_ENV", ")", ":", "resolved_by", ".", "pop", "(", ")", "logger", ".", "info", "(", "'Resolved plugins directory by %s: %s'", ",", "resolved_by", "[", "-", "1", "]", ",", "plugins_directory", ")", "return", "plugins_directory" ]
43.797468
21.924051
def _process_range_request(self, environ, complete_length=None, accept_ranges=None): """Handle Range Request related headers (RFC7233). If `Accept-Ranges` header is valid, and Range Request is processable, we set the headers as described by the RFC, and wrap the underlying response in a RangeWrapper. Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise. :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` if `Range` header could not be parsed or satisfied. """ from ..exceptions import RequestedRangeNotSatisfiable if accept_ranges is None: return False self.headers["Accept-Ranges"] = accept_ranges if not self._is_range_request_processable(environ) or complete_length is None: return False parsed_range = parse_range_header(environ.get("HTTP_RANGE")) if parsed_range is None: raise RequestedRangeNotSatisfiable(complete_length) range_tuple = parsed_range.range_for_length(complete_length) content_range_header = parsed_range.to_content_range_header(complete_length) if range_tuple is None or content_range_header is None: raise RequestedRangeNotSatisfiable(complete_length) content_length = range_tuple[1] - range_tuple[0] # Be sure not to send 206 response # if requested range is the full content. if content_length != complete_length: self.headers["Content-Length"] = content_length self.content_range = content_range_header self.status_code = 206 self._wrap_response(range_tuple[0], content_length) return True return False
[ "def", "_process_range_request", "(", "self", ",", "environ", ",", "complete_length", "=", "None", ",", "accept_ranges", "=", "None", ")", ":", "from", ".", ".", "exceptions", "import", "RequestedRangeNotSatisfiable", "if", "accept_ranges", "is", "None", ":", "return", "False", "self", ".", "headers", "[", "\"Accept-Ranges\"", "]", "=", "accept_ranges", "if", "not", "self", ".", "_is_range_request_processable", "(", "environ", ")", "or", "complete_length", "is", "None", ":", "return", "False", "parsed_range", "=", "parse_range_header", "(", "environ", ".", "get", "(", "\"HTTP_RANGE\"", ")", ")", "if", "parsed_range", "is", "None", ":", "raise", "RequestedRangeNotSatisfiable", "(", "complete_length", ")", "range_tuple", "=", "parsed_range", ".", "range_for_length", "(", "complete_length", ")", "content_range_header", "=", "parsed_range", ".", "to_content_range_header", "(", "complete_length", ")", "if", "range_tuple", "is", "None", "or", "content_range_header", "is", "None", ":", "raise", "RequestedRangeNotSatisfiable", "(", "complete_length", ")", "content_length", "=", "range_tuple", "[", "1", "]", "-", "range_tuple", "[", "0", "]", "# Be sure not to send 206 response", "# if requested range is the full content.", "if", "content_length", "!=", "complete_length", ":", "self", ".", "headers", "[", "\"Content-Length\"", "]", "=", "content_length", "self", ".", "content_range", "=", "content_range_header", "self", ".", "status_code", "=", "206", "self", ".", "_wrap_response", "(", "range_tuple", "[", "0", "]", ",", "content_length", ")", "return", "True", "return", "False" ]
49.428571
22.257143
def book(symbol=None, token='', version=''): '''Book shows IEX’s bids and asks for given symbols. https://iexcloud.io/docs/api/#deep-book Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result ''' _raiseIfNotStr(symbol) if symbol: return _getJson('deep/book?symbols=' + symbol, token, version) return _getJson('deep/book', token, version)
[ "def", "book", "(", "symbol", "=", "None", ",", "token", "=", "''", ",", "version", "=", "''", ")", ":", "_raiseIfNotStr", "(", "symbol", ")", "if", "symbol", ":", "return", "_getJson", "(", "'deep/book?symbols='", "+", "symbol", ",", "token", ",", "version", ")", "return", "_getJson", "(", "'deep/book'", ",", "token", ",", "version", ")" ]
27.294118
20.117647
def FromLegacyResponses(cls, request=None, responses=None): """Creates a Responses object from old style flow request and responses.""" res = cls() res.request = request if request: res.request_data = rdf_protodict.Dict(request.data) dropped_responses = [] # The iterator that was returned as part of these responses. This should # be passed back to actions that expect an iterator. res.iterator = None if not responses: return res # This may not be needed if we can assume that responses are # returned in lexical order from the data_store. responses.sort(key=operator.attrgetter("response_id")) if request.HasField("request"): client_action_name = request.request.name action_registry = server_stubs.ClientActionStub.classes if client_action_name not in action_registry: raise RuntimeError( "Got unknown client action: %s." % client_action_name) expected_response_classes = action_registry[ client_action_name].out_rdfvalues old_response_id = None # Filter the responses by authorized states for msg in responses: # Check if the message is authenticated correctly. if msg.auth_state != msg.AuthorizationState.AUTHENTICATED: logging.warning("%s: Messages must be authenticated (Auth state %s)", msg.session_id, msg.auth_state) dropped_responses.append(msg) # Skip this message - it is invalid continue # Handle retransmissions if msg.response_id == old_response_id: continue old_response_id = msg.response_id # Check for iterators if msg.type == msg.Type.ITERATOR: if res.iterator: raise ValueError("Received multiple iterator messages at once.") res.iterator = rdf_client_action.Iterator(msg.payload) continue # Look for a status message if msg.type == msg.Type.STATUS: # Our status is set to the first status message that we see in # the responses. We ignore all other messages after that. res.status = rdf_flows.GrrStatus(msg.payload) # Check this to see if the call succeeded res.success = res.status.status == res.status.ReturnedStatus.OK # Ignore all other messages break if msg.type == msg.Type.MESSAGE: if request.HasField("request"): # Let's do some verification for requests that came from clients. if not expected_response_classes: raise RuntimeError("Client action %s does not specify out_rdfvalue." % client_action_name) else: args_rdf_name = msg.args_rdf_name if not args_rdf_name: raise RuntimeError("Deprecated message format received: " "args_rdf_name is None.") elif args_rdf_name not in [ x.__name__ for x in expected_response_classes ]: raise RuntimeError("Response type was %s but expected %s for %s." % (args_rdf_name, expected_response_classes, client_action_name)) # Use this message res.responses.append(msg.payload) if res.status is None: # This is a special case of de-synchronized messages. if dropped_responses: logging.error( "De-synchronized messages detected:\n %s", "\n".join([utils.SmartUnicode(x) for x in dropped_responses])) res.LogFlowState(responses) raise ValueError("No valid Status message.") return res
[ "def", "FromLegacyResponses", "(", "cls", ",", "request", "=", "None", ",", "responses", "=", "None", ")", ":", "res", "=", "cls", "(", ")", "res", ".", "request", "=", "request", "if", "request", ":", "res", ".", "request_data", "=", "rdf_protodict", ".", "Dict", "(", "request", ".", "data", ")", "dropped_responses", "=", "[", "]", "# The iterator that was returned as part of these responses. This should", "# be passed back to actions that expect an iterator.", "res", ".", "iterator", "=", "None", "if", "not", "responses", ":", "return", "res", "# This may not be needed if we can assume that responses are", "# returned in lexical order from the data_store.", "responses", ".", "sort", "(", "key", "=", "operator", ".", "attrgetter", "(", "\"response_id\"", ")", ")", "if", "request", ".", "HasField", "(", "\"request\"", ")", ":", "client_action_name", "=", "request", ".", "request", ".", "name", "action_registry", "=", "server_stubs", ".", "ClientActionStub", ".", "classes", "if", "client_action_name", "not", "in", "action_registry", ":", "raise", "RuntimeError", "(", "\"Got unknown client action: %s.\"", "%", "client_action_name", ")", "expected_response_classes", "=", "action_registry", "[", "client_action_name", "]", ".", "out_rdfvalues", "old_response_id", "=", "None", "# Filter the responses by authorized states", "for", "msg", "in", "responses", ":", "# Check if the message is authenticated correctly.", "if", "msg", ".", "auth_state", "!=", "msg", ".", "AuthorizationState", ".", "AUTHENTICATED", ":", "logging", ".", "warning", "(", "\"%s: Messages must be authenticated (Auth state %s)\"", ",", "msg", ".", "session_id", ",", "msg", ".", "auth_state", ")", "dropped_responses", ".", "append", "(", "msg", ")", "# Skip this message - it is invalid", "continue", "# Handle retransmissions", "if", "msg", ".", "response_id", "==", "old_response_id", ":", "continue", "old_response_id", "=", "msg", ".", "response_id", "# Check for iterators", "if", "msg", ".", "type", "==", "msg", ".", "Type", ".", "ITERATOR", ":", "if", "res", ".", "iterator", ":", "raise", "ValueError", "(", "\"Received multiple iterator messages at once.\"", ")", "res", ".", "iterator", "=", "rdf_client_action", ".", "Iterator", "(", "msg", ".", "payload", ")", "continue", "# Look for a status message", "if", "msg", ".", "type", "==", "msg", ".", "Type", ".", "STATUS", ":", "# Our status is set to the first status message that we see in", "# the responses. We ignore all other messages after that.", "res", ".", "status", "=", "rdf_flows", ".", "GrrStatus", "(", "msg", ".", "payload", ")", "# Check this to see if the call succeeded", "res", ".", "success", "=", "res", ".", "status", ".", "status", "==", "res", ".", "status", ".", "ReturnedStatus", ".", "OK", "# Ignore all other messages", "break", "if", "msg", ".", "type", "==", "msg", ".", "Type", ".", "MESSAGE", ":", "if", "request", ".", "HasField", "(", "\"request\"", ")", ":", "# Let's do some verification for requests that came from clients.", "if", "not", "expected_response_classes", ":", "raise", "RuntimeError", "(", "\"Client action %s does not specify out_rdfvalue.\"", "%", "client_action_name", ")", "else", ":", "args_rdf_name", "=", "msg", ".", "args_rdf_name", "if", "not", "args_rdf_name", ":", "raise", "RuntimeError", "(", "\"Deprecated message format received: \"", "\"args_rdf_name is None.\"", ")", "elif", "args_rdf_name", "not", "in", "[", "x", ".", "__name__", "for", "x", "in", "expected_response_classes", "]", ":", "raise", "RuntimeError", "(", "\"Response type was %s but expected %s for %s.\"", "%", "(", "args_rdf_name", ",", "expected_response_classes", ",", "client_action_name", ")", ")", "# Use this message", "res", ".", "responses", ".", "append", "(", "msg", ".", "payload", ")", "if", "res", ".", "status", "is", "None", ":", "# This is a special case of de-synchronized messages.", "if", "dropped_responses", ":", "logging", ".", "error", "(", "\"De-synchronized messages detected:\\n %s\"", ",", "\"\\n\"", ".", "join", "(", "[", "utils", ".", "SmartUnicode", "(", "x", ")", "for", "x", "in", "dropped_responses", "]", ")", ")", "res", ".", "LogFlowState", "(", "responses", ")", "raise", "ValueError", "(", "\"No valid Status message.\"", ")", "return", "res" ]
37.104167
20.46875
def get_transition(self, # suppress(too-many-arguments) line, line_index, column, is_escaped, comment_system_transitions, eof=False): """Get transition from DisabledParser.""" # If we are at the beginning of a line, to see if we should # disable processing from this point onward and get out - this will # happen if we reach the end of some comment block that doesn't have # an explicit end marker. We can't detect line endings here because # we want a disabled region to continue across multiple lines. if (column == 0 and comment_system_transitions.should_terminate_now( line, self._resume_waiting_for )): return (InTextParser(), 0, None) # Need to be a bit careful here, since we need to check what the # disabled parser was waiting for and disable on that, too. if (_token_at_col_in_line(line, column, "```", 3) and not _is_escaped(line, column, is_escaped)): # Hit a disable token, so we resume the old parser return (self._resume_parser((line_index, column + 3), self._resume_waiting_for), 3, None) elif self._resume_waiting_for != ParserState.EOL: wait_until_len = len(self._resume_waiting_for) if (_token_at_col_in_line(line, column, self._resume_waiting_for, wait_until_len) and not _is_escaped(line, column, is_escaped)): # Skip ahead to end of this token return (InTextParser(), len(self._waiting_until), None) elif eof: # We hit the end of the file and were still in a comment # state. Grab everything up to here. return (InTextParser(), 0, None) # Move ahead by one character otherwise return (self, 1, None)
[ "def", "get_transition", "(", "self", ",", "# suppress(too-many-arguments)", "line", ",", "line_index", ",", "column", ",", "is_escaped", ",", "comment_system_transitions", ",", "eof", "=", "False", ")", ":", "# If we are at the beginning of a line, to see if we should", "# disable processing from this point onward and get out - this will", "# happen if we reach the end of some comment block that doesn't have", "# an explicit end marker. We can't detect line endings here because", "# we want a disabled region to continue across multiple lines.", "if", "(", "column", "==", "0", "and", "comment_system_transitions", ".", "should_terminate_now", "(", "line", ",", "self", ".", "_resume_waiting_for", ")", ")", ":", "return", "(", "InTextParser", "(", ")", ",", "0", ",", "None", ")", "# Need to be a bit careful here, since we need to check what the", "# disabled parser was waiting for and disable on that, too.", "if", "(", "_token_at_col_in_line", "(", "line", ",", "column", ",", "\"```\"", ",", "3", ")", "and", "not", "_is_escaped", "(", "line", ",", "column", ",", "is_escaped", ")", ")", ":", "# Hit a disable token, so we resume the old parser", "return", "(", "self", ".", "_resume_parser", "(", "(", "line_index", ",", "column", "+", "3", ")", ",", "self", ".", "_resume_waiting_for", ")", ",", "3", ",", "None", ")", "elif", "self", ".", "_resume_waiting_for", "!=", "ParserState", ".", "EOL", ":", "wait_until_len", "=", "len", "(", "self", ".", "_resume_waiting_for", ")", "if", "(", "_token_at_col_in_line", "(", "line", ",", "column", ",", "self", ".", "_resume_waiting_for", ",", "wait_until_len", ")", "and", "not", "_is_escaped", "(", "line", ",", "column", ",", "is_escaped", ")", ")", ":", "# Skip ahead to end of this token", "return", "(", "InTextParser", "(", ")", ",", "len", "(", "self", ".", "_waiting_until", ")", ",", "None", ")", "elif", "eof", ":", "# We hit the end of the file and were still in a comment", "# state. Grab everything up to here.", "return", "(", "InTextParser", "(", ")", ",", "0", ",", "None", ")", "# Move ahead by one character otherwise", "return", "(", "self", ",", "1", ",", "None", ")" ]
45.196078
16.313725
def clear_search_defaults(self, args=None): """ Clear all search defaults specified by the list of parameter names given as ``args``. If ``args`` is not given, then clear all existing search defaults. Examples:: conn.set_search_defaults(scope=ldap.SCOPE_BASE, attrs=['cn']) conn.clear_search_defaults(['scope']) conn.clear_search_defaults() """ if args is None: self._search_defaults.clear() else: for arg in args: if arg in self._search_defaults: del self._search_defaults[arg]
[ "def", "clear_search_defaults", "(", "self", ",", "args", "=", "None", ")", ":", "if", "args", "is", "None", ":", "self", ".", "_search_defaults", ".", "clear", "(", ")", "else", ":", "for", "arg", "in", "args", ":", "if", "arg", "in", "self", ".", "_search_defaults", ":", "del", "self", ".", "_search_defaults", "[", "arg", "]" ]
34.666667
17.111111
def ungap_sequences(records, gap_chars=GAP_TABLE): """ Remove gaps from sequences, given an alignment. """ logging.info('Applying _ungap_sequences generator: removing all gap characters') for record in records: yield ungap_all(record, gap_chars)
[ "def", "ungap_sequences", "(", "records", ",", "gap_chars", "=", "GAP_TABLE", ")", ":", "logging", ".", "info", "(", "'Applying _ungap_sequences generator: removing all gap characters'", ")", "for", "record", "in", "records", ":", "yield", "ungap_all", "(", "record", ",", "gap_chars", ")" ]
38.142857
11.571429
def iter_modules(self, module=None, filename=None, source=None, excludes=None): """ An iterator for the modules that are imported by the specified *module* or Python source file. The returned #ModuleInfo objects have their *imported_from* member filled in order to be able to track how a module was imported. """ if excludes is None: excludes = self.excludes if not filename: if not module: raise ValueError('need either module or filename parameter') module = self.find_module(module) if not module.filename or module.type == 'native': return else: module = ModuleInfo('__main__', filename, ModuleInfo.SRC) seen = set() stack = collections.deque() for imp in get_imports(module.filename, source): stack.appendleft((module.join_import_from(imp.name), [module.name])) yield module while stack: import_name, imported_from = stack.pop() if import_name in seen: continue seen.add(import_name) if check_module_exclude(import_name, excludes): continue module = self.find_module(import_name) module.imported_from[:] = imported_from yield module if module.type == ModuleInfo.SRC: imported_from = [module.name] + imported_from for imp in get_imports(module.filename): stack.append((module.join_import_from(imp.name), imported_from))
[ "def", "iter_modules", "(", "self", ",", "module", "=", "None", ",", "filename", "=", "None", ",", "source", "=", "None", ",", "excludes", "=", "None", ")", ":", "if", "excludes", "is", "None", ":", "excludes", "=", "self", ".", "excludes", "if", "not", "filename", ":", "if", "not", "module", ":", "raise", "ValueError", "(", "'need either module or filename parameter'", ")", "module", "=", "self", ".", "find_module", "(", "module", ")", "if", "not", "module", ".", "filename", "or", "module", ".", "type", "==", "'native'", ":", "return", "else", ":", "module", "=", "ModuleInfo", "(", "'__main__'", ",", "filename", ",", "ModuleInfo", ".", "SRC", ")", "seen", "=", "set", "(", ")", "stack", "=", "collections", ".", "deque", "(", ")", "for", "imp", "in", "get_imports", "(", "module", ".", "filename", ",", "source", ")", ":", "stack", ".", "appendleft", "(", "(", "module", ".", "join_import_from", "(", "imp", ".", "name", ")", ",", "[", "module", ".", "name", "]", ")", ")", "yield", "module", "while", "stack", ":", "import_name", ",", "imported_from", "=", "stack", ".", "pop", "(", ")", "if", "import_name", "in", "seen", ":", "continue", "seen", ".", "add", "(", "import_name", ")", "if", "check_module_exclude", "(", "import_name", ",", "excludes", ")", ":", "continue", "module", "=", "self", ".", "find_module", "(", "import_name", ")", "module", ".", "imported_from", "[", ":", "]", "=", "imported_from", "yield", "module", "if", "module", ".", "type", "==", "ModuleInfo", ".", "SRC", ":", "imported_from", "=", "[", "module", ".", "name", "]", "+", "imported_from", "for", "imp", "in", "get_imports", "(", "module", ".", "filename", ")", ":", "stack", ".", "append", "(", "(", "module", ".", "join_import_from", "(", "imp", ".", "name", ")", ",", "imported_from", ")", ")" ]
32.232558
21.813953
def not_storable(_type): """ Helper for tagging unserializable types. Arguments: _type (type): type to be ignored. Returns: Storable: storable instance that does not poke. """ return Storable(_type, handlers=StorableHandler(poke=fake_poke, peek=fail_peek(_type)))
[ "def", "not_storable", "(", "_type", ")", ":", "return", "Storable", "(", "_type", ",", "handlers", "=", "StorableHandler", "(", "poke", "=", "fake_poke", ",", "peek", "=", "fail_peek", "(", "_type", ")", ")", ")" ]
21.071429
24.357143
def indexXY(self, index): """Coordinates for the test row at *index* Re-implemented from :meth:`AbstractDragView<sparkle.gui.abstract_drag_view.AbstractDragView.indexXY>` """ # just want the top left of row selected row = index.row() if row == -1: row = self.model().rowCount() y = self.rowHeight(0)*row return 0, y
[ "def", "indexXY", "(", "self", ",", "index", ")", ":", "# just want the top left of row selected", "row", "=", "index", ".", "row", "(", ")", "if", "row", "==", "-", "1", ":", "row", "=", "self", ".", "model", "(", ")", ".", "rowCount", "(", ")", "y", "=", "self", ".", "rowHeight", "(", "0", ")", "*", "row", "return", "0", ",", "y" ]
34.727273
17.727273
def updateAxes(self, maxAxis): """Ensures that there are entries for max_axis axes in the menu (selected by default).""" if maxAxis > len(self._axisId): for i in range(len(self._axisId) + 1, maxAxis + 1, 1): menuId =wx.NewId() self._axisId.append(menuId) self._menu.Append(menuId, "Axis %d" % i, "Select axis %d" % i, True) self._menu.Check(menuId, True) bind(self, wx.EVT_MENU, self._onMenuItemSelected, id=menuId) elif maxAxis < len(self._axisId): for menuId in self._axisId[maxAxis:]: self._menu.Delete(menuId) self._axisId = self._axisId[:maxAxis] self._toolbar.set_active(range(maxAxis))
[ "def", "updateAxes", "(", "self", ",", "maxAxis", ")", ":", "if", "maxAxis", ">", "len", "(", "self", ".", "_axisId", ")", ":", "for", "i", "in", "range", "(", "len", "(", "self", ".", "_axisId", ")", "+", "1", ",", "maxAxis", "+", "1", ",", "1", ")", ":", "menuId", "=", "wx", ".", "NewId", "(", ")", "self", ".", "_axisId", ".", "append", "(", "menuId", ")", "self", ".", "_menu", ".", "Append", "(", "menuId", ",", "\"Axis %d\"", "%", "i", ",", "\"Select axis %d\"", "%", "i", ",", "True", ")", "self", ".", "_menu", ".", "Check", "(", "menuId", ",", "True", ")", "bind", "(", "self", ",", "wx", ".", "EVT_MENU", ",", "self", ".", "_onMenuItemSelected", ",", "id", "=", "menuId", ")", "elif", "maxAxis", "<", "len", "(", "self", ".", "_axisId", ")", ":", "for", "menuId", "in", "self", ".", "_axisId", "[", "maxAxis", ":", "]", ":", "self", ".", "_menu", ".", "Delete", "(", "menuId", ")", "self", ".", "_axisId", "=", "self", ".", "_axisId", "[", ":", "maxAxis", "]", "self", ".", "_toolbar", ".", "set_active", "(", "range", "(", "maxAxis", ")", ")" ]
50
10.666667
def generate_common_reg_log_config(json_value): """Generate common logtail config from loaded json value :param json_value: :return: """ input_detail = copy.deepcopy(json_value['inputDetail']) output_detail = json_value['outputDetail'] logSample = json_value.get('logSample', '') config_name = json_value['configName'] logstore_name = output_detail['logstoreName'] endpoint = output_detail.get('endpoint', '') log_path = input_detail['logPath'] file_pattern = input_detail['filePattern'] time_format = input_detail['timeFormat'] log_begin_regex = input_detail.get('logBeginRegex', '') log_parse_regex = input_detail.get('regex', '') reg_keys = input_detail['key'] topic_format = input_detail['topicFormat'] filter_keys = input_detail['filterKey'] filter_keys_reg = input_detail['filterRegex'] log_type = input_detail.get('logType') for item in ('logPath', 'filePattern', 'timeFormat', 'logBeginRegex', 'regex', 'key', 'topicFormat', 'filterKey', 'filterRegex', 'logType'): if item in input_detail: del input_detail[item] config = CommonRegLogConfigDetail(config_name, logstore_name, endpoint, log_path, file_pattern, time_format, log_begin_regex, log_parse_regex, reg_keys, topic_format, filter_keys, filter_keys_reg, logSample, log_type, **input_detail) return config
[ "def", "generate_common_reg_log_config", "(", "json_value", ")", ":", "input_detail", "=", "copy", ".", "deepcopy", "(", "json_value", "[", "'inputDetail'", "]", ")", "output_detail", "=", "json_value", "[", "'outputDetail'", "]", "logSample", "=", "json_value", ".", "get", "(", "'logSample'", ",", "''", ")", "config_name", "=", "json_value", "[", "'configName'", "]", "logstore_name", "=", "output_detail", "[", "'logstoreName'", "]", "endpoint", "=", "output_detail", ".", "get", "(", "'endpoint'", ",", "''", ")", "log_path", "=", "input_detail", "[", "'logPath'", "]", "file_pattern", "=", "input_detail", "[", "'filePattern'", "]", "time_format", "=", "input_detail", "[", "'timeFormat'", "]", "log_begin_regex", "=", "input_detail", ".", "get", "(", "'logBeginRegex'", ",", "''", ")", "log_parse_regex", "=", "input_detail", ".", "get", "(", "'regex'", ",", "''", ")", "reg_keys", "=", "input_detail", "[", "'key'", "]", "topic_format", "=", "input_detail", "[", "'topicFormat'", "]", "filter_keys", "=", "input_detail", "[", "'filterKey'", "]", "filter_keys_reg", "=", "input_detail", "[", "'filterRegex'", "]", "log_type", "=", "input_detail", ".", "get", "(", "'logType'", ")", "for", "item", "in", "(", "'logPath'", ",", "'filePattern'", ",", "'timeFormat'", ",", "'logBeginRegex'", ",", "'regex'", ",", "'key'", ",", "'topicFormat'", ",", "'filterKey'", ",", "'filterRegex'", ",", "'logType'", ")", ":", "if", "item", "in", "input_detail", ":", "del", "input_detail", "[", "item", "]", "config", "=", "CommonRegLogConfigDetail", "(", "config_name", ",", "logstore_name", ",", "endpoint", ",", "log_path", ",", "file_pattern", ",", "time_format", ",", "log_begin_regex", ",", "log_parse_regex", ",", "reg_keys", ",", "topic_format", ",", "filter_keys", ",", "filter_keys_reg", ",", "logSample", ",", "log_type", ",", "*", "*", "input_detail", ")", "return", "config" ]
47
20.382353
def get_search_fields(cls): """ Returns search fields in sfdict """ sfdict = {} for klass in tuple(cls.__bases__) + (cls, ): if hasattr(klass, 'search_fields'): sfdict.update(klass.search_fields) return sfdict
[ "def", "get_search_fields", "(", "cls", ")", ":", "sfdict", "=", "{", "}", "for", "klass", "in", "tuple", "(", "cls", ".", "__bases__", ")", "+", "(", "cls", ",", ")", ":", "if", "hasattr", "(", "klass", ",", "'search_fields'", ")", ":", "sfdict", ".", "update", "(", "klass", ".", "search_fields", ")", "return", "sfdict" ]
30.777778
9.222222
def _filter_cluster_data(self): """ Filter the cluster data catalog into the filtered_data catalog, which is what is shown in the H-R diagram. Filter on the values of the sliders, as well as the lasso selection in the skyviewer. """ min_temp = self.temperature_range_slider.value[0] max_temp = self.temperature_range_slider.value[1] temp_mask = np.logical_and( self.cluster.catalog['temperature'] >= min_temp, self.cluster.catalog['temperature'] <= max_temp ) min_lum = self.luminosity_range_slider.value[0] max_lum = self.luminosity_range_slider.value[1] lum_mask = np.logical_and( self.cluster.catalog['luminosity'] >= min_lum, self.cluster.catalog['luminosity'] <= max_lum ) selected_mask = np.isin(self.cluster.catalog['id'], self.selection_ids) filter_mask = temp_mask & lum_mask & selected_mask self.filtered_data = self.cluster.catalog[filter_mask].data self.source.data = { 'id': list(self.filtered_data['id']), 'temperature': list(self.filtered_data['temperature']), 'luminosity': list(self.filtered_data['luminosity']), 'color': list(self.filtered_data['color']) } logging.debug("Selected data is now: %s", self.filtered_data)
[ "def", "_filter_cluster_data", "(", "self", ")", ":", "min_temp", "=", "self", ".", "temperature_range_slider", ".", "value", "[", "0", "]", "max_temp", "=", "self", ".", "temperature_range_slider", ".", "value", "[", "1", "]", "temp_mask", "=", "np", ".", "logical_and", "(", "self", ".", "cluster", ".", "catalog", "[", "'temperature'", "]", ">=", "min_temp", ",", "self", ".", "cluster", ".", "catalog", "[", "'temperature'", "]", "<=", "max_temp", ")", "min_lum", "=", "self", ".", "luminosity_range_slider", ".", "value", "[", "0", "]", "max_lum", "=", "self", ".", "luminosity_range_slider", ".", "value", "[", "1", "]", "lum_mask", "=", "np", ".", "logical_and", "(", "self", ".", "cluster", ".", "catalog", "[", "'luminosity'", "]", ">=", "min_lum", ",", "self", ".", "cluster", ".", "catalog", "[", "'luminosity'", "]", "<=", "max_lum", ")", "selected_mask", "=", "np", ".", "isin", "(", "self", ".", "cluster", ".", "catalog", "[", "'id'", "]", ",", "self", ".", "selection_ids", ")", "filter_mask", "=", "temp_mask", "&", "lum_mask", "&", "selected_mask", "self", ".", "filtered_data", "=", "self", ".", "cluster", ".", "catalog", "[", "filter_mask", "]", ".", "data", "self", ".", "source", ".", "data", "=", "{", "'id'", ":", "list", "(", "self", ".", "filtered_data", "[", "'id'", "]", ")", ",", "'temperature'", ":", "list", "(", "self", ".", "filtered_data", "[", "'temperature'", "]", ")", ",", "'luminosity'", ":", "list", "(", "self", ".", "filtered_data", "[", "'luminosity'", "]", ")", ",", "'color'", ":", "list", "(", "self", ".", "filtered_data", "[", "'color'", "]", ")", "}", "logging", ".", "debug", "(", "\"Selected data is now: %s\"", ",", "self", ".", "filtered_data", ")" ]
38.971429
21.771429
def _elect_dest_broker(self, victim_partition): """Select first under loaded brokers preferring not having partition of same topic as victim partition. """ under_loaded_brokers = sorted( [ broker for broker in self._brokers if (victim_partition not in broker.partitions and not broker.inactive and not broker.decommissioned) ], key=lambda b: len(b.partitions) ) if not under_loaded_brokers: return None broker_topic_partition_cnt = [ (broker, broker.count_partitions(victim_partition.topic)) for broker in under_loaded_brokers if victim_partition not in broker.partitions ] min_count_pair = min( broker_topic_partition_cnt, key=lambda ele: ele[1], ) return min_count_pair[0]
[ "def", "_elect_dest_broker", "(", "self", ",", "victim_partition", ")", ":", "under_loaded_brokers", "=", "sorted", "(", "[", "broker", "for", "broker", "in", "self", ".", "_brokers", "if", "(", "victim_partition", "not", "in", "broker", ".", "partitions", "and", "not", "broker", ".", "inactive", "and", "not", "broker", ".", "decommissioned", ")", "]", ",", "key", "=", "lambda", "b", ":", "len", "(", "b", ".", "partitions", ")", ")", "if", "not", "under_loaded_brokers", ":", "return", "None", "broker_topic_partition_cnt", "=", "[", "(", "broker", ",", "broker", ".", "count_partitions", "(", "victim_partition", ".", "topic", ")", ")", "for", "broker", "in", "under_loaded_brokers", "if", "victim_partition", "not", "in", "broker", ".", "partitions", "]", "min_count_pair", "=", "min", "(", "broker_topic_partition_cnt", ",", "key", "=", "lambda", "ele", ":", "ele", "[", "1", "]", ",", ")", "return", "min_count_pair", "[", "0", "]" ]
34.555556
13.481481
def satosa_logging(logger, level, message, state, **kwargs): """ Adds a session ID to the message. :type logger: logging :type level: int :type message: str :type state: satosa.state.State :param logger: Logger to use :param level: Logger level (ex: logging.DEBUG/logging.WARN/...) :param message: Message :param state: The current state :param kwargs: set exc_info=True to get an exception stack trace in the log """ if state is None: session_id = "UNKNOWN" else: try: session_id = state[LOGGER_STATE_KEY] except KeyError: session_id = uuid4().urn state[LOGGER_STATE_KEY] = session_id logger.log(level, "[{id}] {msg}".format(id=session_id, msg=message), **kwargs)
[ "def", "satosa_logging", "(", "logger", ",", "level", ",", "message", ",", "state", ",", "*", "*", "kwargs", ")", ":", "if", "state", "is", "None", ":", "session_id", "=", "\"UNKNOWN\"", "else", ":", "try", ":", "session_id", "=", "state", "[", "LOGGER_STATE_KEY", "]", "except", "KeyError", ":", "session_id", "=", "uuid4", "(", ")", ".", "urn", "state", "[", "LOGGER_STATE_KEY", "]", "=", "session_id", "logger", ".", "log", "(", "level", ",", "\"[{id}] {msg}\"", ".", "format", "(", "id", "=", "session_id", ",", "msg", "=", "message", ")", ",", "*", "*", "kwargs", ")" ]
31.791667
17.458333
def _add_tc_script(self): """ generates tc_script.sh and adds it to included files """ # fill context context = dict(tc_options=self.config.get('tc_options', [])) # import pdb; pdb.set_trace() contents = self._render_template('tc_script.sh', context) self.config.setdefault('files', []) # file list might be empty # add tc_script.sh to list of included files self._add_unique_file({ "path": "/tc_script.sh", "contents": contents, "mode": "755" })
[ "def", "_add_tc_script", "(", "self", ")", ":", "# fill context", "context", "=", "dict", "(", "tc_options", "=", "self", ".", "config", ".", "get", "(", "'tc_options'", ",", "[", "]", ")", ")", "# import pdb; pdb.set_trace()", "contents", "=", "self", ".", "_render_template", "(", "'tc_script.sh'", ",", "context", ")", "self", ".", "config", ".", "setdefault", "(", "'files'", ",", "[", "]", ")", "# file list might be empty", "# add tc_script.sh to list of included files", "self", ".", "_add_unique_file", "(", "{", "\"path\"", ":", "\"/tc_script.sh\"", ",", "\"contents\"", ":", "contents", ",", "\"mode\"", ":", "\"755\"", "}", ")" ]
37.133333
14.466667
def from_dict(cls, fields, mapping): """ Create a Record from a dictionary of field mappings. The *fields* object is used to determine the column indices of fields in the mapping. Args: fields: the Relation schema for the table of this record mapping: a dictionary or other mapping from field names to column values Returns: a :class:`Record` object """ iterable = [None] * len(fields) for key, value in mapping.items(): try: index = fields.index(key) except KeyError: raise ItsdbError('Invalid field name(s): ' + key) iterable[index] = value return cls(fields, iterable)
[ "def", "from_dict", "(", "cls", ",", "fields", ",", "mapping", ")", ":", "iterable", "=", "[", "None", "]", "*", "len", "(", "fields", ")", "for", "key", ",", "value", "in", "mapping", ".", "items", "(", ")", ":", "try", ":", "index", "=", "fields", ".", "index", "(", "key", ")", "except", "KeyError", ":", "raise", "ItsdbError", "(", "'Invalid field name(s): '", "+", "key", ")", "iterable", "[", "index", "]", "=", "value", "return", "cls", "(", "fields", ",", "iterable", ")" ]
34.181818
15.272727
def get_nameid_data(request, key=None): """ Gets the NameID Data of the the Logout Request :param request: Logout Request Message :type request: string|DOMDocument :param key: The SP key :type key: string :return: Name ID Data (Value, Format, NameQualifier, SPNameQualifier) :rtype: dict """ elem = OneLogin_Saml2_XML.to_etree(request) name_id = None encrypted_entries = OneLogin_Saml2_XML.query(elem, '/samlp:LogoutRequest/saml:EncryptedID') if len(encrypted_entries) == 1: if key is None: raise OneLogin_Saml2_Error( 'Private Key is required in order to decrypt the NameID, check settings', OneLogin_Saml2_Error.PRIVATE_KEY_NOT_FOUND ) encrypted_data_nodes = OneLogin_Saml2_XML.query(elem, '/samlp:LogoutRequest/saml:EncryptedID/xenc:EncryptedData') if len(encrypted_data_nodes) == 1: encrypted_data = encrypted_data_nodes[0] name_id = OneLogin_Saml2_Utils.decrypt_element(encrypted_data, key) else: entries = OneLogin_Saml2_XML.query(elem, '/samlp:LogoutRequest/saml:NameID') if len(entries) == 1: name_id = entries[0] if name_id is None: raise OneLogin_Saml2_ValidationError( 'NameID not found in the Logout Request', OneLogin_Saml2_ValidationError.NO_NAMEID ) name_id_data = { 'Value': OneLogin_Saml2_XML.element_text(name_id) } for attr in ['Format', 'SPNameQualifier', 'NameQualifier']: if attr in name_id.attrib: name_id_data[attr] = name_id.attrib[attr] return name_id_data
[ "def", "get_nameid_data", "(", "request", ",", "key", "=", "None", ")", ":", "elem", "=", "OneLogin_Saml2_XML", ".", "to_etree", "(", "request", ")", "name_id", "=", "None", "encrypted_entries", "=", "OneLogin_Saml2_XML", ".", "query", "(", "elem", ",", "'/samlp:LogoutRequest/saml:EncryptedID'", ")", "if", "len", "(", "encrypted_entries", ")", "==", "1", ":", "if", "key", "is", "None", ":", "raise", "OneLogin_Saml2_Error", "(", "'Private Key is required in order to decrypt the NameID, check settings'", ",", "OneLogin_Saml2_Error", ".", "PRIVATE_KEY_NOT_FOUND", ")", "encrypted_data_nodes", "=", "OneLogin_Saml2_XML", ".", "query", "(", "elem", ",", "'/samlp:LogoutRequest/saml:EncryptedID/xenc:EncryptedData'", ")", "if", "len", "(", "encrypted_data_nodes", ")", "==", "1", ":", "encrypted_data", "=", "encrypted_data_nodes", "[", "0", "]", "name_id", "=", "OneLogin_Saml2_Utils", ".", "decrypt_element", "(", "encrypted_data", ",", "key", ")", "else", ":", "entries", "=", "OneLogin_Saml2_XML", ".", "query", "(", "elem", ",", "'/samlp:LogoutRequest/saml:NameID'", ")", "if", "len", "(", "entries", ")", "==", "1", ":", "name_id", "=", "entries", "[", "0", "]", "if", "name_id", "is", "None", ":", "raise", "OneLogin_Saml2_ValidationError", "(", "'NameID not found in the Logout Request'", ",", "OneLogin_Saml2_ValidationError", ".", "NO_NAMEID", ")", "name_id_data", "=", "{", "'Value'", ":", "OneLogin_Saml2_XML", ".", "element_text", "(", "name_id", ")", "}", "for", "attr", "in", "[", "'Format'", ",", "'SPNameQualifier'", ",", "'NameQualifier'", "]", ":", "if", "attr", "in", "name_id", ".", "attrib", ":", "name_id_data", "[", "attr", "]", "=", "name_id", ".", "attrib", "[", "attr", "]", "return", "name_id_data" ]
40.272727
21.636364
def combine(self, name_all=None, out_ndx=None, operation='|', defaultgroups=False): """Combine individual groups into a single one and write output. :Keywords: name_all : string Name of the combined group, ``None`` generates a name. [``None``] out_ndx : filename Name of the output file that will contain the individual groups and the combined group. If ``None`` then default from the class constructor is used. [``None``] operation : character Logical operation that is used to generate the combined group from the individual groups: "|" (OR) or "&" (AND); if set to ``False`` then no combined group is created and only the individual groups are written. ["|"] defaultgroups : bool ``True``: append everything to the default groups produced by :program:`make_ndx` (or rather, the groups provided in the ndx file on initialization --- if this was ``None`` then these are truly default groups); ``False``: only use the generated groups :Returns: ``(combinedgroup_name, output_ndx)``, a tuple showing the actual group name and the name of the file; useful when all names are autogenerated. .. Warning:: The order of the atom numbers in the combined group is *not* guaranteed to be the same as the selections on input because ``make_ndx`` sorts them ascending. Thus you should be careful when using these index files for calculations of angles and dihedrals. Use :class:`gromacs.formats.NDX` in these cases. .. SeeAlso:: :meth:`IndexBuilder.write`. """ if not operation in ('|', '&', False): raise ValueError("Illegal operation {0!r}, only '|' (OR) and '&' (AND) or False allowed.".format( operation)) if name_all is None and operation: name_all = self.name_all or operation.join(self.indexfiles) if out_ndx is None: out_ndx = self.output if defaultgroups: # make a default file (using the original ndx where provided!!) fd, default_ndx = tempfile.mkstemp(suffix='.ndx', prefix='default__') try: self.make_ndx(o=default_ndx, input=['q']) except: utilities.unlink_gmx(default_ndx) raise ndxfiles = [default_ndx] else: ndxfiles = [] ndxfiles.extend(self.indexfiles.values()) if operation: # combine multiple selections and name them try: fd, tmp_ndx = tempfile.mkstemp(suffix='.ndx', prefix='combined__') # combine all selections by loading ALL temporary index files operation = ' '+operation.strip()+' ' cmd = [operation.join(['"{0!s}"'.format(gname) for gname in self.indexfiles]), '', 'q'] rc,out,err = self.make_ndx(n=ndxfiles, o=tmp_ndx, input=cmd) if self._is_empty_group(out): warnings.warn("No atoms found for {cmd!r}".format(**vars()), category=BadParameterWarning) # second pass for naming, sigh (or: use NDX ?) groups = parse_ndxlist(out) last = groups[-1] # name this group name_cmd = ["name {0:d} {1!s}".format(last['nr'], name_all), 'q'] rc,out,err = self.make_ndx(n=tmp_ndx, o=out_ndx, input=name_cmd) # For debugging, look at out and err or set stdout=True, stderr=True # TODO: check out if at least 1 atom selected ##print "DEBUG: combine()" ##print out finally: utilities.unlink_gmx(tmp_ndx) if defaultgroups: utilities.unlink_gmx(default_ndx) else: # just write individual groups in one file (name_all --> None) rc,out,err = self.make_ndx(n=ndxfiles, o=out_ndx, input=['','q']) return name_all, out_ndx
[ "def", "combine", "(", "self", ",", "name_all", "=", "None", ",", "out_ndx", "=", "None", ",", "operation", "=", "'|'", ",", "defaultgroups", "=", "False", ")", ":", "if", "not", "operation", "in", "(", "'|'", ",", "'&'", ",", "False", ")", ":", "raise", "ValueError", "(", "\"Illegal operation {0!r}, only '|' (OR) and '&' (AND) or False allowed.\"", ".", "format", "(", "operation", ")", ")", "if", "name_all", "is", "None", "and", "operation", ":", "name_all", "=", "self", ".", "name_all", "or", "operation", ".", "join", "(", "self", ".", "indexfiles", ")", "if", "out_ndx", "is", "None", ":", "out_ndx", "=", "self", ".", "output", "if", "defaultgroups", ":", "# make a default file (using the original ndx where provided!!)", "fd", ",", "default_ndx", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "'.ndx'", ",", "prefix", "=", "'default__'", ")", "try", ":", "self", ".", "make_ndx", "(", "o", "=", "default_ndx", ",", "input", "=", "[", "'q'", "]", ")", "except", ":", "utilities", ".", "unlink_gmx", "(", "default_ndx", ")", "raise", "ndxfiles", "=", "[", "default_ndx", "]", "else", ":", "ndxfiles", "=", "[", "]", "ndxfiles", ".", "extend", "(", "self", ".", "indexfiles", ".", "values", "(", ")", ")", "if", "operation", ":", "# combine multiple selections and name them", "try", ":", "fd", ",", "tmp_ndx", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "'.ndx'", ",", "prefix", "=", "'combined__'", ")", "# combine all selections by loading ALL temporary index files", "operation", "=", "' '", "+", "operation", ".", "strip", "(", ")", "+", "' '", "cmd", "=", "[", "operation", ".", "join", "(", "[", "'\"{0!s}\"'", ".", "format", "(", "gname", ")", "for", "gname", "in", "self", ".", "indexfiles", "]", ")", ",", "''", ",", "'q'", "]", "rc", ",", "out", ",", "err", "=", "self", ".", "make_ndx", "(", "n", "=", "ndxfiles", ",", "o", "=", "tmp_ndx", ",", "input", "=", "cmd", ")", "if", "self", ".", "_is_empty_group", "(", "out", ")", ":", "warnings", ".", "warn", "(", "\"No atoms found for {cmd!r}\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ",", "category", "=", "BadParameterWarning", ")", "# second pass for naming, sigh (or: use NDX ?)", "groups", "=", "parse_ndxlist", "(", "out", ")", "last", "=", "groups", "[", "-", "1", "]", "# name this group", "name_cmd", "=", "[", "\"name {0:d} {1!s}\"", ".", "format", "(", "last", "[", "'nr'", "]", ",", "name_all", ")", ",", "'q'", "]", "rc", ",", "out", ",", "err", "=", "self", ".", "make_ndx", "(", "n", "=", "tmp_ndx", ",", "o", "=", "out_ndx", ",", "input", "=", "name_cmd", ")", "# For debugging, look at out and err or set stdout=True, stderr=True", "# TODO: check out if at least 1 atom selected", "##print \"DEBUG: combine()\"", "##print out", "finally", ":", "utilities", ".", "unlink_gmx", "(", "tmp_ndx", ")", "if", "defaultgroups", ":", "utilities", ".", "unlink_gmx", "(", "default_ndx", ")", "else", ":", "# just write individual groups in one file (name_all --> None)", "rc", ",", "out", ",", "err", "=", "self", ".", "make_ndx", "(", "n", "=", "ndxfiles", ",", "o", "=", "out_ndx", ",", "input", "=", "[", "''", ",", "'q'", "]", ")", "return", "name_all", ",", "out_ndx" ]
48.367816
25.321839
def r_num(obj): """Read list of numbers.""" if isinstance(obj, (list, tuple)): it = iter else: it = LinesIterator dataset = Dataset([Dataset.FLOAT]) return dataset.load(it(obj))
[ "def", "r_num", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "it", "=", "iter", "else", ":", "it", "=", "LinesIterator", "dataset", "=", "Dataset", "(", "[", "Dataset", ".", "FLOAT", "]", ")", "return", "dataset", ".", "load", "(", "it", "(", "obj", ")", ")" ]
25.75
13.125
def fill(self, doc_contents): """ Fill the content of the document with the information in doc_contents. This is different from the TextDocument fill function, because this will check for symbools in the values of `doc_content` and replace them to good XML codes before filling the template. Parameters ---------- doc_contents: dict Set of values to set the template document. Returns ------- filled_doc: str The content of the document with the template information filled. """ for key, content in doc_contents.items(): doc_contents[key] = replace_chars_for_svg_code(content) return super(SVGDocument, self).fill(doc_contents=doc_contents)
[ "def", "fill", "(", "self", ",", "doc_contents", ")", ":", "for", "key", ",", "content", "in", "doc_contents", ".", "items", "(", ")", ":", "doc_contents", "[", "key", "]", "=", "replace_chars_for_svg_code", "(", "content", ")", "return", "super", "(", "SVGDocument", ",", "self", ")", ".", "fill", "(", "doc_contents", "=", "doc_contents", ")" ]
38.2
23.15
def get_traceback_stxt(): """ Result is (bytes) str type on Python 2 and (unicode) str type on Python 3. """ #/ exc_cls, exc_obj, tb_obj = sys.exc_info() #/ txt_s = traceback.format_exception(exc_cls, exc_obj, tb_obj) #/ res = ''.join(txt_s) return res
[ "def", "get_traceback_stxt", "(", ")", ":", "#/", "exc_cls", ",", "exc_obj", ",", "tb_obj", "=", "sys", ".", "exc_info", "(", ")", "#/", "txt_s", "=", "traceback", ".", "format_exception", "(", "exc_cls", ",", "exc_obj", ",", "tb_obj", ")", "#/", "res", "=", "''", ".", "join", "(", "txt_s", ")", "return", "res" ]
24.142857
23
def _update_page_resources(*, page, font, font_key, procset): """Update this page's fonts with a reference to the Glyphless font""" if '/Resources' not in page: page['/Resources'] = pikepdf.Dictionary({}) resources = page['/Resources'] try: fonts = resources['/Font'] except KeyError: fonts = pikepdf.Dictionary({}) if font_key is not None and font_key not in fonts: fonts[font_key] = font resources['/Font'] = fonts # Reassign /ProcSet to one that just lists everything - ProcSet is # obsolete and doesn't matter but recommended for old viewer support resources['/ProcSet'] = procset
[ "def", "_update_page_resources", "(", "*", ",", "page", ",", "font", ",", "font_key", ",", "procset", ")", ":", "if", "'/Resources'", "not", "in", "page", ":", "page", "[", "'/Resources'", "]", "=", "pikepdf", ".", "Dictionary", "(", "{", "}", ")", "resources", "=", "page", "[", "'/Resources'", "]", "try", ":", "fonts", "=", "resources", "[", "'/Font'", "]", "except", "KeyError", ":", "fonts", "=", "pikepdf", ".", "Dictionary", "(", "{", "}", ")", "if", "font_key", "is", "not", "None", "and", "font_key", "not", "in", "fonts", ":", "fonts", "[", "font_key", "]", "=", "font", "resources", "[", "'/Font'", "]", "=", "fonts", "# Reassign /ProcSet to one that just lists everything - ProcSet is", "# obsolete and doesn't matter but recommended for old viewer support", "resources", "[", "'/ProcSet'", "]", "=", "procset" ]
37.764706
16.882353
def expensive_task_gen(num=8700): r""" Runs a task that takes some time Args: num (int): (default = 8700) CommandLine: python -m utool.util_alg expensive_task_gen --show Example: >>> # DISABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> #num = 8700 >>> num = 40000 >>> with ut.Timer('expanesive task'): >>> time_list = list(ut.expensive_task_gen(num)) >>> print(sum(time_list)) >>> ut.quit_if_noshow() >>> import plottool as pt >>> #pt.plot(time_list) >>> from scipy.optimize import curve_fit >>> def func(x, a, b, c, d): >>> return a * np.exp(-c * x) + d >>> #a*x**3 + b*x**2 +c*x + d >>> y = np.array(time_list) >>> y = np.array(ut.cumsum(y)) >>> x = np.arange(len(y)) >>> #popt, pcov = curve_fit(func, x, y, p0=(1, 1e-6, 1)) >>> #print('pcov = %r' % (pcov,)) >>> #print('popt = %r' % (popt,)) >>> # http://stackoverflow.com/questions/3433486/-curve-fitting-in-python >>> pt.plt.plot(x[::num//50], y[::num//50], 'rx', label='measured data') >>> #x2 = np.arange(len(y) * 2) >>> #pt.plt.plot(x2, func(x2, *popt), 'b', label="Fitted Curve") #same as line above \/ >>> #pt.plt.legend(loc='upper left') >>> ut.show_if_requested() """ import utool as ut #time_list = [] for x in range(0, num): with ut.Timer(verbose=False) as t: ut.is_prime(x) yield t.ellapsed
[ "def", "expensive_task_gen", "(", "num", "=", "8700", ")", ":", "import", "utool", "as", "ut", "#time_list = []", "for", "x", "in", "range", "(", "0", ",", "num", ")", ":", "with", "ut", ".", "Timer", "(", "verbose", "=", "False", ")", "as", "t", ":", "ut", ".", "is_prime", "(", "x", ")", "yield", "t", ".", "ellapsed" ]
34.466667
14.4
def weekly_growth(self, weeks): """Calculate the weekly growth in percentage, and rounds to one digit. Parameters ---------- weeks Number of weeks to calculate growth over. Returns ------- growth_factor A real number such that start * growth_factor** weeks = end. Examples ------- >>> bench = DynamicExercise('Bench press', 100, 120, 3, 8) >>> bench.weekly_growth(8) 2.3 >>> bench.weekly_growth(4) 4.7 """ start, end = self.start_weight, self.final_weight growth_factor = ((end / start) ** (1 / weeks) - 1) * 100 return round(growth_factor, 1)
[ "def", "weekly_growth", "(", "self", ",", "weeks", ")", ":", "start", ",", "end", "=", "self", ".", "start_weight", ",", "self", ".", "final_weight", "growth_factor", "=", "(", "(", "end", "/", "start", ")", "**", "(", "1", "/", "weeks", ")", "-", "1", ")", "*", "100", "return", "round", "(", "growth_factor", ",", "1", ")" ]
27.307692
21.115385
def auto_init(autofile, force_init=False): """ Initialize a repo-specific configuration file to execute dgit Parameters ---------- autofile: Repo-specific configuration file (dgit.json) force_init: Flag to force to re-initialization of the configuration file """ if os.path.exists(autofile) and not force_init: try: autooptions = json.loads(open(autofile).read()) return autooptions except: print("Error in dgit.json configuration file") traceback.print_exc() raise Exception("Invalid configuration file") config = get_config() pluginmgr = plugins_get_mgr() print("Repo configuration file missing or corrupted. Creating one") print("Let us know a few details about your data repository") # Get the username username = getpass.getuser() revised = input("Please specify username [{}]".format(username)) if revised not in ["", None]: username = revised # Get the reponame thisdir = os.path.abspath(os.getcwd()) reponame = os.path.basename(thisdir) revised = input("Please specify repo name [{}]".format(reponame)) if revised not in ["", None]: reponame = revised # Get the default backend URL keys = pluginmgr.search('backend') keys = keys['backend'] keys = [k for k in keys if k[0] != "local"] remoteurl = "" backend = None if len(keys) > 0: backend = pluginmgr.get_by_key('backend', keys[0]) candidate = backend.url(username, reponame) revised = input("Please specify remote URL [{}]".format(candidate)) if revised in ["", None]: remoteurl = candidate else: remoteurl = revised # Get title... title = "" while title == "": title = input("One line summary of your repo:") if title == "": print("The repo requires a one line summary") else: break # Get description description = "" while description == "": description = input("Detailed description:") if description == "": print("The repo requires some text as well") else: break autooptions = OrderedDict([ ("username", username), ("reponame", reponame), ("remoteurl", remoteurl), ("title", title), ("description", description), ("working-directory", "."), ('track' ,OrderedDict([ ('includes', ['*.csv', '*.tsv', '*.txt','*.json', '*.xls', '*.xlsx', "*.sql", "*.hql"]), ('excludes', ['.git', '.svn', os.path.basename(autofile)]), ])), ('auto-push', False), ('pipeline' ,OrderedDict([])), ('import' ,OrderedDict([ ('directory-mapping' ,OrderedDict([ ('.', '') ])) ])), ('dependencies' ,OrderedDict([])) ]) # Gather options from each of the enabled plugins for p in ['validator', 'transformer']: keys = pluginmgr.search(p) keys = keys[p] options = OrderedDict() for k in keys: if k.name in options: continue mgr = pluginmgr.get_by_key(p, k) options[k.name] = mgr.autooptions() autooptions[p] = options keys = pluginmgr.search('metadata') keys = keys['metadata'] if len(keys) > 0: # => Select domains that be included. servers = [] for k in keys: server = pluginmgr.get_by_key('metadata', k) server = server.url.split("/")[2] servers.append(server) # Specify what should be included. Some of these should go ino # the metadata modules autooptions.update(OrderedDict([ ('metadata-management', OrderedDict([ ('servers', servers), ('include-code-history', find_executable_files()), ('include-preview', OrderedDict([ ('length', 512), ('files', ['*.txt', '*.csv', '*.tsv']) ])), ('include-data-history', True), ('include-action-history', True), ('include-validation', True), ('include-dependencies', True), ('include-schema', True), ('include-tab-diffs', True), ('include-platform', True), ]))])) with open(autofile, 'w') as fd: fd.write(json.dumps(autooptions, indent=4)) print("") print("Updated dataset specific config file: {}".format(autofile)) print("Please edit it and rerun dgit auto.") print("Tip: Consider committing dgit.json to the code repository.") #if platform.system() == "Linux": # subprocess.call(["xdg-open", autofile]) sys.exit()
[ "def", "auto_init", "(", "autofile", ",", "force_init", "=", "False", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "autofile", ")", "and", "not", "force_init", ":", "try", ":", "autooptions", "=", "json", ".", "loads", "(", "open", "(", "autofile", ")", ".", "read", "(", ")", ")", "return", "autooptions", "except", ":", "print", "(", "\"Error in dgit.json configuration file\"", ")", "traceback", ".", "print_exc", "(", ")", "raise", "Exception", "(", "\"Invalid configuration file\"", ")", "config", "=", "get_config", "(", ")", "pluginmgr", "=", "plugins_get_mgr", "(", ")", "print", "(", "\"Repo configuration file missing or corrupted. Creating one\"", ")", "print", "(", "\"Let us know a few details about your data repository\"", ")", "# Get the username", "username", "=", "getpass", ".", "getuser", "(", ")", "revised", "=", "input", "(", "\"Please specify username [{}]\"", ".", "format", "(", "username", ")", ")", "if", "revised", "not", "in", "[", "\"\"", ",", "None", "]", ":", "username", "=", "revised", "# Get the reponame", "thisdir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "getcwd", "(", ")", ")", "reponame", "=", "os", ".", "path", ".", "basename", "(", "thisdir", ")", "revised", "=", "input", "(", "\"Please specify repo name [{}]\"", ".", "format", "(", "reponame", ")", ")", "if", "revised", "not", "in", "[", "\"\"", ",", "None", "]", ":", "reponame", "=", "revised", "# Get the default backend URL", "keys", "=", "pluginmgr", ".", "search", "(", "'backend'", ")", "keys", "=", "keys", "[", "'backend'", "]", "keys", "=", "[", "k", "for", "k", "in", "keys", "if", "k", "[", "0", "]", "!=", "\"local\"", "]", "remoteurl", "=", "\"\"", "backend", "=", "None", "if", "len", "(", "keys", ")", ">", "0", ":", "backend", "=", "pluginmgr", ".", "get_by_key", "(", "'backend'", ",", "keys", "[", "0", "]", ")", "candidate", "=", "backend", ".", "url", "(", "username", ",", "reponame", ")", "revised", "=", "input", "(", "\"Please specify remote URL [{}]\"", ".", "format", "(", "candidate", ")", ")", "if", "revised", "in", "[", "\"\"", ",", "None", "]", ":", "remoteurl", "=", "candidate", "else", ":", "remoteurl", "=", "revised", "# Get title...", "title", "=", "\"\"", "while", "title", "==", "\"\"", ":", "title", "=", "input", "(", "\"One line summary of your repo:\"", ")", "if", "title", "==", "\"\"", ":", "print", "(", "\"The repo requires a one line summary\"", ")", "else", ":", "break", "# Get description", "description", "=", "\"\"", "while", "description", "==", "\"\"", ":", "description", "=", "input", "(", "\"Detailed description:\"", ")", "if", "description", "==", "\"\"", ":", "print", "(", "\"The repo requires some text as well\"", ")", "else", ":", "break", "autooptions", "=", "OrderedDict", "(", "[", "(", "\"username\"", ",", "username", ")", ",", "(", "\"reponame\"", ",", "reponame", ")", ",", "(", "\"remoteurl\"", ",", "remoteurl", ")", ",", "(", "\"title\"", ",", "title", ")", ",", "(", "\"description\"", ",", "description", ")", ",", "(", "\"working-directory\"", ",", "\".\"", ")", ",", "(", "'track'", ",", "OrderedDict", "(", "[", "(", "'includes'", ",", "[", "'*.csv'", ",", "'*.tsv'", ",", "'*.txt'", ",", "'*.json'", ",", "'*.xls'", ",", "'*.xlsx'", ",", "\"*.sql\"", ",", "\"*.hql\"", "]", ")", ",", "(", "'excludes'", ",", "[", "'.git'", ",", "'.svn'", ",", "os", ".", "path", ".", "basename", "(", "autofile", ")", "]", ")", ",", "]", ")", ")", ",", "(", "'auto-push'", ",", "False", ")", ",", "(", "'pipeline'", ",", "OrderedDict", "(", "[", "]", ")", ")", ",", "(", "'import'", ",", "OrderedDict", "(", "[", "(", "'directory-mapping'", ",", "OrderedDict", "(", "[", "(", "'.'", ",", "''", ")", "]", ")", ")", "]", ")", ")", ",", "(", "'dependencies'", ",", "OrderedDict", "(", "[", "]", ")", ")", "]", ")", "# Gather options from each of the enabled plugins", "for", "p", "in", "[", "'validator'", ",", "'transformer'", "]", ":", "keys", "=", "pluginmgr", ".", "search", "(", "p", ")", "keys", "=", "keys", "[", "p", "]", "options", "=", "OrderedDict", "(", ")", "for", "k", "in", "keys", ":", "if", "k", ".", "name", "in", "options", ":", "continue", "mgr", "=", "pluginmgr", ".", "get_by_key", "(", "p", ",", "k", ")", "options", "[", "k", ".", "name", "]", "=", "mgr", ".", "autooptions", "(", ")", "autooptions", "[", "p", "]", "=", "options", "keys", "=", "pluginmgr", ".", "search", "(", "'metadata'", ")", "keys", "=", "keys", "[", "'metadata'", "]", "if", "len", "(", "keys", ")", ">", "0", ":", "# => Select domains that be included.", "servers", "=", "[", "]", "for", "k", "in", "keys", ":", "server", "=", "pluginmgr", ".", "get_by_key", "(", "'metadata'", ",", "k", ")", "server", "=", "server", ".", "url", ".", "split", "(", "\"/\"", ")", "[", "2", "]", "servers", ".", "append", "(", "server", ")", "# Specify what should be included. Some of these should go ino", "# the metadata modules", "autooptions", ".", "update", "(", "OrderedDict", "(", "[", "(", "'metadata-management'", ",", "OrderedDict", "(", "[", "(", "'servers'", ",", "servers", ")", ",", "(", "'include-code-history'", ",", "find_executable_files", "(", ")", ")", ",", "(", "'include-preview'", ",", "OrderedDict", "(", "[", "(", "'length'", ",", "512", ")", ",", "(", "'files'", ",", "[", "'*.txt'", ",", "'*.csv'", ",", "'*.tsv'", "]", ")", "]", ")", ")", ",", "(", "'include-data-history'", ",", "True", ")", ",", "(", "'include-action-history'", ",", "True", ")", ",", "(", "'include-validation'", ",", "True", ")", ",", "(", "'include-dependencies'", ",", "True", ")", ",", "(", "'include-schema'", ",", "True", ")", ",", "(", "'include-tab-diffs'", ",", "True", ")", ",", "(", "'include-platform'", ",", "True", ")", ",", "]", ")", ")", "]", ")", ")", "with", "open", "(", "autofile", ",", "'w'", ")", "as", "fd", ":", "fd", ".", "write", "(", "json", ".", "dumps", "(", "autooptions", ",", "indent", "=", "4", ")", ")", "print", "(", "\"\"", ")", "print", "(", "\"Updated dataset specific config file: {}\"", ".", "format", "(", "autofile", ")", ")", "print", "(", "\"Please edit it and rerun dgit auto.\"", ")", "print", "(", "\"Tip: Consider committing dgit.json to the code repository.\"", ")", "#if platform.system() == \"Linux\":", "# subprocess.call([\"xdg-open\", autofile])", "sys", ".", "exit", "(", ")" ]
31.610738
18.134228
def to_example(dictionary): """Helper: build tf.Example from (string -> int/float/str list) dictionary.""" features = {} for (k, v) in six.iteritems(dictionary): if not v: raise ValueError("Empty generated field: %s" % str((k, v))) if isinstance(v[0], six.integer_types): features[k] = tf.train.Feature(int64_list=tf.train.Int64List(value=v)) elif isinstance(v[0], float): features[k] = tf.train.Feature(float_list=tf.train.FloatList(value=v)) elif isinstance(v[0], six.string_types): if not six.PY2: # Convert in python 3. v = [bytes(x, "utf-8") for x in v] features[k] = tf.train.Feature(bytes_list=tf.train.BytesList(value=v)) elif isinstance(v[0], bytes): features[k] = tf.train.Feature(bytes_list=tf.train.BytesList(value=v)) else: raise ValueError("Value for %s is not a recognized type; v: %s type: %s" % (k, str(v[0]), str(type(v[0])))) return tf.train.Example(features=tf.train.Features(feature=features))
[ "def", "to_example", "(", "dictionary", ")", ":", "features", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "dictionary", ")", ":", "if", "not", "v", ":", "raise", "ValueError", "(", "\"Empty generated field: %s\"", "%", "str", "(", "(", "k", ",", "v", ")", ")", ")", "if", "isinstance", "(", "v", "[", "0", "]", ",", "six", ".", "integer_types", ")", ":", "features", "[", "k", "]", "=", "tf", ".", "train", ".", "Feature", "(", "int64_list", "=", "tf", ".", "train", ".", "Int64List", "(", "value", "=", "v", ")", ")", "elif", "isinstance", "(", "v", "[", "0", "]", ",", "float", ")", ":", "features", "[", "k", "]", "=", "tf", ".", "train", ".", "Feature", "(", "float_list", "=", "tf", ".", "train", ".", "FloatList", "(", "value", "=", "v", ")", ")", "elif", "isinstance", "(", "v", "[", "0", "]", ",", "six", ".", "string_types", ")", ":", "if", "not", "six", ".", "PY2", ":", "# Convert in python 3.", "v", "=", "[", "bytes", "(", "x", ",", "\"utf-8\"", ")", "for", "x", "in", "v", "]", "features", "[", "k", "]", "=", "tf", ".", "train", ".", "Feature", "(", "bytes_list", "=", "tf", ".", "train", ".", "BytesList", "(", "value", "=", "v", ")", ")", "elif", "isinstance", "(", "v", "[", "0", "]", ",", "bytes", ")", ":", "features", "[", "k", "]", "=", "tf", ".", "train", ".", "Feature", "(", "bytes_list", "=", "tf", ".", "train", ".", "BytesList", "(", "value", "=", "v", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Value for %s is not a recognized type; v: %s type: %s\"", "%", "(", "k", ",", "str", "(", "v", "[", "0", "]", ")", ",", "str", "(", "type", "(", "v", "[", "0", "]", ")", ")", ")", ")", "return", "tf", ".", "train", ".", "Example", "(", "features", "=", "tf", ".", "train", ".", "Features", "(", "feature", "=", "features", ")", ")" ]
50.05
19.05
def env_set(context): """Set $ENVs to specified string. from the pypyr context. Args: context: is dictionary-like. context is mandatory. context['env']['set'] must exist. It's a dictionary. Values are strings to write to $ENV. Keys are the names of the $ENV values to which to write. For example, say input context is: key1: value1 key2: value2 key3: value3 env: set: MYVAR1: {key1} MYVAR2: before_{key3}_after MYVAR3: arbtexthere This will result in the following $ENVs: $MYVAR1 = value1 $MYVAR2 = before_value3_after $MYVAR3 = arbtexthere Note that the $ENVs are not persisted system-wide, they only exist for pypyr sub-processes, and as such for the following steps during this pypyr pipeline execution. If you set an $ENV here, don't expect to see it in your system environment variables after the pipeline finishes running. """ env_set = context['env'].get('set', None) exists = False if env_set: logger.debug("started") for k, v in env_set.items(): logger.debug(f"setting ${k} to context[{v}]") os.environ[k] = context.get_formatted_string(v) logger.info(f"set {len(env_set)} $ENVs from context.") exists = True logger.debug("done") return exists
[ "def", "env_set", "(", "context", ")", ":", "env_set", "=", "context", "[", "'env'", "]", ".", "get", "(", "'set'", ",", "None", ")", "exists", "=", "False", "if", "env_set", ":", "logger", ".", "debug", "(", "\"started\"", ")", "for", "k", ",", "v", "in", "env_set", ".", "items", "(", ")", ":", "logger", ".", "debug", "(", "f\"setting ${k} to context[{v}]\"", ")", "os", ".", "environ", "[", "k", "]", "=", "context", ".", "get_formatted_string", "(", "v", ")", "logger", ".", "info", "(", "f\"set {len(env_set)} $ENVs from context.\"", ")", "exists", "=", "True", "logger", ".", "debug", "(", "\"done\"", ")", "return", "exists" ]
31.022222
22.177778
def new(self): # type: () -> None ''' A method to create a new UDF Logical Volume Implementation Use. Parameters: None. Returns: Nothing. ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Logical Volume Implementation Use already initialized') self.impl_id = UDFEntityID() self.impl_id.new(0, b'*pycdlib') self.num_files = 0 self.num_dirs = 1 self.min_udf_read_revision = 258 self.min_udf_write_revision = 258 self.max_udf_write_revision = 258 self.impl_use = b'\x00' * 378 # FIXME: let the user set this self._initialized = True
[ "def", "new", "(", "self", ")", ":", "# type: () -> None", "if", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'UDF Logical Volume Implementation Use already initialized'", ")", "self", ".", "impl_id", "=", "UDFEntityID", "(", ")", "self", ".", "impl_id", ".", "new", "(", "0", ",", "b'*pycdlib'", ")", "self", ".", "num_files", "=", "0", "self", ".", "num_dirs", "=", "1", "self", ".", "min_udf_read_revision", "=", "258", "self", ".", "min_udf_write_revision", "=", "258", "self", ".", "max_udf_write_revision", "=", "258", "self", ".", "impl_use", "=", "b'\\x00'", "*", "378", "# FIXME: let the user set this", "self", ".", "_initialized", "=", "True" ]
27.76
23.28
def route(cls, path): """A decorator to indicate that a method should be a routable HTTP endpoint. .. code-block:: python from compactor.process import Process class WebProcess(Process): @Process.route('/hello/world') def hello_world(self, handler): return handler.write('<html><title>hello world</title></html>') The handler passed to the method is a tornado RequestHandler. WARNING: This interface is alpha and may change in the future if or when we remove tornado as a compactor dependency. :param path: The endpoint to route to this method. :type path: ``str`` """ if not path.startswith('/'): raise ValueError('Routes must start with "/"') def wrap(fn): setattr(fn, cls.ROUTE_ATTRIBUTE, path) return fn return wrap
[ "def", "route", "(", "cls", ",", "path", ")", ":", "if", "not", "path", ".", "startswith", "(", "'/'", ")", ":", "raise", "ValueError", "(", "'Routes must start with \"/\"'", ")", "def", "wrap", "(", "fn", ")", ":", "setattr", "(", "fn", ",", "cls", ".", "ROUTE_ATTRIBUTE", ",", "path", ")", "return", "fn", "return", "wrap" ]
27.931034
21.965517
def c32ToB58(c32string, version=-1): """ >>> c32ToB58('SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7') '1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d' >>> c32ToB58('SM2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQVX8X0G') '3GgUssdoWh5QkoUDXKqT6LMESBDf8aqp2y' >>> c32ToB58('ST2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQYAC0RQ') 'mvWRFPELmpCHSkFQ7o9EVdCd9eXeUTa9T8' >>> c32ToB58('SN2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKP6D2ZK9') '2N8EgwcZq89akxb6mCTTKiHLVeXRpxjuy98' >>> c32ToB58('SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7', 0) '1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d' >>> c32ToB58('SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7', 5) '3GgUssdoWh5QkoUDXKqT6LMESBDf8aqp2y' >>> c32ToB58('SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7', 111) 'mvWRFPELmpCHSkFQ7o9EVdCd9eXeUTa9T8' >>> c32ToB58('SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7', 196) '2N8EgwcZq89akxb6mCTTKiHLVeXRpxjuy98' """ addr_version, addr_hash160 = c32addressDecode(c32string) bitcoin_version = None if version < 0: bitcoin_version = addr_version if ADDR_STACKS_TO_BITCOIN.get(addr_version) is not None: bitcoin_version = ADDR_STACKS_TO_BITCOIN[addr_version] else: bitcoin_version = version return keylib.b58check.b58check_encode(addr_hash160.decode('hex'), bitcoin_version)
[ "def", "c32ToB58", "(", "c32string", ",", "version", "=", "-", "1", ")", ":", "addr_version", ",", "addr_hash160", "=", "c32addressDecode", "(", "c32string", ")", "bitcoin_version", "=", "None", "if", "version", "<", "0", ":", "bitcoin_version", "=", "addr_version", "if", "ADDR_STACKS_TO_BITCOIN", ".", "get", "(", "addr_version", ")", "is", "not", "None", ":", "bitcoin_version", "=", "ADDR_STACKS_TO_BITCOIN", "[", "addr_version", "]", "else", ":", "bitcoin_version", "=", "version", "return", "keylib", ".", "b58check", ".", "b58check_encode", "(", "addr_hash160", ".", "decode", "(", "'hex'", ")", ",", "bitcoin_version", ")" ]
41.225806
16.193548
def get_audits(): """Get OS hardening Secure TTY audits. :returns: dictionary of audits """ audits = [] audits.append(TemplatedFile('/etc/securetty', SecureTTYContext(), template_dir=TEMPLATES_DIR, mode=0o0400, user='root', group='root')) return audits
[ "def", "get_audits", "(", ")", ":", "audits", "=", "[", "]", "audits", ".", "append", "(", "TemplatedFile", "(", "'/etc/securetty'", ",", "SecureTTYContext", "(", ")", ",", "template_dir", "=", "TEMPLATES_DIR", ",", "mode", "=", "0o0400", ",", "user", "=", "'root'", ",", "group", "=", "'root'", ")", ")", "return", "audits" ]
33.3
19.6
def fig_network_input_structure(fig, params, bottom=0.1, top=0.9, transient=200, T=[800, 1000], Df= 0., mlab= True, NFFT=256, srate=1000, window=plt.mlab.window_hanning, noverlap=256*3/4, letters='abcde', flim=(4, 400), show_titles=True, show_xlabels=True, show_CSD=False): ''' This figure is the top part for plotting a comparison between the PD-model and the modified-PD model ''' #load spike as database networkSim = CachedNetwork(**params.networkSimParams) if analysis_params.bw: networkSim.colors = phlp.get_colors(len(networkSim.X)) # ana_params.set_PLOS_2column_fig_style(ratio=ratio) # fig = plt.figure() # fig.subplots_adjust(left=0.06, right=0.94, bottom=0.09, top=0.92, wspace=0.5, hspace=0.2) #use gridspec to get nicely aligned subplots througout panel gs1 = gridspec.GridSpec(5, 5, bottom=bottom, top=top) ############################################################################ # A part, full dot display ############################################################################ ax0 = fig.add_subplot(gs1[:, 0]) phlp.remove_axis_junk(ax0) phlp.annotate_subplot(ax0, ncols=5, nrows=1, letter=letters[0], linear_offset=0.065) x, y = networkSim.get_xy(T, fraction=1) networkSim.plot_raster(ax0, T, x, y, markersize=0.2, marker='_', alpha=1., legend=False, pop_names=True, rasterized=False) ax0.set_ylabel('population', labelpad=0.) ax0.set_xticks([800,900,1000]) if show_titles: ax0.set_title('spiking activity',va='center') if show_xlabels: ax0.set_xlabel(r'$t$ (ms)', labelpad=0.) else: ax0.set_xlabel('') ############################################################################ # B part, firing rate spectra ############################################################################ # Get the firing rate from Potjan Diesmann et al network activity #collect the spikes x is the times, y is the id of the cell. T_all=[transient, networkSim.simtime] bins = np.arange(transient, networkSim.simtime+1) x, y = networkSim.get_xy(T_all, fraction=1) # create invisible axes to position labels correctly ax_ = fig.add_subplot(gs1[:, 1]) phlp.annotate_subplot(ax_, ncols=5, nrows=1, letter=letters[1], linear_offset=0.065) if show_titles: ax_.set_title('firing rate PSD', va='center') ax_.axis('off') colors = phlp.get_colors(len(params.Y))+['k'] COUNTER = 0 label_set = False tits = ['L23E/I', 'L4E/I', 'L5E/I', 'L6E/I', 'TC'] if x['TC'].size > 0: TC = True else: TC = False BAxes = [] for i, X in enumerate(networkSim.X): if i % 2 == 0: ax1 = fig.add_subplot(gs1[COUNTER, 1]) phlp.remove_axis_junk(ax1) if x[X].size > 0: ax1.text(0.05, 0.85, tits[COUNTER], horizontalalignment='left', verticalalignment='bottom', transform=ax1.transAxes) BAxes.append(ax1) #firing rate histogram hist = np.histogram(x[X], bins=bins)[0].astype(float) hist -= hist.mean() if mlab: Pxx, freqs=plt.mlab.psd(hist, NFFT=NFFT, Fs=srate, noverlap=noverlap, window=window) else: [freqs, Pxx] = hlp.powerspec([hist], tbin= 1., Df=Df, pointProcess=False) mask = np.where(freqs >= 0.) freqs = freqs[mask] Pxx = Pxx.flatten() Pxx = Pxx[mask] Pxx = Pxx/(T_all[1]-T_all[0])**2 if x[X].size > 0: ax1.loglog(freqs[1:], Pxx[1:], label=X, color=colors[i], clip_on=True) ax1.axis(ax1.axis('tight')) ax1.set_ylim([5E-4,5E2]) ax1.set_yticks([1E-3,1E-1,1E1]) if label_set == False: ax1.set_ylabel(r'(s$^{-2}$/Hz)', labelpad=0.) label_set = True if i > 1: ax1.set_yticklabels([]) if i >= 6 and not TC and show_xlabels or X == 'TC' and TC and show_xlabels: ax1.set_xlabel('$f$ (Hz)', labelpad=0.) if TC and i < 8 or not TC and i < 6: ax1.set_xticklabels([]) else: ax1.axis('off') ax1.set_xlim(flim) if i % 2 == 0: COUNTER += 1 ax1.yaxis.set_minor_locator(plt.NullLocator()) ############################################################################ # c part, LFP traces and CSD color plots ############################################################################ ax2 = fig.add_subplot(gs1[:, 2]) phlp.annotate_subplot(ax2, ncols=5, nrows=1, letter=letters[2], linear_offset=0.065) phlp.remove_axis_junk(ax2) plot_signal_sum(ax2, params, fname=os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T, ylim=[-1600, 40], rasterized=False) # CSD background colorplot if show_CSD: im = plot_signal_sum_colorplot(ax2, params, os.path.join(params.savefolder, 'CSDsum.h5'), unit=r'($\mu$Amm$^{-3}$)', T=[800, 1000], colorbar=False, ylim=[-1600, 40], fancy=False, cmap=plt.cm.get_cmap('bwr_r', 21), rasterized=False) cb = phlp.colorbar(fig, ax2, im, width=0.05, height=0.4, hoffset=-0.05, voffset=0.3) cb.set_label('($\mu$Amm$^{-3}$)', labelpad=0.1) ax2.set_xticks([800,900,1000]) ax2.axis(ax2.axis('tight')) if show_titles: if show_CSD: ax2.set_title('LFP & CSD', va='center') else: ax2.set_title('LFP', va='center') if show_xlabels: ax2.set_xlabel(r'$t$ (ms)', labelpad=0.) else: ax2.set_xlabel('') ############################################################################ # d part, LFP power trace for each layer ############################################################################ freqs, PSD = calc_signal_power(params, fname=os.path.join(params.savefolder, 'LFPsum.h5'), transient=transient, Df=Df, mlab=mlab, NFFT=NFFT, noverlap=noverlap, window=window) channels = [0, 3, 7, 11, 13] # create invisible axes to position labels correctly ax_ = fig.add_subplot(gs1[:, 3]) phlp.annotate_subplot(ax_, ncols=5, nrows=1, letter=letters[3], linear_offset=0.065) if show_titles: ax_.set_title('LFP PSD',va='center') ax_.axis('off') for i, ch in enumerate(channels): ax = fig.add_subplot(gs1[i, 3]) phlp.remove_axis_junk(ax) if i == 0: ax.set_ylabel('(mV$^2$/Hz)', labelpad=0) ax.loglog(freqs[1:],PSD[ch][1:], color='k') ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') if i < 4: ax.set_xticklabels([]) ax.text(0.75, 0.85,'ch. %i' %(channels[i]+1), horizontalalignment='left', verticalalignment='bottom', fontsize=6, transform=ax.transAxes) ax.tick_params(axis='y', which='minor', bottom='off') ax.axis(ax.axis('tight')) ax.yaxis.set_minor_locator(plt.NullLocator()) ax.set_xlim(flim) ax.set_ylim(1E-7,2E-4) if i != 0 : ax.set_yticklabels([]) if show_xlabels: ax.set_xlabel('$f$ (Hz)', labelpad=0.) ############################################################################ # e part signal power ############################################################################ ax4 = fig.add_subplot(gs1[:, 4]) phlp.annotate_subplot(ax4, ncols=5, nrows=1, letter=letters[4], linear_offset=0.065) fname=os.path.join(params.savefolder, 'LFPsum.h5') im = plot_signal_power_colorplot(ax4, params, fname=fname, transient=transient, Df=Df, mlab=mlab, NFFT=NFFT, window=window, cmap=plt.cm.get_cmap('gray_r', 12), vmin=1E-7, vmax=1E-4) phlp.remove_axis_junk(ax4) ax4.set_xlim(flim) cb = phlp.colorbar(fig, ax4, im, width=0.05, height=0.5, hoffset=-0.05, voffset=0.5) cb.set_label('(mV$^2$/Hz)', labelpad=0.1) if show_titles: ax4.set_title('LFP PSD', va='center') if show_xlabels: ax4.set_xlabel(r'$f$ (Hz)', labelpad=0.) else: ax4.set_xlabel('') return fig
[ "def", "fig_network_input_structure", "(", "fig", ",", "params", ",", "bottom", "=", "0.1", ",", "top", "=", "0.9", ",", "transient", "=", "200", ",", "T", "=", "[", "800", ",", "1000", "]", ",", "Df", "=", "0.", ",", "mlab", "=", "True", ",", "NFFT", "=", "256", ",", "srate", "=", "1000", ",", "window", "=", "plt", ".", "mlab", ".", "window_hanning", ",", "noverlap", "=", "256", "*", "3", "/", "4", ",", "letters", "=", "'abcde'", ",", "flim", "=", "(", "4", ",", "400", ")", ",", "show_titles", "=", "True", ",", "show_xlabels", "=", "True", ",", "show_CSD", "=", "False", ")", ":", "#load spike as database", "networkSim", "=", "CachedNetwork", "(", "*", "*", "params", ".", "networkSimParams", ")", "if", "analysis_params", ".", "bw", ":", "networkSim", ".", "colors", "=", "phlp", ".", "get_colors", "(", "len", "(", "networkSim", ".", "X", ")", ")", "# ana_params.set_PLOS_2column_fig_style(ratio=ratio)", "# fig = plt.figure()", "# fig.subplots_adjust(left=0.06, right=0.94, bottom=0.09, top=0.92, wspace=0.5, hspace=0.2)", "#use gridspec to get nicely aligned subplots througout panel", "gs1", "=", "gridspec", ".", "GridSpec", "(", "5", ",", "5", ",", "bottom", "=", "bottom", ",", "top", "=", "top", ")", "############################################################################ ", "# A part, full dot display", "############################################################################", "ax0", "=", "fig", ".", "add_subplot", "(", "gs1", "[", ":", ",", "0", "]", ")", "phlp", ".", "remove_axis_junk", "(", "ax0", ")", "phlp", ".", "annotate_subplot", "(", "ax0", ",", "ncols", "=", "5", ",", "nrows", "=", "1", ",", "letter", "=", "letters", "[", "0", "]", ",", "linear_offset", "=", "0.065", ")", "x", ",", "y", "=", "networkSim", ".", "get_xy", "(", "T", ",", "fraction", "=", "1", ")", "networkSim", ".", "plot_raster", "(", "ax0", ",", "T", ",", "x", ",", "y", ",", "markersize", "=", "0.2", ",", "marker", "=", "'_'", ",", "alpha", "=", "1.", ",", "legend", "=", "False", ",", "pop_names", "=", "True", ",", "rasterized", "=", "False", ")", "ax0", ".", "set_ylabel", "(", "'population'", ",", "labelpad", "=", "0.", ")", "ax0", ".", "set_xticks", "(", "[", "800", ",", "900", ",", "1000", "]", ")", "if", "show_titles", ":", "ax0", ".", "set_title", "(", "'spiking activity'", ",", "va", "=", "'center'", ")", "if", "show_xlabels", ":", "ax0", ".", "set_xlabel", "(", "r'$t$ (ms)'", ",", "labelpad", "=", "0.", ")", "else", ":", "ax0", ".", "set_xlabel", "(", "''", ")", "############################################################################", "# B part, firing rate spectra", "############################################################################", "# Get the firing rate from Potjan Diesmann et al network activity", "#collect the spikes x is the times, y is the id of the cell.", "T_all", "=", "[", "transient", ",", "networkSim", ".", "simtime", "]", "bins", "=", "np", ".", "arange", "(", "transient", ",", "networkSim", ".", "simtime", "+", "1", ")", "x", ",", "y", "=", "networkSim", ".", "get_xy", "(", "T_all", ",", "fraction", "=", "1", ")", "# create invisible axes to position labels correctly", "ax_", "=", "fig", ".", "add_subplot", "(", "gs1", "[", ":", ",", "1", "]", ")", "phlp", ".", "annotate_subplot", "(", "ax_", ",", "ncols", "=", "5", ",", "nrows", "=", "1", ",", "letter", "=", "letters", "[", "1", "]", ",", "linear_offset", "=", "0.065", ")", "if", "show_titles", ":", "ax_", ".", "set_title", "(", "'firing rate PSD'", ",", "va", "=", "'center'", ")", "ax_", ".", "axis", "(", "'off'", ")", "colors", "=", "phlp", ".", "get_colors", "(", "len", "(", "params", ".", "Y", ")", ")", "+", "[", "'k'", "]", "COUNTER", "=", "0", "label_set", "=", "False", "tits", "=", "[", "'L23E/I'", ",", "'L4E/I'", ",", "'L5E/I'", ",", "'L6E/I'", ",", "'TC'", "]", "if", "x", "[", "'TC'", "]", ".", "size", ">", "0", ":", "TC", "=", "True", "else", ":", "TC", "=", "False", "BAxes", "=", "[", "]", "for", "i", ",", "X", "in", "enumerate", "(", "networkSim", ".", "X", ")", ":", "if", "i", "%", "2", "==", "0", ":", "ax1", "=", "fig", ".", "add_subplot", "(", "gs1", "[", "COUNTER", ",", "1", "]", ")", "phlp", ".", "remove_axis_junk", "(", "ax1", ")", "if", "x", "[", "X", "]", ".", "size", ">", "0", ":", "ax1", ".", "text", "(", "0.05", ",", "0.85", ",", "tits", "[", "COUNTER", "]", ",", "horizontalalignment", "=", "'left'", ",", "verticalalignment", "=", "'bottom'", ",", "transform", "=", "ax1", ".", "transAxes", ")", "BAxes", ".", "append", "(", "ax1", ")", "#firing rate histogram", "hist", "=", "np", ".", "histogram", "(", "x", "[", "X", "]", ",", "bins", "=", "bins", ")", "[", "0", "]", ".", "astype", "(", "float", ")", "hist", "-=", "hist", ".", "mean", "(", ")", "if", "mlab", ":", "Pxx", ",", "freqs", "=", "plt", ".", "mlab", ".", "psd", "(", "hist", ",", "NFFT", "=", "NFFT", ",", "Fs", "=", "srate", ",", "noverlap", "=", "noverlap", ",", "window", "=", "window", ")", "else", ":", "[", "freqs", ",", "Pxx", "]", "=", "hlp", ".", "powerspec", "(", "[", "hist", "]", ",", "tbin", "=", "1.", ",", "Df", "=", "Df", ",", "pointProcess", "=", "False", ")", "mask", "=", "np", ".", "where", "(", "freqs", ">=", "0.", ")", "freqs", "=", "freqs", "[", "mask", "]", "Pxx", "=", "Pxx", ".", "flatten", "(", ")", "Pxx", "=", "Pxx", "[", "mask", "]", "Pxx", "=", "Pxx", "/", "(", "T_all", "[", "1", "]", "-", "T_all", "[", "0", "]", ")", "**", "2", "if", "x", "[", "X", "]", ".", "size", ">", "0", ":", "ax1", ".", "loglog", "(", "freqs", "[", "1", ":", "]", ",", "Pxx", "[", "1", ":", "]", ",", "label", "=", "X", ",", "color", "=", "colors", "[", "i", "]", ",", "clip_on", "=", "True", ")", "ax1", ".", "axis", "(", "ax1", ".", "axis", "(", "'tight'", ")", ")", "ax1", ".", "set_ylim", "(", "[", "5E-4", ",", "5E2", "]", ")", "ax1", ".", "set_yticks", "(", "[", "1E-3", ",", "1E-1", ",", "1E1", "]", ")", "if", "label_set", "==", "False", ":", "ax1", ".", "set_ylabel", "(", "r'(s$^{-2}$/Hz)'", ",", "labelpad", "=", "0.", ")", "label_set", "=", "True", "if", "i", ">", "1", ":", "ax1", ".", "set_yticklabels", "(", "[", "]", ")", "if", "i", ">=", "6", "and", "not", "TC", "and", "show_xlabels", "or", "X", "==", "'TC'", "and", "TC", "and", "show_xlabels", ":", "ax1", ".", "set_xlabel", "(", "'$f$ (Hz)'", ",", "labelpad", "=", "0.", ")", "if", "TC", "and", "i", "<", "8", "or", "not", "TC", "and", "i", "<", "6", ":", "ax1", ".", "set_xticklabels", "(", "[", "]", ")", "else", ":", "ax1", ".", "axis", "(", "'off'", ")", "ax1", ".", "set_xlim", "(", "flim", ")", "if", "i", "%", "2", "==", "0", ":", "COUNTER", "+=", "1", "ax1", ".", "yaxis", ".", "set_minor_locator", "(", "plt", ".", "NullLocator", "(", ")", ")", "############################################################################", "# c part, LFP traces and CSD color plots", "############################################################################", "ax2", "=", "fig", ".", "add_subplot", "(", "gs1", "[", ":", ",", "2", "]", ")", "phlp", ".", "annotate_subplot", "(", "ax2", ",", "ncols", "=", "5", ",", "nrows", "=", "1", ",", "letter", "=", "letters", "[", "2", "]", ",", "linear_offset", "=", "0.065", ")", "phlp", ".", "remove_axis_junk", "(", "ax2", ")", "plot_signal_sum", "(", "ax2", ",", "params", ",", "fname", "=", "os", ".", "path", ".", "join", "(", "params", ".", "savefolder", ",", "'LFPsum.h5'", ")", ",", "unit", "=", "'mV'", ",", "T", "=", "T", ",", "ylim", "=", "[", "-", "1600", ",", "40", "]", ",", "rasterized", "=", "False", ")", "# CSD background colorplot", "if", "show_CSD", ":", "im", "=", "plot_signal_sum_colorplot", "(", "ax2", ",", "params", ",", "os", ".", "path", ".", "join", "(", "params", ".", "savefolder", ",", "'CSDsum.h5'", ")", ",", "unit", "=", "r'($\\mu$Amm$^{-3}$)'", ",", "T", "=", "[", "800", ",", "1000", "]", ",", "colorbar", "=", "False", ",", "ylim", "=", "[", "-", "1600", ",", "40", "]", ",", "fancy", "=", "False", ",", "cmap", "=", "plt", ".", "cm", ".", "get_cmap", "(", "'bwr_r'", ",", "21", ")", ",", "rasterized", "=", "False", ")", "cb", "=", "phlp", ".", "colorbar", "(", "fig", ",", "ax2", ",", "im", ",", "width", "=", "0.05", ",", "height", "=", "0.4", ",", "hoffset", "=", "-", "0.05", ",", "voffset", "=", "0.3", ")", "cb", ".", "set_label", "(", "'($\\mu$Amm$^{-3}$)'", ",", "labelpad", "=", "0.1", ")", "ax2", ".", "set_xticks", "(", "[", "800", ",", "900", ",", "1000", "]", ")", "ax2", ".", "axis", "(", "ax2", ".", "axis", "(", "'tight'", ")", ")", "if", "show_titles", ":", "if", "show_CSD", ":", "ax2", ".", "set_title", "(", "'LFP & CSD'", ",", "va", "=", "'center'", ")", "else", ":", "ax2", ".", "set_title", "(", "'LFP'", ",", "va", "=", "'center'", ")", "if", "show_xlabels", ":", "ax2", ".", "set_xlabel", "(", "r'$t$ (ms)'", ",", "labelpad", "=", "0.", ")", "else", ":", "ax2", ".", "set_xlabel", "(", "''", ")", "############################################################################", "# d part, LFP power trace for each layer", "############################################################################", "freqs", ",", "PSD", "=", "calc_signal_power", "(", "params", ",", "fname", "=", "os", ".", "path", ".", "join", "(", "params", ".", "savefolder", ",", "'LFPsum.h5'", ")", ",", "transient", "=", "transient", ",", "Df", "=", "Df", ",", "mlab", "=", "mlab", ",", "NFFT", "=", "NFFT", ",", "noverlap", "=", "noverlap", ",", "window", "=", "window", ")", "channels", "=", "[", "0", ",", "3", ",", "7", ",", "11", ",", "13", "]", "# create invisible axes to position labels correctly", "ax_", "=", "fig", ".", "add_subplot", "(", "gs1", "[", ":", ",", "3", "]", ")", "phlp", ".", "annotate_subplot", "(", "ax_", ",", "ncols", "=", "5", ",", "nrows", "=", "1", ",", "letter", "=", "letters", "[", "3", "]", ",", "linear_offset", "=", "0.065", ")", "if", "show_titles", ":", "ax_", ".", "set_title", "(", "'LFP PSD'", ",", "va", "=", "'center'", ")", "ax_", ".", "axis", "(", "'off'", ")", "for", "i", ",", "ch", "in", "enumerate", "(", "channels", ")", ":", "ax", "=", "fig", ".", "add_subplot", "(", "gs1", "[", "i", ",", "3", "]", ")", "phlp", ".", "remove_axis_junk", "(", "ax", ")", "if", "i", "==", "0", ":", "ax", ".", "set_ylabel", "(", "'(mV$^2$/Hz)'", ",", "labelpad", "=", "0", ")", "ax", ".", "loglog", "(", "freqs", "[", "1", ":", "]", ",", "PSD", "[", "ch", "]", "[", "1", ":", "]", ",", "color", "=", "'k'", ")", "ax", ".", "xaxis", ".", "set_ticks_position", "(", "'bottom'", ")", "ax", ".", "yaxis", ".", "set_ticks_position", "(", "'left'", ")", "if", "i", "<", "4", ":", "ax", ".", "set_xticklabels", "(", "[", "]", ")", "ax", ".", "text", "(", "0.75", ",", "0.85", ",", "'ch. %i'", "%", "(", "channels", "[", "i", "]", "+", "1", ")", ",", "horizontalalignment", "=", "'left'", ",", "verticalalignment", "=", "'bottom'", ",", "fontsize", "=", "6", ",", "transform", "=", "ax", ".", "transAxes", ")", "ax", ".", "tick_params", "(", "axis", "=", "'y'", ",", "which", "=", "'minor'", ",", "bottom", "=", "'off'", ")", "ax", ".", "axis", "(", "ax", ".", "axis", "(", "'tight'", ")", ")", "ax", ".", "yaxis", ".", "set_minor_locator", "(", "plt", ".", "NullLocator", "(", ")", ")", "ax", ".", "set_xlim", "(", "flim", ")", "ax", ".", "set_ylim", "(", "1E-7", ",", "2E-4", ")", "if", "i", "!=", "0", ":", "ax", ".", "set_yticklabels", "(", "[", "]", ")", "if", "show_xlabels", ":", "ax", ".", "set_xlabel", "(", "'$f$ (Hz)'", ",", "labelpad", "=", "0.", ")", "############################################################################", "# e part signal power", "############################################################################", "ax4", "=", "fig", ".", "add_subplot", "(", "gs1", "[", ":", ",", "4", "]", ")", "phlp", ".", "annotate_subplot", "(", "ax4", ",", "ncols", "=", "5", ",", "nrows", "=", "1", ",", "letter", "=", "letters", "[", "4", "]", ",", "linear_offset", "=", "0.065", ")", "fname", "=", "os", ".", "path", ".", "join", "(", "params", ".", "savefolder", ",", "'LFPsum.h5'", ")", "im", "=", "plot_signal_power_colorplot", "(", "ax4", ",", "params", ",", "fname", "=", "fname", ",", "transient", "=", "transient", ",", "Df", "=", "Df", ",", "mlab", "=", "mlab", ",", "NFFT", "=", "NFFT", ",", "window", "=", "window", ",", "cmap", "=", "plt", ".", "cm", ".", "get_cmap", "(", "'gray_r'", ",", "12", ")", ",", "vmin", "=", "1E-7", ",", "vmax", "=", "1E-4", ")", "phlp", ".", "remove_axis_junk", "(", "ax4", ")", "ax4", ".", "set_xlim", "(", "flim", ")", "cb", "=", "phlp", ".", "colorbar", "(", "fig", ",", "ax4", ",", "im", ",", "width", "=", "0.05", ",", "height", "=", "0.5", ",", "hoffset", "=", "-", "0.05", ",", "voffset", "=", "0.5", ")", "cb", ".", "set_label", "(", "'(mV$^2$/Hz)'", ",", "labelpad", "=", "0.1", ")", "if", "show_titles", ":", "ax4", ".", "set_title", "(", "'LFP PSD'", ",", "va", "=", "'center'", ")", "if", "show_xlabels", ":", "ax4", ".", "set_xlabel", "(", "r'$f$ (Hz)'", ",", "labelpad", "=", "0.", ")", "else", ":", "ax4", ".", "set_xlabel", "(", "''", ")", "return", "fig" ]
33.895911
21.828996
def colorbar(height, length, colormap): """Return the channels of a colorbar. """ cbar = np.tile(np.arange(length) * 1.0 / (length - 1), (height, 1)) cbar = (cbar * (colormap.values.max() - colormap.values.min()) + colormap.values.min()) return colormap.colorize(cbar)
[ "def", "colorbar", "(", "height", ",", "length", ",", "colormap", ")", ":", "cbar", "=", "np", ".", "tile", "(", "np", ".", "arange", "(", "length", ")", "*", "1.0", "/", "(", "length", "-", "1", ")", ",", "(", "height", ",", "1", ")", ")", "cbar", "=", "(", "cbar", "*", "(", "colormap", ".", "values", ".", "max", "(", ")", "-", "colormap", ".", "values", ".", "min", "(", ")", ")", "+", "colormap", ".", "values", ".", "min", "(", ")", ")", "return", "colormap", ".", "colorize", "(", "cbar", ")" ]
36.75
13.5
def doigrf(lon, lat, alt, date, **kwargs): """ Calculates the interpolated (<2015) or extrapolated (>2015) main field and secular variation coefficients and passes them to the Malin and Barraclough routine (function pmag.magsyn) to calculate the field from the coefficients. Parameters: ----------- lon : east longitude in degrees (0 to 360 or -180 to 180) lat : latitude in degrees (-90 to 90) alt : height above mean sea level in km (itype = 1 assumed) date : Required date in years and decimals of a year (A.D.) Optional Parameters: ----------- coeffs : if True, then return the gh coefficients mod : model to use ('arch3k','cals3k','pfm9k','hfm10k','cals10k.2','cals10k.1b','shadif14k') arch3k (Korte et al., 2009) cals3k (Korte and Constable, 2011) cals10k.1b (Korte et al., 2011) pfm9k (Nilsson et al., 2014) hfm.OL1.A1 (Constable et al., 2016) cals10k.2 (Constable et al., 2016) shadif14k (Pavon-Carrasco et al. (2014) NB : the first four of these models, are constrained to agree with gufm1 (Jackson et al., 2000) for the past four centuries Return ----------- x : north component of the magnetic field in nT y : east component of the magnetic field in nT z : downward component of the magnetic field in nT f : total magnetic field in nT By default, igrf12 coefficients are used between 1900 and 2020 from http://www.ngdc.noaa.gov/IAGA/vmod/igrf.html. To check the results you can run the interactive program at the NGDC www.ngdc.noaa.gov/geomag-web """ from . import coefficients as cf gh, sv = [], [] colat = 90. - lat #! convert to colatitude for MB routine if lon < 0: lon = lon + 360. # ensure all positive east longitudes itype = 1 models, igrf12coeffs = cf.get_igrf12() if 'mod' in list(kwargs.keys()): if kwargs['mod'] == 'arch3k': psvmodels, psvcoeffs = cf.get_arch3k() # use ARCH3k coefficients elif kwargs['mod'] == 'cals3k': # use CALS3K_4b coefficients between -1000,1940 psvmodels, psvcoeffs = cf.get_cals3k() elif kwargs['mod'] == 'pfm9k': # use PFM9k (Nilsson et al., 2014), coefficients from -7000 to 1900 psvmodels, psvcoeffs = cf.get_pfm9k() elif kwargs['mod'] == 'hfm10k': # use HFM.OL1.A1 (Constable et al., 2016), coefficients from -8000 # to 1900 psvmodels, psvcoeffs = cf.get_hfm10k() elif kwargs['mod'] == 'cals10k.2': # use CALS10k.2 (Constable et al., 2016), coefficients from -8000 # to 1900 psvmodels, psvcoeffs = cf.get_cals10k_2() elif kwargs['mod'] == 'shadif14k': # use CALS10k.2 (Constable et al., 2016), coefficients from -8000 # to 1900 psvmodels, psvcoeffs = cf.get_shadif14k() else: # Korte and Constable, 2011; use prior to -1000, back to -8000 psvmodels, psvcoeffs = cf.get_cals10k() # use geodetic coordinates if 'models' in kwargs: if 'mod' in list(kwargs.keys()): return psvmodels, psvcoeffs else: return models, igrf12coeffs if date < -12000: print('too old') return if 'mod' in list(kwargs.keys()) and kwargs['mod'] == 'shadif14k': if date < -10000: incr = 100 else: incr = 50 model = date - date % incr gh = psvcoeffs[psvmodels.index(int(model))] sv = old_div( (psvcoeffs[psvmodels.index(int(model + incr))] - gh), float(incr)) x, y, z, f = magsyn(gh, sv, model, date, itype, alt, colat, lon) elif date < -1000: incr = 10 model = date - date % incr gh = psvcoeffs[psvmodels.index(int(model))] sv = old_div( (psvcoeffs[psvmodels.index(int(model + incr))] - gh), float(incr)) x, y, z, f = magsyn(gh, sv, model, date, itype, alt, colat, lon) elif date < 1900: if kwargs['mod'] == 'cals10k': incr = 50 else: incr = 10 model = date - date % incr gh = psvcoeffs[psvmodels.index(model)] if model + incr < 1900: sv = old_div( (psvcoeffs[psvmodels.index(model + incr)] - gh), float(incr)) else: field2 = igrf12coeffs[models.index(1940)][0:120] sv = old_div((field2 - gh), float(1940 - model)) x, y, z, f = magsyn(gh, sv, model, date, itype, alt, colat, lon) else: model = date - date % 5 if date < 2015: gh = igrf12coeffs[models.index(model)] sv = old_div((igrf12coeffs[models.index(model + 5)] - gh), 5.) x, y, z, f = magsyn(gh, sv, model, date, itype, alt, colat, lon) else: gh = igrf12coeffs[models.index(2015)] sv = igrf12coeffs[models.index(2015.20)] x, y, z, f = magsyn(gh, sv, model, date, itype, alt, colat, lon) if 'coeffs' in list(kwargs.keys()): return gh else: return x, y, z, f
[ "def", "doigrf", "(", "lon", ",", "lat", ",", "alt", ",", "date", ",", "*", "*", "kwargs", ")", ":", "from", ".", "import", "coefficients", "as", "cf", "gh", ",", "sv", "=", "[", "]", ",", "[", "]", "colat", "=", "90.", "-", "lat", "#! convert to colatitude for MB routine", "if", "lon", "<", "0", ":", "lon", "=", "lon", "+", "360.", "# ensure all positive east longitudes", "itype", "=", "1", "models", ",", "igrf12coeffs", "=", "cf", ".", "get_igrf12", "(", ")", "if", "'mod'", "in", "list", "(", "kwargs", ".", "keys", "(", ")", ")", ":", "if", "kwargs", "[", "'mod'", "]", "==", "'arch3k'", ":", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_arch3k", "(", ")", "# use ARCH3k coefficients", "elif", "kwargs", "[", "'mod'", "]", "==", "'cals3k'", ":", "# use CALS3K_4b coefficients between -1000,1940", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_cals3k", "(", ")", "elif", "kwargs", "[", "'mod'", "]", "==", "'pfm9k'", ":", "# use PFM9k (Nilsson et al., 2014), coefficients from -7000 to 1900", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_pfm9k", "(", ")", "elif", "kwargs", "[", "'mod'", "]", "==", "'hfm10k'", ":", "# use HFM.OL1.A1 (Constable et al., 2016), coefficients from -8000", "# to 1900", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_hfm10k", "(", ")", "elif", "kwargs", "[", "'mod'", "]", "==", "'cals10k.2'", ":", "# use CALS10k.2 (Constable et al., 2016), coefficients from -8000", "# to 1900", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_cals10k_2", "(", ")", "elif", "kwargs", "[", "'mod'", "]", "==", "'shadif14k'", ":", "# use CALS10k.2 (Constable et al., 2016), coefficients from -8000", "# to 1900", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_shadif14k", "(", ")", "else", ":", "# Korte and Constable, 2011; use prior to -1000, back to -8000", "psvmodels", ",", "psvcoeffs", "=", "cf", ".", "get_cals10k", "(", ")", "# use geodetic coordinates", "if", "'models'", "in", "kwargs", ":", "if", "'mod'", "in", "list", "(", "kwargs", ".", "keys", "(", ")", ")", ":", "return", "psvmodels", ",", "psvcoeffs", "else", ":", "return", "models", ",", "igrf12coeffs", "if", "date", "<", "-", "12000", ":", "print", "(", "'too old'", ")", "return", "if", "'mod'", "in", "list", "(", "kwargs", ".", "keys", "(", ")", ")", "and", "kwargs", "[", "'mod'", "]", "==", "'shadif14k'", ":", "if", "date", "<", "-", "10000", ":", "incr", "=", "100", "else", ":", "incr", "=", "50", "model", "=", "date", "-", "date", "%", "incr", "gh", "=", "psvcoeffs", "[", "psvmodels", ".", "index", "(", "int", "(", "model", ")", ")", "]", "sv", "=", "old_div", "(", "(", "psvcoeffs", "[", "psvmodels", ".", "index", "(", "int", "(", "model", "+", "incr", ")", ")", "]", "-", "gh", ")", ",", "float", "(", "incr", ")", ")", "x", ",", "y", ",", "z", ",", "f", "=", "magsyn", "(", "gh", ",", "sv", ",", "model", ",", "date", ",", "itype", ",", "alt", ",", "colat", ",", "lon", ")", "elif", "date", "<", "-", "1000", ":", "incr", "=", "10", "model", "=", "date", "-", "date", "%", "incr", "gh", "=", "psvcoeffs", "[", "psvmodels", ".", "index", "(", "int", "(", "model", ")", ")", "]", "sv", "=", "old_div", "(", "(", "psvcoeffs", "[", "psvmodels", ".", "index", "(", "int", "(", "model", "+", "incr", ")", ")", "]", "-", "gh", ")", ",", "float", "(", "incr", ")", ")", "x", ",", "y", ",", "z", ",", "f", "=", "magsyn", "(", "gh", ",", "sv", ",", "model", ",", "date", ",", "itype", ",", "alt", ",", "colat", ",", "lon", ")", "elif", "date", "<", "1900", ":", "if", "kwargs", "[", "'mod'", "]", "==", "'cals10k'", ":", "incr", "=", "50", "else", ":", "incr", "=", "10", "model", "=", "date", "-", "date", "%", "incr", "gh", "=", "psvcoeffs", "[", "psvmodels", ".", "index", "(", "model", ")", "]", "if", "model", "+", "incr", "<", "1900", ":", "sv", "=", "old_div", "(", "(", "psvcoeffs", "[", "psvmodels", ".", "index", "(", "model", "+", "incr", ")", "]", "-", "gh", ")", ",", "float", "(", "incr", ")", ")", "else", ":", "field2", "=", "igrf12coeffs", "[", "models", ".", "index", "(", "1940", ")", "]", "[", "0", ":", "120", "]", "sv", "=", "old_div", "(", "(", "field2", "-", "gh", ")", ",", "float", "(", "1940", "-", "model", ")", ")", "x", ",", "y", ",", "z", ",", "f", "=", "magsyn", "(", "gh", ",", "sv", ",", "model", ",", "date", ",", "itype", ",", "alt", ",", "colat", ",", "lon", ")", "else", ":", "model", "=", "date", "-", "date", "%", "5", "if", "date", "<", "2015", ":", "gh", "=", "igrf12coeffs", "[", "models", ".", "index", "(", "model", ")", "]", "sv", "=", "old_div", "(", "(", "igrf12coeffs", "[", "models", ".", "index", "(", "model", "+", "5", ")", "]", "-", "gh", ")", ",", "5.", ")", "x", ",", "y", ",", "z", ",", "f", "=", "magsyn", "(", "gh", ",", "sv", ",", "model", ",", "date", ",", "itype", ",", "alt", ",", "colat", ",", "lon", ")", "else", ":", "gh", "=", "igrf12coeffs", "[", "models", ".", "index", "(", "2015", ")", "]", "sv", "=", "igrf12coeffs", "[", "models", ".", "index", "(", "2015.20", ")", "]", "x", ",", "y", ",", "z", ",", "f", "=", "magsyn", "(", "gh", ",", "sv", ",", "model", ",", "date", ",", "itype", ",", "alt", ",", "colat", ",", "lon", ")", "if", "'coeffs'", "in", "list", "(", "kwargs", ".", "keys", "(", ")", ")", ":", "return", "gh", "else", ":", "return", "x", ",", "y", ",", "z", ",", "f" ]
39.818898
18.685039
def path(self, which=None): """Extend ``nailgun.entity_mixins.Entity.path``. The format of the returned path depends on the value of ``which``: add_subscriptions /hosts/<id>/add_subscriptions remove_subscriptions /hosts/<id>/remove_subscriptions ``super`` is called otherwise. """ if which in ( 'add_subscriptions', 'remove_subscriptions'): return '{0}/{1}'.format( super(HostSubscription, self).path(which='base'), which ) return super(HostSubscription, self).path(which)
[ "def", "path", "(", "self", ",", "which", "=", "None", ")", ":", "if", "which", "in", "(", "'add_subscriptions'", ",", "'remove_subscriptions'", ")", ":", "return", "'{0}/{1}'", ".", "format", "(", "super", "(", "HostSubscription", ",", "self", ")", ".", "path", "(", "which", "=", "'base'", ")", ",", "which", ")", "return", "super", "(", "HostSubscription", ",", "self", ")", ".", "path", "(", "which", ")" ]
30.095238
16.904762
def assign_objective_requisite(self, objective_id, requisite_objective_id): """Creates a requirement dependency between two ``Objectives``. arg: objective_id (osid.id.Id): the ``Id`` of the dependent ``Objective`` arg: requisite_objective_id (osid.id.Id): the ``Id`` of the required ``Objective`` raise: AlreadyExists - ``objective_id`` already mapped to ``requisite_objective_id`` raise: NotFound - ``objective_id`` or ``requisite_objective_id`` not found raise: NullArgument - ``objective_id`` or ``requisite_objective_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ requisite_type = Type(**Relationship().get_type_data('OBJECTIVE.REQUISITE')) ras = self._get_provider_manager( 'RELATIONSHIP').get_relationship_admin_session_for_family( self.get_objective_bank_id(), proxy=self._proxy) rfc = ras.get_relationship_form_for_create(objective_id, requisite_objective_id, []) rfc.set_display_name('Objective Requisite') rfc.set_description('An Objective Requisite created by the ObjectiveRequisiteAssignmentSession') rfc.set_genus_type(requisite_type) ras.create_relationship(rfc)
[ "def", "assign_objective_requisite", "(", "self", ",", "objective_id", ",", "requisite_objective_id", ")", ":", "requisite_type", "=", "Type", "(", "*", "*", "Relationship", "(", ")", ".", "get_type_data", "(", "'OBJECTIVE.REQUISITE'", ")", ")", "ras", "=", "self", ".", "_get_provider_manager", "(", "'RELATIONSHIP'", ")", ".", "get_relationship_admin_session_for_family", "(", "self", ".", "get_objective_bank_id", "(", ")", ",", "proxy", "=", "self", ".", "_proxy", ")", "rfc", "=", "ras", ".", "get_relationship_form_for_create", "(", "objective_id", ",", "requisite_objective_id", ",", "[", "]", ")", "rfc", ".", "set_display_name", "(", "'Objective Requisite'", ")", "rfc", ".", "set_description", "(", "'An Objective Requisite created by the ObjectiveRequisiteAssignmentSession'", ")", "rfc", ".", "set_genus_type", "(", "requisite_type", ")", "ras", ".", "create_relationship", "(", "rfc", ")" ]
51.321429
21.035714
def _nameFromHeaderInfo(headerInfo, isDecoy, decoyTag): """Generates a protein name from headerInfo. If "isDecoy" is True, the "decoyTag" is added to beginning of the generated protein name. :param headerInfo: dict, must contain a key "name" or "id" :param isDecoy: bool, determines if the "decoyTag" is added or not. :param decoyTag: str, a tag that identifies decoy / reverse protein entries. :returns: str, protein name """ if 'name' in headerInfo: proteinName = headerInfo['name'] else: proteinName = headerInfo['id'] if isDecoy: proteinName = ''.join((decoyTag, proteinName)) return proteinName
[ "def", "_nameFromHeaderInfo", "(", "headerInfo", ",", "isDecoy", ",", "decoyTag", ")", ":", "if", "'name'", "in", "headerInfo", ":", "proteinName", "=", "headerInfo", "[", "'name'", "]", "else", ":", "proteinName", "=", "headerInfo", "[", "'id'", "]", "if", "isDecoy", ":", "proteinName", "=", "''", ".", "join", "(", "(", "decoyTag", ",", "proteinName", ")", ")", "return", "proteinName" ]
38.411765
19.176471
async def stream_frames(self, frames="allframes", components=None, on_packet=None): """Stream measured frames from QTM until :func:`~qtm.QRTConnection.stream_frames_stop` is called. :param frames: Which frames to receive, possible values are 'allframes', 'frequency:n' or 'frequencydivisor:n' where n should be desired value. :param components: A list of components to receive, could be 'all' or any combination of '2d', '2dlin', '3d', '3dres', '3dnolabels', '3dnolabelsres', 'force', 'forcesingle', '6d', '6dres', '6deuler', '6deulerres', 'gazevector', 'image', 'timecode', 'skeleton', 'skeleton:global' :rtype: The string 'Ok' if successful """ if components is None: components = ["all"] else: _validate_components(components) self._protocol.set_on_packet(on_packet) cmd = "streamframes %s %s" % (frames, " ".join(components)) return await asyncio.wait_for( self._protocol.send_command(cmd), timeout=self._timeout )
[ "async", "def", "stream_frames", "(", "self", ",", "frames", "=", "\"allframes\"", ",", "components", "=", "None", ",", "on_packet", "=", "None", ")", ":", "if", "components", "is", "None", ":", "components", "=", "[", "\"all\"", "]", "else", ":", "_validate_components", "(", "components", ")", "self", ".", "_protocol", ".", "set_on_packet", "(", "on_packet", ")", "cmd", "=", "\"streamframes %s %s\"", "%", "(", "frames", ",", "\" \"", ".", "join", "(", "components", ")", ")", "return", "await", "asyncio", ".", "wait_for", "(", "self", ".", "_protocol", ".", "send_command", "(", "cmd", ")", ",", "timeout", "=", "self", ".", "_timeout", ")" ]
41.074074
25.111111
def _onDeviceStatus(self, client, userdata, pahoMessage): """ Internal callback for device status messages, parses source device from topic string and passes the information on to the registerd device status callback """ try: status = Status(pahoMessage) self.logger.debug("Received %s action from %s" % (status.action, status.clientId)) if self.deviceStatusCallback: self.deviceStatusCallback(status) except InvalidEventException as e: self.logger.critical(str(e))
[ "def", "_onDeviceStatus", "(", "self", ",", "client", ",", "userdata", ",", "pahoMessage", ")", ":", "try", ":", "status", "=", "Status", "(", "pahoMessage", ")", "self", ".", "logger", ".", "debug", "(", "\"Received %s action from %s\"", "%", "(", "status", ".", "action", ",", "status", ".", "clientId", ")", ")", "if", "self", ".", "deviceStatusCallback", ":", "self", ".", "deviceStatusCallback", "(", "status", ")", "except", "InvalidEventException", "as", "e", ":", "self", ".", "logger", ".", "critical", "(", "str", "(", "e", ")", ")" ]
47.166667
16.666667
def pipe_tail(context=None, _INPUT=None, conf=None, **kwargs): """Returns a specified number of items from the bottom of a feed. Parameters ---------- context : pipe2py.Context object _INPUT : pipe2py.modules pipe like object (iterable of items) kwargs -- terminal, if the truncation value is wired in conf : count -- length of the truncated feed, if specified literally Yields ------ _OUTPUT : items """ conf = DotDict(conf) limit = conf.get('count', func=int, **kwargs) for item in deque(_INPUT, limit): yield item
[ "def", "pipe_tail", "(", "context", "=", "None", ",", "_INPUT", "=", "None", ",", "conf", "=", "None", ",", "*", "*", "kwargs", ")", ":", "conf", "=", "DotDict", "(", "conf", ")", "limit", "=", "conf", ".", "get", "(", "'count'", ",", "func", "=", "int", ",", "*", "*", "kwargs", ")", "for", "item", "in", "deque", "(", "_INPUT", ",", "limit", ")", ":", "yield", "item" ]
29.736842
21.315789
def write_summary(page, args, ifos, skyError=None, ipn=False, ipnError=False): """ Write summary of information to markup.page object page """ from pylal import antenna from lal.gpstime import gps_to_utc, LIGOTimeGPS gps = args.start_time grbdate = gps_to_utc(LIGOTimeGPS(gps))\ .strftime("%B %d %Y, %H:%M:%S %ZUTC") page.h3() page.add('Basic information') page.h3.close() if ipn: ra = [] dec = [] td1 = [] td2 = [] td3 = [] timedelay = {} search_file = '../../../S5IPN_GRB%s_search_180deg.txt' % args.grb_name for line in open(search_file): ra.append(line.split()[0]) dec.append(line.split()[1]) th1 = [ 'GPS', 'Date', 'Error Box (sq.deg.)', 'IFOs' ] td1 = [ gps, grbdate, ipnError, ifos ] th2 = [ 'RA', 'DEC' ] th3 = ['Timedelays (ms)', '', '' ] for ra_i,dec_i in zip(ra,dec): td_i = [ ra_i, dec_i ] td2.append(td_i) ifo_list = [ ifos[i*2:(i*2)+2] for i in range(int(len(ifos)/2)) ] for j in td2: for p in range(0, len(ifo_list)): for q in range(0, len(ifo_list)): pairs = [ifo_list[p], ifo_list[q]] ifo_pairs = "".join(pairs) timedelay[ifo_pairs] = antenna.timeDelay(int(gps), float(j[0]), float(j[1]), 'degree', ifo_list[p], ifo_list[q]) timedelay[ifo_pairs]="%.4f" % timedelay[ifo_pairs] if ifos == 'H1H2L1': td3.append(['H1L1: %f' % float(timedelay['H1L1'])]) if ifos == 'H1H2L1V1': td3.append(['H1L1: %f' % float(timedelay['H1L1']), 'H1V1: %f' % float(timedelay['H1V1']), 'L1V1: %f' % float(timedelay['L1V1'])]) if ifos == 'L1V1': td3.append(['L1V1: %f' % float(timedelay['L1V1'])]) page = write_table(page, th1, td1) page = write_table(page, th2, td2) page = write_table(page, th3, td3) else: ra = args.ra dec = args.dec if skyError: th = [ 'GPS', 'Date', 'RA', 'DEC', 'Sky Error', 'IFOs' ] td = [ gps, grbdate, ra, dec, skyError, ifos ] else: th = [ 'GPS', 'Date', 'RA', 'DEC', 'IFOs' ] td = [ gps, grbdate, ra, dec, ifos ] page = write_table(page, th, td) return page
[ "def", "write_summary", "(", "page", ",", "args", ",", "ifos", ",", "skyError", "=", "None", ",", "ipn", "=", "False", ",", "ipnError", "=", "False", ")", ":", "from", "pylal", "import", "antenna", "from", "lal", ".", "gpstime", "import", "gps_to_utc", ",", "LIGOTimeGPS", "gps", "=", "args", ".", "start_time", "grbdate", "=", "gps_to_utc", "(", "LIGOTimeGPS", "(", "gps", ")", ")", ".", "strftime", "(", "\"%B %d %Y, %H:%M:%S %ZUTC\"", ")", "page", ".", "h3", "(", ")", "page", ".", "add", "(", "'Basic information'", ")", "page", ".", "h3", ".", "close", "(", ")", "if", "ipn", ":", "ra", "=", "[", "]", "dec", "=", "[", "]", "td1", "=", "[", "]", "td2", "=", "[", "]", "td3", "=", "[", "]", "timedelay", "=", "{", "}", "search_file", "=", "'../../../S5IPN_GRB%s_search_180deg.txt'", "%", "args", ".", "grb_name", "for", "line", "in", "open", "(", "search_file", ")", ":", "ra", ".", "append", "(", "line", ".", "split", "(", ")", "[", "0", "]", ")", "dec", ".", "append", "(", "line", ".", "split", "(", ")", "[", "1", "]", ")", "th1", "=", "[", "'GPS'", ",", "'Date'", ",", "'Error Box (sq.deg.)'", ",", "'IFOs'", "]", "td1", "=", "[", "gps", ",", "grbdate", ",", "ipnError", ",", "ifos", "]", "th2", "=", "[", "'RA'", ",", "'DEC'", "]", "th3", "=", "[", "'Timedelays (ms)'", ",", "''", ",", "''", "]", "for", "ra_i", ",", "dec_i", "in", "zip", "(", "ra", ",", "dec", ")", ":", "td_i", "=", "[", "ra_i", ",", "dec_i", "]", "td2", ".", "append", "(", "td_i", ")", "ifo_list", "=", "[", "ifos", "[", "i", "*", "2", ":", "(", "i", "*", "2", ")", "+", "2", "]", "for", "i", "in", "range", "(", "int", "(", "len", "(", "ifos", ")", "/", "2", ")", ")", "]", "for", "j", "in", "td2", ":", "for", "p", "in", "range", "(", "0", ",", "len", "(", "ifo_list", ")", ")", ":", "for", "q", "in", "range", "(", "0", ",", "len", "(", "ifo_list", ")", ")", ":", "pairs", "=", "[", "ifo_list", "[", "p", "]", ",", "ifo_list", "[", "q", "]", "]", "ifo_pairs", "=", "\"\"", ".", "join", "(", "pairs", ")", "timedelay", "[", "ifo_pairs", "]", "=", "antenna", ".", "timeDelay", "(", "int", "(", "gps", ")", ",", "float", "(", "j", "[", "0", "]", ")", ",", "float", "(", "j", "[", "1", "]", ")", ",", "'degree'", ",", "ifo_list", "[", "p", "]", ",", "ifo_list", "[", "q", "]", ")", "timedelay", "[", "ifo_pairs", "]", "=", "\"%.4f\"", "%", "timedelay", "[", "ifo_pairs", "]", "if", "ifos", "==", "'H1H2L1'", ":", "td3", ".", "append", "(", "[", "'H1L1: %f'", "%", "float", "(", "timedelay", "[", "'H1L1'", "]", ")", "]", ")", "if", "ifos", "==", "'H1H2L1V1'", ":", "td3", ".", "append", "(", "[", "'H1L1: %f'", "%", "float", "(", "timedelay", "[", "'H1L1'", "]", ")", ",", "'H1V1: %f'", "%", "float", "(", "timedelay", "[", "'H1V1'", "]", ")", ",", "'L1V1: %f'", "%", "float", "(", "timedelay", "[", "'L1V1'", "]", ")", "]", ")", "if", "ifos", "==", "'L1V1'", ":", "td3", ".", "append", "(", "[", "'L1V1: %f'", "%", "float", "(", "timedelay", "[", "'L1V1'", "]", ")", "]", ")", "page", "=", "write_table", "(", "page", ",", "th1", ",", "td1", ")", "page", "=", "write_table", "(", "page", ",", "th2", ",", "td2", ")", "page", "=", "write_table", "(", "page", ",", "th3", ",", "td3", ")", "else", ":", "ra", "=", "args", ".", "ra", "dec", "=", "args", ".", "dec", "if", "skyError", ":", "th", "=", "[", "'GPS'", ",", "'Date'", ",", "'RA'", ",", "'DEC'", ",", "'Sky Error'", ",", "'IFOs'", "]", "td", "=", "[", "gps", ",", "grbdate", ",", "ra", ",", "dec", ",", "skyError", ",", "ifos", "]", "else", ":", "th", "=", "[", "'GPS'", ",", "'Date'", ",", "'RA'", ",", "'DEC'", ",", "'IFOs'", "]", "td", "=", "[", "gps", ",", "grbdate", ",", "ra", ",", "dec", ",", "ifos", "]", "page", "=", "write_table", "(", "page", ",", "th", ",", "td", ")", "return", "page" ]
36.588235
18.411765
def btc_script_deserialize(script): """ Given a script (hex or bin), decode it into its list of opcodes and data. Return a list of strings and ints. Based on code in pybitcointools (https://github.com/vbuterin/pybitcointools) by Vitalik Buterin """ if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script): script = binascii.unhexlify(script) # output buffer out = [] pos = 0 while pos < len(script): # next script op... code = encoding.from_byte_to_int(script[pos]) if code == 0: # empty (OP_0) out.append(None) pos += 1 elif code <= 75: # literal numeric constant, followed by a slice of data. # push the slice of data. out.append(script[pos+1:pos+1+code]) pos += 1 + code elif code <= 78: # OP_PUSHDATA1, OP_PUSHDATA2, OP_PUSHDATA4, followed by length and data # push the data itself szsz = pow(2, code - 76) sz = encoding.decode(script[pos+szsz: pos:-1], 256) out.append(script[pos + 1 + szsz : pos + 1 + szsz + sz]) pos += 1 + szsz + sz elif code <= 96: # OP_1NEGATE, OP_RESERVED, OP_1 thru OP_16 # pass -1 for OP_1NEGATE # pass 0 for OP_RESERVED (shouldn't be used anyway) # pass 1 thru 16 for OP_1 thru OP_16 out.append(code - 80) pos += 1 else: # raw opcode out.append(code) pos += 1 # make sure each string is hex'ed out = encoding.json_changebase(out, lambda x: encoding.safe_hexlify(x)) return out
[ "def", "btc_script_deserialize", "(", "script", ")", ":", "if", "isinstance", "(", "script", ",", "str", ")", "and", "re", ".", "match", "(", "'^[0-9a-fA-F]*$'", ",", "script", ")", ":", "script", "=", "binascii", ".", "unhexlify", "(", "script", ")", "# output buffer", "out", "=", "[", "]", "pos", "=", "0", "while", "pos", "<", "len", "(", "script", ")", ":", "# next script op...", "code", "=", "encoding", ".", "from_byte_to_int", "(", "script", "[", "pos", "]", ")", "if", "code", "==", "0", ":", "# empty (OP_0)", "out", ".", "append", "(", "None", ")", "pos", "+=", "1", "elif", "code", "<=", "75", ":", "# literal numeric constant, followed by a slice of data.", "# push the slice of data.", "out", ".", "append", "(", "script", "[", "pos", "+", "1", ":", "pos", "+", "1", "+", "code", "]", ")", "pos", "+=", "1", "+", "code", "elif", "code", "<=", "78", ":", "# OP_PUSHDATA1, OP_PUSHDATA2, OP_PUSHDATA4, followed by length and data", "# push the data itself", "szsz", "=", "pow", "(", "2", ",", "code", "-", "76", ")", "sz", "=", "encoding", ".", "decode", "(", "script", "[", "pos", "+", "szsz", ":", "pos", ":", "-", "1", "]", ",", "256", ")", "out", ".", "append", "(", "script", "[", "pos", "+", "1", "+", "szsz", ":", "pos", "+", "1", "+", "szsz", "+", "sz", "]", ")", "pos", "+=", "1", "+", "szsz", "+", "sz", "elif", "code", "<=", "96", ":", "# OP_1NEGATE, OP_RESERVED, OP_1 thru OP_16", "# pass -1 for OP_1NEGATE", "# pass 0 for OP_RESERVED (shouldn't be used anyway)", "# pass 1 thru 16 for OP_1 thru OP_16", "out", ".", "append", "(", "code", "-", "80", ")", "pos", "+=", "1", "else", ":", "# raw opcode", "out", ".", "append", "(", "code", ")", "pos", "+=", "1", "# make sure each string is hex'ed", "out", "=", "encoding", ".", "json_changebase", "(", "out", ",", "lambda", "x", ":", "encoding", ".", "safe_hexlify", "(", "x", ")", ")", "return", "out" ]
30.109091
20.763636
def _representative_structure_setter(self, structprop, keep_chain, clean=True, keep_chemicals=None, out_suffix='_clean', outdir=None, force_rerun=False): """Set the representative structure by 1) cleaning it and 2) copying over attributes of the original structure. The structure is copied because the chains stored may change, and cleaning it makes a new PDB file. Args: structprop (StructProp): StructProp object to set as representative keep_chain (str): Chain ID to keep clean (bool): If the PDB file should be cleaned (see ssbio.structure.utils.cleanpdb) keep_chemicals (str, list): Keep specified chemical names out_suffix (str): Suffix to append to clean PDB file outdir (str): Path to output directory Returns: StructProp: representative structure """ # Set output directory for cleaned PDB file if not outdir: outdir = self.structure_dir if not outdir: raise ValueError('Output directory must be specified') # Create new ID for this representative structure, it cannot be the same as the original one new_id = 'REP-{}'.format(structprop.id) # Remove the previously set representative structure if set to force rerun if self.structures.has_id(new_id): if force_rerun: existing = self.structures.get_by_id(new_id) self.structures.remove(existing) # If the structure is to be cleaned, and which chain to keep if clean: final_pdb = structprop.clean_structure(outdir=outdir, out_suffix=out_suffix, keep_chemicals=keep_chemicals, keep_chains=keep_chain, force_rerun=force_rerun) log.debug('{}: cleaned structure and saved new file at {}'.format(structprop.id, final_pdb)) else: final_pdb = structprop.structure_path self.representative_structure = StructProp(ident=new_id, chains=keep_chain, mapped_chains=keep_chain, structure_path=final_pdb, file_type='pdb') self.representative_chain = keep_chain self.representative_structure.update(structprop.get_dict_with_chain(chain=keep_chain), only_keys=self.__representative_structure_attributes, overwrite=True) # Save the original structure ID as an extra attribute self.representative_structure.original_structure_id = structprop.id # Also need to parse the clean structure and save its sequence.. self.representative_structure.parse_structure() # And finally add it to the list of structures self.structures.append(self.representative_structure)
[ "def", "_representative_structure_setter", "(", "self", ",", "structprop", ",", "keep_chain", ",", "clean", "=", "True", ",", "keep_chemicals", "=", "None", ",", "out_suffix", "=", "'_clean'", ",", "outdir", "=", "None", ",", "force_rerun", "=", "False", ")", ":", "# Set output directory for cleaned PDB file", "if", "not", "outdir", ":", "outdir", "=", "self", ".", "structure_dir", "if", "not", "outdir", ":", "raise", "ValueError", "(", "'Output directory must be specified'", ")", "# Create new ID for this representative structure, it cannot be the same as the original one", "new_id", "=", "'REP-{}'", ".", "format", "(", "structprop", ".", "id", ")", "# Remove the previously set representative structure if set to force rerun", "if", "self", ".", "structures", ".", "has_id", "(", "new_id", ")", ":", "if", "force_rerun", ":", "existing", "=", "self", ".", "structures", ".", "get_by_id", "(", "new_id", ")", "self", ".", "structures", ".", "remove", "(", "existing", ")", "# If the structure is to be cleaned, and which chain to keep", "if", "clean", ":", "final_pdb", "=", "structprop", ".", "clean_structure", "(", "outdir", "=", "outdir", ",", "out_suffix", "=", "out_suffix", ",", "keep_chemicals", "=", "keep_chemicals", ",", "keep_chains", "=", "keep_chain", ",", "force_rerun", "=", "force_rerun", ")", "log", ".", "debug", "(", "'{}: cleaned structure and saved new file at {}'", ".", "format", "(", "structprop", ".", "id", ",", "final_pdb", ")", ")", "else", ":", "final_pdb", "=", "structprop", ".", "structure_path", "self", ".", "representative_structure", "=", "StructProp", "(", "ident", "=", "new_id", ",", "chains", "=", "keep_chain", ",", "mapped_chains", "=", "keep_chain", ",", "structure_path", "=", "final_pdb", ",", "file_type", "=", "'pdb'", ")", "self", ".", "representative_chain", "=", "keep_chain", "self", ".", "representative_structure", ".", "update", "(", "structprop", ".", "get_dict_with_chain", "(", "chain", "=", "keep_chain", ")", ",", "only_keys", "=", "self", ".", "__representative_structure_attributes", ",", "overwrite", "=", "True", ")", "# Save the original structure ID as an extra attribute", "self", ".", "representative_structure", ".", "original_structure_id", "=", "structprop", ".", "id", "# Also need to parse the clean structure and save its sequence..", "self", ".", "representative_structure", ".", "parse_structure", "(", ")", "# And finally add it to the list of structures", "self", ".", "structures", ".", "append", "(", "self", ".", "representative_structure", ")" ]
49.542373
31.305085
def set_writer(self, writer): """ Changes the writer function to handle writing to the text edit. A writer function must have the following prototype: .. code-block:: python def write(text_edit, text, color) :param writer: write function as described above. """ if self._writer != writer and self._writer: self._writer = None if writer: self._writer = writer
[ "def", "set_writer", "(", "self", ",", "writer", ")", ":", "if", "self", ".", "_writer", "!=", "writer", "and", "self", ".", "_writer", ":", "self", ".", "_writer", "=", "None", "if", "writer", ":", "self", ".", "_writer", "=", "writer" ]
27.9375
18.9375
def run_analysis(self, argv): """Run this analysis""" args = self._parser.parse_args(argv) if not HAVE_ST: raise RuntimeError( "Trying to run fermipy analysis, but don't have ST") workdir = os.path.dirname(args.config) _config_file = self._clone_config_and_srcmaps(args.config, args.seed) gta = GTAnalysis(_config_file, logging={'verbosity': 3}, fileio={'workdir_regex': '\.xml$|\.npy$'}) gta.load_roi(args.roi_baseline) simfile = os.path.join(workdir, 'sim_%s_%s.yaml' % (args.sim, args.sim_profile)) mcube_file = "%s_%s_%06i" % (args.sim, args.sim_profile, args.seed) sim_config = utils.load_yaml(simfile) injected_source = sim_config.get('injected_source', None) if injected_source is not None: src_dict = injected_source['source_model'] src_dict['ra'] = gta.config['selection']['ra'] src_dict['dec'] = gta.config['selection']['dec'] injected_name = injected_source['name'] gta.add_source(injected_name, src_dict) gta.write_model_map(mcube_file) mc_spec_dict = dict(true_counts=gta.model_counts_spectrum(injected_name), energies=gta.energies, model=src_dict) mcspec_file = os.path.join(workdir, "mcspec_%s_%06i.yaml" % (mcube_file, args.seed)) utils.write_yaml(mc_spec_dict, mcspec_file) else: injected_name = None gta.write_roi('sim_baseline_%06i' % args.seed) test_sources = [] for profile in args.profiles: profile_path = os.path.join(workdir, 'profile_%s.yaml' % profile) test_source = load_yaml(profile_path) test_sources.append(test_source) first = args.seed last = first + args.nsims for seed in range(first, last): self._run_simulation(gta, args.roi_baseline, injected_name, test_sources, first, seed, non_null_src=args.non_null_src)
[ "def", "run_analysis", "(", "self", ",", "argv", ")", ":", "args", "=", "self", ".", "_parser", ".", "parse_args", "(", "argv", ")", "if", "not", "HAVE_ST", ":", "raise", "RuntimeError", "(", "\"Trying to run fermipy analysis, but don't have ST\"", ")", "workdir", "=", "os", ".", "path", ".", "dirname", "(", "args", ".", "config", ")", "_config_file", "=", "self", ".", "_clone_config_and_srcmaps", "(", "args", ".", "config", ",", "args", ".", "seed", ")", "gta", "=", "GTAnalysis", "(", "_config_file", ",", "logging", "=", "{", "'verbosity'", ":", "3", "}", ",", "fileio", "=", "{", "'workdir_regex'", ":", "'\\.xml$|\\.npy$'", "}", ")", "gta", ".", "load_roi", "(", "args", ".", "roi_baseline", ")", "simfile", "=", "os", ".", "path", ".", "join", "(", "workdir", ",", "'sim_%s_%s.yaml'", "%", "(", "args", ".", "sim", ",", "args", ".", "sim_profile", ")", ")", "mcube_file", "=", "\"%s_%s_%06i\"", "%", "(", "args", ".", "sim", ",", "args", ".", "sim_profile", ",", "args", ".", "seed", ")", "sim_config", "=", "utils", ".", "load_yaml", "(", "simfile", ")", "injected_source", "=", "sim_config", ".", "get", "(", "'injected_source'", ",", "None", ")", "if", "injected_source", "is", "not", "None", ":", "src_dict", "=", "injected_source", "[", "'source_model'", "]", "src_dict", "[", "'ra'", "]", "=", "gta", ".", "config", "[", "'selection'", "]", "[", "'ra'", "]", "src_dict", "[", "'dec'", "]", "=", "gta", ".", "config", "[", "'selection'", "]", "[", "'dec'", "]", "injected_name", "=", "injected_source", "[", "'name'", "]", "gta", ".", "add_source", "(", "injected_name", ",", "src_dict", ")", "gta", ".", "write_model_map", "(", "mcube_file", ")", "mc_spec_dict", "=", "dict", "(", "true_counts", "=", "gta", ".", "model_counts_spectrum", "(", "injected_name", ")", ",", "energies", "=", "gta", ".", "energies", ",", "model", "=", "src_dict", ")", "mcspec_file", "=", "os", ".", "path", ".", "join", "(", "workdir", ",", "\"mcspec_%s_%06i.yaml\"", "%", "(", "mcube_file", ",", "args", ".", "seed", ")", ")", "utils", ".", "write_yaml", "(", "mc_spec_dict", ",", "mcspec_file", ")", "else", ":", "injected_name", "=", "None", "gta", ".", "write_roi", "(", "'sim_baseline_%06i'", "%", "args", ".", "seed", ")", "test_sources", "=", "[", "]", "for", "profile", "in", "args", ".", "profiles", ":", "profile_path", "=", "os", ".", "path", ".", "join", "(", "workdir", ",", "'profile_%s.yaml'", "%", "profile", ")", "test_source", "=", "load_yaml", "(", "profile_path", ")", "test_sources", ".", "append", "(", "test_source", ")", "first", "=", "args", ".", "seed", "last", "=", "first", "+", "args", ".", "nsims", "for", "seed", "in", "range", "(", "first", ",", "last", ")", ":", "self", ".", "_run_simulation", "(", "gta", ",", "args", ".", "roi_baseline", ",", "injected_name", ",", "test_sources", ",", "first", ",", "seed", ",", "non_null_src", "=", "args", ".", "non_null_src", ")" ]
43.137255
20.019608
def namedb_get_namespace_by_preorder_hash( cur, preorder_hash, include_history=True ): """ Get a namespace by its preorder hash (regardless of whether or not it was expired.) """ select_query = "SELECT * FROM namespaces WHERE preorder_hash = ?;" namespace_rows = namedb_query_execute( cur, select_query, (preorder_hash,)) namespace_row = namespace_rows.fetchone() if namespace_row is None: # no such namespace return None namespace = {} namespace.update( namespace_row ) if include_history: hist = namedb_get_history( cur, namespace['namespace_id'] ) namespace['history'] = hist namespace = op_decanonicalize(op_get_opcode_name(namespace['op']), namespace) return namespace
[ "def", "namedb_get_namespace_by_preorder_hash", "(", "cur", ",", "preorder_hash", ",", "include_history", "=", "True", ")", ":", "select_query", "=", "\"SELECT * FROM namespaces WHERE preorder_hash = ?;\"", "namespace_rows", "=", "namedb_query_execute", "(", "cur", ",", "select_query", ",", "(", "preorder_hash", ",", ")", ")", "namespace_row", "=", "namespace_rows", ".", "fetchone", "(", ")", "if", "namespace_row", "is", "None", ":", "# no such namespace ", "return", "None", "namespace", "=", "{", "}", "namespace", ".", "update", "(", "namespace_row", ")", "if", "include_history", ":", "hist", "=", "namedb_get_history", "(", "cur", ",", "namespace", "[", "'namespace_id'", "]", ")", "namespace", "[", "'history'", "]", "=", "hist", "namespace", "=", "op_decanonicalize", "(", "op_get_opcode_name", "(", "namespace", "[", "'op'", "]", ")", ",", "namespace", ")", "return", "namespace" ]
33.590909
24.772727
def is_stalemate(self) -> bool: """Checks if the current position is a stalemate.""" if self.is_check(): return False if self.is_variant_end(): return False return not any(self.generate_legal_moves())
[ "def", "is_stalemate", "(", "self", ")", "->", "bool", ":", "if", "self", ".", "is_check", "(", ")", ":", "return", "False", "if", "self", ".", "is_variant_end", "(", ")", ":", "return", "False", "return", "not", "any", "(", "self", ".", "generate_legal_moves", "(", ")", ")" ]
27.777778
16.888889
def join(self): """Wait until grid finishes computing.""" self._future = False self._job.poll() self._job = None
[ "def", "join", "(", "self", ")", ":", "self", ".", "_future", "=", "False", "self", ".", "_job", ".", "poll", "(", ")", "self", ".", "_job", "=", "None" ]
28
13.8
def _get_text(self): """ Get the current metadatas """ device = self._get_device() if device is None: return (UNKNOWN_DEVICE, self.py3.COLOR_BAD) if not device["isReachable"] or not device["isTrusted"]: return ( self.py3.safe_format( self.format_disconnected, {"name": device["name"]} ), self.py3.COLOR_BAD, ) battery = self._get_battery() (charge, bat_status, color) = self._get_battery_status(battery) notif = self._get_notifications() (notif_size, notif_status) = self._get_notifications_status(notif) return ( self.py3.safe_format( self.format, dict( name=device["name"], charge=charge, bat_status=bat_status, notif_size=notif_size, notif_status=notif_status, ), ), color, )
[ "def", "_get_text", "(", "self", ")", ":", "device", "=", "self", ".", "_get_device", "(", ")", "if", "device", "is", "None", ":", "return", "(", "UNKNOWN_DEVICE", ",", "self", ".", "py3", ".", "COLOR_BAD", ")", "if", "not", "device", "[", "\"isReachable\"", "]", "or", "not", "device", "[", "\"isTrusted\"", "]", ":", "return", "(", "self", ".", "py3", ".", "safe_format", "(", "self", ".", "format_disconnected", ",", "{", "\"name\"", ":", "device", "[", "\"name\"", "]", "}", ")", ",", "self", ".", "py3", ".", "COLOR_BAD", ",", ")", "battery", "=", "self", ".", "_get_battery", "(", ")", "(", "charge", ",", "bat_status", ",", "color", ")", "=", "self", ".", "_get_battery_status", "(", "battery", ")", "notif", "=", "self", ".", "_get_notifications", "(", ")", "(", "notif_size", ",", "notif_status", ")", "=", "self", ".", "_get_notifications_status", "(", "notif", ")", "return", "(", "self", ".", "py3", ".", "safe_format", "(", "self", ".", "format", ",", "dict", "(", "name", "=", "device", "[", "\"name\"", "]", ",", "charge", "=", "charge", ",", "bat_status", "=", "bat_status", ",", "notif_size", "=", "notif_size", ",", "notif_status", "=", "notif_status", ",", ")", ",", ")", ",", "color", ",", ")" ]
29.485714
17.142857
def search_regexp(self): """ Define the regexp used for the search """ if ((self.season == "") and (self.episode == "")): # Find serie try: print("%s has %s seasons (the serie is %s)" % (self.tvdb.data['seriesname'], self.tvdb.get_season_number(), self.tvdb.data['status'].lower())) # print self.tvdb.data except: pass regexp = '^%s.*' % self.title.lower() elif (self.episode == ""): # Find season try: print("%s has %s episodes in season %s" % (self.tvdb.data['seriesname'], self.tvdb.get_episode_number(int(self.season)), self.season)) except: pass regexp = '^%s.*(s[0]*%s|season[\s\_\-\.]*%s).*' % (self.title.lower(), self.season, self.season) else: # Find season and episode try: print("%s S%sE%s name is \"%s\"" % (self.tvdb.data['seriesname'], self.season, self.episode, self.tvdb.get_episode(int(self.season), int(self.episode))['episodename'])) except: pass regexp = '^%s.*((s[0]*%s.*e[0]*%s)|[0]*%sx[0]*%s).*' % (self.title.lower(), self.season, self.episode, self.season, self.episode) return regexp
[ "def", "search_regexp", "(", "self", ")", ":", "if", "(", "(", "self", ".", "season", "==", "\"\"", ")", "and", "(", "self", ".", "episode", "==", "\"\"", ")", ")", ":", "# Find serie", "try", ":", "print", "(", "\"%s has %s seasons (the serie is %s)\"", "%", "(", "self", ".", "tvdb", ".", "data", "[", "'seriesname'", "]", ",", "self", ".", "tvdb", ".", "get_season_number", "(", ")", ",", "self", ".", "tvdb", ".", "data", "[", "'status'", "]", ".", "lower", "(", ")", ")", ")", "# print self.tvdb.data", "except", ":", "pass", "regexp", "=", "'^%s.*'", "%", "self", ".", "title", ".", "lower", "(", ")", "elif", "(", "self", ".", "episode", "==", "\"\"", ")", ":", "# Find season", "try", ":", "print", "(", "\"%s has %s episodes in season %s\"", "%", "(", "self", ".", "tvdb", ".", "data", "[", "'seriesname'", "]", ",", "self", ".", "tvdb", ".", "get_episode_number", "(", "int", "(", "self", ".", "season", ")", ")", ",", "self", ".", "season", ")", ")", "except", ":", "pass", "regexp", "=", "'^%s.*(s[0]*%s|season[\\s\\_\\-\\.]*%s).*'", "%", "(", "self", ".", "title", ".", "lower", "(", ")", ",", "self", ".", "season", ",", "self", ".", "season", ")", "else", ":", "# Find season and episode", "try", ":", "print", "(", "\"%s S%sE%s name is \\\"%s\\\"\"", "%", "(", "self", ".", "tvdb", ".", "data", "[", "'seriesname'", "]", ",", "self", ".", "season", ",", "self", ".", "episode", ",", "self", ".", "tvdb", ".", "get_episode", "(", "int", "(", "self", ".", "season", ")", ",", "int", "(", "self", ".", "episode", ")", ")", "[", "'episodename'", "]", ")", ")", "except", ":", "pass", "regexp", "=", "'^%s.*((s[0]*%s.*e[0]*%s)|[0]*%sx[0]*%s).*'", "%", "(", "self", ".", "title", ".", "lower", "(", ")", ",", "self", ".", "season", ",", "self", ".", "episode", ",", "self", ".", "season", ",", "self", ".", "episode", ")", "return", "regexp" ]
48
32.296296
def _find_frame_imports(name, frame): """ Detect imports in the frame, with the required *name*. Such imports can be considered assignments. Returns True if an import for the given name was found. """ imports = frame.nodes_of_class((astroid.Import, astroid.ImportFrom)) for import_node in imports: for import_name, import_alias in import_node.names: # If the import uses an alias, check only that. # Otherwise, check only the import name. if import_alias: if import_alias == name: return True elif import_name and import_name == name: return True return None
[ "def", "_find_frame_imports", "(", "name", ",", "frame", ")", ":", "imports", "=", "frame", ".", "nodes_of_class", "(", "(", "astroid", ".", "Import", ",", "astroid", ".", "ImportFrom", ")", ")", "for", "import_node", "in", "imports", ":", "for", "import_name", ",", "import_alias", "in", "import_node", ".", "names", ":", "# If the import uses an alias, check only that.", "# Otherwise, check only the import name.", "if", "import_alias", ":", "if", "import_alias", "==", "name", ":", "return", "True", "elif", "import_name", "and", "import_name", "==", "name", ":", "return", "True", "return", "None" ]
40.117647
12.352941
def read_file(self, container_name, blob_name, **kwargs): """ Read a file from Azure Blob Storage and return as a string. :param container_name: Name of the container. :type container_name: str :param blob_name: Name of the blob. :type blob_name: str :param kwargs: Optional keyword arguments that `BlockBlobService.create_blob_from_path()` takes. :type kwargs: object """ return self.connection.get_blob_to_text(container_name, blob_name, **kwargs).content
[ "def", "read_file", "(", "self", ",", "container_name", ",", "blob_name", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "connection", ".", "get_blob_to_text", "(", "container_name", ",", "blob_name", ",", "*", "*", "kwargs", ")", ".", "content" ]
42.133333
15.466667
def format(self): """PixelFormat: The raw format of the texture. The actual format may differ, but pixel transfers will use this format. """ fmt = ffi.new('Uint32 *') check_int_err(lib.SDL_QueryTexture(self._ptr, fmt, ffi.NULL, ffi.NULL, ffi.NULL)) return PixelFormat(fmt[0])
[ "def", "format", "(", "self", ")", ":", "fmt", "=", "ffi", ".", "new", "(", "'Uint32 *'", ")", "check_int_err", "(", "lib", ".", "SDL_QueryTexture", "(", "self", ".", "_ptr", ",", "fmt", ",", "ffi", ".", "NULL", ",", "ffi", ".", "NULL", ",", "ffi", ".", "NULL", ")", ")", "return", "PixelFormat", "(", "fmt", "[", "0", "]", ")" ]
47.571429
13.428571
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True): """ Mitogen always executes ActionBase helper methods in the context of the target user account, so it is never necessary to modify permissions except to ensure the execute bit is set if requested. """ LOG.debug('_fixup_perms2(%r, remote_user=%r, execute=%r)', remote_paths, remote_user, execute) if execute and self._task.action not in self.FIXUP_PERMS_RED_HERRING: return self._remote_chmod(remote_paths, mode='u+x') return self.COMMAND_RESULT.copy()
[ "def", "_fixup_perms2", "(", "self", ",", "remote_paths", ",", "remote_user", "=", "None", ",", "execute", "=", "True", ")", ":", "LOG", ".", "debug", "(", "'_fixup_perms2(%r, remote_user=%r, execute=%r)'", ",", "remote_paths", ",", "remote_user", ",", "execute", ")", "if", "execute", "and", "self", ".", "_task", ".", "action", "not", "in", "self", ".", "FIXUP_PERMS_RED_HERRING", ":", "return", "self", ".", "_remote_chmod", "(", "remote_paths", ",", "mode", "=", "'u+x'", ")", "return", "self", ".", "COMMAND_RESULT", ".", "copy", "(", ")" ]
55.181818
20.454545
def _incr_exceptions(self, conn): """Increment the number of exceptions for the current connection. :param psycopg2.extensions.connection conn: the psycopg2 connection """ self._pool_manager.get_connection(self.pid, conn).exceptions += 1
[ "def", "_incr_exceptions", "(", "self", ",", "conn", ")", ":", "self", ".", "_pool_manager", ".", "get_connection", "(", "self", ".", "pid", ",", "conn", ")", ".", "exceptions", "+=", "1" ]
37.857143
22.142857
def buildProtocol(self, addr): """Get a new LLRP client protocol object. Consult self.antenna_dict to look up antennas to use. """ self.resetDelay() # reset reconnection backoff state clargs = self.client_args.copy() # optionally configure antennas from self.antenna_dict, which looks # like {'10.0.0.1:5084': {'1': 'ant1', '2': 'ant2'}} hostport = '{}:{}'.format(addr.host, addr.port) logger.debug('Building protocol for %s', hostport) if hostport in self.antenna_dict: clargs['antennas'] = [ int(x) for x in self.antenna_dict[hostport].keys()] elif addr.host in self.antenna_dict: clargs['antennas'] = [ int(x) for x in self.antenna_dict[addr.host].keys()] logger.debug('Antennas in buildProtocol: %s', clargs.get('antennas')) logger.debug('%s start_inventory: %s', hostport, clargs.get('start_inventory')) if self.start_first and not self.protocols: # this is the first protocol, so let's start it inventorying clargs['start_inventory'] = True proto = LLRPClient(factory=self, **clargs) # register state-change callbacks with new client for state, cbs in self._state_callbacks.items(): for cb in cbs: proto.addStateCallback(state, cb) # register message callbacks with new client for msg_type, cbs in self._message_callbacks.items(): for cb in cbs: proto.addMessageCallback(msg_type, cb) return proto
[ "def", "buildProtocol", "(", "self", ",", "addr", ")", ":", "self", ".", "resetDelay", "(", ")", "# reset reconnection backoff state", "clargs", "=", "self", ".", "client_args", ".", "copy", "(", ")", "# optionally configure antennas from self.antenna_dict, which looks", "# like {'10.0.0.1:5084': {'1': 'ant1', '2': 'ant2'}}", "hostport", "=", "'{}:{}'", ".", "format", "(", "addr", ".", "host", ",", "addr", ".", "port", ")", "logger", ".", "debug", "(", "'Building protocol for %s'", ",", "hostport", ")", "if", "hostport", "in", "self", ".", "antenna_dict", ":", "clargs", "[", "'antennas'", "]", "=", "[", "int", "(", "x", ")", "for", "x", "in", "self", ".", "antenna_dict", "[", "hostport", "]", ".", "keys", "(", ")", "]", "elif", "addr", ".", "host", "in", "self", ".", "antenna_dict", ":", "clargs", "[", "'antennas'", "]", "=", "[", "int", "(", "x", ")", "for", "x", "in", "self", ".", "antenna_dict", "[", "addr", ".", "host", "]", ".", "keys", "(", ")", "]", "logger", ".", "debug", "(", "'Antennas in buildProtocol: %s'", ",", "clargs", ".", "get", "(", "'antennas'", ")", ")", "logger", ".", "debug", "(", "'%s start_inventory: %s'", ",", "hostport", ",", "clargs", ".", "get", "(", "'start_inventory'", ")", ")", "if", "self", ".", "start_first", "and", "not", "self", ".", "protocols", ":", "# this is the first protocol, so let's start it inventorying", "clargs", "[", "'start_inventory'", "]", "=", "True", "proto", "=", "LLRPClient", "(", "factory", "=", "self", ",", "*", "*", "clargs", ")", "# register state-change callbacks with new client", "for", "state", ",", "cbs", "in", "self", ".", "_state_callbacks", ".", "items", "(", ")", ":", "for", "cb", "in", "cbs", ":", "proto", ".", "addStateCallback", "(", "state", ",", "cb", ")", "# register message callbacks with new client", "for", "msg_type", ",", "cbs", "in", "self", ".", "_message_callbacks", ".", "items", "(", ")", ":", "for", "cb", "in", "cbs", ":", "proto", ".", "addMessageCallback", "(", "msg_type", ",", "cb", ")", "return", "proto" ]
41.842105
18.684211
def cache_clean_handler(min_age_hours=1): """This periodically cleans up the ~/.astrobase cache to save us from disk-space doom. Parameters ---------- min_age_hours : int Files older than this number of hours from the current time will be deleted. Returns ------- Nothing. """ # find the files to delete cmd = ( "find ~ec2-user/.astrobase -type f -mmin +{mmin} -exec rm -v '{{}}' \;" ) mmin = '%.1f' % (min_age_hours*60.0) cmd = cmd.format(mmin=mmin) try: proc = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE) ndeleted = len(proc.stdout.decode().split('\n')) LOGWARNING('cache clean: %s files older than %s hours deleted' % (ndeleted, min_age_hours)) except Exception as e: LOGEXCEPTION('cache clean: could not delete old files')
[ "def", "cache_clean_handler", "(", "min_age_hours", "=", "1", ")", ":", "# find the files to delete", "cmd", "=", "(", "\"find ~ec2-user/.astrobase -type f -mmin +{mmin} -exec rm -v '{{}}' \\;\"", ")", "mmin", "=", "'%.1f'", "%", "(", "min_age_hours", "*", "60.0", ")", "cmd", "=", "cmd", ".", "format", "(", "mmin", "=", "mmin", ")", "try", ":", "proc", "=", "subprocess", ".", "run", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "ndeleted", "=", "len", "(", "proc", ".", "stdout", ".", "decode", "(", ")", ".", "split", "(", "'\\n'", ")", ")", "LOGWARNING", "(", "'cache clean: %s files older than %s hours deleted'", "%", "(", "ndeleted", ",", "min_age_hours", ")", ")", "except", "Exception", "as", "e", ":", "LOGEXCEPTION", "(", "'cache clean: could not delete old files'", ")" ]
26.65625
24.65625
def abort(self): """ Abort a running command. @return: A new ApiCommand object with the updated information. """ if self.id == ApiCommand.SYNCHRONOUS_COMMAND_ID: return self path = self._path() + '/abort' resp = self._get_resource_root().post(path) return ApiCommand.from_json_dict(resp, self._get_resource_root())
[ "def", "abort", "(", "self", ")", ":", "if", "self", ".", "id", "==", "ApiCommand", ".", "SYNCHRONOUS_COMMAND_ID", ":", "return", "self", "path", "=", "self", ".", "_path", "(", ")", "+", "'/abort'", "resp", "=", "self", ".", "_get_resource_root", "(", ")", ".", "post", "(", "path", ")", "return", "ApiCommand", ".", "from_json_dict", "(", "resp", ",", "self", ".", "_get_resource_root", "(", ")", ")" ]
28.583333
18.25
def open_tensorboard(log_dir='/tmp/tensorflow', port=6006): """Open Tensorboard. Parameters ---------- log_dir : str Directory where your tensorboard logs are saved port : int TensorBoard port you want to open, 6006 is tensorboard default """ text = "[TL] Open tensorboard, go to localhost:" + str(port) + " to access" text2 = " not yet supported by this function (tl.ops.open_tb)" if not tl.files.exists_or_mkdir(log_dir, verbose=False): tl.logging.info("[TL] Log reportory was created at %s" % log_dir) if _platform == "linux" or _platform == "linux2": raise NotImplementedError() elif _platform == "darwin": tl.logging.info('OS X: %s' % text) subprocess.Popen( sys.prefix + " | python -m tensorflow.tensorboard --logdir=" + log_dir + " --port=" + str(port), shell=True ) # open tensorboard in localhost:6006/ or whatever port you chose elif _platform == "win32": raise NotImplementedError("this function is not supported on the Windows platform") else: tl.logging.info(_platform + text2)
[ "def", "open_tensorboard", "(", "log_dir", "=", "'/tmp/tensorflow'", ",", "port", "=", "6006", ")", ":", "text", "=", "\"[TL] Open tensorboard, go to localhost:\"", "+", "str", "(", "port", ")", "+", "\" to access\"", "text2", "=", "\" not yet supported by this function (tl.ops.open_tb)\"", "if", "not", "tl", ".", "files", ".", "exists_or_mkdir", "(", "log_dir", ",", "verbose", "=", "False", ")", ":", "tl", ".", "logging", ".", "info", "(", "\"[TL] Log reportory was created at %s\"", "%", "log_dir", ")", "if", "_platform", "==", "\"linux\"", "or", "_platform", "==", "\"linux2\"", ":", "raise", "NotImplementedError", "(", ")", "elif", "_platform", "==", "\"darwin\"", ":", "tl", ".", "logging", ".", "info", "(", "'OS X: %s'", "%", "text", ")", "subprocess", ".", "Popen", "(", "sys", ".", "prefix", "+", "\" | python -m tensorflow.tensorboard --logdir=\"", "+", "log_dir", "+", "\" --port=\"", "+", "str", "(", "port", ")", ",", "shell", "=", "True", ")", "# open tensorboard in localhost:6006/ or whatever port you chose", "elif", "_platform", "==", "\"win32\"", ":", "raise", "NotImplementedError", "(", "\"this function is not supported on the Windows platform\"", ")", "else", ":", "tl", ".", "logging", ".", "info", "(", "_platform", "+", "text2", ")" ]
39.428571
24.821429
def start_parent(): """ Start the parent that will simply run the child forever until stopped. """ while True: args = [sys.executable] + sys.argv new_environ = environ.copy() new_environ["_IN_CHILD"] = 'yes' ret = subprocess.call(args, env=new_environ) if ret != settings.CODE_RELOAD_EXIT: return ret
[ "def", "start_parent", "(", ")", ":", "while", "True", ":", "args", "=", "[", "sys", ".", "executable", "]", "+", "sys", ".", "argv", "new_environ", "=", "environ", ".", "copy", "(", ")", "new_environ", "[", "\"_IN_CHILD\"", "]", "=", "'yes'", "ret", "=", "subprocess", ".", "call", "(", "args", ",", "env", "=", "new_environ", ")", "if", "ret", "!=", "settings", ".", "CODE_RELOAD_EXIT", ":", "return", "ret" ]
27.538462
15.384615
def _get_types_from_sample(result_vars, sparql_results_json): """Return types if homogenous within sample Compare up to 10 rows of results to determine homogeneity. DESCRIBE and CONSTRUCT queries, for example, :param result_vars: :param sparql_results_json: """ total_bindings = len(sparql_results_json['results']['bindings']) homogeneous_types = {} for result_var in result_vars: var_types = set() var_datatypes = set() for i in range(0, min(total_bindings, 10)): binding = sparql_results_json['results']['bindings'][i] rdf_term = binding.get(result_var) if rdf_term is not None: # skip missing values var_types.add(rdf_term.get('type')) var_datatypes.add(rdf_term.get('datatype')) if len(var_types) > 1 or len(var_datatypes) > 1: return None # Heterogeneous types else: homogeneous_types[result_var] = { 'type': var_types.pop() if var_types else None, 'datatype': var_datatypes.pop() if var_datatypes else None } return homogeneous_types
[ "def", "_get_types_from_sample", "(", "result_vars", ",", "sparql_results_json", ")", ":", "total_bindings", "=", "len", "(", "sparql_results_json", "[", "'results'", "]", "[", "'bindings'", "]", ")", "homogeneous_types", "=", "{", "}", "for", "result_var", "in", "result_vars", ":", "var_types", "=", "set", "(", ")", "var_datatypes", "=", "set", "(", ")", "for", "i", "in", "range", "(", "0", ",", "min", "(", "total_bindings", ",", "10", ")", ")", ":", "binding", "=", "sparql_results_json", "[", "'results'", "]", "[", "'bindings'", "]", "[", "i", "]", "rdf_term", "=", "binding", ".", "get", "(", "result_var", ")", "if", "rdf_term", "is", "not", "None", ":", "# skip missing values", "var_types", ".", "add", "(", "rdf_term", ".", "get", "(", "'type'", ")", ")", "var_datatypes", ".", "add", "(", "rdf_term", ".", "get", "(", "'datatype'", ")", ")", "if", "len", "(", "var_types", ")", ">", "1", "or", "len", "(", "var_datatypes", ")", ">", "1", ":", "return", "None", "# Heterogeneous types", "else", ":", "homogeneous_types", "[", "result_var", "]", "=", "{", "'type'", ":", "var_types", ".", "pop", "(", ")", "if", "var_types", "else", "None", ",", "'datatype'", ":", "var_datatypes", ".", "pop", "(", ")", "if", "var_datatypes", "else", "None", "}", "return", "homogeneous_types" ]
37.733333
18.466667
def wait_for_tasks(self): """ Wait for one or more tasks to finish or return empty list if we are done. Starts new tasks if we have less than task_at_once currently running. :return: [(Task,object)]: list of (task,result) for finished tasks """ finished_tasks_and_results = [] while len(finished_tasks_and_results) == 0: if self.is_done(): break self.start_tasks() self.process_all_messages_in_queue() finished_tasks_and_results = self.get_finished_results() return finished_tasks_and_results
[ "def", "wait_for_tasks", "(", "self", ")", ":", "finished_tasks_and_results", "=", "[", "]", "while", "len", "(", "finished_tasks_and_results", ")", "==", "0", ":", "if", "self", ".", "is_done", "(", ")", ":", "break", "self", ".", "start_tasks", "(", ")", "self", ".", "process_all_messages_in_queue", "(", ")", "finished_tasks_and_results", "=", "self", ".", "get_finished_results", "(", ")", "return", "finished_tasks_and_results" ]
43.357143
15.357143
def fetchall(self, sql: str, *args) -> Sequence[Sequence[Any]]: """Executes SQL; returns all rows, or [].""" self.ensure_db_open() cursor = self.db.cursor() self.db_exec_with_cursor(cursor, sql, *args) try: rows = cursor.fetchall() return rows except: # nopep8 log.exception("fetchall: SQL was: " + sql) raise
[ "def", "fetchall", "(", "self", ",", "sql", ":", "str", ",", "*", "args", ")", "->", "Sequence", "[", "Sequence", "[", "Any", "]", "]", ":", "self", ".", "ensure_db_open", "(", ")", "cursor", "=", "self", ".", "db", ".", "cursor", "(", ")", "self", ".", "db_exec_with_cursor", "(", "cursor", ",", "sql", ",", "*", "args", ")", "try", ":", "rows", "=", "cursor", ".", "fetchall", "(", ")", "return", "rows", "except", ":", "# nopep8", "log", ".", "exception", "(", "\"fetchall: SQL was: \"", "+", "sql", ")", "raise" ]
36
14
def p_end_function(p): """ top : top END_FUNCTION """ p[0] = p[1] p[0].append(node.return_stmt(ret=ret_expr)) p[0].append(node.comment_stmt("\nif __name__ == '__main__':\n pass"))
[ "def", "p_end_function", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "p", "[", "0", "]", ".", "append", "(", "node", ".", "return_stmt", "(", "ret", "=", "ret_expr", ")", ")", "p", "[", "0", "]", ".", "append", "(", "node", ".", "comment_stmt", "(", "\"\\nif __name__ == '__main__':\\n pass\"", ")", ")" ]
28.571429
14.285714
def summarize(text, sent_limit=None, char_limit=None, imp_require=None, debug=False, **lexrank_params): ''' Args: text: text to be summarized (unicode string) sent_limit: summary length (the number of sentences) char_limit: summary length (the number of characters) imp_require: cumulative LexRank score [0.0-1.0] Returns: list of extracted sentences ''' debug_info = {} sentences = list(tools.sent_splitter_ja(text)) scores, sim_mat = lexrank(sentences, **lexrank_params) sum_scores = sum(scores.itervalues()) acc_scores = 0.0 indexes = set() num_sent, num_char = 0, 0 for i in sorted(scores, key=lambda i: scores[i], reverse=True): num_sent += 1 num_char += len(sentences[i]) if sent_limit is not None and num_sent > sent_limit: break if char_limit is not None and num_char > char_limit: break if imp_require is not None and acc_scores / sum_scores >= imp_require: break indexes.add(i) acc_scores += scores[i] if len(indexes) > 0: summary_sents = [sentences[i] for i in sorted(indexes)] else: summary_sents = sentences if debug: debug_info.update({ 'sentences': sentences, 'scores': scores }) return summary_sents, debug_info
[ "def", "summarize", "(", "text", ",", "sent_limit", "=", "None", ",", "char_limit", "=", "None", ",", "imp_require", "=", "None", ",", "debug", "=", "False", ",", "*", "*", "lexrank_params", ")", ":", "debug_info", "=", "{", "}", "sentences", "=", "list", "(", "tools", ".", "sent_splitter_ja", "(", "text", ")", ")", "scores", ",", "sim_mat", "=", "lexrank", "(", "sentences", ",", "*", "*", "lexrank_params", ")", "sum_scores", "=", "sum", "(", "scores", ".", "itervalues", "(", ")", ")", "acc_scores", "=", "0.0", "indexes", "=", "set", "(", ")", "num_sent", ",", "num_char", "=", "0", ",", "0", "for", "i", "in", "sorted", "(", "scores", ",", "key", "=", "lambda", "i", ":", "scores", "[", "i", "]", ",", "reverse", "=", "True", ")", ":", "num_sent", "+=", "1", "num_char", "+=", "len", "(", "sentences", "[", "i", "]", ")", "if", "sent_limit", "is", "not", "None", "and", "num_sent", ">", "sent_limit", ":", "break", "if", "char_limit", "is", "not", "None", "and", "num_char", ">", "char_limit", ":", "break", "if", "imp_require", "is", "not", "None", "and", "acc_scores", "/", "sum_scores", ">=", "imp_require", ":", "break", "indexes", ".", "add", "(", "i", ")", "acc_scores", "+=", "scores", "[", "i", "]", "if", "len", "(", "indexes", ")", ">", "0", ":", "summary_sents", "=", "[", "sentences", "[", "i", "]", "for", "i", "in", "sorted", "(", "indexes", ")", "]", "else", ":", "summary_sents", "=", "sentences", "if", "debug", ":", "debug_info", ".", "update", "(", "{", "'sentences'", ":", "sentences", ",", "'scores'", ":", "scores", "}", ")", "return", "summary_sents", ",", "debug_info" ]
31.761905
20.857143
def get_format_extension(fmt): ''' Returns the recommended extension for a given format ''' if fmt is None: return 'dict' fmt = fmt.lower() if fmt not in _converter_map: raise RuntimeError('Unknown basis set format "{}"'.format(fmt)) return _converter_map[fmt]['extension']
[ "def", "get_format_extension", "(", "fmt", ")", ":", "if", "fmt", "is", "None", ":", "return", "'dict'", "fmt", "=", "fmt", ".", "lower", "(", ")", "if", "fmt", "not", "in", "_converter_map", ":", "raise", "RuntimeError", "(", "'Unknown basis set format \"{}\"'", ".", "format", "(", "fmt", ")", ")", "return", "_converter_map", "[", "fmt", "]", "[", "'extension'", "]" ]
23.692308
24
def set_passport_data_errors(self, user_id, errors): """ Informs a user that some of the Telegram Passport elements they provided contains errors. The user will not be able to re-submit their Passport to you until the errors are fixed (the contents of the field for which you returned the error must change). Returns True on success. Use this if the data submitted by the user doesn't satisfy the standards your service requires for any reason. For example, if a birthday date seems invalid, a submitted document is blurry, a scan shows evidence of tampering, etc. Supply some details in the error message to make sure the user knows how to correct the issues. https://core.telegram.org/bots/api#setpassportdataerrors Parameters: :param user_id: User identifier :type user_id: int :param errors: A JSON-serialized array describing the errors :type errors: list of pytgbot.api_types.sendable.passport.PassportElementError Returns: :return: Returns True on success :rtype: bool """ from pytgbot.api_types.sendable.passport import PassportElementError assert_type_or_raise(user_id, int, parameter_name="user_id") assert_type_or_raise(errors, list, parameter_name="errors") result = self.do("setPassportDataErrors", user_id=user_id, errors=errors) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) # end try # no valid parsing so far raise TgApiParseException("Could not parse result.") # See debug log for details! # end if return_python_objects return result
[ "def", "set_passport_data_errors", "(", "self", ",", "user_id", ",", "errors", ")", ":", "from", "pytgbot", ".", "api_types", ".", "sendable", ".", "passport", "import", "PassportElementError", "assert_type_or_raise", "(", "user_id", ",", "int", ",", "parameter_name", "=", "\"user_id\"", ")", "assert_type_or_raise", "(", "errors", ",", "list", ",", "parameter_name", "=", "\"errors\"", ")", "result", "=", "self", ".", "do", "(", "\"setPassportDataErrors\"", ",", "user_id", "=", "user_id", ",", "errors", "=", "errors", ")", "if", "self", ".", "return_python_objects", ":", "logger", ".", "debug", "(", "\"Trying to parse {data}\"", ".", "format", "(", "data", "=", "repr", "(", "result", ")", ")", ")", "try", ":", "return", "from_array_list", "(", "bool", ",", "result", ",", "list_level", "=", "0", ",", "is_builtin", "=", "True", ")", "except", "TgApiParseException", ":", "logger", ".", "debug", "(", "\"Failed parsing as primitive bool\"", ",", "exc_info", "=", "True", ")", "# end try", "# no valid parsing so far", "raise", "TgApiParseException", "(", "\"Could not parse result.\"", ")", "# See debug log for details!", "# end if return_python_objects", "return", "result" ]
49.275
36.925
def _validate_auths(self, path, obj, app): """ make sure that apiKey and basicAuth are empty list in Operation object. """ errs = [] for k, v in six.iteritems(obj.authorizations or {}): if k not in app.raw.authorizations: errs.append('auth {0} not found in resource list'.format(k)) if app.raw.authorizations[k].type in ('basicAuth', 'apiKey') and v != []: errs.append('auth {0} should be an empty list'.format(k)) return path, obj.__class__.__name__, errs
[ "def", "_validate_auths", "(", "self", ",", "path", ",", "obj", ",", "app", ")", ":", "errs", "=", "[", "]", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "obj", ".", "authorizations", "or", "{", "}", ")", ":", "if", "k", "not", "in", "app", ".", "raw", ".", "authorizations", ":", "errs", ".", "append", "(", "'auth {0} not found in resource list'", ".", "format", "(", "k", ")", ")", "if", "app", ".", "raw", ".", "authorizations", "[", "k", "]", ".", "type", "in", "(", "'basicAuth'", ",", "'apiKey'", ")", "and", "v", "!=", "[", "]", ":", "errs", ".", "append", "(", "'auth {0} should be an empty list'", ".", "format", "(", "k", ")", ")", "return", "path", ",", "obj", ".", "__class__", ".", "__name__", ",", "errs" ]
39.285714
21.928571
def download(self, job_id, destination=None, timeout=DEFAULT_TIMEOUT, retries=DEFAULT_RETRIES): """ Downloads all screenshots for given job_id to `destination` folder. If `destination` is None, then screenshots will be saved in current directory. """ self._retries_num = 0 sleep(timeout) self.save_many(job_id, destination, timeout, retries) return self._cache
[ "def", "download", "(", "self", ",", "job_id", ",", "destination", "=", "None", ",", "timeout", "=", "DEFAULT_TIMEOUT", ",", "retries", "=", "DEFAULT_RETRIES", ")", ":", "self", ".", "_retries_num", "=", "0", "sleep", "(", "timeout", ")", "self", ".", "save_many", "(", "job_id", ",", "destination", ",", "timeout", ",", "retries", ")", "return", "self", ".", "_cache" ]
46.222222
22.222222
def fcomplete(text, state): """Readline completion function: Filenames""" text = os.path.expanduser(text) head, tail = os.path.split(text) search_dir = os.path.join('.', head) candidates = [s for s in os.listdir(search_dir) if s.startswith(tail)] if state >= len(candidates): return None if len(candidates) == 1: fn = os.path.join(head, candidates[0]) if not os.path.isdir(fn): return fn + ' ' return fn + '/' return os.path.join(head, candidates[state])
[ "def", "fcomplete", "(", "text", ",", "state", ")", ":", "text", "=", "os", ".", "path", ".", "expanduser", "(", "text", ")", "head", ",", "tail", "=", "os", ".", "path", ".", "split", "(", "text", ")", "search_dir", "=", "os", ".", "path", ".", "join", "(", "'.'", ",", "head", ")", "candidates", "=", "[", "s", "for", "s", "in", "os", ".", "listdir", "(", "search_dir", ")", "if", "s", ".", "startswith", "(", "tail", ")", "]", "if", "state", ">=", "len", "(", "candidates", ")", ":", "return", "None", "if", "len", "(", "candidates", ")", "==", "1", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "head", ",", "candidates", "[", "0", "]", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "fn", ")", ":", "return", "fn", "+", "' '", "return", "fn", "+", "'/'", "return", "os", ".", "path", ".", "join", "(", "head", ",", "candidates", "[", "state", "]", ")" ]
27.210526
18.315789